Auto-commit 2026-04-29 16:31

This commit is contained in:
2026-04-29 16:31:27 -04:00
parent e8687bb6b2
commit 0495ee5bd2
19691 changed files with 3272886 additions and 138 deletions

View File

@@ -0,0 +1 @@
export * from 'react-dom/server.edge'

View File

@@ -0,0 +1,15 @@
let ReactDOMServer;
try {
// TODO: Use Node.js build unless we're in an Edge runtime.
ReactDOMServer = require('react-dom/server.edge');
} catch (error) {
if (error.code !== 'MODULE_NOT_FOUND' && error.code !== 'ERR_PACKAGE_PATH_NOT_EXPORTED') {
throw error;
}
// In React versions without react-dom/server.edge, the browser build works in Node.js.
// The Node.js build does not support renderToReadableStream.
ReactDOMServer = require('react-dom/server.browser');
}
module.exports = ReactDOMServer;
//# sourceMappingURL=ReactDOMServerPages.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../src/server/ReactDOMServerPages.js"],"sourcesContent":["let ReactDOMServer\n\ntry {\n // TODO: Use Node.js build unless we're in an Edge runtime.\n ReactDOMServer = require('react-dom/server.edge')\n} catch (error) {\n if (\n error.code !== 'MODULE_NOT_FOUND' &&\n error.code !== 'ERR_PACKAGE_PATH_NOT_EXPORTED'\n ) {\n throw error\n }\n // In React versions without react-dom/server.edge, the browser build works in Node.js.\n // The Node.js build does not support renderToReadableStream.\n ReactDOMServer = require('react-dom/server.browser')\n}\n\nmodule.exports = ReactDOMServer\n"],"names":["ReactDOMServer","require","error","code","module","exports"],"mappings":"AAAA,IAAIA;AAEJ,IAAI;IACF,2DAA2D;IAC3DA,iBAAiBC,QAAQ;AAC3B,EAAE,OAAOC,OAAO;IACd,IACEA,MAAMC,IAAI,KAAK,sBACfD,MAAMC,IAAI,KAAK,iCACf;QACA,MAAMD;IACR;IACA,uFAAuF;IACvF,6DAA6D;IAC7DF,iBAAiBC,QAAQ;AAC3B;AAEAG,OAAOC,OAAO,GAAGL","ignoreList":[0]}

122
node_modules/next/dist/esm/server/accept-header.js generated vendored Normal file
View File

@@ -0,0 +1,122 @@
function parse(raw, preferences, options) {
const lowers = new Map();
const header = raw.replace(/[ \t]/g, '');
if (preferences) {
let pos = 0;
for (const preference of preferences){
const lower = preference.toLowerCase();
lowers.set(lower, {
orig: preference,
pos: pos++
});
if (options.prefixMatch) {
const parts = lower.split('-');
while(parts.pop(), parts.length > 0){
const joined = parts.join('-');
if (!lowers.has(joined)) {
lowers.set(joined, {
orig: preference,
pos: pos++
});
}
}
}
}
}
const parts = header.split(',');
const selections = [];
const map = new Set();
for(let i = 0; i < parts.length; ++i){
const part = parts[i];
if (!part) {
continue;
}
const params = part.split(';');
if (params.length > 2) {
throw Object.defineProperty(new Error(`Invalid ${options.type} header`), "__NEXT_ERROR_CODE", {
value: "E77",
enumerable: false,
configurable: true
});
}
let token = params[0].toLowerCase();
if (!token) {
throw Object.defineProperty(new Error(`Invalid ${options.type} header`), "__NEXT_ERROR_CODE", {
value: "E77",
enumerable: false,
configurable: true
});
}
const selection = {
token,
pos: i,
q: 1
};
if (preferences && lowers.has(token)) {
selection.pref = lowers.get(token).pos;
}
map.add(selection.token);
if (params.length === 2) {
const q = params[1];
const [key, value] = q.split('=');
if (!value || key !== 'q' && key !== 'Q') {
throw Object.defineProperty(new Error(`Invalid ${options.type} header`), "__NEXT_ERROR_CODE", {
value: "E77",
enumerable: false,
configurable: true
});
}
const score = parseFloat(value);
if (score === 0) {
continue;
}
if (Number.isFinite(score) && score <= 1 && score >= 0.001) {
selection.q = score;
}
}
selections.push(selection);
}
selections.sort((a, b)=>{
if (b.q !== a.q) {
return b.q - a.q;
}
if (b.pref !== a.pref) {
if (a.pref === undefined) {
return 1;
}
if (b.pref === undefined) {
return -1;
}
return a.pref - b.pref;
}
return a.pos - b.pos;
});
const values = selections.map((selection)=>selection.token);
if (!preferences || !preferences.length) {
return values;
}
const preferred = [];
for (const selection of values){
if (selection === '*') {
for (const [preference, value] of lowers){
if (!map.has(preference)) {
preferred.push(value.orig);
}
}
} else {
const lower = selection.toLowerCase();
if (lowers.has(lower)) {
preferred.push(lowers.get(lower).orig);
}
}
}
return preferred;
}
export function acceptLanguage(header = '', preferences) {
return parse(header, preferences, {
type: 'accept-language',
prefixMatch: true
})[0] || '';
}
//# sourceMappingURL=accept-header.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,125 @@
import PromiseQueue from 'next/dist/compiled/p-queue';
import { InvariantError } from '../../shared/lib/invariant-error';
import { isThenable } from '../../shared/lib/is-thenable';
import { workAsyncStorage } from '../app-render/work-async-storage.external';
import { withExecuteRevalidates } from '../revalidation-utils';
import { bindSnapshot } from '../app-render/async-local-storage';
import { workUnitAsyncStorage } from '../app-render/work-unit-async-storage.external';
import { afterTaskAsyncStorage } from '../app-render/after-task-async-storage.external';
export class AfterContext {
constructor({ waitUntil, onClose, onTaskError }){
this.workUnitStores = new Set();
this.waitUntil = waitUntil;
this.onClose = onClose;
this.onTaskError = onTaskError;
this.callbackQueue = new PromiseQueue();
this.callbackQueue.pause();
}
after(task) {
if (isThenable(task)) {
if (!this.waitUntil) {
errorWaitUntilNotAvailable();
}
this.waitUntil(task.catch((error)=>this.reportTaskError('promise', error)));
} else if (typeof task === 'function') {
// TODO(after): implement tracing
this.addCallback(task);
} else {
throw Object.defineProperty(new Error('`after()`: Argument must be a promise or a function'), "__NEXT_ERROR_CODE", {
value: "E50",
enumerable: false,
configurable: true
});
}
}
addCallback(callback) {
// if something is wrong, throw synchronously, bubbling up to the `after` callsite.
if (!this.waitUntil) {
errorWaitUntilNotAvailable();
}
const workUnitStore = workUnitAsyncStorage.getStore();
if (workUnitStore) {
this.workUnitStores.add(workUnitStore);
}
const afterTaskStore = afterTaskAsyncStorage.getStore();
// This is used for checking if request APIs can be called inside `after`.
// Note that we need to check the phase in which the *topmost* `after` was called (which should be "action"),
// not the current phase (which might be "after" if we're in a nested after).
// Otherwise, we might allow `after(() => headers())`, but not `after(() => after(() => headers()))`.
const rootTaskSpawnPhase = afterTaskStore ? afterTaskStore.rootTaskSpawnPhase // nested after
: workUnitStore == null ? void 0 : workUnitStore.phase // topmost after
;
// this should only happen once.
if (!this.runCallbacksOnClosePromise) {
this.runCallbacksOnClosePromise = this.runCallbacksOnClose();
this.waitUntil(this.runCallbacksOnClosePromise);
}
// Bind the callback to the current execution context (i.e. preserve all currently available ALS-es).
// We do this because we want all of these to be equivalent in every regard except timing:
// after(() => x())
// after(x())
// await x()
const wrappedCallback = bindSnapshot(// WARNING: Don't make this a named function. It must be anonymous.
// See: https://github.com/facebook/react/pull/34911
async ()=>{
try {
await afterTaskAsyncStorage.run({
rootTaskSpawnPhase
}, ()=>callback());
} catch (error) {
this.reportTaskError('function', error);
}
});
this.callbackQueue.add(wrappedCallback);
}
async runCallbacksOnClose() {
await new Promise((resolve)=>this.onClose(resolve));
return this.runCallbacks();
}
async runCallbacks() {
if (this.callbackQueue.size === 0) return;
for (const workUnitStore of this.workUnitStores){
workUnitStore.phase = 'after';
}
const workStore = workAsyncStorage.getStore();
if (!workStore) {
throw Object.defineProperty(new InvariantError('Missing workStore in AfterContext.runCallbacks'), "__NEXT_ERROR_CODE", {
value: "E547",
enumerable: false,
configurable: true
});
}
return withExecuteRevalidates(workStore, ()=>{
this.callbackQueue.start();
return this.callbackQueue.onIdle();
});
}
reportTaskError(taskKind, error) {
// TODO(after): this is fine for now, but will need better intergration with our error reporting.
// TODO(after): should we log this if we have a onTaskError callback?
console.error(taskKind === 'promise' ? `A promise passed to \`after()\` rejected:` : `An error occurred in a function passed to \`after()\`:`, error);
if (this.onTaskError) {
// this is very defensive, but we really don't want anything to blow up in an error handler
try {
this.onTaskError == null ? void 0 : this.onTaskError.call(this, error);
} catch (handlerError) {
console.error(Object.defineProperty(new InvariantError('`onTaskError` threw while handling an error thrown from an `after` task', {
cause: handlerError
}), "__NEXT_ERROR_CODE", {
value: "E569",
enumerable: false,
configurable: true
}));
}
}
}
}
function errorWaitUntilNotAvailable() {
throw Object.defineProperty(new Error('`after()` will not work correctly, because `waitUntil` is not available in the current environment.'), "__NEXT_ERROR_CODE", {
value: "E91",
enumerable: false,
configurable: true
});
}
//# sourceMappingURL=after-context.js.map

File diff suppressed because one or more lines are too long

18
node_modules/next/dist/esm/server/after/after.js generated vendored Normal file
View File

@@ -0,0 +1,18 @@
import { workAsyncStorage } from '../app-render/work-async-storage.external';
/**
* This function allows you to schedule callbacks to be executed after the current request finishes.
*/ export function after(task) {
const workStore = workAsyncStorage.getStore();
if (!workStore) {
// TODO(after): the linked docs page talks about *dynamic* APIs, which after soon won't be anymore
throw Object.defineProperty(new Error('`after` was called outside a request scope. Read more: https://nextjs.org/docs/messages/next-dynamic-api-wrong-context'), "__NEXT_ERROR_CODE", {
value: "E468",
enumerable: false,
configurable: true
});
}
const { afterContext } = workStore;
return afterContext.after(task);
}
//# sourceMappingURL=after.js.map

1
node_modules/next/dist/esm/server/after/after.js.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/after/after.ts"],"sourcesContent":["import { workAsyncStorage } from '../app-render/work-async-storage.external'\n\nexport type AfterTask<T = unknown> = Promise<T> | AfterCallback<T>\nexport type AfterCallback<T = unknown> = () => T | Promise<T>\n\n/**\n * This function allows you to schedule callbacks to be executed after the current request finishes.\n */\nexport function after<T>(task: AfterTask<T>): void {\n const workStore = workAsyncStorage.getStore()\n\n if (!workStore) {\n // TODO(after): the linked docs page talks about *dynamic* APIs, which after soon won't be anymore\n throw new Error(\n '`after` was called outside a request scope. Read more: https://nextjs.org/docs/messages/next-dynamic-api-wrong-context'\n )\n }\n\n const { afterContext } = workStore\n return afterContext.after(task)\n}\n"],"names":["workAsyncStorage","after","task","workStore","getStore","Error","afterContext"],"mappings":"AAAA,SAASA,gBAAgB,QAAQ,4CAA2C;AAK5E;;CAEC,GACD,OAAO,SAASC,MAASC,IAAkB;IACzC,MAAMC,YAAYH,iBAAiBI,QAAQ;IAE3C,IAAI,CAACD,WAAW;QACd,kGAAkG;QAClG,MAAM,qBAEL,CAFK,IAAIE,MACR,2HADI,qBAAA;mBAAA;wBAAA;0BAAA;QAEN;IACF;IAEA,MAAM,EAAEC,YAAY,EAAE,GAAGH;IACzB,OAAOG,aAAaL,KAAK,CAACC;AAC5B","ignoreList":[0]}

60
node_modules/next/dist/esm/server/after/awaiter.js generated vendored Normal file
View File

@@ -0,0 +1,60 @@
import { InvariantError } from '../../shared/lib/invariant-error';
/**
* Provides a `waitUntil` implementation which gathers promises to be awaited later (via {@link AwaiterMulti.awaiting}).
* Unlike a simple `Promise.all`, {@link AwaiterMulti} works recursively --
* if a promise passed to {@link AwaiterMulti.waitUntil} calls `waitUntil` again,
* that second promise will also be awaited.
*/ export class AwaiterMulti {
constructor({ onError } = {}){
this.promises = new Set();
this.waitUntil = (promise)=>{
// if a promise settles before we await it, we should drop it --
// storing them indefinitely could result in a memory leak.
const cleanup = ()=>{
this.promises.delete(promise);
};
promise.then(cleanup, (err)=>{
cleanup();
this.onError(err);
});
this.promises.add(promise);
};
this.onError = onError ?? console.error;
}
async awaiting() {
while(this.promises.size > 0){
const promises = Array.from(this.promises);
this.promises.clear();
await Promise.allSettled(promises);
}
}
}
/**
* Like {@link AwaiterMulti}, but can only be awaited once.
* If {@link AwaiterOnce.waitUntil} is called after that, it will throw.
*/ export class AwaiterOnce {
constructor(options = {}){
this.done = false;
this.waitUntil = (promise)=>{
if (this.done) {
throw Object.defineProperty(new InvariantError('Cannot call waitUntil() on an AwaiterOnce that was already awaited'), "__NEXT_ERROR_CODE", {
value: "E563",
enumerable: false,
configurable: true
});
}
return this.awaiter.waitUntil(promise);
};
this.awaiter = new AwaiterMulti(options);
}
async awaiting() {
if (!this.pending) {
this.pending = this.awaiter.awaiting().finally(()=>{
this.done = true;
});
}
return this.pending;
}
}
//# sourceMappingURL=awaiter.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/after/awaiter.ts"],"sourcesContent":["import { InvariantError } from '../../shared/lib/invariant-error'\n\n/**\n * Provides a `waitUntil` implementation which gathers promises to be awaited later (via {@link AwaiterMulti.awaiting}).\n * Unlike a simple `Promise.all`, {@link AwaiterMulti} works recursively --\n * if a promise passed to {@link AwaiterMulti.waitUntil} calls `waitUntil` again,\n * that second promise will also be awaited.\n */\nexport class AwaiterMulti {\n private promises: Set<Promise<unknown>> = new Set()\n private onError: (error: unknown) => void\n\n constructor({ onError }: { onError?: (error: unknown) => void } = {}) {\n this.onError = onError ?? console.error\n }\n\n public waitUntil = (promise: Promise<unknown>): void => {\n // if a promise settles before we await it, we should drop it --\n // storing them indefinitely could result in a memory leak.\n const cleanup = () => {\n this.promises.delete(promise)\n }\n\n promise.then(cleanup, (err) => {\n cleanup()\n this.onError(err)\n })\n\n this.promises.add(promise)\n }\n\n public async awaiting(): Promise<void> {\n while (this.promises.size > 0) {\n const promises = Array.from(this.promises)\n this.promises.clear()\n await Promise.allSettled(promises)\n }\n }\n}\n\n/**\n * Like {@link AwaiterMulti}, but can only be awaited once.\n * If {@link AwaiterOnce.waitUntil} is called after that, it will throw.\n */\nexport class AwaiterOnce {\n private awaiter: AwaiterMulti\n private done: boolean = false\n private pending: Promise<void> | undefined\n\n constructor(options: { onError?: (error: unknown) => void } = {}) {\n this.awaiter = new AwaiterMulti(options)\n }\n\n public waitUntil = (promise: Promise<unknown>): void => {\n if (this.done) {\n throw new InvariantError(\n 'Cannot call waitUntil() on an AwaiterOnce that was already awaited'\n )\n }\n return this.awaiter.waitUntil(promise)\n }\n\n public async awaiting(): Promise<void> {\n if (!this.pending) {\n this.pending = this.awaiter.awaiting().finally(() => {\n this.done = true\n })\n }\n return this.pending\n }\n}\n"],"names":["InvariantError","AwaiterMulti","constructor","onError","promises","Set","waitUntil","promise","cleanup","delete","then","err","add","console","error","awaiting","size","Array","from","clear","Promise","allSettled","AwaiterOnce","options","done","awaiter","pending","finally"],"mappings":"AAAA,SAASA,cAAc,QAAQ,mCAAkC;AAEjE;;;;;CAKC,GACD,OAAO,MAAMC;IAIXC,YAAY,EAAEC,OAAO,EAA0C,GAAG,CAAC,CAAC,CAAE;aAH9DC,WAAkC,IAAIC;aAOvCC,YAAY,CAACC;YAClB,gEAAgE;YAChE,2DAA2D;YAC3D,MAAMC,UAAU;gBACd,IAAI,CAACJ,QAAQ,CAACK,MAAM,CAACF;YACvB;YAEAA,QAAQG,IAAI,CAACF,SAAS,CAACG;gBACrBH;gBACA,IAAI,CAACL,OAAO,CAACQ;YACf;YAEA,IAAI,CAACP,QAAQ,CAACQ,GAAG,CAACL;QACpB;QAhBE,IAAI,CAACJ,OAAO,GAAGA,WAAWU,QAAQC,KAAK;IACzC;IAiBA,MAAaC,WAA0B;QACrC,MAAO,IAAI,CAACX,QAAQ,CAACY,IAAI,GAAG,EAAG;YAC7B,MAAMZ,WAAWa,MAAMC,IAAI,CAAC,IAAI,CAACd,QAAQ;YACzC,IAAI,CAACA,QAAQ,CAACe,KAAK;YACnB,MAAMC,QAAQC,UAAU,CAACjB;QAC3B;IACF;AACF;AAEA;;;CAGC,GACD,OAAO,MAAMkB;IAKXpB,YAAYqB,UAAkD,CAAC,CAAC,CAAE;aAH1DC,OAAgB;aAOjBlB,YAAY,CAACC;YAClB,IAAI,IAAI,CAACiB,IAAI,EAAE;gBACb,MAAM,qBAEL,CAFK,IAAIxB,eACR,uEADI,qBAAA;2BAAA;gCAAA;kCAAA;gBAEN;YACF;YACA,OAAO,IAAI,CAACyB,OAAO,CAACnB,SAAS,CAACC;QAChC;QAVE,IAAI,CAACkB,OAAO,GAAG,IAAIxB,aAAasB;IAClC;IAWA,MAAaR,WAA0B;QACrC,IAAI,CAAC,IAAI,CAACW,OAAO,EAAE;YACjB,IAAI,CAACA,OAAO,GAAG,IAAI,CAACD,OAAO,CAACV,QAAQ,GAAGY,OAAO,CAAC;gBAC7C,IAAI,CAACH,IAAI,GAAG;YACd;QACF;QACA,OAAO,IAAI,CAACE,OAAO;IACrB;AACF","ignoreList":[0]}

View File

@@ -0,0 +1,19 @@
import { createAsyncLocalStorage } from '../app-render/async-local-storage';
export function getBuiltinRequestContext() {
const _globalThis = globalThis;
const ctx = _globalThis[NEXT_REQUEST_CONTEXT_SYMBOL];
return ctx == null ? void 0 : ctx.get();
}
const NEXT_REQUEST_CONTEXT_SYMBOL = Symbol.for('@next/request-context');
/** "@next/request-context" has a different signature from AsyncLocalStorage,
* matching [AsyncContext.Variable](https://github.com/tc39/proposal-async-context).
* We don't need a full AsyncContext adapter here, just having `.get()` is enough
*/ export function createLocalRequestContext() {
const storage = createAsyncLocalStorage();
return {
get: ()=>storage.getStore(),
run: (value, callback)=>storage.run(value, callback)
};
}
//# sourceMappingURL=builtin-request-context.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/after/builtin-request-context.ts"],"sourcesContent":["import { createAsyncLocalStorage } from '../app-render/async-local-storage'\n\nexport function getBuiltinRequestContext():\n | BuiltinRequestContextValue\n | undefined {\n const _globalThis = globalThis as GlobalThisWithRequestContext\n const ctx = _globalThis[NEXT_REQUEST_CONTEXT_SYMBOL]\n return ctx?.get()\n}\n\nconst NEXT_REQUEST_CONTEXT_SYMBOL = Symbol.for('@next/request-context')\n\ntype GlobalThisWithRequestContext = typeof globalThis & {\n [NEXT_REQUEST_CONTEXT_SYMBOL]?: BuiltinRequestContext\n}\n\n/** A request context provided by the platform. */\nexport type BuiltinRequestContext = {\n get(): BuiltinRequestContextValue | undefined\n}\n\nexport type RunnableBuiltinRequestContext = BuiltinRequestContext & {\n run<T>(value: BuiltinRequestContextValue, callback: () => T): T\n}\n\nexport type BuiltinRequestContextValue = {\n waitUntil?: WaitUntil\n}\nexport type WaitUntil = (promise: Promise<any>) => void\n\n/** \"@next/request-context\" has a different signature from AsyncLocalStorage,\n * matching [AsyncContext.Variable](https://github.com/tc39/proposal-async-context).\n * We don't need a full AsyncContext adapter here, just having `.get()` is enough\n */\nexport function createLocalRequestContext(): RunnableBuiltinRequestContext {\n const storage = createAsyncLocalStorage<BuiltinRequestContextValue>()\n return {\n get: () => storage.getStore(),\n run: (value, callback) => storage.run(value, callback),\n }\n}\n"],"names":["createAsyncLocalStorage","getBuiltinRequestContext","_globalThis","globalThis","ctx","NEXT_REQUEST_CONTEXT_SYMBOL","get","Symbol","for","createLocalRequestContext","storage","getStore","run","value","callback"],"mappings":"AAAA,SAASA,uBAAuB,QAAQ,oCAAmC;AAE3E,OAAO,SAASC;IAGd,MAAMC,cAAcC;IACpB,MAAMC,MAAMF,WAAW,CAACG,4BAA4B;IACpD,OAAOD,uBAAAA,IAAKE,GAAG;AACjB;AAEA,MAAMD,8BAA8BE,OAAOC,GAAG,CAAC;AAoB/C;;;CAGC,GACD,OAAO,SAASC;IACd,MAAMC,UAAUV;IAChB,OAAO;QACLM,KAAK,IAAMI,QAAQC,QAAQ;QAC3BC,KAAK,CAACC,OAAOC,WAAaJ,QAAQE,GAAG,CAACC,OAAOC;IAC/C;AACF","ignoreList":[0]}

3
node_modules/next/dist/esm/server/after/index.js generated vendored Normal file
View File

@@ -0,0 +1,3 @@
export * from './after';
//# sourceMappingURL=index.js.map

1
node_modules/next/dist/esm/server/after/index.js.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/after/index.ts"],"sourcesContent":["export * from './after'\n"],"names":[],"mappings":"AAAA,cAAc,UAAS","ignoreList":[0]}

View File

@@ -0,0 +1,25 @@
import { DetachedPromise } from '../../lib/detached-promise';
import { CloseController } from '../web/web-on-close';
import { AwaiterOnce } from './awaiter';
export class AfterRunner {
async executeAfter() {
this.closeController.dispatchClose();
await this.awaiter.awaiting();
// if we got an error while running the callbacks,
// thenthis is a noop, because the promise is already rejected
this.finishedWithoutErrors.resolve();
return this.finishedWithoutErrors.promise;
}
constructor(){
this.awaiter = new AwaiterOnce();
this.closeController = new CloseController();
this.finishedWithoutErrors = new DetachedPromise();
this.context = {
waitUntil: this.awaiter.waitUntil.bind(this.awaiter),
onClose: this.closeController.onClose.bind(this.closeController),
onTaskError: (error)=>this.finishedWithoutErrors.reject(error)
};
}
}
//# sourceMappingURL=run-with-after.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/after/run-with-after.ts"],"sourcesContent":["import { DetachedPromise } from '../../lib/detached-promise'\nimport { CloseController } from '../web/web-on-close'\nimport type { AfterContextOpts } from './after-context'\nimport { AwaiterOnce } from './awaiter'\n\ntype Ctx = {\n waitUntil: NonNullable<AfterContextOpts['waitUntil']>\n onClose: NonNullable<AfterContextOpts['onClose']>\n onTaskError: NonNullable<AfterContextOpts['onTaskError']>\n}\n\nexport class AfterRunner {\n private awaiter = new AwaiterOnce()\n private closeController = new CloseController()\n private finishedWithoutErrors = new DetachedPromise<void>()\n\n readonly context: Ctx = {\n waitUntil: this.awaiter.waitUntil.bind(this.awaiter),\n onClose: this.closeController.onClose.bind(this.closeController),\n onTaskError: (error) => this.finishedWithoutErrors.reject(error),\n }\n\n public async executeAfter() {\n this.closeController.dispatchClose()\n await this.awaiter.awaiting()\n\n // if we got an error while running the callbacks,\n // thenthis is a noop, because the promise is already rejected\n this.finishedWithoutErrors.resolve()\n\n return this.finishedWithoutErrors.promise\n }\n}\n"],"names":["DetachedPromise","CloseController","AwaiterOnce","AfterRunner","executeAfter","closeController","dispatchClose","awaiter","awaiting","finishedWithoutErrors","resolve","promise","context","waitUntil","bind","onClose","onTaskError","error","reject"],"mappings":"AAAA,SAASA,eAAe,QAAQ,6BAA4B;AAC5D,SAASC,eAAe,QAAQ,sBAAqB;AAErD,SAASC,WAAW,QAAQ,YAAW;AAQvC,OAAO,MAAMC;IAWX,MAAaC,eAAe;QAC1B,IAAI,CAACC,eAAe,CAACC,aAAa;QAClC,MAAM,IAAI,CAACC,OAAO,CAACC,QAAQ;QAE3B,kDAAkD;QAClD,8DAA8D;QAC9D,IAAI,CAACC,qBAAqB,CAACC,OAAO;QAElC,OAAO,IAAI,CAACD,qBAAqB,CAACE,OAAO;IAC3C;;aAnBQJ,UAAU,IAAIL;aACdG,kBAAkB,IAAIJ;aACtBQ,wBAAwB,IAAIT;aAE3BY,UAAe;YACtBC,WAAW,IAAI,CAACN,OAAO,CAACM,SAAS,CAACC,IAAI,CAAC,IAAI,CAACP,OAAO;YACnDQ,SAAS,IAAI,CAACV,eAAe,CAACU,OAAO,CAACD,IAAI,CAAC,IAAI,CAACT,eAAe;YAC/DW,aAAa,CAACC,QAAU,IAAI,CAACR,qBAAqB,CAACS,MAAM,CAACD;QAC5D;;AAYF","ignoreList":[0]}

View File

@@ -0,0 +1,15 @@
/**
* Parse cookies from the `headers` of request
* @param req request object
*/ export function getCookieParser(headers) {
return function parseCookie() {
const { cookie } = headers;
if (!cookie) {
return {};
}
const { parse: parseCookieFn } = require('next/dist/compiled/cookie');
return parseCookieFn(Array.isArray(cookie) ? cookie.join('; ') : cookie);
};
}
//# sourceMappingURL=get-cookie-parser.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/api-utils/get-cookie-parser.ts"],"sourcesContent":["import type { NextApiRequestCookies } from '.'\n\n/**\n * Parse cookies from the `headers` of request\n * @param req request object\n */\n\nexport function getCookieParser(headers: {\n [key: string]: string | string[] | null | undefined\n}): () => NextApiRequestCookies {\n return function parseCookie(): NextApiRequestCookies {\n const { cookie } = headers\n\n if (!cookie) {\n return {}\n }\n\n const { parse: parseCookieFn } =\n require('next/dist/compiled/cookie') as typeof import('next/dist/compiled/cookie')\n return parseCookieFn(Array.isArray(cookie) ? cookie.join('; ') : cookie)\n }\n}\n"],"names":["getCookieParser","headers","parseCookie","cookie","parse","parseCookieFn","require","Array","isArray","join"],"mappings":"AAEA;;;CAGC,GAED,OAAO,SAASA,gBAAgBC,OAE/B;IACC,OAAO,SAASC;QACd,MAAM,EAAEC,MAAM,EAAE,GAAGF;QAEnB,IAAI,CAACE,QAAQ;YACX,OAAO,CAAC;QACV;QAEA,MAAM,EAAEC,OAAOC,aAAa,EAAE,GAC5BC,QAAQ;QACV,OAAOD,cAAcE,MAAMC,OAAO,CAACL,UAAUA,OAAOM,IAAI,CAAC,QAAQN;IACnE;AACF","ignoreList":[0]}

156
node_modules/next/dist/esm/server/api-utils/index.js generated vendored Normal file
View File

@@ -0,0 +1,156 @@
import { HeadersAdapter } from '../web/spec-extension/adapters/headers';
import { PRERENDER_REVALIDATE_HEADER, PRERENDER_REVALIDATE_ONLY_GENERATED_HEADER } from '../../lib/constants';
import { getTracer } from '../lib/trace/tracer';
import { NodeSpan } from '../lib/trace/constants';
export function wrapApiHandler(page, handler) {
return (...args)=>{
getTracer().setRootSpanAttribute('next.route', page);
// Call API route method
return getTracer().trace(NodeSpan.runHandler, {
spanName: `executing api route (pages) ${page}`
}, ()=>handler(...args));
};
}
/**
*
* @param res response object
* @param statusCode `HTTP` status code of response
*/ export function sendStatusCode(res, statusCode) {
res.statusCode = statusCode;
return res;
}
/**
*
* @param res response object
* @param [statusOrUrl] `HTTP` status code of redirect
* @param url URL of redirect
*/ export function redirect(res, statusOrUrl, url) {
if (typeof statusOrUrl === 'string') {
url = statusOrUrl;
statusOrUrl = 307;
}
if (typeof statusOrUrl !== 'number' || typeof url !== 'string') {
throw Object.defineProperty(new Error(`Invalid redirect arguments. Please use a single argument URL, e.g. res.redirect('/destination') or use a status code and URL, e.g. res.redirect(307, '/destination').`), "__NEXT_ERROR_CODE", {
value: "E389",
enumerable: false,
configurable: true
});
}
res.writeHead(statusOrUrl, {
Location: url
});
res.write(url);
res.end();
return res;
}
export function checkIsOnDemandRevalidate(req, previewProps) {
const headers = HeadersAdapter.from(req.headers);
const previewModeId = headers.get(PRERENDER_REVALIDATE_HEADER);
const isOnDemandRevalidate = previewModeId === previewProps.previewModeId;
const revalidateOnlyGenerated = headers.has(PRERENDER_REVALIDATE_ONLY_GENERATED_HEADER);
return {
isOnDemandRevalidate,
revalidateOnlyGenerated
};
}
export const COOKIE_NAME_PRERENDER_BYPASS = `__prerender_bypass`;
export const COOKIE_NAME_PRERENDER_DATA = `__next_preview_data`;
export const RESPONSE_LIMIT_DEFAULT = 4 * 1024 * 1024;
export const SYMBOL_PREVIEW_DATA = Symbol(COOKIE_NAME_PRERENDER_DATA);
export const SYMBOL_CLEARED_COOKIES = Symbol(COOKIE_NAME_PRERENDER_BYPASS);
export function clearPreviewData(res, options = {}) {
if (SYMBOL_CLEARED_COOKIES in res) {
return res;
}
const { serialize } = require('next/dist/compiled/cookie');
const previous = res.getHeader('Set-Cookie');
res.setHeader(`Set-Cookie`, [
...typeof previous === 'string' ? [
previous
] : Array.isArray(previous) ? previous : [],
serialize(COOKIE_NAME_PRERENDER_BYPASS, '', {
// To delete a cookie, set `expires` to a date in the past:
// https://tools.ietf.org/html/rfc6265#section-4.1.1
// `Max-Age: 0` is not valid, thus ignored, and the cookie is persisted.
expires: new Date(0),
httpOnly: true,
sameSite: process.env.NODE_ENV !== 'development' ? 'none' : 'lax',
secure: process.env.NODE_ENV !== 'development',
path: '/',
...options.path !== undefined ? {
path: options.path
} : undefined
}),
serialize(COOKIE_NAME_PRERENDER_DATA, '', {
// To delete a cookie, set `expires` to a date in the past:
// https://tools.ietf.org/html/rfc6265#section-4.1.1
// `Max-Age: 0` is not valid, thus ignored, and the cookie is persisted.
expires: new Date(0),
httpOnly: true,
sameSite: process.env.NODE_ENV !== 'development' ? 'none' : 'lax',
secure: process.env.NODE_ENV !== 'development',
path: '/',
...options.path !== undefined ? {
path: options.path
} : undefined
})
]);
Object.defineProperty(res, SYMBOL_CLEARED_COOKIES, {
value: true,
enumerable: false
});
return res;
}
/**
* Custom error class
*/ export class ApiError extends Error {
constructor(statusCode, message){
super(message);
this.statusCode = statusCode;
}
}
/**
* Sends error in `response`
* @param res response object
* @param statusCode of response
* @param message of response
*/ export function sendError(res, statusCode, message) {
res.statusCode = statusCode;
res.statusMessage = message;
res.end(message);
}
/**
* Execute getter function only if its needed
* @param LazyProps `req` and `params` for lazyProp
* @param prop name of property
* @param getter function to get data
*/ export function setLazyProp({ req }, prop, getter) {
const opts = {
configurable: true,
enumerable: true
};
const optsReset = {
...opts,
writable: true
};
Object.defineProperty(req, prop, {
...opts,
get: ()=>{
const value = getter();
// we set the property on the object to avoid recalculating it
Object.defineProperty(req, prop, {
...optsReset,
value
});
return value;
},
set: (value)=>{
Object.defineProperty(req, prop, {
...optsReset,
value
});
}
});
}
//# sourceMappingURL=index.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,380 @@
import bytes from 'next/dist/compiled/bytes';
import { generateETag } from '../../lib/etag';
import { sendEtagResponse } from '../../send-payload';
import { Stream } from 'stream';
import isError from '../../../lib/is-error';
import { isResSent } from '../../../shared/lib/utils';
import { interopDefault } from '../../../lib/interop-default';
import { setLazyProp, sendStatusCode, redirect, clearPreviewData, sendError, ApiError, COOKIE_NAME_PRERENDER_BYPASS, COOKIE_NAME_PRERENDER_DATA, RESPONSE_LIMIT_DEFAULT } from './../index';
import { getCookieParser } from './../get-cookie-parser';
import { JSON_CONTENT_TYPE_HEADER, PRERENDER_REVALIDATE_HEADER, PRERENDER_REVALIDATE_ONLY_GENERATED_HEADER } from '../../../lib/constants';
import { tryGetPreviewData } from './try-get-preview-data';
import { parseBody } from './parse-body';
function getMaxContentLength(responseLimit) {
if (responseLimit && typeof responseLimit !== 'boolean') {
return bytes.parse(responseLimit);
}
return RESPONSE_LIMIT_DEFAULT;
}
/**
* Send `any` body to response
* @param req request object
* @param res response object
* @param body of response
*/ function sendData(req, res, body) {
if (body === null || body === undefined) {
res.end();
return;
}
// strip irrelevant headers/body
if (res.statusCode === 204 || res.statusCode === 304) {
res.removeHeader('Content-Type');
res.removeHeader('Content-Length');
res.removeHeader('Transfer-Encoding');
if (process.env.NODE_ENV === 'development' && body) {
console.warn(`A body was attempted to be set with a 204 statusCode for ${req.url}, this is invalid and the body was ignored.\n` + `See more info here https://nextjs.org/docs/messages/invalid-api-status-body`);
}
res.end();
return;
}
const contentType = res.getHeader('Content-Type');
if (body instanceof Stream) {
if (!contentType) {
res.setHeader('Content-Type', 'application/octet-stream');
}
body.pipe(res);
return;
}
const isJSONLike = [
'object',
'number',
'boolean'
].includes(typeof body);
const stringifiedBody = isJSONLike ? JSON.stringify(body) : body;
const etag = generateETag(stringifiedBody);
if (sendEtagResponse(req, res, etag)) {
return;
}
if (Buffer.isBuffer(body)) {
if (!contentType) {
res.setHeader('Content-Type', 'application/octet-stream');
}
res.setHeader('Content-Length', body.length);
res.end(body);
return;
}
if (isJSONLike) {
res.setHeader('Content-Type', JSON_CONTENT_TYPE_HEADER);
}
res.setHeader('Content-Length', Buffer.byteLength(stringifiedBody));
res.end(stringifiedBody);
}
/**
* Send `JSON` object
* @param res response object
* @param jsonBody of data
*/ function sendJson(res, jsonBody) {
// Set header to application/json
res.setHeader('Content-Type', JSON_CONTENT_TYPE_HEADER);
// Use send to handle request
res.send(JSON.stringify(jsonBody));
}
function isValidData(str) {
return typeof str === 'string' && str.length >= 16;
}
function setDraftMode(res, options) {
if (!isValidData(options.previewModeId)) {
throw Object.defineProperty(new Error('invariant: invalid previewModeId'), "__NEXT_ERROR_CODE", {
value: "E169",
enumerable: false,
configurable: true
});
}
const expires = options.enable ? undefined : new Date(0);
// To delete a cookie, set `expires` to a date in the past:
// https://tools.ietf.org/html/rfc6265#section-4.1.1
// `Max-Age: 0` is not valid, thus ignored, and the cookie is persisted.
const { serialize } = require('next/dist/compiled/cookie');
const previous = res.getHeader('Set-Cookie');
res.setHeader(`Set-Cookie`, [
...typeof previous === 'string' ? [
previous
] : Array.isArray(previous) ? previous : [],
serialize(COOKIE_NAME_PRERENDER_BYPASS, options.previewModeId, {
httpOnly: true,
sameSite: process.env.NODE_ENV !== 'development' ? 'none' : 'lax',
secure: process.env.NODE_ENV !== 'development',
path: '/',
expires
})
]);
return res;
}
function setPreviewData(res, data, options) {
if (!isValidData(options.previewModeId)) {
throw Object.defineProperty(new Error('invariant: invalid previewModeId'), "__NEXT_ERROR_CODE", {
value: "E169",
enumerable: false,
configurable: true
});
}
if (!isValidData(options.previewModeEncryptionKey)) {
throw Object.defineProperty(new Error('invariant: invalid previewModeEncryptionKey'), "__NEXT_ERROR_CODE", {
value: "E334",
enumerable: false,
configurable: true
});
}
if (!isValidData(options.previewModeSigningKey)) {
throw Object.defineProperty(new Error('invariant: invalid previewModeSigningKey'), "__NEXT_ERROR_CODE", {
value: "E436",
enumerable: false,
configurable: true
});
}
const jsonwebtoken = require('next/dist/compiled/jsonwebtoken');
const { encryptWithSecret } = require('../../crypto-utils');
const payload = jsonwebtoken.sign({
data: encryptWithSecret(Buffer.from(options.previewModeEncryptionKey), JSON.stringify(data))
}, options.previewModeSigningKey, {
algorithm: 'HS256',
...options.maxAge !== undefined ? {
expiresIn: options.maxAge
} : undefined
});
// limit preview mode cookie to 2KB since we shouldn't store too much
// data here and browsers drop cookies over 4KB
if (payload.length > 2048) {
throw Object.defineProperty(new Error(`Preview data is limited to 2KB currently, reduce how much data you are storing as preview data to continue`), "__NEXT_ERROR_CODE", {
value: "E465",
enumerable: false,
configurable: true
});
}
const { serialize } = require('next/dist/compiled/cookie');
const previous = res.getHeader('Set-Cookie');
res.setHeader(`Set-Cookie`, [
...typeof previous === 'string' ? [
previous
] : Array.isArray(previous) ? previous : [],
serialize(COOKIE_NAME_PRERENDER_BYPASS, options.previewModeId, {
httpOnly: true,
sameSite: process.env.NODE_ENV !== 'development' ? 'none' : 'lax',
secure: process.env.NODE_ENV !== 'development',
path: '/',
...options.maxAge !== undefined ? {
maxAge: options.maxAge
} : undefined,
...options.path !== undefined ? {
path: options.path
} : undefined
}),
serialize(COOKIE_NAME_PRERENDER_DATA, payload, {
httpOnly: true,
sameSite: process.env.NODE_ENV !== 'development' ? 'none' : 'lax',
secure: process.env.NODE_ENV !== 'development',
path: '/',
...options.maxAge !== undefined ? {
maxAge: options.maxAge
} : undefined,
...options.path !== undefined ? {
path: options.path
} : undefined
})
]);
return res;
}
async function revalidate(urlPath, opts, req, context) {
if (typeof urlPath !== 'string' || !urlPath.startsWith('/')) {
throw Object.defineProperty(new Error(`Invalid urlPath provided to revalidate(), must be a path e.g. /blog/post-1, received ${urlPath}`), "__NEXT_ERROR_CODE", {
value: "E153",
enumerable: false,
configurable: true
});
}
const headers = {
[PRERENDER_REVALIDATE_HEADER]: context.previewModeId,
...opts.unstable_onlyGenerated ? {
[PRERENDER_REVALIDATE_ONLY_GENERATED_HEADER]: '1'
} : {}
};
const allowedRevalidateHeaderKeys = [
...context.allowedRevalidateHeaderKeys || []
];
if (context.trustHostHeader || context.dev) {
allowedRevalidateHeaderKeys.push('cookie');
}
if (context.trustHostHeader) {
allowedRevalidateHeaderKeys.push('x-vercel-protection-bypass');
}
for (const key of Object.keys(req.headers)){
if (allowedRevalidateHeaderKeys.includes(key)) {
headers[key] = req.headers[key];
}
}
const internalRevalidate = context.internalRevalidate;
try {
// We use the revalidate in router-server if available.
// If we are operating without router-server (serverless)
// we must go through network layer with fetch request
if (internalRevalidate) {
return await internalRevalidate({
urlPath,
headers,
opts
});
}
if (context.trustHostHeader) {
const res = await fetch(`https://${req.headers.host}${urlPath}`, {
method: 'HEAD',
headers
});
// we use the cache header to determine successful revalidate as
// a non-200 status code can be returned from a successful revalidate
// e.g. notFound: true returns 404 status code but is successful
const cacheHeader = res.headers.get('x-vercel-cache') || res.headers.get('x-nextjs-cache');
if ((cacheHeader == null ? void 0 : cacheHeader.toUpperCase()) !== 'REVALIDATED' && res.status !== 200 && !(res.status === 404 && opts.unstable_onlyGenerated)) {
throw Object.defineProperty(new Error(`Invalid response ${res.status}`), "__NEXT_ERROR_CODE", {
value: "E175",
enumerable: false,
configurable: true
});
}
} else {
throw Object.defineProperty(new Error(`Invariant: missing internal router-server-methods this is an internal bug`), "__NEXT_ERROR_CODE", {
value: "E676",
enumerable: false,
configurable: true
});
}
} catch (err) {
throw Object.defineProperty(new Error(`Failed to revalidate ${urlPath}: ${isError(err) ? err.message : err}`), "__NEXT_ERROR_CODE", {
value: "E240",
enumerable: false,
configurable: true
});
}
}
export async function apiResolver(req, res, query, resolverModule, apiContext, propagateError, dev, page, onError) {
const apiReq = req;
const apiRes = res;
try {
var _config_api, _config_api1, _config_api2;
if (!resolverModule) {
res.statusCode = 404;
res.end('Not Found');
return;
}
const config = resolverModule.config || {};
const bodyParser = ((_config_api = config.api) == null ? void 0 : _config_api.bodyParser) !== false;
const responseLimit = ((_config_api1 = config.api) == null ? void 0 : _config_api1.responseLimit) ?? true;
const externalResolver = ((_config_api2 = config.api) == null ? void 0 : _config_api2.externalResolver) || false;
// Parsing of cookies
setLazyProp({
req: apiReq
}, 'cookies', getCookieParser(req.headers));
// Ensure req.query is a writable, enumerable property by using Object.defineProperty.
// This addresses Express 5.x, which defines query as a getter only (read-only).
Object.defineProperty(apiReq, 'query', {
value: {
...query
},
writable: true,
enumerable: true,
configurable: true
});
// Parsing preview data
setLazyProp({
req: apiReq
}, 'previewData', ()=>tryGetPreviewData(req, res, apiContext, !!apiContext.multiZoneDraftMode));
// Checking if preview mode is enabled
setLazyProp({
req: apiReq
}, 'preview', ()=>apiReq.previewData !== false ? true : undefined);
// Set draftMode to the same value as preview
setLazyProp({
req: apiReq
}, 'draftMode', ()=>apiReq.preview);
// Parsing of body
if (bodyParser && !apiReq.body) {
apiReq.body = await parseBody(apiReq, config.api && config.api.bodyParser && config.api.bodyParser.sizeLimit ? config.api.bodyParser.sizeLimit : '1mb');
}
let contentLength = 0;
const maxContentLength = getMaxContentLength(responseLimit);
const writeData = apiRes.write;
const endResponse = apiRes.end;
apiRes.write = (...args)=>{
contentLength += Buffer.byteLength(args[0] || '');
return writeData.apply(apiRes, args);
};
apiRes.end = (...args)=>{
if (args.length && typeof args[0] !== 'function') {
contentLength += Buffer.byteLength(args[0] || '');
}
if (responseLimit && contentLength >= maxContentLength) {
console.warn(`API response for ${req.url} exceeds ${bytes.format(maxContentLength)}. API Routes are meant to respond quickly. https://nextjs.org/docs/messages/api-routes-response-size-limit`);
}
return endResponse.apply(apiRes, args);
};
apiRes.status = (statusCode)=>sendStatusCode(apiRes, statusCode);
apiRes.send = (data)=>sendData(apiReq, apiRes, data);
apiRes.json = (data)=>sendJson(apiRes, data);
apiRes.redirect = (statusOrUrl, url)=>redirect(apiRes, statusOrUrl, url);
apiRes.setDraftMode = (options = {
enable: true
})=>setDraftMode(apiRes, Object.assign({}, apiContext, options));
apiRes.setPreviewData = (data, options = {})=>setPreviewData(apiRes, data, Object.assign({}, apiContext, options));
apiRes.clearPreviewData = (options = {})=>clearPreviewData(apiRes, options);
apiRes.revalidate = (urlPath, opts)=>revalidate(urlPath, opts || {}, req, apiContext);
const resolver = interopDefault(resolverModule);
let wasPiped = false;
if (process.env.NODE_ENV !== 'production') {
// listen for pipe event and don't show resolve warning
res.once('pipe', ()=>wasPiped = true);
}
const apiRouteResult = await resolver(req, res);
if (process.env.NODE_ENV !== 'production') {
if (typeof apiRouteResult !== 'undefined') {
if (apiRouteResult instanceof Response) {
throw Object.defineProperty(new Error('API route returned a Response object in the Node.js runtime, this is not supported. Please use `runtime: "edge"` instead: https://nextjs.org/docs/api-routes/edge-api-routes'), "__NEXT_ERROR_CODE", {
value: "E36",
enumerable: false,
configurable: true
});
}
console.warn(`API handler should not return a value, received ${typeof apiRouteResult}.`);
}
if (!externalResolver && !isResSent(res) && !wasPiped) {
console.warn(`API resolved without sending a response for ${req.url}, this may result in stalled requests.`);
}
}
} catch (err) {
await (onError == null ? void 0 : onError(err, {
method: req.method || 'GET',
headers: req.headers,
path: req.url || '/'
}, {
routerKind: 'Pages Router',
routePath: page || '',
routeType: 'route',
revalidateReason: undefined
}));
if (err instanceof ApiError) {
sendError(apiRes, err.statusCode, err.message);
} else {
if (dev) {
if (isError(err)) {
err.page = page;
}
throw err;
}
console.error(err);
if (propagateError) {
throw err;
}
sendError(apiRes, 500, 'Internal Server Error');
}
}
}
//# sourceMappingURL=api-resolver.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,67 @@
import { parse } from 'next/dist/compiled/content-type';
import isError from '../../../lib/is-error';
import { ApiError } from '../index';
/**
* Parse `JSON` and handles invalid `JSON` strings
* @param str `JSON` string
*/ function parseJson(str) {
if (str.length === 0) {
// special-case empty json body, as it's a common client-side mistake
return {};
}
try {
return JSON.parse(str);
} catch (e) {
throw Object.defineProperty(new ApiError(400, 'Invalid JSON'), "__NEXT_ERROR_CODE", {
value: "E394",
enumerable: false,
configurable: true
});
}
}
/**
* Parse incoming message like `json` or `urlencoded`
* @param req request object
*/ export async function parseBody(req, limit) {
let contentType;
try {
contentType = parse(req.headers['content-type'] || 'text/plain');
} catch {
contentType = parse('text/plain');
}
const { type, parameters } = contentType;
const encoding = parameters.charset || 'utf-8';
let buffer;
try {
const getRawBody = require('next/dist/compiled/raw-body');
buffer = await getRawBody(req, {
encoding,
limit
});
} catch (e) {
if (isError(e) && e.type === 'entity.too.large') {
throw Object.defineProperty(new ApiError(413, `Body exceeded ${limit} limit`), "__NEXT_ERROR_CODE", {
value: "E394",
enumerable: false,
configurable: true
});
} else {
throw Object.defineProperty(new ApiError(400, 'Invalid body'), "__NEXT_ERROR_CODE", {
value: "E394",
enumerable: false,
configurable: true
});
}
}
const body = buffer.toString();
if (type === 'application/json' || type === 'application/ld+json') {
return parseJson(body);
} else if (type === 'application/x-www-form-urlencoded') {
const qs = require('querystring');
return qs.decode(body);
} else {
return body;
}
}
//# sourceMappingURL=parse-body.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../../src/server/api-utils/node/parse-body.ts"],"sourcesContent":["import type { IncomingMessage } from 'http'\n\nimport { parse } from 'next/dist/compiled/content-type'\nimport isError from '../../../lib/is-error'\nimport type { SizeLimit } from '../../../types'\nimport { ApiError } from '../index'\n\n/**\n * Parse `JSON` and handles invalid `JSON` strings\n * @param str `JSON` string\n */\nfunction parseJson(str: string): object {\n if (str.length === 0) {\n // special-case empty json body, as it's a common client-side mistake\n return {}\n }\n\n try {\n return JSON.parse(str)\n } catch (e) {\n throw new ApiError(400, 'Invalid JSON')\n }\n}\n\n/**\n * Parse incoming message like `json` or `urlencoded`\n * @param req request object\n */\nexport async function parseBody(\n req: IncomingMessage,\n limit: SizeLimit\n): Promise<any> {\n let contentType\n try {\n contentType = parse(req.headers['content-type'] || 'text/plain')\n } catch {\n contentType = parse('text/plain')\n }\n const { type, parameters } = contentType\n const encoding = parameters.charset || 'utf-8'\n\n let buffer\n\n try {\n const getRawBody =\n require('next/dist/compiled/raw-body') as typeof import('next/dist/compiled/raw-body')\n buffer = await getRawBody(req, { encoding, limit })\n } catch (e) {\n if (isError(e) && e.type === 'entity.too.large') {\n throw new ApiError(413, `Body exceeded ${limit} limit`)\n } else {\n throw new ApiError(400, 'Invalid body')\n }\n }\n\n const body = buffer.toString()\n\n if (type === 'application/json' || type === 'application/ld+json') {\n return parseJson(body)\n } else if (type === 'application/x-www-form-urlencoded') {\n const qs = require('querystring') as typeof import('querystring')\n return qs.decode(body)\n } else {\n return body\n }\n}\n"],"names":["parse","isError","ApiError","parseJson","str","length","JSON","e","parseBody","req","limit","contentType","headers","type","parameters","encoding","charset","buffer","getRawBody","require","body","toString","qs","decode"],"mappings":"AAEA,SAASA,KAAK,QAAQ,kCAAiC;AACvD,OAAOC,aAAa,wBAAuB;AAE3C,SAASC,QAAQ,QAAQ,WAAU;AAEnC;;;CAGC,GACD,SAASC,UAAUC,GAAW;IAC5B,IAAIA,IAAIC,MAAM,KAAK,GAAG;QACpB,qEAAqE;QACrE,OAAO,CAAC;IACV;IAEA,IAAI;QACF,OAAOC,KAAKN,KAAK,CAACI;IACpB,EAAE,OAAOG,GAAG;QACV,MAAM,qBAAiC,CAAjC,IAAIL,SAAS,KAAK,iBAAlB,qBAAA;mBAAA;wBAAA;0BAAA;QAAgC;IACxC;AACF;AAEA;;;CAGC,GACD,OAAO,eAAeM,UACpBC,GAAoB,EACpBC,KAAgB;IAEhB,IAAIC;IACJ,IAAI;QACFA,cAAcX,MAAMS,IAAIG,OAAO,CAAC,eAAe,IAAI;IACrD,EAAE,OAAM;QACND,cAAcX,MAAM;IACtB;IACA,MAAM,EAAEa,IAAI,EAAEC,UAAU,EAAE,GAAGH;IAC7B,MAAMI,WAAWD,WAAWE,OAAO,IAAI;IAEvC,IAAIC;IAEJ,IAAI;QACF,MAAMC,aACJC,QAAQ;QACVF,SAAS,MAAMC,WAAWT,KAAK;YAAEM;YAAUL;QAAM;IACnD,EAAE,OAAOH,GAAG;QACV,IAAIN,QAAQM,MAAMA,EAAEM,IAAI,KAAK,oBAAoB;YAC/C,MAAM,qBAAiD,CAAjD,IAAIX,SAAS,KAAK,CAAC,cAAc,EAAEQ,MAAM,MAAM,CAAC,GAAhD,qBAAA;uBAAA;4BAAA;8BAAA;YAAgD;QACxD,OAAO;YACL,MAAM,qBAAiC,CAAjC,IAAIR,SAAS,KAAK,iBAAlB,qBAAA;uBAAA;4BAAA;8BAAA;YAAgC;QACxC;IACF;IAEA,MAAMkB,OAAOH,OAAOI,QAAQ;IAE5B,IAAIR,SAAS,sBAAsBA,SAAS,uBAAuB;QACjE,OAAOV,UAAUiB;IACnB,OAAO,IAAIP,SAAS,qCAAqC;QACvD,MAAMS,KAAKH,QAAQ;QACnB,OAAOG,GAAGC,MAAM,CAACH;IACnB,OAAO;QACL,OAAOA;IACT;AACF","ignoreList":[0]}

View File

@@ -0,0 +1,76 @@
import { checkIsOnDemandRevalidate } from '../.';
import { clearPreviewData, COOKIE_NAME_PRERENDER_BYPASS, COOKIE_NAME_PRERENDER_DATA, SYMBOL_PREVIEW_DATA } from '../index';
import { RequestCookies } from '../../web/spec-extension/cookies';
import { HeadersAdapter } from '../../web/spec-extension/adapters/headers';
export function tryGetPreviewData(req, res, options, multiZoneDraftMode) {
var _cookies_get, _cookies_get1;
// if an On-Demand revalidation is being done preview mode
// is disabled
if (options && checkIsOnDemandRevalidate(req, options).isOnDemandRevalidate) {
return false;
}
// Read cached preview data if present
// TODO: use request metadata instead of a symbol
if (SYMBOL_PREVIEW_DATA in req) {
return req[SYMBOL_PREVIEW_DATA];
}
const headers = HeadersAdapter.from(req.headers);
const cookies = new RequestCookies(headers);
const previewModeId = (_cookies_get = cookies.get(COOKIE_NAME_PRERENDER_BYPASS)) == null ? void 0 : _cookies_get.value;
const tokenPreviewData = (_cookies_get1 = cookies.get(COOKIE_NAME_PRERENDER_DATA)) == null ? void 0 : _cookies_get1.value;
// Case: preview mode cookie set but data cookie is not set
if (previewModeId && !tokenPreviewData && previewModeId === options.previewModeId) {
// This is "Draft Mode" which doesn't use
// previewData, so we return an empty object
// for backwards compat with "Preview Mode".
const data = {};
Object.defineProperty(req, SYMBOL_PREVIEW_DATA, {
value: data,
enumerable: false
});
return data;
}
// Case: neither cookie is set.
if (!previewModeId && !tokenPreviewData) {
return false;
}
// Case: one cookie is set, but not the other.
if (!previewModeId || !tokenPreviewData) {
if (!multiZoneDraftMode) {
clearPreviewData(res);
}
return false;
}
// Case: preview session is for an old build.
if (previewModeId !== options.previewModeId) {
if (!multiZoneDraftMode) {
clearPreviewData(res);
}
return false;
}
let encryptedPreviewData;
try {
const jsonwebtoken = require('next/dist/compiled/jsonwebtoken');
encryptedPreviewData = jsonwebtoken.verify(tokenPreviewData, options.previewModeSigningKey);
} catch {
// TODO: warn
clearPreviewData(res);
return false;
}
const { decryptWithSecret } = require('../../crypto-utils');
const decryptedPreviewData = decryptWithSecret(Buffer.from(options.previewModeEncryptionKey), encryptedPreviewData.data);
try {
// TODO: strict runtime type checking
const data = JSON.parse(decryptedPreviewData);
// Cache lookup
Object.defineProperty(req, SYMBOL_PREVIEW_DATA, {
value: data,
enumerable: false
});
return data;
} catch {
return false;
}
}
//# sourceMappingURL=try-get-preview-data.js.map

File diff suppressed because one or more lines are too long

7
node_modules/next/dist/esm/server/api-utils/web.js generated vendored Normal file
View File

@@ -0,0 +1,7 @@
// Buffer.byteLength polyfill in the Edge runtime, with only utf8 strings
// supported at the moment.
export function byteLength(payload) {
return new TextEncoder().encode(payload).buffer.byteLength;
}
//# sourceMappingURL=web.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/api-utils/web.ts"],"sourcesContent":["// Buffer.byteLength polyfill in the Edge runtime, with only utf8 strings\n// supported at the moment.\nexport function byteLength(payload: string): number {\n return new TextEncoder().encode(payload).buffer.byteLength\n}\n"],"names":["byteLength","payload","TextEncoder","encode","buffer"],"mappings":"AAAA,yEAAyE;AACzE,2BAA2B;AAC3B,OAAO,SAASA,WAAWC,OAAe;IACxC,OAAO,IAAIC,cAAcC,MAAM,CAACF,SAASG,MAAM,CAACJ,UAAU;AAC5D","ignoreList":[0]}

View File

@@ -0,0 +1,4 @@
import { createAsyncLocalStorage } from './async-local-storage';
export const actionAsyncStorageInstance = createAsyncLocalStorage();
//# sourceMappingURL=action-async-storage-instance.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/action-async-storage-instance.ts"],"sourcesContent":["import type { ActionAsyncStorage } from './action-async-storage.external'\nimport { createAsyncLocalStorage } from './async-local-storage'\n\nexport const actionAsyncStorageInstance: ActionAsyncStorage =\n createAsyncLocalStorage()\n"],"names":["createAsyncLocalStorage","actionAsyncStorageInstance"],"mappings":"AACA,SAASA,uBAAuB,QAAQ,wBAAuB;AAE/D,OAAO,MAAMC,6BACXD,0BAAyB","ignoreList":[0]}

View File

@@ -0,0 +1,7 @@
// Share the instance module in the next-shared layer
import { actionAsyncStorageInstance } from './action-async-storage-instance' with {
'turbopack-transition': 'next-shared'
};
export { actionAsyncStorageInstance as actionAsyncStorage };
//# sourceMappingURL=action-async-storage.external.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/action-async-storage.external.ts"],"sourcesContent":["import type { AsyncLocalStorage } from 'async_hooks'\n\n// Share the instance module in the next-shared layer\nimport { actionAsyncStorageInstance } from './action-async-storage-instance' with { 'turbopack-transition': 'next-shared' }\nexport interface ActionStore {\n readonly isAction?: boolean\n readonly isAppRoute?: boolean\n}\n\nexport type ActionAsyncStorage = AsyncLocalStorage<ActionStore>\n\nexport { actionAsyncStorageInstance as actionAsyncStorage }\n"],"names":["actionAsyncStorageInstance","actionAsyncStorage"],"mappings":"AAEA,qDAAqD;AACrD,SAASA,0BAA0B,QAAQ,uCAAuC;IAAE,wBAAwB;AAAc,EAAC;AAQ3H,SAASA,8BAA8BC,kBAAkB,GAAE","ignoreList":[0]}

View File

@@ -0,0 +1,993 @@
import { RSC_HEADER, RSC_CONTENT_TYPE_HEADER, NEXT_ROUTER_STATE_TREE_HEADER, ACTION_HEADER, NEXT_ACTION_NOT_FOUND_HEADER, NEXT_ROUTER_PREFETCH_HEADER, NEXT_ROUTER_SEGMENT_PREFETCH_HEADER, NEXT_URL, NEXT_ACTION_REVALIDATED_HEADER } from '../../client/components/app-router-headers';
import { getAccessFallbackHTTPStatus, isHTTPAccessFallbackError } from '../../client/components/http-access-fallback/http-access-fallback';
import { getRedirectTypeFromError, getURLFromRedirectError } from '../../client/components/redirect';
import { isRedirectError } from '../../client/components/redirect-error';
import RenderResult from '../render-result';
import { FlightRenderResult } from './flight-render-result';
import { filterReqHeaders, actionsForbiddenHeaders } from '../lib/server-ipc/utils';
import { getModifiedCookieValues } from '../web/spec-extension/adapters/request-cookies';
import { JSON_CONTENT_TYPE_HEADER, NEXT_CACHE_REVALIDATED_TAGS_HEADER, NEXT_CACHE_REVALIDATE_TAG_TOKEN_HEADER } from '../../lib/constants';
import { getServerActionRequestMetadata } from '../lib/server-action-request-meta';
import { isCsrfOriginAllowed } from './csrf-protection';
import { warn } from '../../build/output/log';
import { RequestCookies, ResponseCookies } from '../web/spec-extension/cookies';
import { HeadersAdapter } from '../web/spec-extension/adapters/headers';
import { fromNodeOutgoingHttpHeaders } from '../web/utils';
import { selectWorkerForForwarding, getServerActionsManifest, getServerModuleMap } from './manifests-singleton';
import { isNodeNextRequest, isWebNextRequest } from '../base-http/helpers';
import { normalizeFilePath } from './segment-explorer-path';
import { extractInfoFromServerReferenceId } from '../../shared/lib/server-reference-info';
import { RedirectStatusCode } from '../../client/components/redirect-status-code';
import { synchronizeMutableCookies } from '../async-storage/request-store';
import { workUnitAsyncStorage } from '../app-render/work-unit-async-storage.external';
import { InvariantError } from '../../shared/lib/invariant-error';
import { executeRevalidates } from '../revalidation-utils';
import { addRequestMeta, getRequestMeta } from '../request-meta';
import { setCacheBustingSearchParam } from '../../client/components/router-reducer/set-cache-busting-search-param';
import { ActionDidNotRevalidate, ActionDidRevalidateStaticAndDynamic } from '../../shared/lib/action-revalidation-kind';
const INLINE_ACTION_PREFIX = '$$RSC_SERVER_ACTION_';
/**
* Checks if the app has any server actions defined in any runtime.
*/ function hasServerActions() {
const serverActionsManifest = getServerActionsManifest();
return Object.keys(serverActionsManifest.node).length > 0 || Object.keys(serverActionsManifest.edge).length > 0;
}
function nodeHeadersToRecord(headers) {
const record = {};
for (const [key, value] of Object.entries(headers)){
if (value !== undefined) {
record[key] = Array.isArray(value) ? value.join(', ') : `${value}`;
}
}
return record;
}
function getForwardedHeaders(req, res) {
// Get request headers and cookies
const requestHeaders = req.headers;
const requestCookies = new RequestCookies(HeadersAdapter.from(requestHeaders));
// Get response headers and cookies
const responseHeaders = res.getHeaders();
const responseCookies = new ResponseCookies(fromNodeOutgoingHttpHeaders(responseHeaders));
// Merge request and response headers
const mergedHeaders = filterReqHeaders({
...nodeHeadersToRecord(requestHeaders),
...nodeHeadersToRecord(responseHeaders)
}, actionsForbiddenHeaders);
// Merge cookies into requestCookies, so responseCookies always take precedence
// and overwrite/delete those from requestCookies.
responseCookies.getAll().forEach((cookie)=>{
if (typeof cookie.value === 'undefined') {
requestCookies.delete(cookie.name);
} else {
requestCookies.set(cookie);
}
});
// Update the 'cookie' header with the merged cookies
mergedHeaders['cookie'] = requestCookies.toString();
// Remove headers that should not be forwarded
delete mergedHeaders['transfer-encoding'];
return new Headers(mergedHeaders);
}
function addRevalidationHeader(res, { workStore, requestStore }) {
var _workStore_pendingRevalidatedTags;
// If a tag was revalidated, the client router needs to invalidate all the
// client router cache as they may be stale. And if a path was revalidated, the
// client needs to invalidate all subtrees below that path.
// TODO: Currently we don't send the specific tags or paths to the client,
// we just send a flag indicating that all the static data on the client
// should be invalidated. In the future, this will likely be a Bloom filter
// or bitmask of some kind.
// TODO-APP: Currently the prefetch cache doesn't have subtree information,
// so we need to invalidate the entire cache if a path was revalidated.
// TODO-APP: Currently paths are treated as tags, so the second element of the tuple
// is always empty.
// Only count tags without a profile (updateTag) as requiring client cache invalidation
// Tags with a profile (revalidateTag) use stale-while-revalidate and shouldn't
// trigger immediate client-side cache invalidation
const isTagRevalidated = ((_workStore_pendingRevalidatedTags = workStore.pendingRevalidatedTags) == null ? void 0 : _workStore_pendingRevalidatedTags.some((item)=>item.profile === undefined)) ? 1 : 0;
const isCookieRevalidated = getModifiedCookieValues(requestStore.mutableCookies).length ? 1 : 0;
// First check if a tag, cookie, or path was revalidated.
if (isTagRevalidated || isCookieRevalidated) {
res.setHeader(NEXT_ACTION_REVALIDATED_HEADER, JSON.stringify(ActionDidRevalidateStaticAndDynamic));
} else if (// Check for refresh() actions. This will invalidate only the dynamic data.
workStore.pathWasRevalidated !== undefined && workStore.pathWasRevalidated !== ActionDidNotRevalidate) {
res.setHeader(NEXT_ACTION_REVALIDATED_HEADER, JSON.stringify(workStore.pathWasRevalidated));
}
}
/**
* Forwards a server action request to a separate worker. Used when the requested action is not available in the current worker.
*/ async function createForwardedActionResponse(req, res, host, workerPathname, basePath) {
var _getRequestMeta;
if (!host) {
throw Object.defineProperty(new Error('Invariant: Missing `host` header from a forwarded Server Actions request.'), "__NEXT_ERROR_CODE", {
value: "E226",
enumerable: false,
configurable: true
});
}
const forwardedHeaders = getForwardedHeaders(req, res);
// indicate that this action request was forwarded from another worker
// we use this to skip rendering the flight tree so that we don't update the UI
// with the response from the forwarded worker
forwardedHeaders.set('x-action-forwarded', '1');
const proto = ((_getRequestMeta = getRequestMeta(req, 'initProtocol')) == null ? void 0 : _getRequestMeta.replace(/:+$/, '')) || 'https';
// For standalone or the serverful mode, use the internal origin directly
// other than the host headers from the request.
const origin = process.env.__NEXT_PRIVATE_ORIGIN || `${proto}://${host.value}`;
const fetchUrl = new URL(`${origin}${basePath}${workerPathname}`);
try {
var _response_headers_get;
let body;
if (// The type check here ensures that `req` is correctly typed, and the
// environment variable check provides dead code elimination.
process.env.NEXT_RUNTIME === 'edge' && isWebNextRequest(req)) {
if (!req.body) {
throw Object.defineProperty(new Error('Invariant: missing request body.'), "__NEXT_ERROR_CODE", {
value: "E333",
enumerable: false,
configurable: true
});
}
body = req.body;
} else if (// The type check here ensures that `req` is correctly typed, and the
// environment variable check provides dead code elimination.
process.env.NEXT_RUNTIME !== 'edge' && isNodeNextRequest(req)) {
body = req.stream();
} else {
throw Object.defineProperty(new Error('Invariant: Unknown request type.'), "__NEXT_ERROR_CODE", {
value: "E114",
enumerable: false,
configurable: true
});
}
// Forward the request to the new worker
const response = await fetch(fetchUrl, {
method: 'POST',
body,
duplex: 'half',
headers: forwardedHeaders,
redirect: 'manual',
next: {
// @ts-ignore
internal: 1
}
});
if ((_response_headers_get = response.headers.get('content-type')) == null ? void 0 : _response_headers_get.startsWith(RSC_CONTENT_TYPE_HEADER)) {
// copy the headers from the redirect response to the response we're sending
for (const [key, value] of response.headers){
if (!actionsForbiddenHeaders.includes(key)) {
res.setHeader(key, value);
}
}
return new FlightRenderResult(response.body);
} else {
var // Since we aren't consuming the response body, we cancel it to avoid memory leaks
_response_body;
(_response_body = response.body) == null ? void 0 : _response_body.cancel();
}
} catch (err) {
// we couldn't stream the forwarded response, so we'll just return an empty response
console.error(`failed to forward action response`, err);
}
return RenderResult.fromStatic('{}', JSON_CONTENT_TYPE_HEADER);
}
/**
* Returns the parsed redirect URL if we deem that it is hosted by us.
*
* We handle both relative and absolute redirect URLs.
*
* In case the redirect URL is not relative to the application we return `null`.
*/ function getAppRelativeRedirectUrl(basePath, host, redirectUrl, currentPathname) {
if (redirectUrl.startsWith('/')) {
// Absolute path - just add basePath
return new URL(`${basePath}${redirectUrl}`, 'http://n');
} else if (redirectUrl.startsWith('.')) {
// Relative path - resolve relative to current pathname
let base = currentPathname || '/';
// Ensure the base path ends with a slash so relative resolution works correctly
// e.g., "./subpage" from "/subdir" should resolve to "/subdir/subpage"
// not "/subpage"
if (!base.endsWith('/')) {
base = base + '/';
}
const resolved = new URL(redirectUrl, `http://n${base}`);
// Include basePath in the final URL
return new URL(`${basePath}${resolved.pathname}${resolved.search}${resolved.hash}`, 'http://n');
}
const parsedRedirectUrl = new URL(redirectUrl);
if ((host == null ? void 0 : host.value) !== parsedRedirectUrl.host) {
return null;
}
// At this point the hosts are the same, just confirm we
// are routing to a path underneath the `basePath`
return parsedRedirectUrl.pathname.startsWith(basePath) ? parsedRedirectUrl : null;
}
async function createRedirectRenderResult(req, res, originalHost, redirectUrl, redirectType, basePath, workStore, currentPathname) {
res.setHeader('x-action-redirect', `${redirectUrl};${redirectType}`);
// If we're redirecting to another route of this Next.js application, we'll
// try to stream the response from the other worker path. When that works,
// we can save an extra roundtrip and avoid a full page reload.
// When the redirect URL starts with a `/` or is to the same host, under the
// `basePath` we treat it as an app-relative redirect;
const appRelativeRedirectUrl = getAppRelativeRedirectUrl(basePath, originalHost, redirectUrl, currentPathname);
if (appRelativeRedirectUrl) {
var _getRequestMeta;
if (!originalHost) {
throw Object.defineProperty(new Error('Invariant: Missing `host` header from a forwarded Server Actions request.'), "__NEXT_ERROR_CODE", {
value: "E226",
enumerable: false,
configurable: true
});
}
const forwardedHeaders = getForwardedHeaders(req, res);
forwardedHeaders.set(RSC_HEADER, '1');
const proto = ((_getRequestMeta = getRequestMeta(req, 'initProtocol')) == null ? void 0 : _getRequestMeta.replace(/:+$/, '')) || 'https';
// For standalone or the serverful mode, use the internal origin directly
// other than the host headers from the request.
const origin = process.env.__NEXT_PRIVATE_ORIGIN || `${proto}://${originalHost.value}`;
const fetchUrl = new URL(`${origin}${appRelativeRedirectUrl.pathname}${appRelativeRedirectUrl.search}`);
if (workStore.pendingRevalidatedTags) {
var _workStore_incrementalCache_prerenderManifest_preview, _workStore_incrementalCache_prerenderManifest, _workStore_incrementalCache;
forwardedHeaders.set(NEXT_CACHE_REVALIDATED_TAGS_HEADER, workStore.pendingRevalidatedTags.map((item)=>item.tag).join(','));
forwardedHeaders.set(NEXT_CACHE_REVALIDATE_TAG_TOKEN_HEADER, ((_workStore_incrementalCache = workStore.incrementalCache) == null ? void 0 : (_workStore_incrementalCache_prerenderManifest = _workStore_incrementalCache.prerenderManifest) == null ? void 0 : (_workStore_incrementalCache_prerenderManifest_preview = _workStore_incrementalCache_prerenderManifest.preview) == null ? void 0 : _workStore_incrementalCache_prerenderManifest_preview.previewModeId) || '');
}
// Ensures that when the path was revalidated we don't return a partial response on redirects
forwardedHeaders.delete(NEXT_ROUTER_STATE_TREE_HEADER);
// When an action follows a redirect, it's no longer handling an action: it's just a normal RSC request
// to the requested URL. We should remove the `next-action` header so that it's not treated as an action
forwardedHeaders.delete(ACTION_HEADER);
try {
var _response_headers_get;
setCacheBustingSearchParam(fetchUrl, {
[NEXT_ROUTER_PREFETCH_HEADER]: forwardedHeaders.get(NEXT_ROUTER_PREFETCH_HEADER) ? '1' : undefined,
[NEXT_ROUTER_SEGMENT_PREFETCH_HEADER]: forwardedHeaders.get(NEXT_ROUTER_SEGMENT_PREFETCH_HEADER) ?? undefined,
[NEXT_ROUTER_STATE_TREE_HEADER]: forwardedHeaders.get(NEXT_ROUTER_STATE_TREE_HEADER) ?? undefined,
[NEXT_URL]: forwardedHeaders.get(NEXT_URL) ?? undefined
});
const response = await fetch(fetchUrl, {
method: 'GET',
headers: forwardedHeaders,
next: {
// @ts-ignore
internal: 1
}
});
if ((_response_headers_get = response.headers.get('content-type')) == null ? void 0 : _response_headers_get.startsWith(RSC_CONTENT_TYPE_HEADER)) {
// copy the headers from the redirect response to the response we're sending
for (const [key, value] of response.headers){
if (!actionsForbiddenHeaders.includes(key)) {
res.setHeader(key, value);
}
}
return new FlightRenderResult(response.body);
} else {
var // Since we aren't consuming the response body, we cancel it to avoid memory leaks
_response_body;
(_response_body = response.body) == null ? void 0 : _response_body.cancel();
}
} catch (err) {
// we couldn't stream the redirect response, so we'll just do a normal redirect
console.error(`failed to get redirect response`, err);
}
}
return RenderResult.EMPTY;
}
/**
* Ensures the value of the header can't create long logs.
*/ function limitUntrustedHeaderValueForLogs(value) {
return value.length > 100 ? value.slice(0, 100) + '...' : value;
}
export function parseHostHeader(headers, originDomain) {
var _forwardedHostHeader_split_, _forwardedHostHeader_split;
const forwardedHostHeader = headers['x-forwarded-host'];
const forwardedHostHeaderValue = forwardedHostHeader && Array.isArray(forwardedHostHeader) ? forwardedHostHeader[0] : forwardedHostHeader == null ? void 0 : (_forwardedHostHeader_split = forwardedHostHeader.split(',')) == null ? void 0 : (_forwardedHostHeader_split_ = _forwardedHostHeader_split[0]) == null ? void 0 : _forwardedHostHeader_split_.trim();
const hostHeader = headers['host'];
if (originDomain) {
return forwardedHostHeaderValue === originDomain ? {
type: "x-forwarded-host",
value: forwardedHostHeaderValue
} : hostHeader === originDomain ? {
type: "host",
value: hostHeader
} : undefined;
}
return forwardedHostHeaderValue ? {
type: "x-forwarded-host",
value: forwardedHostHeaderValue
} : hostHeader ? {
type: "host",
value: hostHeader
} : undefined;
}
export async function handleAction({ req, res, ComponentMod, generateFlight, workStore, requestStore, serverActions, ctx, metadata }) {
const contentType = req.headers['content-type'];
const { page } = ctx.renderOpts;
const serverModuleMap = getServerModuleMap();
const { actionId, isMultipartAction, isFetchAction, isURLEncodedAction, isPossibleServerAction } = getServerActionRequestMetadata(req);
const handleUnrecognizedFetchAction = (err)=>{
// If the deployment doesn't have skew protection, this is expected to occasionally happen,
// so we use a warning instead of an error.
console.warn(err);
// Return an empty response with a header that the client router will interpret.
// We don't need to waste time encoding a flight response, and using a blank body + header
// means that unrecognized actions can also be handled at the infra level
// (i.e. without needing to invoke a lambda)
res.setHeader(NEXT_ACTION_NOT_FOUND_HEADER, '1');
res.setHeader('content-type', 'text/plain');
res.statusCode = 404;
return {
type: 'done',
result: RenderResult.fromStatic('Server action not found.', 'text/plain')
};
};
// If it can't be a Server Action, skip handling.
// Note that this can be a false positive -- any multipart/urlencoded POST can get us here,
// But won't know if it's an MPA action or not until we call `decodeAction` below.
if (!isPossibleServerAction) {
return null;
}
// We don't currently support URL encoded actions, so we bail out early.
// Depending on if it's a fetch action or an MPA, we return a different response.
if (isURLEncodedAction) {
if (isFetchAction) {
return {
type: 'not-found'
};
} else {
// This is an MPA action, so we return null
return null;
}
}
// If the app has no server actions at all, we can 404 early.
if (!hasServerActions()) {
return handleUnrecognizedFetchAction(getActionNotFoundError(actionId));
}
if (workStore.isStaticGeneration) {
throw Object.defineProperty(new Error("Invariant: server actions can't be handled during static rendering"), "__NEXT_ERROR_CODE", {
value: "E359",
enumerable: false,
configurable: true
});
}
let temporaryReferences;
// When running actions the default is no-store, you can still `cache: 'force-cache'`
workStore.fetchCache = 'default-no-store';
const originHeader = req.headers['origin'];
const originHost = typeof originHeader === 'string' ? // However, these contexts can still send along credentials like cookies,
// so we need to check if they're allowed cross-origin requests.
originHeader === 'null' ? 'null' : new URL(originHeader).host : undefined;
const host = parseHostHeader(req.headers);
let warning = undefined;
function warnBadServerActionRequest() {
if (warning) {
warn(warning);
}
}
// This is to prevent CSRF attacks. If `x-forwarded-host` is set, we need to
// ensure that the request is coming from the same host.
if (!originHost) {
// This is a handcrafted request without an origin or a request from an unsafe browser.
// We'll let this through but log a warning.
// We can't guard against unsafe browsers and handcrafted requests can't contain
// user credentials that haven't been shared willingly.
warning = 'Missing `origin` header from a forwarded Server Actions request.';
} else if (!host || originHost !== host.value) {
// If the customer sets a list of allowed origins, we'll allow the request.
// These are considered safe but might be different from forwarded host set
// by the infra (i.e. reverse proxies).
if (isCsrfOriginAllowed(originHost, serverActions == null ? void 0 : serverActions.allowedOrigins)) {
// Ignore it
} else {
if (host) {
// This seems to be an CSRF attack. We should not proceed the action.
console.error(`\`${host.type}\` header with value \`${limitUntrustedHeaderValueForLogs(host.value)}\` does not match \`origin\` header with value \`${limitUntrustedHeaderValueForLogs(originHost)}\` from a forwarded Server Actions request. Aborting the action.`);
} else {
// This is an attack. We should not proceed the action.
console.error(`\`x-forwarded-host\` or \`host\` headers are not provided. One of these is needed to compare the \`origin\` header from a forwarded Server Actions request. Aborting the action.`);
}
const error = Object.defineProperty(new Error('Invalid Server Actions request.'), "__NEXT_ERROR_CODE", {
value: "E80",
enumerable: false,
configurable: true
});
if (isFetchAction) {
res.statusCode = 500;
metadata.statusCode = 500;
const promise = Promise.reject(error);
try {
// we need to await the promise to trigger the rejection early
// so that it's already handled by the time we call
// the RSC runtime. Otherwise, it will throw an unhandled
// promise rejection error in the renderer.
await promise;
} catch {
// swallow error, it's gonna be handled on the client
}
return {
type: 'done',
result: await generateFlight(req, ctx, requestStore, {
actionResult: promise,
// We didn't execute an action, so no revalidations could have
// occurred. We can skip rendering the page.
skipPageRendering: true,
temporaryReferences
})
};
}
throw error;
}
}
// ensure we avoid caching server actions unexpectedly
res.setHeader('Cache-Control', 'no-cache, no-store, max-age=0, must-revalidate');
const { actionAsyncStorage } = ComponentMod;
const actionWasForwarded = Boolean(req.headers['x-action-forwarded']);
if (actionId) {
const forwardedWorker = selectWorkerForForwarding(actionId, page);
// If forwardedWorker is truthy, it means there isn't a worker for the action
// in the current handler, so we forward the request to a worker that has the action.
if (forwardedWorker) {
return {
type: 'done',
result: await createForwardedActionResponse(req, res, host, forwardedWorker, ctx.renderOpts.basePath)
};
}
}
try {
return await actionAsyncStorage.run({
isAction: true
}, async ()=>{
// We only use these for fetch actions -- MPA actions handle them inside `decodeAction`.
let actionModId;
let boundActionArguments = [];
if (// The type check here ensures that `req` is correctly typed, and the
// environment variable check provides dead code elimination.
process.env.NEXT_RUNTIME === 'edge' && isWebNextRequest(req)) {
if (!req.body) {
throw Object.defineProperty(new Error('invariant: Missing request body.'), "__NEXT_ERROR_CODE", {
value: "E364",
enumerable: false,
configurable: true
});
}
// TODO: add body limit
// Use react-server-dom-webpack/server
const { createTemporaryReferenceSet, decodeReply, decodeAction, decodeFormState } = ComponentMod;
temporaryReferences = createTemporaryReferenceSet();
if (isMultipartAction) {
// TODO-APP: Add streaming support
const formData = await req.request.formData();
if (isFetchAction) {
// A fetch action with a multipart body.
try {
actionModId = getActionModIdOrError(actionId, serverModuleMap);
} catch (err) {
return handleUnrecognizedFetchAction(err);
}
boundActionArguments = await decodeReply(formData, serverModuleMap, {
temporaryReferences
});
} else {
// Multipart POST, but not a fetch action.
// Potentially an MPA action, we have to try decoding it to check.
if (areAllActionIdsValid(formData, serverModuleMap) === false) {
// TODO: This can be from skew or manipulated input. We should handle this case
// more gracefully but this preserves the prior behavior where decodeAction would throw instead.
throw Object.defineProperty(new Error(`Failed to find Server Action. This request might be from an older or newer deployment.\nRead more: https://nextjs.org/docs/messages/failed-to-find-server-action`), "__NEXT_ERROR_CODE", {
value: "E975",
enumerable: false,
configurable: true
});
}
const action = await decodeAction(formData, serverModuleMap);
if (typeof action === 'function') {
// an MPA action.
// Only warn if it's a server action, otherwise skip for other post requests
warnBadServerActionRequest();
const { actionResult } = await executeActionAndPrepareForRender(action, [], workStore, requestStore, actionWasForwarded);
const formState = await decodeFormState(actionResult, formData, serverModuleMap);
// Skip the fetch path.
// We need to render a full HTML version of the page for the response, we'll handle that in app-render.
return {
type: 'done',
result: undefined,
formState
};
} else {
// We couldn't decode an action, so this POST request turned out not to be a server action request.
return null;
}
}
} else {
// POST with non-multipart body.
// If it's not multipart AND not a fetch action,
// then it can't be an action request.
if (!isFetchAction) {
return null;
}
try {
actionModId = getActionModIdOrError(actionId, serverModuleMap);
} catch (err) {
return handleUnrecognizedFetchAction(err);
}
// A fetch action with a non-multipart body.
// In practice, this happens if `encodeReply` returned a string instead of FormData,
// which can happen for very simple JSON-like values that don't need multiple flight rows.
const chunks = [];
const reader = req.body.getReader();
while(true){
const { done, value } = await reader.read();
if (done) {
break;
}
chunks.push(value);
}
const actionData = Buffer.concat(chunks).toString('utf-8');
boundActionArguments = await decodeReply(actionData, serverModuleMap, {
temporaryReferences
});
}
} else if (// The type check here ensures that `req` is correctly typed, and the
// environment variable check provides dead code elimination.
process.env.NEXT_RUNTIME !== 'edge' && isNodeNextRequest(req)) {
// Use react-server-dom-webpack/server.node which supports streaming
const { createTemporaryReferenceSet, decodeReply, decodeReplyFromBusboy, decodeAction, decodeFormState } = require(`./react-server.node`);
temporaryReferences = createTemporaryReferenceSet();
const { PassThrough, Readable, Transform } = require('node:stream');
const { pipeline } = require('node:stream/promises');
// If actionBody was stashed in request meta (from parsing the postponed
// state prefix in minimal mode), use it instead of req.body
const actionBodyFromMeta = getRequestMeta(req, 'actionBody');
const body = actionBodyFromMeta ? Readable.from(actionBodyFromMeta) : req.body;
const defaultBodySizeLimit = '1 MB';
const bodySizeLimit = (serverActions == null ? void 0 : serverActions.bodySizeLimit) ?? defaultBodySizeLimit;
const bodySizeLimitBytes = bodySizeLimit !== defaultBodySizeLimit ? require('next/dist/compiled/bytes').parse(bodySizeLimit) : 1024 * 1024 // 1 MB
;
let size = 0;
const sizeLimitTransform = new Transform({
transform (chunk, encoding, callback) {
size += Buffer.byteLength(chunk, encoding);
if (size > bodySizeLimitBytes) {
const { ApiError } = require('../api-utils');
callback(Object.defineProperty(new ApiError(413, `Body exceeded ${bodySizeLimit} limit.\n` + `To configure the body size limit for Server Actions, see: https://nextjs.org/docs/app/api-reference/next-config-js/serverActions#bodysizelimit`), "__NEXT_ERROR_CODE", {
value: "E394",
enumerable: false,
configurable: true
}));
return;
}
callback(null, chunk);
}
});
if (isMultipartAction) {
if (isFetchAction) {
// A fetch action with a multipart body.
try {
actionModId = getActionModIdOrError(actionId, serverModuleMap);
} catch (err) {
return handleUnrecognizedFetchAction(err);
}
const busboy = require('next/dist/compiled/busboy')({
defParamCharset: 'utf8',
headers: req.headers,
limits: {
fieldSize: bodySizeLimitBytes
}
});
const abortController = new AbortController();
try {
;
[, boundActionArguments] = await Promise.all([
pipeline(body, sizeLimitTransform, busboy, {
signal: abortController.signal
}),
decodeReplyFromBusboy(busboy, serverModuleMap, {
temporaryReferences
})
]);
} catch (err) {
abortController.abort();
throw err;
}
} else {
// Multipart POST, but not a fetch action.
// Potentially an MPA action, we have to try decoding it to check.
const sizeLimitedBody = new PassThrough();
// React doesn't yet publish a busboy version of decodeAction
// so we polyfill the parsing of FormData.
const fakeRequest = new Request('http://localhost', {
method: 'POST',
// @ts-expect-error
headers: {
'Content-Type': contentType
},
body: Readable.toWeb(sizeLimitedBody),
duplex: 'half'
});
let formData;
const abortController = new AbortController();
try {
;
[, formData] = await Promise.all([
pipeline(body, sizeLimitTransform, sizeLimitedBody, {
signal: abortController.signal
}),
fakeRequest.formData()
]);
} catch (err) {
abortController.abort();
throw err;
}
if (areAllActionIdsValid(formData, serverModuleMap) === false) {
// TODO: This can be from skew or manipulated input. We should handle this case
// more gracefully but this preserves the prior behavior where decodeAction would throw instead.
throw Object.defineProperty(new Error(`Failed to find Server Action. This request might be from an older or newer deployment.\nRead more: https://nextjs.org/docs/messages/failed-to-find-server-action`), "__NEXT_ERROR_CODE", {
value: "E975",
enumerable: false,
configurable: true
});
}
// TODO: Refactor so it is harder to accidentally decode an action before you have validated that the
// action referred to is available.
const action = await decodeAction(formData, serverModuleMap);
if (typeof action === 'function') {
// an MPA action.
// Only warn if it's a server action, otherwise skip for other post requests
warnBadServerActionRequest();
const { actionResult } = await executeActionAndPrepareForRender(action, [], workStore, requestStore, actionWasForwarded);
const formState = await decodeFormState(actionResult, formData, serverModuleMap);
// Skip the fetch path.
// We need to render a full HTML version of the page for the response, we'll handle that in app-render.
return {
type: 'done',
result: undefined,
formState
};
} else {
// We couldn't decode an action, so this POST request turned out not to be a server action request.
return null;
}
}
} else {
// POST with non-multipart body.
// If it's not multipart AND not a fetch action,
// then it can't be an action request.
if (!isFetchAction) {
return null;
}
try {
actionModId = getActionModIdOrError(actionId, serverModuleMap);
} catch (err) {
return handleUnrecognizedFetchAction(err);
}
// A fetch action with a non-multipart body.
// In practice, this happens if `encodeReply` returned a string instead of FormData,
// which can happen for very simple JSON-like values that don't need multiple flight rows.
const sizeLimitedBody = new PassThrough();
const chunks = [];
await Promise.all([
pipeline(body, sizeLimitTransform, sizeLimitedBody),
(async ()=>{
for await (const chunk of sizeLimitedBody){
chunks.push(Buffer.from(chunk));
}
})()
]);
const actionData = Buffer.concat(chunks).toString('utf-8');
boundActionArguments = await decodeReply(actionData, serverModuleMap, {
temporaryReferences
});
}
} else {
throw Object.defineProperty(new Error('Invariant: Unknown request type.'), "__NEXT_ERROR_CODE", {
value: "E114",
enumerable: false,
configurable: true
});
}
// actions.js
// app/page.js
// action worker1
// appRender1
// app/foo/page.js
// action worker2
// appRender
// / -> fire action -> POST / -> appRender1 -> modId for the action file
// /foo -> fire action -> POST /foo -> appRender2 -> modId for the action file
const actionMod = await ComponentMod.__next_app__.require(actionModId);
const actionHandler = actionMod[// `actionId` must exist if we got here, as otherwise we would have thrown an error above
actionId];
// Log server action call in development when enabled
let logInfo = null;
const { type: actionType } = extractInfoFromServerReferenceId(actionId);
if (process.env.NODE_ENV === 'development' && ctx.renderOpts.logServerFunctions && // TODO: For now, skip logging for 'use cache' Server Functions as the
// output needs more work, or a different approach entirely.
actionType !== 'use-cache') {
var _serverActionsManifest_runtime;
const serverActionsManifest = getServerActionsManifest();
const runtime = process.env.NEXT_RUNTIME === 'edge' ? 'edge' : 'node';
const actionInfo = (_serverActionsManifest_runtime = serverActionsManifest[runtime]) == null ? void 0 : _serverActionsManifest_runtime[actionId];
if (actionInfo) {
var _actionInfo_exportedName;
const isInlineAction = (_actionInfo_exportedName = actionInfo.exportedName) == null ? void 0 : _actionInfo_exportedName.startsWith(INLINE_ACTION_PREFIX);
const projectDir = ctx.renderOpts.dir || (process.env.NEXT_RUNTIME === 'edge' ? '' : process.cwd());
const location = normalizeFilePath(projectDir, actionInfo.filename);
// Format function name for display
let functionName;
if (isInlineAction) {
functionName = '<inline action>';
} else if (actionInfo.exportedName === 'default') {
functionName = 'default';
} else {
functionName = actionInfo.exportedName || '<action>';
}
logInfo = {
functionName,
args: boundActionArguments,
location
};
}
}
const startTime = performance.now();
const { actionResult, skipPageRendering } = await executeActionAndPrepareForRender(actionHandler, boundActionArguments, workStore, requestStore, actionWasForwarded).finally(()=>{
addRevalidationHeader(res, {
workStore,
requestStore
});
if (logInfo) {
// Store server action log info to be logged after the request log
const duration = Math.round(performance.now() - startTime);
addRequestMeta(req, 'devServerActionLog', {
functionName: logInfo.functionName,
args: logInfo.args,
location: logInfo.location,
duration
});
}
});
// For form actions, we need to continue rendering the page.
if (isFetchAction) {
// If we skip page rendering, we need to ensure pending revalidates
// are awaited before closing the response. Otherwise, this will be
// done after rendering the page.
const maybeRevalidatesPromise = skipPageRendering ? executeRevalidates(workStore) : false;
return {
type: 'done',
result: await generateFlight(req, ctx, requestStore, {
actionResult: Promise.resolve(actionResult),
skipPageRendering,
temporaryReferences,
waitUntil: maybeRevalidatesPromise === false ? undefined : maybeRevalidatesPromise
})
};
} else {
// TODO: this shouldn't be reachable, because all non-fetch codepaths return early.
// this will be handled in a follow-up refactor PR.
return null;
}
});
} catch (err) {
if (isRedirectError(err)) {
const redirectUrl = getURLFromRedirectError(err);
const redirectType = getRedirectTypeFromError(err);
// if it's a fetch action, we'll set the status code for logging/debugging purposes
// but we won't set a Location header, as the redirect will be handled by the client router
res.statusCode = RedirectStatusCode.SeeOther;
metadata.statusCode = RedirectStatusCode.SeeOther;
if (isFetchAction) {
return {
type: 'done',
result: await createRedirectRenderResult(req, res, host, redirectUrl, redirectType, ctx.renderOpts.basePath, workStore, requestStore.url.pathname)
};
}
// For an MPA action, the redirect doesn't need a body, just a Location header.
res.setHeader('Location', redirectUrl);
return {
type: 'done',
result: RenderResult.EMPTY
};
} else if (isHTTPAccessFallbackError(err)) {
res.statusCode = getAccessFallbackHTTPStatus(err);
metadata.statusCode = res.statusCode;
if (isFetchAction) {
const promise = Promise.reject(err);
try {
// we need to await the promise to trigger the rejection early
// so that it's already handled by the time we call
// the RSC runtime. Otherwise, it will throw an unhandled
// promise rejection error in the renderer.
await promise;
} catch {
// swallow error, it's gonna be handled on the client
}
return {
type: 'done',
result: await generateFlight(req, ctx, requestStore, {
skipPageRendering: false,
actionResult: promise,
temporaryReferences
})
};
}
// For an MPA action, we need to render a HTML response. We'll handle that in app-render.
return {
type: 'not-found'
};
}
// An error that didn't come from `redirect()` or `notFound()`, likely thrown from user code
// (but it could also be a bug in our code!)
if (isFetchAction) {
// TODO: consider checking if the error is an `ApiError` and change status code
// so that we can respond with a 413 to requests that break the body size limit
// (but if we do that, we also need to make sure that whatever handles the non-fetch error path below does the same)
res.statusCode = 500;
metadata.statusCode = 500;
const promise = Promise.reject(err);
try {
// we need to await the promise to trigger the rejection early
// so that it's already handled by the time we call
// the RSC runtime. Otherwise, it will throw an unhandled
// promise rejection error in the renderer.
await promise;
} catch {
// swallow error, it's gonna be handled on the client
}
return {
type: 'done',
result: await generateFlight(req, ctx, requestStore, {
actionResult: promise,
// If the page was not revalidated, or if the action was forwarded
// from another worker, we can skip rendering the page.
skipPageRendering: workStore.pathWasRevalidated === undefined || workStore.pathWasRevalidated === ActionDidNotRevalidate || actionWasForwarded,
temporaryReferences
})
};
}
// For an MPA action, we need to render a HTML response. We'll rethrow the error and let it be handled above.
throw err;
}
}
/**
* Limit on the number of arguments passed to a server action. This prevents
* stack overflow during `action.apply()` from malicious requests.
*/ const SERVER_ACTION_ARGS_LIMIT = 1000;
async function executeActionAndPrepareForRender(action, args, workStore, requestStore, actionWasForwarded) {
requestStore.phase = 'action';
let skipPageRendering = actionWasForwarded;
if (args.length > SERVER_ACTION_ARGS_LIMIT) {
throw Object.defineProperty(new Error(`Server Action arguments list is too long (${args.length}). Maximum allowed is ${SERVER_ACTION_ARGS_LIMIT}.`), "__NEXT_ERROR_CODE", {
value: "E986",
enumerable: false,
configurable: true
});
}
try {
const actionResult = await workUnitAsyncStorage.run(requestStore, ()=>action.apply(null, args));
// If the page was not revalidated, or if the action was forwarded from
// another worker, we can skip rendering the page.
skipPageRendering ||= workStore.pathWasRevalidated === undefined || workStore.pathWasRevalidated === ActionDidNotRevalidate;
return {
actionResult,
skipPageRendering
};
} finally{
if (!skipPageRendering) {
requestStore.phase = 'render';
// When we switch to the render phase, cookies() will return
// `workUnitStore.cookies` instead of
// `workUnitStore.userspaceMutableCookies`. We want the render to see any
// cookie writes that we performed during the action, so we need to update
// the immutable cookies to reflect the changes.
synchronizeMutableCookies(requestStore);
// The server action might have toggled draft mode, so we need to reflect
// that in the work store to be up-to-date for subsequent rendering.
workStore.isDraftMode = requestStore.draftMode.isEnabled;
// If the action called revalidateTag/revalidatePath, then that might
// affect data used by the subsequent render, so we need to make sure all
// revalidations are applied before that.
await executeRevalidates(workStore);
}
}
}
/**
* Attempts to find the module ID for the action from the module map. When this fails, it could be a deployment skew where
* the action came from a different deployment. It could also simply be an invalid POST request that is not a server action.
* In either case, we'll throw an error to be handled by the caller.
*/ function getActionModIdOrError(actionId, serverModuleMap) {
var _serverModuleMap_actionId;
// if we're missing the action ID header, we can't do any further processing
if (!actionId) {
throw Object.defineProperty(new InvariantError("Missing 'next-action' header."), "__NEXT_ERROR_CODE", {
value: "E664",
enumerable: false,
configurable: true
});
}
const actionModId = (_serverModuleMap_actionId = serverModuleMap[actionId]) == null ? void 0 : _serverModuleMap_actionId.id;
if (!actionModId) {
throw getActionNotFoundError(actionId);
}
return actionModId;
}
function getActionNotFoundError(actionId) {
return Object.defineProperty(new Error(`Failed to find Server Action${actionId ? ` "${actionId}"` : ''}. This request might be from an older or newer deployment.\nRead more: https://nextjs.org/docs/messages/failed-to-find-server-action`), "__NEXT_ERROR_CODE", {
value: "E974",
enumerable: false,
configurable: true
});
}
const $ACTION_ = '$ACTION_';
const $ACTION_REF_ = '$ACTION_REF_';
const $ACTION_ID_ = '$ACTION_ID_';
const ACTION_ID_EXPECTED_LENGTH = 42;
/**
* This function mirrors logic inside React's decodeAction and should be kept in sync with that.
* It pre-parses the FormData to ensure that any action IDs referred to are actual action IDs for
* this Next.js application.
*/ function areAllActionIdsValid(mpaFormData, serverModuleMap) {
let hasAtLeastOneAction = false;
// Before we attempt to decode the payload for a possible MPA action, assert that all
// action IDs are valid IDs. If not we should disregard the payload
for (let key of mpaFormData.keys()){
if (!key.startsWith($ACTION_)) {
continue;
}
if (key.startsWith($ACTION_ID_)) {
// No Bound args case
if (isInvalidActionIdFieldName(key, serverModuleMap)) {
return false;
}
hasAtLeastOneAction = true;
} else if (key.startsWith($ACTION_REF_)) {
// Bound args case
const actionDescriptorField = $ACTION_ + key.slice($ACTION_REF_.length) + ':0';
const actionFields = mpaFormData.getAll(actionDescriptorField);
if (actionFields.length !== 1) {
return false;
}
const actionField = actionFields[0];
if (typeof actionField !== 'string') {
return false;
}
if (isInvalidStringActionDescriptor(actionField, serverModuleMap)) {
return false;
}
hasAtLeastOneAction = true;
}
}
return hasAtLeastOneAction;
}
const ACTION_DESCRIPTOR_ID_PREFIX = '{"id":"';
function isInvalidStringActionDescriptor(actionDescriptor, serverModuleMap) {
if (actionDescriptor.startsWith(ACTION_DESCRIPTOR_ID_PREFIX) === false) {
return true;
}
const from = ACTION_DESCRIPTOR_ID_PREFIX.length;
const to = from + ACTION_ID_EXPECTED_LENGTH;
// We expect actionDescriptor to be '{"id":"<actionId>",...}'
const actionId = actionDescriptor.slice(from, to);
if (actionId.length !== ACTION_ID_EXPECTED_LENGTH || actionDescriptor[to] !== '"') {
return true;
}
const entry = serverModuleMap[actionId];
if (entry == null) {
return true;
}
return false;
}
function isInvalidActionIdFieldName(actionIdFieldName, serverModuleMap) {
// The field name must always start with $ACTION_ID_ but since it is
// the id is extracted from the key of the field we have already validated
// this before entering this function
if (actionIdFieldName.length !== $ACTION_ID_.length + ACTION_ID_EXPECTED_LENGTH) {
// this field name has too few or too many characters
return true;
}
const actionId = actionIdFieldName.slice($ACTION_ID_.length);
const entry = serverModuleMap[actionId];
if (entry == null) {
return true;
}
return false;
}
//# sourceMappingURL=action-handler.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,4 @@
import { createAsyncLocalStorage } from './async-local-storage';
export const afterTaskAsyncStorageInstance = createAsyncLocalStorage();
//# sourceMappingURL=after-task-async-storage-instance.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/after-task-async-storage-instance.ts"],"sourcesContent":["import type { AfterTaskAsyncStorage } from './after-task-async-storage.external'\nimport { createAsyncLocalStorage } from './async-local-storage'\n\nexport const afterTaskAsyncStorageInstance: AfterTaskAsyncStorage =\n createAsyncLocalStorage()\n"],"names":["createAsyncLocalStorage","afterTaskAsyncStorageInstance"],"mappings":"AACA,SAASA,uBAAuB,QAAQ,wBAAuB;AAE/D,OAAO,MAAMC,gCACXD,0BAAyB","ignoreList":[0]}

View File

@@ -0,0 +1,7 @@
// Share the instance module in the next-shared layer
import { afterTaskAsyncStorageInstance as afterTaskAsyncStorage } from './after-task-async-storage-instance' with {
'turbopack-transition': 'next-shared'
};
export { afterTaskAsyncStorage };
//# sourceMappingURL=after-task-async-storage.external.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/after-task-async-storage.external.ts"],"sourcesContent":["import type { AsyncLocalStorage } from 'async_hooks'\n\n// Share the instance module in the next-shared layer\nimport { afterTaskAsyncStorageInstance as afterTaskAsyncStorage } from './after-task-async-storage-instance' with { 'turbopack-transition': 'next-shared' }\nimport type { WorkUnitStore } from './work-unit-async-storage.external'\n\nexport interface AfterTaskStore {\n /** The phase in which the topmost `after` was called.\n *\n * NOTE: Can be undefined when running `generateStaticParams`,\n * where we only have a `workStore`, no `workUnitStore`.\n */\n readonly rootTaskSpawnPhase: WorkUnitStore['phase'] | undefined\n}\n\nexport type AfterTaskAsyncStorage = AsyncLocalStorage<AfterTaskStore>\n\nexport { afterTaskAsyncStorage }\n"],"names":["afterTaskAsyncStorageInstance","afterTaskAsyncStorage"],"mappings":"AAEA,qDAAqD;AACrD,SAASA,iCAAiCC,qBAAqB,QAAQ,2CAA2C;IAAE,wBAAwB;AAAc,EAAC;AAc3J,SAASA,qBAAqB,GAAE","ignoreList":[0]}

View File

@@ -0,0 +1,137 @@
import { InvariantError } from '../../shared/lib/invariant-error';
// React's RSC prerender function will emit an incomplete flight stream when using `prerender`. If the connection
// closes then whatever hanging chunks exist will be errored. This is because prerender (an experimental feature)
// has not yet implemented a concept of resume. For now we will simulate a paused connection by wrapping the stream
// in one that doesn't close even when the underlying is complete.
export class ReactServerResult {
constructor(stream){
this._stream = stream;
}
tee() {
if (this._stream === null) {
throw Object.defineProperty(new Error('Cannot tee a ReactServerResult that has already been consumed'), "__NEXT_ERROR_CODE", {
value: "E106",
enumerable: false,
configurable: true
});
}
const tee = this._stream.tee();
this._stream = tee[0];
return tee[1];
}
consume() {
if (this._stream === null) {
throw Object.defineProperty(new Error('Cannot consume a ReactServerResult that has already been consumed'), "__NEXT_ERROR_CODE", {
value: "E470",
enumerable: false,
configurable: true
});
}
const stream = this._stream;
this._stream = null;
return stream;
}
}
export async function createReactServerPrerenderResult(underlying) {
const chunks = [];
const { prelude } = await underlying;
const reader = prelude.getReader();
while(true){
const { done, value } = await reader.read();
if (done) {
return new ReactServerPrerenderResult(chunks);
} else {
chunks.push(value);
}
}
}
export async function createReactServerPrerenderResultFromRender(underlying) {
const chunks = [];
const reader = underlying.getReader();
while(true){
const { done, value } = await reader.read();
if (done) {
break;
} else {
chunks.push(value);
}
}
return new ReactServerPrerenderResult(chunks);
}
export class ReactServerPrerenderResult {
assertChunks(expression) {
if (this._chunks === null) {
throw Object.defineProperty(new InvariantError(`Cannot \`${expression}\` on a ReactServerPrerenderResult that has already been consumed.`), "__NEXT_ERROR_CODE", {
value: "E593",
enumerable: false,
configurable: true
});
}
return this._chunks;
}
consumeChunks(expression) {
const chunks = this.assertChunks(expression);
this.consume();
return chunks;
}
consume() {
this._chunks = null;
}
constructor(chunks){
this._chunks = chunks;
}
asUnclosingStream() {
const chunks = this.assertChunks('asUnclosingStream()');
return createUnclosingStream(chunks);
}
consumeAsUnclosingStream() {
const chunks = this.consumeChunks('consumeAsUnclosingStream()');
return createUnclosingStream(chunks);
}
asStream() {
const chunks = this.assertChunks('asStream()');
return createClosingStream(chunks);
}
consumeAsStream() {
const chunks = this.consumeChunks('consumeAsStream()');
return createClosingStream(chunks);
}
}
function createUnclosingStream(chunks) {
let i = 0;
return new ReadableStream({
async pull (controller) {
if (i < chunks.length) {
controller.enqueue(chunks[i++]);
}
// we intentionally keep the stream open. The consumer will clear
// out chunks once finished and the remaining memory will be GC'd
// when this object goes out of scope
}
});
}
function createClosingStream(chunks) {
let i = 0;
return new ReadableStream({
async pull (controller) {
if (i < chunks.length) {
controller.enqueue(chunks[i++]);
} else {
controller.close();
}
}
});
}
export async function processPrelude(unprocessedPrelude) {
const [prelude, peek] = unprocessedPrelude.tee();
const reader = peek.getReader();
const firstResult = await reader.read();
reader.cancel();
const preludeIsEmpty = firstResult.done === true;
return {
prelude,
preludeIsEmpty
};
}
//# sourceMappingURL=app-render-prerender-utils.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,72 @@
import { InvariantError } from '../../shared/lib/invariant-error';
import { createAtomicTimerGroup } from './app-render-scheduling';
import { DANGEROUSLY_runPendingImmediatesAfterCurrentTask, expectNoPendingImmediates } from '../node-environment-extensions/fast-set-immediate.external';
import { isThenable } from '../../shared/lib/is-thenable';
function noop() {}
/**
* This is a utility function to make scheduling sequential tasks that run back to back easier.
* We schedule on the same queue (setTimeout) at the same time to ensure no other events can sneak in between.
*
* The first function runs in the first task. Each subsequent function runs in its own task.
* The returned promise resolves after the last task completes.
*/ export function runInSequentialTasks(first, ...rest) {
if (process.env.NEXT_RUNTIME === 'edge') {
throw Object.defineProperty(new InvariantError('`runInSequentialTasks` should not be called in edge runtime.'), "__NEXT_ERROR_CODE", {
value: "E1054",
enumerable: false,
configurable: true
});
} else {
return new Promise((resolve, reject)=>{
const scheduleTimeout = createAtomicTimerGroup();
const ids = [];
let result;
ids.push(scheduleTimeout(()=>{
try {
DANGEROUSLY_runPendingImmediatesAfterCurrentTask();
result = first();
// If the first function returns a thenable, suppress unhandled
// rejections. A later task in the sequence (e.g. an abort) may
// cause the promise to reject, and we don't want that to surface
// as an unhandled rejection — the caller will observe the
// rejection when they await the returned promise.
if (isThenable(result)) {
result.then(noop, noop);
}
} catch (err) {
for(let i = 1; i < ids.length; i++){
clearTimeout(ids[i]);
}
reject(err);
}
}));
for(let i = 0; i < rest.length; i++){
const fn = rest[i];
let index = ids.length;
ids.push(scheduleTimeout(()=>{
try {
DANGEROUSLY_runPendingImmediatesAfterCurrentTask();
fn();
} catch (err) {
// clear remaining timeouts
while(++index < ids.length){
clearTimeout(ids[index]);
}
reject(err);
}
}));
}
// We wait a task before resolving
ids.push(scheduleTimeout(()=>{
try {
expectNoPendingImmediates();
resolve(result);
} catch (err) {
reject(err);
}
}));
});
}
}
//# sourceMappingURL=app-render-render-utils.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/app-render-render-utils.ts"],"sourcesContent":["import { InvariantError } from '../../shared/lib/invariant-error'\nimport { createAtomicTimerGroup } from './app-render-scheduling'\nimport {\n DANGEROUSLY_runPendingImmediatesAfterCurrentTask,\n expectNoPendingImmediates,\n} from '../node-environment-extensions/fast-set-immediate.external'\nimport { isThenable } from '../../shared/lib/is-thenable'\n\nfunction noop() {}\n\n/**\n * This is a utility function to make scheduling sequential tasks that run back to back easier.\n * We schedule on the same queue (setTimeout) at the same time to ensure no other events can sneak in between.\n *\n * The first function runs in the first task. Each subsequent function runs in its own task.\n * The returned promise resolves after the last task completes.\n */\nexport function runInSequentialTasks<R>(\n first: () => R,\n ...rest: Array<() => void>\n): Promise<Awaited<R>> {\n if (process.env.NEXT_RUNTIME === 'edge') {\n throw new InvariantError(\n '`runInSequentialTasks` should not be called in edge runtime.'\n )\n } else {\n return new Promise((resolve, reject) => {\n const scheduleTimeout = createAtomicTimerGroup()\n const ids: ReturnType<typeof scheduleTimeout>[] = []\n\n let result: R\n ids.push(\n scheduleTimeout(() => {\n try {\n DANGEROUSLY_runPendingImmediatesAfterCurrentTask()\n result = first()\n // If the first function returns a thenable, suppress unhandled\n // rejections. A later task in the sequence (e.g. an abort) may\n // cause the promise to reject, and we don't want that to surface\n // as an unhandled rejection — the caller will observe the\n // rejection when they await the returned promise.\n if (isThenable(result)) {\n result.then(noop, noop)\n }\n } catch (err) {\n for (let i = 1; i < ids.length; i++) {\n clearTimeout(ids[i])\n }\n reject(err)\n }\n })\n )\n\n for (let i = 0; i < rest.length; i++) {\n const fn = rest[i]\n let index = ids.length\n\n ids.push(\n scheduleTimeout(() => {\n try {\n DANGEROUSLY_runPendingImmediatesAfterCurrentTask()\n fn()\n } catch (err) {\n // clear remaining timeouts\n while (++index < ids.length) {\n clearTimeout(ids[index])\n }\n reject(err)\n }\n })\n )\n }\n\n // We wait a task before resolving\n ids.push(\n scheduleTimeout(() => {\n try {\n expectNoPendingImmediates()\n resolve(result as Awaited<R>)\n } catch (err) {\n reject(err)\n }\n })\n )\n })\n }\n}\n"],"names":["InvariantError","createAtomicTimerGroup","DANGEROUSLY_runPendingImmediatesAfterCurrentTask","expectNoPendingImmediates","isThenable","noop","runInSequentialTasks","first","rest","process","env","NEXT_RUNTIME","Promise","resolve","reject","scheduleTimeout","ids","result","push","then","err","i","length","clearTimeout","fn","index"],"mappings":"AAAA,SAASA,cAAc,QAAQ,mCAAkC;AACjE,SAASC,sBAAsB,QAAQ,0BAAyB;AAChE,SACEC,gDAAgD,EAChDC,yBAAyB,QACpB,6DAA4D;AACnE,SAASC,UAAU,QAAQ,+BAA8B;AAEzD,SAASC,QAAQ;AAEjB;;;;;;CAMC,GACD,OAAO,SAASC,qBACdC,KAAc,EACd,GAAGC,IAAuB;IAE1B,IAAIC,QAAQC,GAAG,CAACC,YAAY,KAAK,QAAQ;QACvC,MAAM,qBAEL,CAFK,IAAIX,eACR,iEADI,qBAAA;mBAAA;wBAAA;0BAAA;QAEN;IACF,OAAO;QACL,OAAO,IAAIY,QAAQ,CAACC,SAASC;YAC3B,MAAMC,kBAAkBd;YACxB,MAAMe,MAA4C,EAAE;YAEpD,IAAIC;YACJD,IAAIE,IAAI,CACNH,gBAAgB;gBACd,IAAI;oBACFb;oBACAe,SAASV;oBACT,+DAA+D;oBAC/D,+DAA+D;oBAC/D,iEAAiE;oBACjE,0DAA0D;oBAC1D,kDAAkD;oBAClD,IAAIH,WAAWa,SAAS;wBACtBA,OAAOE,IAAI,CAACd,MAAMA;oBACpB;gBACF,EAAE,OAAOe,KAAK;oBACZ,IAAK,IAAIC,IAAI,GAAGA,IAAIL,IAAIM,MAAM,EAAED,IAAK;wBACnCE,aAAaP,GAAG,CAACK,EAAE;oBACrB;oBACAP,OAAOM;gBACT;YACF;YAGF,IAAK,IAAIC,IAAI,GAAGA,IAAIb,KAAKc,MAAM,EAAED,IAAK;gBACpC,MAAMG,KAAKhB,IAAI,CAACa,EAAE;gBAClB,IAAII,QAAQT,IAAIM,MAAM;gBAEtBN,IAAIE,IAAI,CACNH,gBAAgB;oBACd,IAAI;wBACFb;wBACAsB;oBACF,EAAE,OAAOJ,KAAK;wBACZ,2BAA2B;wBAC3B,MAAO,EAAEK,QAAQT,IAAIM,MAAM,CAAE;4BAC3BC,aAAaP,GAAG,CAACS,MAAM;wBACzB;wBACAX,OAAOM;oBACT;gBACF;YAEJ;YAEA,kCAAkC;YAClCJ,IAAIE,IAAI,CACNH,gBAAgB;gBACd,IAAI;oBACFZ;oBACAU,QAAQI;gBACV,EAAE,OAAOG,KAAK;oBACZN,OAAOM;gBACT;YACF;QAEJ;IACF;AACF","ignoreList":[0]}

View File

@@ -0,0 +1,184 @@
import { InvariantError } from '../../shared/lib/invariant-error';
import { unpatchedSetImmediate } from '../node-environment-extensions/fast-set-immediate.external';
/*
==========================
| Background |
==========================
Node.js does not guarantee that two timers scheduled back to back will run
on the same iteration of the event loop:
```ts
setTimeout(one, 0)
setTimeout(two, 0)
```
Internally, each timer is assigned a `_idleStart` property that holds
an internal libuv timestamp in millisecond resolution.
This will be used to determine if the timer is already "expired" and should be executed.
However, even in sync code, it's possible for two timers to get different `_idleStart` values.
This can cause one of the timers to be executed, and the other to be delayed until the next timer phase.
The delaying happens [here](https://github.com/nodejs/node/blob/c208ffc66bb9418ff026c4e3fa82e5b4387bd147/lib/internal/timers.js#L556-L564).
and can be debugged by running node with `NODE_DEBUG=timer`.
The easiest way to observe it is to run this program in a loop until it exits with status 1:
```
// test.js
let immediateRan = false
const t1 = setTimeout(() => {
console.log('timeout 1')
setImmediate(() => {
console.log('immediate 1')
immediateRan = true
})
})
const t2 = setTimeout(() => {
console.log('timeout 2')
if (immediateRan) {
console.log('immediate ran before the second timeout!')
console.log(
`t1._idleStart: ${t1._idleStart}, t2_idleStart: ${t2._idleStart}`
);
process.exit(1)
}
})
```
```bash
#!/usr/bin/env bash
i=1;
while true; do
output="$(NODE_DEBUG=timer node test.js 2>&1)";
if [ "$?" -eq 1 ]; then
echo "failed after $i iterations";
echo "$output";
break;
fi;
i=$((i+1));
done
```
If `t2` is deferred to the next iteration of the event loop,
then the immediate scheduled from inside `t1` will run first.
When this occurs, `_idleStart` is reliably different between `t1` and `t2`.
==========================
| Solution |
==========================
We can guarantee that multiple timers (with the same delay, usually `0`)
run together without any delays by making sure that their `_idleStart`s are the same,
because that's what's used to determine if a timer should be deferred or not.
Luckily, this property is currently exposed to userland and mutable,
so we can patch it.
Another related trick we could potentially apply is making
a timer immediately be considered expired by doing `timer._idleStart -= 2`.
(the value must be more than `1`, the delay that actually gets set for `setTimeout(cb, 0)`).
This makes node view this timer as "a 1ms timer scheduled 2ms ago",
meaning that it should definitely run in the next timer phase.
However, I'm not confident we know all the side effects of doing this,
so for now, simply ensuring coordination is enough.
*/ let shouldAttemptPatching = true;
function warnAboutTimers() {
console.warn("Next.js cannot guarantee that Cache Components will run as expected due to the current runtime's implementation of `setTimeout()`.\nPlease report a github issue here: https://github.com/vercel/next.js/issues/new/");
}
/**
* Allows scheduling multiple timers (equivalent to `setTimeout(cb, delayMs)`)
* that are guaranteed to run in the same iteration of the event loop.
*
* @param delayMs - the delay to pass to `setTimeout`. (default: 0)
*
* */ export function createAtomicTimerGroup(delayMs = 0) {
if (process.env.NEXT_RUNTIME === 'edge') {
throw Object.defineProperty(new InvariantError('createAtomicTimerGroup cannot be called in the edge runtime'), "__NEXT_ERROR_CODE", {
value: "E934",
enumerable: false,
configurable: true
});
} else {
let isFirstCallback = true;
let firstTimerIdleStart = null;
let didFirstTimerRun = false;
// As a sanity check, we schedule an immediate from the first timeout
// to check if the execution was interrupted (i.e. if it ran between the timeouts).
// Note that we're deliberately bypassing the "fast setImmediate" patch here --
// otherwise, this check would always fail, because the immediate
// would always run before the second timeout.
let didImmediateRun = false;
function runFirstCallback(callback) {
didFirstTimerRun = true;
if (shouldAttemptPatching) {
unpatchedSetImmediate(()=>{
didImmediateRun = true;
});
}
return callback();
}
function runSubsequentCallback(callback) {
if (shouldAttemptPatching) {
if (didImmediateRun) {
// If the immediate managed to run between the timers, then we're not
// able to provide the guarantees that we're supposed to
shouldAttemptPatching = false;
warnAboutTimers();
}
}
return callback();
}
return function scheduleTimeout(callback) {
if (didFirstTimerRun) {
throw Object.defineProperty(new InvariantError('Cannot schedule more timers into a group that already executed'), "__NEXT_ERROR_CODE", {
value: "E935",
enumerable: false,
configurable: true
});
}
const timer = setTimeout(isFirstCallback ? runFirstCallback : runSubsequentCallback, delayMs, callback);
isFirstCallback = false;
if (!shouldAttemptPatching) {
// We already tried patching some timers, and it didn't work.
// No point trying again.
return timer;
}
// NodeJS timers have a `_idleStart` property, but it doesn't exist e.g. in Bun.
// If it's not present, we'll warn and try to continue.
try {
if ('_idleStart' in timer && typeof timer._idleStart === 'number') {
// If this is the first timer that was scheduled, save its `_idleStart`.
// We'll copy it onto subsequent timers to guarantee that they'll all be
// considered expired in the same iteration of the event loop
// and thus will all be executed in the same timer phase.
if (firstTimerIdleStart === null) {
firstTimerIdleStart = timer._idleStart;
} else {
timer._idleStart = firstTimerIdleStart;
}
} else {
shouldAttemptPatching = false;
warnAboutTimers();
}
} catch (err) {
// This should never fail in current Node, but it might start failing in the future.
// We might be okay even without tweaking the timers, so warn and try to continue.
console.error(Object.defineProperty(new InvariantError('An unexpected error occurred while adjusting `_idleStart` on an atomic timer', {
cause: err
}), "__NEXT_ERROR_CODE", {
value: "E933",
enumerable: false,
configurable: true
}));
shouldAttemptPatching = false;
warnAboutTimers();
}
return timer;
};
}
}
//# sourceMappingURL=app-render-scheduling.js.map

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,50 @@
const sharedAsyncLocalStorageNotAvailableError = Object.defineProperty(new Error('Invariant: AsyncLocalStorage accessed in runtime where it is not available'), "__NEXT_ERROR_CODE", {
value: "E504",
enumerable: false,
configurable: true
});
class FakeAsyncLocalStorage {
disable() {
throw sharedAsyncLocalStorageNotAvailableError;
}
getStore() {
// This fake implementation of AsyncLocalStorage always returns `undefined`.
return undefined;
}
run() {
throw sharedAsyncLocalStorageNotAvailableError;
}
exit() {
throw sharedAsyncLocalStorageNotAvailableError;
}
enterWith() {
throw sharedAsyncLocalStorageNotAvailableError;
}
static bind(fn) {
return fn;
}
}
const maybeGlobalAsyncLocalStorage = typeof globalThis !== 'undefined' && globalThis.AsyncLocalStorage;
export function createAsyncLocalStorage() {
if (maybeGlobalAsyncLocalStorage) {
return new maybeGlobalAsyncLocalStorage();
}
return new FakeAsyncLocalStorage();
}
export function bindSnapshot(// WARNING: Don't pass a named function to this argument! See: https://github.com/facebook/react/pull/34911
fn) {
if (maybeGlobalAsyncLocalStorage) {
return maybeGlobalAsyncLocalStorage.bind(fn);
}
return FakeAsyncLocalStorage.bind(fn);
}
export function createSnapshot() {
if (maybeGlobalAsyncLocalStorage) {
return maybeGlobalAsyncLocalStorage.snapshot();
}
return function(fn, ...args) {
return fn(...args);
};
}
//# sourceMappingURL=async-local-storage.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/async-local-storage.ts"],"sourcesContent":["import type { AsyncLocalStorage } from 'async_hooks'\n\nconst sharedAsyncLocalStorageNotAvailableError = new Error(\n 'Invariant: AsyncLocalStorage accessed in runtime where it is not available'\n)\n\nclass FakeAsyncLocalStorage<Store extends {}>\n implements AsyncLocalStorage<Store>\n{\n disable(): void {\n throw sharedAsyncLocalStorageNotAvailableError\n }\n\n getStore(): Store | undefined {\n // This fake implementation of AsyncLocalStorage always returns `undefined`.\n return undefined\n }\n\n run<R>(): R {\n throw sharedAsyncLocalStorageNotAvailableError\n }\n\n exit<R>(): R {\n throw sharedAsyncLocalStorageNotAvailableError\n }\n\n enterWith(): void {\n throw sharedAsyncLocalStorageNotAvailableError\n }\n\n static bind<T>(fn: T): T {\n return fn\n }\n}\n\nconst maybeGlobalAsyncLocalStorage =\n typeof globalThis !== 'undefined' && (globalThis as any).AsyncLocalStorage\n\nexport function createAsyncLocalStorage<\n Store extends {},\n>(): AsyncLocalStorage<Store> {\n if (maybeGlobalAsyncLocalStorage) {\n return new maybeGlobalAsyncLocalStorage()\n }\n return new FakeAsyncLocalStorage()\n}\n\nexport function bindSnapshot<T>(\n // WARNING: Don't pass a named function to this argument! See: https://github.com/facebook/react/pull/34911\n fn: T\n): T {\n if (maybeGlobalAsyncLocalStorage) {\n return maybeGlobalAsyncLocalStorage.bind(fn)\n }\n return FakeAsyncLocalStorage.bind(fn)\n}\n\nexport function createSnapshot(): <R, TArgs extends any[]>(\n fn: (...args: TArgs) => R,\n ...args: TArgs\n) => R {\n if (maybeGlobalAsyncLocalStorage) {\n return maybeGlobalAsyncLocalStorage.snapshot()\n }\n return function (fn: any, ...args: any[]) {\n return fn(...args)\n }\n}\n"],"names":["sharedAsyncLocalStorageNotAvailableError","Error","FakeAsyncLocalStorage","disable","getStore","undefined","run","exit","enterWith","bind","fn","maybeGlobalAsyncLocalStorage","globalThis","AsyncLocalStorage","createAsyncLocalStorage","bindSnapshot","createSnapshot","snapshot","args"],"mappings":"AAEA,MAAMA,2CAA2C,qBAEhD,CAFgD,IAAIC,MACnD,+EAD+C,qBAAA;WAAA;gBAAA;kBAAA;AAEjD;AAEA,MAAMC;IAGJC,UAAgB;QACd,MAAMH;IACR;IAEAI,WAA8B;QAC5B,4EAA4E;QAC5E,OAAOC;IACT;IAEAC,MAAY;QACV,MAAMN;IACR;IAEAO,OAAa;QACX,MAAMP;IACR;IAEAQ,YAAkB;QAChB,MAAMR;IACR;IAEA,OAAOS,KAAQC,EAAK,EAAK;QACvB,OAAOA;IACT;AACF;AAEA,MAAMC,+BACJ,OAAOC,eAAe,eAAe,AAACA,WAAmBC,iBAAiB;AAE5E,OAAO,SAASC;IAGd,IAAIH,8BAA8B;QAChC,OAAO,IAAIA;IACb;IACA,OAAO,IAAIT;AACb;AAEA,OAAO,SAASa,aACd,2GAA2G;AAC3GL,EAAK;IAEL,IAAIC,8BAA8B;QAChC,OAAOA,6BAA6BF,IAAI,CAACC;IAC3C;IACA,OAAOR,sBAAsBO,IAAI,CAACC;AACpC;AAEA,OAAO,SAASM;IAId,IAAIL,8BAA8B;QAChC,OAAOA,6BAA6BM,QAAQ;IAC9C;IACA,OAAO,SAAUP,EAAO,EAAE,GAAGQ,IAAW;QACtC,OAAOR,MAAMQ;IACf;AACF","ignoreList":[0]}

View File

@@ -0,0 +1,171 @@
/**
* This class is used to detect when all cache reads for a given render are settled.
* We do this to allow for cache warming the prerender without having to continue rendering
* the remainder of the page. This feature is really only useful when the cacheComponents flag is on
* and should only be used in codepaths gated with this feature.
*/ import { InvariantError } from '../../shared/lib/invariant-error';
export class CacheSignal {
constructor(){
this.count = 0;
this.earlyListeners = [];
this.listeners = [];
this.tickPending = false;
this.pendingTimeoutCleanup = null;
this.subscribedSignals = null;
this.invokeListenersIfNoPendingReads = ()=>{
this.pendingTimeoutCleanup = null;
if (this.count === 0) {
for(let i = 0; i < this.listeners.length; i++){
this.listeners[i]();
}
this.listeners.length = 0;
}
};
if (process.env.NEXT_RUNTIME === 'edge') {
// we rely on `process.nextTick`, which is not supported in edge
throw Object.defineProperty(new InvariantError('CacheSignal cannot be used in the edge runtime, because `cacheComponents` does not support it.'), "__NEXT_ERROR_CODE", {
value: "E728",
enumerable: false,
configurable: true
});
}
}
noMorePendingCaches() {
if (!this.tickPending) {
this.tickPending = true;
queueMicrotask(()=>process.nextTick(()=>{
this.tickPending = false;
if (this.count === 0) {
for(let i = 0; i < this.earlyListeners.length; i++){
this.earlyListeners[i]();
}
this.earlyListeners.length = 0;
}
}));
}
// After a cache resolves, React will schedule new rendering work:
// - in a microtask (when prerendering)
// - in setImmediate (when rendering)
// To cover both of these, we have to make sure that we let immediates execute at least once after each cache resolved.
// We don't know when the pending timeout was scheduled (and if it's about to resolve),
// so by scheduling a new one, we can be sure that we'll go around the event loop at least once.
if (this.pendingTimeoutCleanup) {
// We cancel the timeout in beginRead, so this shouldn't ever be active here,
// but we still cancel it defensively.
this.pendingTimeoutCleanup();
}
this.pendingTimeoutCleanup = scheduleImmediateAndTimeoutWithCleanup(this.invokeListenersIfNoPendingReads);
}
/**
* This promise waits until there are no more in progress cache reads but no later.
* This allows for adding more cache reads after to delay cacheReady.
*/ inputReady() {
return new Promise((resolve)=>{
this.earlyListeners.push(resolve);
if (this.count === 0) {
this.noMorePendingCaches();
}
});
}
/**
* If there are inflight cache reads this Promise can resolve in a microtask however
* if there are no inflight cache reads then we wait at least one task to allow initial
* cache reads to be initiated.
*/ cacheReady() {
return new Promise((resolve)=>{
this.listeners.push(resolve);
if (this.count === 0) {
this.noMorePendingCaches();
}
});
}
beginRead() {
this.count++;
// There's a new pending cache, so if there's a `noMorePendingCaches` timeout running,
// we should cancel it.
if (this.pendingTimeoutCleanup) {
this.pendingTimeoutCleanup();
this.pendingTimeoutCleanup = null;
}
if (this.subscribedSignals !== null) {
for (const subscriber of this.subscribedSignals){
subscriber.beginRead();
}
}
}
endRead() {
if (this.count === 0) {
throw Object.defineProperty(new InvariantError('CacheSignal got more endRead() calls than beginRead() calls'), "__NEXT_ERROR_CODE", {
value: "E678",
enumerable: false,
configurable: true
});
}
// If this is the last read we need to wait a task before we can claim the cache is settled.
// The cache read will likely ping a Server Component which can read from the cache again and this
// will play out in a microtask so we need to only resolve pending listeners if we're still at 0
// after at least one task.
// We only want one task scheduled at a time so when we hit count 1 we don't decrement the counter immediately.
// If intervening reads happen before the scheduled task runs they will never observe count 1 preventing reentrency
this.count--;
if (this.count === 0) {
this.noMorePendingCaches();
}
if (this.subscribedSignals !== null) {
for (const subscriber of this.subscribedSignals){
subscriber.endRead();
}
}
}
hasPendingReads() {
return this.count > 0;
}
trackRead(promise) {
this.beginRead();
// `promise.finally()` still rejects, so don't use it here to avoid unhandled rejections
const onFinally = this.endRead.bind(this);
promise.then(onFinally, onFinally);
return promise;
}
subscribeToReads(subscriber) {
if (subscriber === this) {
throw Object.defineProperty(new InvariantError('A CacheSignal cannot subscribe to itself'), "__NEXT_ERROR_CODE", {
value: "E679",
enumerable: false,
configurable: true
});
}
if (this.subscribedSignals === null) {
this.subscribedSignals = new Set();
}
this.subscribedSignals.add(subscriber);
// we'll notify the subscriber of each endRead() on this signal,
// so we need to give it a corresponding beginRead() for each read we have in flight now.
for(let i = 0; i < this.count; i++){
subscriber.beginRead();
}
return this.unsubscribeFromReads.bind(this, subscriber);
}
unsubscribeFromReads(subscriber) {
if (!this.subscribedSignals) {
return;
}
this.subscribedSignals.delete(subscriber);
// we don't need to set the set back to `null` if it's empty --
// if other signals are subscribing to this one, it'll likely get more subscriptions later,
// so we'd have to allocate a fresh set again when that happens.
}
}
function scheduleImmediateAndTimeoutWithCleanup(cb) {
// If we decide to clean up the timeout, we want to remove
// either the immediate or the timeout, whichever is still pending.
let clearPending;
const immediate = setImmediate(()=>{
const timeout = setTimeout(cb, 0);
clearPending = clearTimeout.bind(null, timeout);
});
clearPending = clearImmediate.bind(null, immediate);
return ()=>clearPending();
}
//# sourceMappingURL=cache-signal.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,588 @@
/* eslint-disable @next/internal/no-ambiguous-jsx -- Bundled in entry-base so it gets the right JSX runtime. */ import { jsx as _jsx } from "react/jsx-runtime";
import { PrefetchHint } from '../../shared/lib/app-router-types';
import { readVaryParams } from '../../shared/lib/segment-cache/vary-params-decoding';
import { PAGE_SEGMENT_KEY } from '../../shared/lib/segment';
// eslint-disable-next-line import/no-extraneous-dependencies
import { createFromReadableStream } from 'react-server-dom-webpack/client';
// eslint-disable-next-line import/no-extraneous-dependencies
import { prerender } from 'react-server-dom-webpack/static';
import { streamFromBuffer, streamToBuffer } from '../stream-utils/node-web-streams-helper';
import { waitAtLeastOneReactRenderTask } from '../../lib/scheduler';
import { createSegmentRequestKeyPart, appendSegmentRequestKeyPart, ROOT_SEGMENT_REQUEST_KEY, HEAD_REQUEST_KEY } from '../../shared/lib/segment-cache/segment-value-encoding';
import { getDigestForWellKnownError } from './create-error-handler';
import { Phase, printDebugThrownValueForProspectiveRender } from './prospective-render-utils';
import { workAsyncStorage } from './work-async-storage.external';
const filterStackFrame = process.env.NODE_ENV !== 'production' ? require('../lib/source-maps').filterStackFrameDEV : undefined;
const findSourceMapURL = process.env.NODE_ENV !== 'production' ? require('../lib/source-maps').findSourceMapURLDEV : undefined;
function onSegmentPrerenderError(error) {
const digest = getDigestForWellKnownError(error);
if (digest) {
return digest;
}
// We don't need to log the errors because we would have already done that
// when generating the original Flight stream for the whole page.
if (process.env.NEXT_DEBUG_BUILD || process.env.__NEXT_VERBOSE_LOGGING) {
const workStore = workAsyncStorage.getStore();
printDebugThrownValueForProspectiveRender(error, (workStore == null ? void 0 : workStore.route) ?? 'unknown route', Phase.SegmentCollection);
}
}
/**
* Extract the FlightRouterState, seed data, and head from a prerendered
* InitialRSCPayload. Returns null if the payload doesn't match the expected
* shape (single path with 3 elements).
*/ function extractFlightData(initialRSCPayload) {
const flightDataPaths = initialRSCPayload.f;
// FlightDataPath is an unsound type, hence the additional checks.
if (flightDataPaths.length !== 1 && flightDataPaths[0].length !== 3) {
console.error('Internal Next.js error: InitialRSCPayload does not match the expected ' + 'shape for a prerendered page during segment prefetch generation.');
return null;
}
return {
buildId: initialRSCPayload.b,
flightRouterState: flightDataPaths[0][0],
seedData: flightDataPaths[0][1],
head: flightDataPaths[0][2]
};
}
export async function collectSegmentData(isCacheComponentsEnabled, fullPageDataBuffer, staleTime, clientModules, serverConsumerManifest, prefetchInlining, hints) {
// Traverse the router tree and generate a prefetch response for each segment.
// A mutable map to collect the results as we traverse the route tree.
const resultMap = new Map();
// Before we start, warm up the module cache by decoding the page data once.
// Then we can assume that any remaining async tasks that occur the next time
// are due to hanging promises caused by dynamic data access. Note we only
// have to do this once per page, not per individual segment.
//
try {
await createFromReadableStream(streamFromBuffer(fullPageDataBuffer), {
findSourceMapURL,
serverConsumerManifest
});
await waitAtLeastOneReactRenderTask();
} catch {}
// Create an abort controller that we'll use to stop the stream.
const abortController = new AbortController();
const onCompletedProcessingRouteTree = async ()=>{
// Since all we're doing is decoding and re-encoding a cached prerender, if
// serializing the stream takes longer than a microtask, it must because of
// hanging promises caused by dynamic data.
await waitAtLeastOneReactRenderTask();
abortController.abort();
};
// Generate a stream for the route tree prefetch. While we're walking the
// tree, we'll also spawn additional tasks to generate the segment prefetches.
// The promises for these tasks are pushed to a mutable array that we will
// await once the route tree is fully rendered.
const segmentTasks = [];
const { prelude: treeStream } = await prerender(// RootTreePrefetch is not a valid return type for a React component, but
// we need to use a component so that when we decode the original stream
// inside of it, the side effects are transferred to the new stream.
// @ts-expect-error
/*#__PURE__*/ _jsx(PrefetchTreeData, {
isClientParamParsingEnabled: isCacheComponentsEnabled,
fullPageDataBuffer: fullPageDataBuffer,
serverConsumerManifest: serverConsumerManifest,
clientModules: clientModules,
staleTime: staleTime,
segmentTasks: segmentTasks,
onCompletedProcessingRouteTree: onCompletedProcessingRouteTree,
prefetchInlining: prefetchInlining,
hints: hints
}), clientModules, {
filterStackFrame,
signal: abortController.signal,
onError: onSegmentPrerenderError
});
// Write the route tree to a special `/_tree` segment.
const treeBuffer = await streamToBuffer(treeStream);
resultMap.set('/_tree', treeBuffer);
// Also output the entire full page data response
resultMap.set('/_full', fullPageDataBuffer);
// Now that we've finished rendering the route tree, all the segment tasks
// should have been spawned. Await them in parallel and write the segment
// prefetches to the result map.
for (const [segmentPath, buffer] of (await Promise.all(segmentTasks))){
resultMap.set(segmentPath, buffer);
}
return resultMap;
}
/**
* Compute prefetch hints for a route by measuring segment sizes and deciding
* which segments should be inlined. Only runs at build time. The results are
* written to prefetch-hints.json and loaded at server startup.
*
* This is a separate pass from collectSegmentData so that the inlining
* decisions can be fed back into collectSegmentData to control which segments
* are output as separate entries vs. inlined into their parent.
*/ export async function collectPrefetchHints(fullPageDataBuffer, staleTime, clientModules, serverConsumerManifest, maxSize, maxBundleSize) {
// Warm up the module cache, same as collectSegmentData.
try {
await createFromReadableStream(streamFromBuffer(fullPageDataBuffer), {
findSourceMapURL,
serverConsumerManifest
});
await waitAtLeastOneReactRenderTask();
} catch {}
// Decode the Flight data to walk the route tree.
const initialRSCPayload = await createFromReadableStream(createUnclosingPrefetchStream(streamFromBuffer(fullPageDataBuffer)), {
findSourceMapURL,
serverConsumerManifest
});
const flightData = extractFlightData(initialRSCPayload);
if (flightData === null) {
return {
hints: 0,
slots: null
};
}
const { buildId, flightRouterState, seedData, head } = flightData;
// Measure the head (metadata/viewport) gzip size so the main traversal
// can decide whether to inline it into a page's bundle.
const headVaryParamsThenable = initialRSCPayload.h;
const headVaryParams = headVaryParamsThenable !== null ? readVaryParams(headVaryParamsThenable) : null;
const [, headBuffer] = await renderSegmentPrefetch(buildId, staleTime, head, HEAD_REQUEST_KEY, headVaryParams, clientModules);
const headGzipSize = await getGzipSize(headBuffer);
// Mutable accumulator: the first page leaf that can fit the head sets
// this to true. Once set, subsequent leaves skip the check.
const headInlineState = {
inlined: false
};
// Walk the tree with the parent-first, child-decides algorithm.
const { node } = await collectPrefetchHintsImpl(flightRouterState, buildId, staleTime, seedData, clientModules, ROOT_SEGMENT_REQUEST_KEY, null, maxSize, maxBundleSize, headGzipSize, headInlineState);
if (!headInlineState.inlined) {
// No page could accept the head. Set HeadOutlined on the root so the
// client knows to fetch the head separately.
node.hints |= PrefetchHint.HeadOutlined;
}
return node;
}
// Measure a segment's gzip size and decide whether it should be inlined.
//
// These hints are computed once during build and never change for the
// lifetime of that deployment. The client can assume that hints delivered as
// part of one request will be the same during a subsequent request, given
// the same build ID. There's no skew to worry about as long as the build
// itself is consistent.
//
// In the Segment Cache, we split page prefetches into multiple requests so
// that each one can be cached and deduped independently. However, some
// segments are small enough that the potential caching benefits are not worth
// the additional network overhead. For these, we inline a parent's data into
// one of its children's responses, avoiding a separate request. The parent
// is inlined into the child (not the other way around) because the parent's
// response is more likely to be shared across multiple pages. The child's
// response is already page-specific, so adding the parent's data there
// doesn't meaningfully reduce deduplication. It's similar to how JS bundlers
// decide whether to inline a module into a chunk.
//
// The algorithm is parent-first, child-decides: the parent measures itself
// and passes its gzip size down. Each child decides whether to accept. A
// child rejects if the parent exceeds maxSize or if accepting would push
// the cumulative inlined bytes past maxBundleSize. This produces
// both ParentInlinedIntoSelf (on the child) and InlinedIntoChild (on the
// parent) in a single pass.
async function collectPrefetchHintsImpl(route, buildId, staleTime, seedData, clientModules, // TODO: Consider persisting the computed requestKey into the hints output
// so it doesn't need to be recomputed during the build. This might also
// suggest renaming prefetch-hints.json to something like
// segment-manifest.json, since it would contain more than just hints.
requestKey, parentGzipSize, maxSize, maxBundleSize, headGzipSize, headInlineState) {
// Render current segment and measure its gzip size.
let currentGzipSize = null;
if (seedData !== null) {
const varyParamsThenable = seedData[4];
const varyParams = varyParamsThenable !== null ? readVaryParams(varyParamsThenable) : null;
const [, buffer] = await renderSegmentPrefetch(buildId, staleTime, seedData[0], requestKey, varyParams, clientModules);
currentGzipSize = await getGzipSize(buffer);
}
// Only offer this segment to its children for inlining if its gzip size
// is below maxSize. Segments above this get their own response.
const sizeToInline = currentGzipSize !== null && currentGzipSize < maxSize ? currentGzipSize : null;
// Process children serially (not in parallel) to ensure deterministic
// results. Since this only runs at build time and the rendering is just
// re-encoding cached prerenders, this won't impact build times. Each child
// receives our gzip size and decides whether to inline us. Once a child
// accepts, we stop offering to remaining siblings — the parent is only
// inlined into one child. In parallel routes, this avoids duplicating the
// parent's data across multiple sibling responses.
const children = route[1];
const seedDataChildren = seedData !== null ? seedData[1] : null;
let slots = null;
let didInlineIntoChild = false;
let acceptingChildInlinedBytes = 0;
// Track the smallest inlinedBytes across all children so we know how much
// budget remains along the best path. When our own parent asks whether we
// can accept its data, the parent's bytes would flow through to the child
// with the most remaining headroom.
let smallestChildInlinedBytes = Infinity;
let hasChildren = false;
for(const parallelRouteKey in children){
hasChildren = true;
const childRoute = children[parallelRouteKey];
const childSegment = childRoute[0];
const childSeedData = seedDataChildren !== null ? seedDataChildren[parallelRouteKey] : null;
const childRequestKey = appendSegmentRequestKeyPart(requestKey, parallelRouteKey, createSegmentRequestKeyPart(childSegment));
const childResult = await collectPrefetchHintsImpl(childRoute, buildId, staleTime, childSeedData, clientModules, childRequestKey, // Once a child has accepted us, stop offering to remaining siblings.
didInlineIntoChild ? null : sizeToInline, maxSize, maxBundleSize, headGzipSize, headInlineState);
if (slots === null) {
slots = {};
}
slots[parallelRouteKey] = childResult.node;
if (childResult.node.hints & PrefetchHint.ParentInlinedIntoSelf) {
// This child accepted our data — it will include our segment's
// response in its own. No need to track headroom anymore since
// we already know which child we're inlined into.
didInlineIntoChild = true;
acceptingChildInlinedBytes = childResult.inlinedBytes;
} else if (!didInlineIntoChild) {
// Track the child with the most remaining headroom. Used below
// when deciding whether to accept our own parent's data.
if (childResult.inlinedBytes < smallestChildInlinedBytes) {
smallestChildInlinedBytes = childResult.inlinedBytes;
}
}
}
// Leaf segment: no children have consumed any budget yet.
if (!hasChildren) {
smallestChildInlinedBytes = 0;
}
// Mark this segment as InlinedIntoChild if one of its children accepted.
// This means this segment doesn't need its own prefetch response — its
// data is included in the accepting child's response instead.
let hints = 0;
if (didInlineIntoChild) {
hints |= PrefetchHint.InlinedIntoChild;
}
// inlinedBytes represents the total gzipped bytes of parent data inlined
// into the deepest "inlining target" along this branch. It starts at 0 at
// the leaves and grows as parents are inlined going back up the tree. If a
// child accepted us, our size is already counted in that child's value.
let inlinedBytes = didInlineIntoChild ? acceptingChildInlinedBytes : smallestChildInlinedBytes;
// At leaf nodes (pages), try to inline the head (metadata/viewport) into
// this page's response. The head is treated like an additional inlined
// entry — it counts against the same total budget. Only the first page
// that has room gets the head; subsequent pages skip via the shared
// headInlineState accumulator.
if (!hasChildren && !headInlineState.inlined) {
if (inlinedBytes + headGzipSize < maxBundleSize) {
hints |= PrefetchHint.HeadInlinedIntoSelf;
inlinedBytes += headGzipSize;
headInlineState.inlined = true;
}
}
// Decide whether to accept our own parent's data. Two conditions:
//
// 1. The parent offered us a size (parentGzipSize is not null). It's null
// when the parent is too large to inline or when this is the root.
//
// 2. The total inlined bytes along this branch wouldn't exceed the budget.
// Even if each segment is individually small, at some point it no
// longer makes sense to keep adding bytes because the combined response
// is unique per URL and can't be deduped.
//
// A node can be both InlinedIntoChild and ParentInlinedIntoSelf. This
// happens in multi-level chains: GP → P → C where all are small. C
// accepts P (P is InlinedIntoChild), then P also accepts GP (P is
// ParentInlinedIntoSelf). The result: C's response includes both P's
// and GP's data. The parent's data flows through to the deepest
// accepting descendant.
if (parentGzipSize !== null) {
if (inlinedBytes + parentGzipSize < maxBundleSize) {
hints |= PrefetchHint.ParentInlinedIntoSelf;
inlinedBytes += parentGzipSize;
}
}
return {
node: {
hints,
slots
},
inlinedBytes
};
}
// We use gzip size rather than raw size because it better reflects the actual
// transfer cost. The inlining trade-off is about whether the overhead of an
// additional HTTP request (connection setup, headers, round trip) is worth
// the deduplication benefit of keeping a segment separate. Below some
// compressed size, the request overhead dominates and inlining is better.
// Above it, the deduplication benefit of a cacheable standalone response
// wins out.
async function getGzipSize(buffer) {
const stream = new Blob([
new Uint8Array(buffer)
]).stream().pipeThrough(new CompressionStream('gzip'));
const compressedBlob = await new Response(stream).blob();
return compressedBlob.size;
}
async function PrefetchTreeData({ isClientParamParsingEnabled, fullPageDataBuffer, serverConsumerManifest, clientModules, staleTime, segmentTasks, onCompletedProcessingRouteTree, prefetchInlining, hints }) {
// We're currently rendering a Flight response for the route tree prefetch.
// Inside this component, decode the Flight stream for the whole page. This is
// a hack to transfer the side effects from the original Flight stream (e.g.
// Float preloads) onto the Flight stream for the tree prefetch.
// TODO: React needs a better way to do this. Needed for Server Actions, too.
const initialRSCPayload = await createFromReadableStream(createUnclosingPrefetchStream(streamFromBuffer(fullPageDataBuffer)), {
findSourceMapURL,
serverConsumerManifest
});
const flightData = extractFlightData(initialRSCPayload);
if (flightData === null) {
return null;
}
const { buildId, flightRouterState, seedData, head } = flightData;
// Extract the head vary params from the decoded response.
// The head vary params thenable should be fulfilled by now; if not, treat
// as unknown (null).
const headVaryParamsThenable = initialRSCPayload.h;
const headVaryParams = headVaryParamsThenable !== null ? readVaryParams(headVaryParamsThenable) : null;
// Compute the route metadata tree by traversing the FlightRouterState. As we
// walk the tree, we will also spawn a task to produce a prefetch response for
// each segment (unless prefetch inlining is enabled, in which case all
// segments are bundled into a single /_inlined response).
const tree = collectSegmentDataImpl(isClientParamParsingEnabled, flightRouterState, buildId, staleTime, seedData, clientModules, ROOT_SEGMENT_REQUEST_KEY, segmentTasks, prefetchInlining, hints);
if (prefetchInlining) {
// When prefetch inlining is enabled, bundle all segment data into a single
// /_inlined response instead of individual per-segment responses. The head
// is also included in the inlined response.
segmentTasks.push(waitAtLeastOneReactRenderTask().then(()=>renderInlinedPrefetchResponse(flightRouterState, buildId, staleTime, seedData, head, headVaryParams, clientModules)));
} else {
// Also spawn a task to produce a prefetch response for the "head" segment.
// The head contains metadata, like the title; it's not really a route
// segment, but it contains RSC data, so it's treated like a segment by
// the client cache.
segmentTasks.push(waitAtLeastOneReactRenderTask().then(()=>renderSegmentPrefetch(buildId, staleTime, head, HEAD_REQUEST_KEY, headVaryParams, clientModules)));
}
// Notify the abort controller that we're done processing the route tree.
// Anything async that happens after this point must be due to hanging
// promises in the original stream.
onCompletedProcessingRouteTree();
// Render the route tree to a special `/_tree` segment.
const treePrefetch = {
tree,
staleTime
};
if (buildId) {
treePrefetch.buildId = buildId;
}
return treePrefetch;
}
function collectSegmentDataImpl(isClientParamParsingEnabled, route, buildId, staleTime, seedData, clientModules, requestKey, segmentTasks, prefetchInlining, hintTree) {
// Metadata about the segment. Sent as part of the tree prefetch. Null if
// there are no children.
let slotMetadata = null;
const children = route[1];
const seedDataChildren = seedData !== null ? seedData[1] : null;
for(const parallelRouteKey in children){
const childRoute = children[parallelRouteKey];
const childSegment = childRoute[0];
const childSeedData = seedDataChildren !== null ? seedDataChildren[parallelRouteKey] : null;
const childRequestKey = appendSegmentRequestKeyPart(requestKey, parallelRouteKey, createSegmentRequestKeyPart(childSegment));
const childHintTree = hintTree !== null && hintTree.slots !== null ? hintTree.slots[parallelRouteKey] ?? null : null;
const childTree = collectSegmentDataImpl(isClientParamParsingEnabled, childRoute, buildId, staleTime, childSeedData, clientModules, childRequestKey, segmentTasks, prefetchInlining, childHintTree);
if (slotMetadata === null) {
slotMetadata = {};
}
slotMetadata[parallelRouteKey] = childTree;
}
// Union the hints already embedded in the FlightRouterState with the
// separately-computed build-time hints. During the initial build, the
// FlightRouterState was produced before collectPrefetchHints ran, so
// inlining hints (ParentInlinedIntoSelf, InlinedIntoChild) won't be in
// route[4] yet. On subsequent renders the hints are already in the
// FlightRouterState, so the union is idempotent.
const prefetchHints = (route[4] ?? 0) | (hintTree !== null ? hintTree.hints : 0);
// Determine which params this segment varies on.
// Read the vary params thenable directly from the seed data. By the time
// collectSegmentData runs, the thenable should be fulfilled. If it's not
// fulfilled or null, treat as unknown (null means we can't share cache
// entries across param values).
const varyParamsThenable = seedData !== null ? seedData[4] : null;
const varyParams = varyParamsThenable !== null ? readVaryParams(varyParamsThenable) : null;
if (!prefetchInlining) {
// When prefetch inlining is disabled, spawn individual segment tasks.
// When enabled, segment data is bundled into the /_inlined response
// instead, so we skip per-segment tasks here.
if (seedData !== null) {
// Spawn a task to write the segment data to a new Flight stream.
segmentTasks.push(// Since we're already in the middle of a render, wait until after the
// current task to escape the current rendering context.
waitAtLeastOneReactRenderTask().then(()=>renderSegmentPrefetch(buildId, staleTime, seedData[0], requestKey, varyParams, clientModules)));
} else {
// This segment does not have any seed data. Skip generating a prefetch
// response for it. We'll still include it in the route tree, though.
// TODO: We should encode in the route tree whether a segment is missing
// so we don't attempt to fetch it for no reason. As of now this shouldn't
// ever happen in practice, though.
}
}
const segment = route[0];
let name;
let param;
if (typeof segment === 'string') {
name = segment;
param = null;
} else {
name = segment[0];
param = {
type: segment[2],
// This value is omitted from the prefetch response when cacheComponents
// is enabled.
key: isClientParamParsingEnabled ? null : segment[1],
siblings: segment[3]
};
}
// Metadata about the segment. Sent to the client as part of the
// tree prefetch.
return {
name,
param,
prefetchHints,
slots: slotMetadata
};
}
async function renderSegmentPrefetch(buildId, staleTime, rsc, requestKey, varyParams, clientModules) {
// Render the segment data to a stream.
const segmentPrefetch = {
rsc,
isPartial: await isPartialRSCData(rsc, clientModules),
staleTime,
varyParams
};
if (buildId) {
segmentPrefetch.buildId = buildId;
}
// Since all we're doing is decoding and re-encoding a cached prerender, if
// it takes longer than a microtask, it must because of hanging promises
// caused by dynamic data. Abort the stream at the end of the current task.
const abortController = new AbortController();
waitAtLeastOneReactRenderTask().then(()=>abortController.abort());
const { prelude: segmentStream } = await prerender(segmentPrefetch, clientModules, {
filterStackFrame,
signal: abortController.signal,
onError: onSegmentPrerenderError
});
const segmentBuffer = await streamToBuffer(segmentStream);
if (requestKey === ROOT_SEGMENT_REQUEST_KEY) {
return [
'/_index',
segmentBuffer
];
} else {
return [
requestKey,
segmentBuffer
];
}
}
async function renderInlinedPrefetchResponse(route, buildId, staleTime, seedData, head, headVaryParams, clientModules) {
// Build the inlined tree by walking the route and collecting all segments.
const inlinedTree = await buildInlinedSegmentPrefetch(route, buildId, staleTime, seedData, clientModules);
// Build the head segment.
const headPrefetch = {
rsc: head,
isPartial: await isPartialRSCData(head, clientModules),
staleTime,
varyParams: headVaryParams
};
if (buildId) {
headPrefetch.buildId = buildId;
}
const response = {
tree: inlinedTree,
head: headPrefetch
};
// Render as a single Flight response.
const abortController = new AbortController();
waitAtLeastOneReactRenderTask().then(()=>abortController.abort());
const { prelude } = await prerender(response, clientModules, {
filterStackFrame,
signal: abortController.signal,
onError: onSegmentPrerenderError
});
const buffer = await streamToBuffer(prelude);
return [
'/' + PAGE_SEGMENT_KEY,
buffer
];
}
async function buildInlinedSegmentPrefetch(route, buildId, staleTime, seedData, clientModules) {
let slots = null;
const children = route[1];
const seedDataChildren = seedData !== null ? seedData[1] : null;
for(const parallelRouteKey in children){
const childRoute = children[parallelRouteKey];
const childSeedData = seedDataChildren !== null ? seedDataChildren[parallelRouteKey] : null;
const childPrefetch = await buildInlinedSegmentPrefetch(childRoute, buildId, staleTime, childSeedData, clientModules);
if (slots === null) {
slots = {};
}
slots[parallelRouteKey] = childPrefetch;
}
const rsc = seedData !== null ? seedData[0] : null;
const varyParamsThenable = seedData !== null ? seedData[4] : null;
const varyParams = varyParamsThenable !== null ? readVaryParams(varyParamsThenable) : null;
const segment = {
rsc,
isPartial: rsc !== null ? await isPartialRSCData(rsc, clientModules) : true,
staleTime,
varyParams
};
if (buildId) {
segment.buildId = buildId;
}
return {
segment,
slots
};
}
async function isPartialRSCData(rsc, clientModules) {
// We can determine if a segment contains only partial data if it takes longer
// than a task to encode, because dynamic data is encoded as an infinite
// promise. We must do this in a separate Flight prerender from the one that
// actually generates the prefetch stream because we need to include
// `isPartial` in the stream itself.
let isPartial = false;
const abortController = new AbortController();
waitAtLeastOneReactRenderTask().then(()=>{
// If we haven't yet finished the outer task, then it must be because we
// accessed dynamic data.
isPartial = true;
abortController.abort();
});
await prerender(rsc, clientModules, {
filterStackFrame,
signal: abortController.signal,
onError () {}
});
return isPartial;
}
function createUnclosingPrefetchStream(originalFlightStream) {
// When PPR is enabled, prefetch streams may contain references that never
// resolve, because that's how we encode dynamic data access. In the decoded
// object returned by the Flight client, these are reified into hanging
// promises that suspend during render, which is effectively what we want.
// The UI resolves when it switches to the dynamic data stream
// (via useDeferredValue(dynamic, static)).
//
// However, the Flight implementation currently errors if the server closes
// the response before all the references are resolved. As a cheat to work
// around this, we wrap the original stream in a new stream that never closes,
// and therefore doesn't error.
const reader = originalFlightStream.getReader();
return new ReadableStream({
async pull (controller) {
while(true){
const { done, value } = await reader.read();
if (!done) {
// Pass to the target stream and keep consuming the Flight response
// from the server.
controller.enqueue(value);
continue;
}
// The server stream has closed. Exit, but intentionally do not close
// the target stream.
return;
}
}
});
}
//# sourceMappingURL=collect-segment-data.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,4 @@
import { createAsyncLocalStorage } from './async-local-storage';
export const consoleAsyncStorageInstance = createAsyncLocalStorage();
//# sourceMappingURL=console-async-storage-instance.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/console-async-storage-instance.ts"],"sourcesContent":["import { createAsyncLocalStorage } from './async-local-storage'\nimport type { ConsoleAsyncStorage } from './console-async-storage.external'\n\nexport const consoleAsyncStorageInstance: ConsoleAsyncStorage =\n createAsyncLocalStorage()\n"],"names":["createAsyncLocalStorage","consoleAsyncStorageInstance"],"mappings":"AAAA,SAASA,uBAAuB,QAAQ,wBAAuB;AAG/D,OAAO,MAAMC,8BACXD,0BAAyB","ignoreList":[0]}

View File

@@ -0,0 +1,7 @@
// Share the instance module in the next-shared layer
import { consoleAsyncStorageInstance } from './console-async-storage-instance' with {
'turbopack-transition': 'next-shared'
};
export { consoleAsyncStorageInstance as consoleAsyncStorage };
//# sourceMappingURL=console-async-storage.external.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/console-async-storage.external.ts"],"sourcesContent":["import type { AsyncLocalStorage } from 'async_hooks'\n\n// Share the instance module in the next-shared layer\nimport { consoleAsyncStorageInstance } from './console-async-storage-instance' with { 'turbopack-transition': 'next-shared' }\n\nexport interface ConsoleStore {\n /**\n * if true the color of logs output will be dimmed to indicate the log is\n * from a repeat or validation render that is not typically relevant to\n * the primary action the server is taking.\n */\n readonly dim: boolean\n}\n\nexport type ConsoleAsyncStorage = AsyncLocalStorage<ConsoleStore>\n\nexport { consoleAsyncStorageInstance as consoleAsyncStorage }\n"],"names":["consoleAsyncStorageInstance","consoleAsyncStorage"],"mappings":"AAEA,qDAAqD;AACrD,SAASA,2BAA2B,QAAQ,wCAAwC;IAAE,wBAAwB;AAAc,EAAC;AAa7H,SAASA,+BAA+BC,mBAAmB,GAAE","ignoreList":[0]}

View File

@@ -0,0 +1,23 @@
import { interopDefault } from './interop-default';
import { getLinkAndScriptTags } from './get-css-inlined-link-tags';
import { getAssetQueryString } from './get-asset-query-string';
import { encodeURIPath } from '../../shared/lib/encode-uri-path';
import { renderCssResource } from './render-css-resource';
export async function createComponentStylesAndScripts({ filePath, getComponent, injectedCSS, injectedJS, ctx }) {
const { componentMod: { createElement } } = ctx;
const { styles: entryCssFiles, scripts: jsHrefs } = getLinkAndScriptTags(filePath, injectedCSS, injectedJS);
const styles = renderCssResource(entryCssFiles, ctx);
const scripts = jsHrefs ? jsHrefs.map((href, index)=>createElement('script', {
src: `${ctx.assetPrefix}/_next/${encodeURIPath(href)}${getAssetQueryString(ctx, true)}`,
async: true,
key: `script-${index}`
})) : null;
const Comp = interopDefault(await getComponent());
return [
Comp,
styles,
scripts
];
}
//# sourceMappingURL=create-component-styles-and-scripts.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/create-component-styles-and-scripts.tsx"],"sourcesContent":["import { interopDefault } from './interop-default'\nimport { getLinkAndScriptTags } from './get-css-inlined-link-tags'\nimport type { AppRenderContext } from './app-render'\nimport { getAssetQueryString } from './get-asset-query-string'\nimport { encodeURIPath } from '../../shared/lib/encode-uri-path'\nimport { renderCssResource } from './render-css-resource'\n\nexport async function createComponentStylesAndScripts({\n filePath,\n getComponent,\n injectedCSS,\n injectedJS,\n ctx,\n}: {\n filePath: string\n getComponent: () => any\n injectedCSS: Set<string>\n injectedJS: Set<string>\n ctx: AppRenderContext\n}): Promise<[React.ComponentType<any>, React.ReactNode, React.ReactNode]> {\n const {\n componentMod: { createElement },\n } = ctx\n const { styles: entryCssFiles, scripts: jsHrefs } = getLinkAndScriptTags(\n filePath,\n injectedCSS,\n injectedJS\n )\n\n const styles = renderCssResource(entryCssFiles, ctx)\n\n const scripts = jsHrefs\n ? jsHrefs.map((href, index) =>\n createElement('script', {\n src: `${ctx.assetPrefix}/_next/${encodeURIPath(href)}${getAssetQueryString(ctx, true)}`,\n async: true,\n key: `script-${index}`,\n })\n )\n : null\n\n const Comp = interopDefault(await getComponent())\n\n return [Comp, styles, scripts]\n}\n"],"names":["interopDefault","getLinkAndScriptTags","getAssetQueryString","encodeURIPath","renderCssResource","createComponentStylesAndScripts","filePath","getComponent","injectedCSS","injectedJS","ctx","componentMod","createElement","styles","entryCssFiles","scripts","jsHrefs","map","href","index","src","assetPrefix","async","key","Comp"],"mappings":"AAAA,SAASA,cAAc,QAAQ,oBAAmB;AAClD,SAASC,oBAAoB,QAAQ,8BAA6B;AAElE,SAASC,mBAAmB,QAAQ,2BAA0B;AAC9D,SAASC,aAAa,QAAQ,mCAAkC;AAChE,SAASC,iBAAiB,QAAQ,wBAAuB;AAEzD,OAAO,eAAeC,gCAAgC,EACpDC,QAAQ,EACRC,YAAY,EACZC,WAAW,EACXC,UAAU,EACVC,GAAG,EAOJ;IACC,MAAM,EACJC,cAAc,EAAEC,aAAa,EAAE,EAChC,GAAGF;IACJ,MAAM,EAAEG,QAAQC,aAAa,EAAEC,SAASC,OAAO,EAAE,GAAGf,qBAClDK,UACAE,aACAC;IAGF,MAAMI,SAAST,kBAAkBU,eAAeJ;IAEhD,MAAMK,UAAUC,UACZA,QAAQC,GAAG,CAAC,CAACC,MAAMC,QACjBP,cAAc,UAAU;YACtBQ,KAAK,GAAGV,IAAIW,WAAW,CAAC,OAAO,EAAElB,cAAce,QAAQhB,oBAAoBQ,KAAK,OAAO;YACvFY,OAAO;YACPC,KAAK,CAAC,OAAO,EAAEJ,OAAO;QACxB,MAEF;IAEJ,MAAMK,OAAOxB,eAAe,MAAMO;IAElC,OAAO;QAACiB;QAAMX;QAAQE;KAAQ;AAChC","ignoreList":[0]}

View File

@@ -0,0 +1,821 @@
import { isClientReference, isUseCacheFunction } from '../../lib/client-and-server-references';
import { getLayoutOrPageModule } from '../lib/app-dir-module';
import { interopDefault } from './interop-default';
import { parseLoaderTree } from '../../shared/lib/router/utils/parse-loader-tree';
import { createComponentStylesAndScripts } from './create-component-styles-and-scripts';
import { getLayerAssets } from './get-layer-assets';
import { hasLoadingComponentInTree } from './has-loading-component-in-tree';
import { validateRevalidate } from '../lib/patch-fetch';
import { PARALLEL_ROUTE_DEFAULT_PATH } from '../../client/components/builtin/default';
import { getTracer } from '../lib/trace/tracer';
import { NextNodeServerSpan } from '../lib/trace/constants';
import { StaticGenBailoutError } from '../../client/components/static-generation-bailout';
import { workUnitAsyncStorage } from './work-unit-async-storage.external';
import { createVaryParamsAccumulator, emptyVaryParamsAccumulator, getVaryParamsThenable } from './vary-params';
import { DEFAULT_SEGMENT_KEY } from '../../shared/lib/segment';
import { BOUNDARY_PREFIX, BOUNDARY_SUFFIX, BUILTIN_PREFIX, getConventionPathByType, isNextjsBuiltinFilePath } from './segment-explorer-path';
import { RenderStage } from './staged-rendering';
/**
* Use the provided loader tree to create the React Component tree.
*/ // TODO convert these arguments to non-object form. the entrypoint doesn't need most of them
export function createComponentTree(props) {
return getTracer().trace(NextNodeServerSpan.createComponentTree, {
spanName: 'build component tree'
}, ()=>createComponentTreeInternal(props, true));
}
function errorMissingDefaultExport(pagePath, convention) {
const normalizedPagePath = pagePath === '/' ? '' : pagePath;
throw Object.defineProperty(new Error(`The default export is not a React Component in "${normalizedPagePath}/${convention}"`), "__NEXT_ERROR_CODE", {
value: "E45",
enumerable: false,
configurable: true
});
}
const cacheNodeKey = 'c';
async function createComponentTreeInternal({ loaderTree: tree, parentParams, parentOptionalCatchAllParamName, parentRuntimePrefetchable, rootLayoutIncluded, injectedCSS, injectedJS, injectedFontPreloadTags, ctx, missingSlots, preloadCallbacks, authInterrupts, MetadataOutlet }, isRoot) {
const { renderOpts: { nextConfigOutput, experimental, cacheComponents }, workStore, componentMod: { createElement, Fragment, SegmentViewNode, HTTPAccessFallbackBoundary, LayoutRouter, RenderFromTemplateContext, ClientPageRoot, ClientSegmentRoot, createServerSearchParamsForServerPage, createPrerenderSearchParamsForClientPage, createServerParamsForServerSegment, createPrerenderParamsForClientSegment, serverHooks: { DynamicServerError }, Postpone }, pagePath, getDynamicParamFromSegment, isPrefetch, query } = ctx;
const { page, conventionPath, segment, modules, parallelRoutes } = parseLoaderTree(tree);
const { layout, template, error, loading, 'not-found': notFound, forbidden, unauthorized } = modules;
const injectedCSSWithCurrentLayout = new Set(injectedCSS);
const injectedJSWithCurrentLayout = new Set(injectedJS);
const injectedFontPreloadTagsWithCurrentLayout = new Set(injectedFontPreloadTags);
const layerAssets = getLayerAssets({
preloadCallbacks,
ctx,
layoutOrPagePath: conventionPath,
injectedCSS: injectedCSSWithCurrentLayout,
injectedJS: injectedJSWithCurrentLayout,
injectedFontPreloadTags: injectedFontPreloadTagsWithCurrentLayout
});
const [Template, templateStyles, templateScripts] = template ? await createComponentStylesAndScripts({
ctx,
filePath: template[1],
getComponent: template[0],
injectedCSS: injectedCSSWithCurrentLayout,
injectedJS: injectedJSWithCurrentLayout
}) : [
Fragment
];
const [ErrorComponent, errorStyles, errorScripts] = error ? await createComponentStylesAndScripts({
ctx,
filePath: error[1],
getComponent: error[0],
injectedCSS: injectedCSSWithCurrentLayout,
injectedJS: injectedJSWithCurrentLayout
}) : [];
const [Loading, loadingStyles, loadingScripts] = loading ? await createComponentStylesAndScripts({
ctx,
filePath: loading[1],
getComponent: loading[0],
injectedCSS: injectedCSSWithCurrentLayout,
injectedJS: injectedJSWithCurrentLayout
}) : [];
const isLayout = typeof layout !== 'undefined';
const isPage = typeof page !== 'undefined';
const { mod: layoutOrPageMod, modType } = await getTracer().trace(NextNodeServerSpan.getLayoutOrPageModule, {
hideSpan: !(isLayout || isPage),
spanName: 'resolve segment modules',
attributes: {
'next.segment': segment
}
}, ()=>getLayoutOrPageModule(tree));
/**
* Checks if the current segment is a root layout.
*/ const rootLayoutAtThisLevel = isLayout && !rootLayoutIncluded;
/**
* Checks if the current segment or any level above it has a root layout.
*/ const rootLayoutIncludedAtThisLevelOrAbove = rootLayoutIncluded || rootLayoutAtThisLevel;
const [NotFound, notFoundStyles] = notFound ? await createComponentStylesAndScripts({
ctx,
filePath: notFound[1],
getComponent: notFound[0],
injectedCSS: injectedCSSWithCurrentLayout,
injectedJS: injectedJSWithCurrentLayout
}) : [];
const instantConfig = layoutOrPageMod ? layoutOrPageMod.unstable_instant : undefined;
const hasRuntimePrefetch = instantConfig && typeof instantConfig === 'object' ? instantConfig.prefetch === 'runtime' : false;
const isRuntimePrefetchable = hasRuntimePrefetch || parentRuntimePrefetchable;
const [Forbidden, forbiddenStyles] = authInterrupts && forbidden ? await createComponentStylesAndScripts({
ctx,
filePath: forbidden[1],
getComponent: forbidden[0],
injectedCSS: injectedCSSWithCurrentLayout,
injectedJS: injectedJSWithCurrentLayout
}) : [];
const [Unauthorized, unauthorizedStyles] = authInterrupts && unauthorized ? await createComponentStylesAndScripts({
ctx,
filePath: unauthorized[1],
getComponent: unauthorized[0],
injectedCSS: injectedCSSWithCurrentLayout,
injectedJS: injectedJSWithCurrentLayout
}) : [];
let dynamic = layoutOrPageMod == null ? void 0 : layoutOrPageMod.dynamic;
if (nextConfigOutput === 'export') {
if (!dynamic || dynamic === 'auto') {
dynamic = 'error';
} else if (dynamic === 'force-dynamic') {
// force-dynamic is always incompatible with 'export'. We must interrupt the build
throw Object.defineProperty(new StaticGenBailoutError(`Page with \`dynamic = "force-dynamic"\` couldn't be exported. \`output: "export"\` requires all pages be renderable statically because there is no runtime server to dynamically render routes in this output format. Learn more: https://nextjs.org/docs/app/building-your-application/deploying/static-exports`), "__NEXT_ERROR_CODE", {
value: "E527",
enumerable: false,
configurable: true
});
}
}
if (typeof dynamic === 'string') {
// the nested most config wins so we only force-static
// if it's configured above any parent that configured
// otherwise
if (dynamic === 'error') {
workStore.dynamicShouldError = true;
} else if (dynamic === 'force-dynamic') {
workStore.forceDynamic = true;
// TODO: (PPR) remove this bailout once PPR is the default
if (workStore.isStaticGeneration && !experimental.isRoutePPREnabled) {
// If the postpone API isn't available, we can't postpone the render and
// therefore we can't use the dynamic API.
const err = Object.defineProperty(new DynamicServerError(`Page with \`dynamic = "force-dynamic"\` won't be rendered statically.`), "__NEXT_ERROR_CODE", {
value: "E585",
enumerable: false,
configurable: true
});
workStore.dynamicUsageDescription = err.message;
workStore.dynamicUsageStack = err.stack;
throw err;
}
} else {
workStore.dynamicShouldError = false;
workStore.forceStatic = dynamic === 'force-static';
}
}
if (typeof (layoutOrPageMod == null ? void 0 : layoutOrPageMod.fetchCache) === 'string') {
workStore.fetchCache = layoutOrPageMod == null ? void 0 : layoutOrPageMod.fetchCache;
}
if (typeof (layoutOrPageMod == null ? void 0 : layoutOrPageMod.revalidate) !== 'undefined') {
validateRevalidate(layoutOrPageMod == null ? void 0 : layoutOrPageMod.revalidate, workStore.route);
}
if (typeof (layoutOrPageMod == null ? void 0 : layoutOrPageMod.revalidate) === 'number') {
const defaultRevalidate = layoutOrPageMod.revalidate;
const workUnitStore = workUnitAsyncStorage.getStore();
if (workUnitStore) {
switch(workUnitStore.type){
case 'prerender':
case 'prerender-runtime':
case 'prerender-legacy':
case 'prerender-ppr':
if (workUnitStore.revalidate > defaultRevalidate) {
workUnitStore.revalidate = defaultRevalidate;
}
break;
case 'request':
break;
// createComponentTree is not called for these stores:
case 'cache':
case 'private-cache':
case 'prerender-client':
case 'validation-client':
case 'unstable-cache':
case 'generate-static-params':
break;
default:
workUnitStore;
}
}
if (!workStore.forceStatic && workStore.isStaticGeneration && defaultRevalidate === 0 && // If the postpone API isn't available, we can't postpone the render and
// therefore we can't use the dynamic API.
!experimental.isRoutePPREnabled) {
const dynamicUsageDescription = `revalidate: 0 configured ${segment}`;
workStore.dynamicUsageDescription = dynamicUsageDescription;
throw Object.defineProperty(new DynamicServerError(dynamicUsageDescription), "__NEXT_ERROR_CODE", {
value: "E1005",
enumerable: false,
configurable: true
});
}
}
// Read unstable_dynamicStaleTime from page modules (not layouts) and track it on
// the store's stale field. This affects the segment cache stale time via
// the StaleTimeIterable.
if (isPage && typeof (layoutOrPageMod == null ? void 0 : layoutOrPageMod.unstable_dynamicStaleTime) === 'number') {
const pageStaleTime = layoutOrPageMod.unstable_dynamicStaleTime;
const workUnitStore = workUnitAsyncStorage.getStore();
if (workUnitStore) {
switch(workUnitStore.type){
case 'prerender':
case 'prerender-runtime':
case 'prerender-legacy':
case 'prerender-ppr':
if (workUnitStore.stale > pageStaleTime) {
workUnitStore.stale = pageStaleTime;
}
break;
case 'request':
if (workUnitStore.stale === undefined || workUnitStore.stale > pageStaleTime) {
workUnitStore.stale = pageStaleTime;
}
break;
// createComponentTree is not called for these stores:
case 'cache':
case 'private-cache':
case 'prerender-client':
case 'validation-client':
case 'unstable-cache':
case 'generate-static-params':
break;
default:
workUnitStore;
}
}
}
const isStaticGeneration = workStore.isStaticGeneration;
// Assume the segment we're rendering contains only partial data if PPR is
// enabled and this is a statically generated response. This is used by the
// client Segment Cache after a prefetch to determine if it can skip the
// second request to fill in the dynamic data.
//
// It's OK for this to be `true` when the data is actually fully static, but
// it's not OK for this to be `false` when the data possibly contains holes.
// Although the value here is overly pessimistic, for prefetches, it will be
// replaced by a more specific value when the data is later processed into
// per-segment responses (see collect-segment-data.tsx)
//
// For dynamic requests, this must always be `false` because dynamic responses
// are never partial.
const isPossiblyPartialResponse = isStaticGeneration && experimental.isRoutePPREnabled === true;
const LayoutOrPage = layoutOrPageMod ? interopDefault(layoutOrPageMod) : undefined;
/**
* The React Component to render.
*/ let MaybeComponent = LayoutOrPage;
if (process.env.NODE_ENV === 'development' || isStaticGeneration) {
const { isValidElementType } = require('next/dist/compiled/react-is');
if (typeof MaybeComponent !== 'undefined' && !isValidElementType(MaybeComponent)) {
errorMissingDefaultExport(pagePath, modType ?? 'page');
}
if (typeof ErrorComponent !== 'undefined' && !isValidElementType(ErrorComponent)) {
errorMissingDefaultExport(pagePath, 'error');
}
if (typeof Loading !== 'undefined' && !isValidElementType(Loading)) {
errorMissingDefaultExport(pagePath, 'loading');
}
if (typeof NotFound !== 'undefined' && !isValidElementType(NotFound)) {
errorMissingDefaultExport(pagePath, 'not-found');
}
if (typeof Forbidden !== 'undefined' && !isValidElementType(Forbidden)) {
errorMissingDefaultExport(pagePath, 'forbidden');
}
if (typeof Unauthorized !== 'undefined' && !isValidElementType(Unauthorized)) {
errorMissingDefaultExport(pagePath, 'unauthorized');
}
}
// Handle dynamic segment params.
const segmentParam = getDynamicParamFromSegment(tree);
// Create object holding the parent params and current params
let currentParams = parentParams;
if (segmentParam && segmentParam.value !== null) {
currentParams = {
...parentParams,
[segmentParam.param]: segmentParam.value
};
}
// Track optional catch-all params with no value (e.g., [[...slug]] at /).
// These params won't exist as properties on the params object, so vary
// params tracking needs to use a Proxy to detect access. We propagate this
// through the tree so that child segments (like __PAGE__) also know about
// the missing param. In practice, this only gets passed down one level —
// from the optional catch-all layout segment to the page segment — so it's
// always very close to the leaf of the tree.
const optionalCatchAllParamName = (segmentParam == null ? void 0 : segmentParam.type) === 'oc' && segmentParam.value === null ? segmentParam.param : parentOptionalCatchAllParamName;
// Resolve the segment param
const isSegmentViewEnabled = !!process.env.__NEXT_DEV_SERVER;
const dir = (process.env.NEXT_RUNTIME === 'edge' ? process.env.__NEXT_EDGE_PROJECT_DIR : ctx.renderOpts.dir) || '';
const [notFoundElement, notFoundFilePath] = await createBoundaryConventionElement({
ctx,
conventionName: 'not-found',
Component: NotFound,
styles: notFoundStyles,
tree
});
const [forbiddenElement] = await createBoundaryConventionElement({
ctx,
conventionName: 'forbidden',
Component: Forbidden,
styles: forbiddenStyles,
tree
});
const [unauthorizedElement] = await createBoundaryConventionElement({
ctx,
conventionName: 'unauthorized',
Component: Unauthorized,
styles: unauthorizedStyles,
tree
});
// TODO: Combine this `map` traversal with the loop below that turns the array
// into an object.
const parallelRouteMap = await Promise.all(Object.keys(parallelRoutes).map(async (parallelRouteKey)=>{
const isChildrenRouteKey = parallelRouteKey === 'children';
const parallelRoute = parallelRoutes[parallelRouteKey];
const notFoundComponent = isChildrenRouteKey ? notFoundElement : undefined;
const forbiddenComponent = isChildrenRouteKey ? forbiddenElement : undefined;
const unauthorizedComponent = isChildrenRouteKey ? unauthorizedElement : undefined;
// if we're prefetching and that there's a Loading component, we bail out
// otherwise we keep rendering for the prefetch.
// We also want to bail out if there's no Loading component in the tree.
let childCacheNodeSeedData = null;
if (// Before PPR, the way instant navigations work in Next.js is we
// prefetch everything up to the first route segment that defines a
// loading.tsx boundary. (We do the same if there's no loading
// boundary in the entire tree, because we don't want to prefetch too
// much) The rest of the tree is deferred until the actual navigation.
// It does not take into account whether the data is dynamic — even if
// the tree is completely static, it will still defer everything
// inside the loading boundary.
//
// This behavior predates PPR and is only relevant if the
// PPR flag is not enabled.
isPrefetch && (Loading || !hasLoadingComponentInTree(parallelRoute)) && // The approach with PPR is different — loading.tsx behaves like a
// regular Suspense boundary and has no special behavior.
//
// With PPR, we prefetch as deeply as possible, and only defer when
// dynamic data is accessed. If so, we only defer the nearest parent
// Suspense boundary of the dynamic data access, regardless of whether
// the boundary is defined by loading.tsx or a normal <Suspense>
// component in userspace.
//
// NOTE: In practice this usually means we'll end up prefetching more
// than we were before PPR, which may or may not be considered a
// performance regression by some apps. The plan is to address this
// before General Availability of PPR by introducing granular
// per-segment fetching, so we can reuse as much of the tree as
// possible during both prefetches and dynamic navigations. But during
// the beta period, we should be clear about this trade off in our
// communications.
!experimental.isRoutePPREnabled) {
// Don't prefetch this child. This will trigger a lazy fetch by the
// client router.
} else {
// Create the child component
if (process.env.NODE_ENV === 'development' && missingSlots) {
var _parsedTree_conventionPath;
// When we detect the default fallback (which triggers a 404), we collect the missing slots
// to provide more helpful debug information during development mode.
const parsedTree = parseLoaderTree(parallelRoute);
if ((_parsedTree_conventionPath = parsedTree.conventionPath) == null ? void 0 : _parsedTree_conventionPath.endsWith(PARALLEL_ROUTE_DEFAULT_PATH)) {
missingSlots.add(parallelRouteKey);
}
}
const seedData = await createComponentTreeInternal({
loaderTree: parallelRoute,
parentParams: currentParams,
parentOptionalCatchAllParamName: optionalCatchAllParamName,
parentRuntimePrefetchable: isRuntimePrefetchable,
rootLayoutIncluded: rootLayoutIncludedAtThisLevelOrAbove,
injectedCSS: injectedCSSWithCurrentLayout,
injectedJS: injectedJSWithCurrentLayout,
injectedFontPreloadTags: injectedFontPreloadTagsWithCurrentLayout,
ctx,
missingSlots,
preloadCallbacks,
authInterrupts,
// `StreamingMetadataOutlet` is used to conditionally throw. In the case of parallel routes we will have more than one page
// but we only want to throw on the first one.
MetadataOutlet: isChildrenRouteKey ? MetadataOutlet : null
}, false);
childCacheNodeSeedData = seedData;
}
const templateNode = createElement(Template, null, createElement(RenderFromTemplateContext, null));
const templateFilePath = getConventionPathByType(tree, dir, 'template');
const errorFilePath = getConventionPathByType(tree, dir, 'error');
const loadingFilePath = getConventionPathByType(tree, dir, 'loading');
const globalErrorFilePath = isRoot ? getConventionPathByType(tree, dir, 'global-error') : undefined;
const wrappedErrorStyles = isSegmentViewEnabled && errorFilePath ? createElement(SegmentViewNode, {
type: 'error',
pagePath: errorFilePath
}, errorStyles) : errorStyles;
// Add a suffix to avoid conflict with the segment view node representing rendered file.
// existence: not-found.tsx@boundary
// rendered: not-found.tsx
const fileNameSuffix = BOUNDARY_SUFFIX;
const segmentViewBoundaries = isSegmentViewEnabled ? createElement(Fragment, null, notFoundFilePath && createElement(SegmentViewNode, {
type: `${BOUNDARY_PREFIX}not-found`,
pagePath: notFoundFilePath + fileNameSuffix
}), loadingFilePath && createElement(SegmentViewNode, {
type: `${BOUNDARY_PREFIX}loading`,
pagePath: loadingFilePath + fileNameSuffix
}), errorFilePath && createElement(SegmentViewNode, {
type: `${BOUNDARY_PREFIX}error`,
pagePath: errorFilePath + fileNameSuffix
}), globalErrorFilePath && createElement(SegmentViewNode, {
type: `${BOUNDARY_PREFIX}global-error`,
pagePath: isNextjsBuiltinFilePath(globalErrorFilePath) ? `${BUILTIN_PREFIX}global-error.js${fileNameSuffix}` : globalErrorFilePath
})) : null;
return [
parallelRouteKey,
createElement(LayoutRouter, {
parallelRouterKey: parallelRouteKey,
error: ErrorComponent,
errorStyles: wrappedErrorStyles,
errorScripts: errorScripts,
template: isSegmentViewEnabled && templateFilePath ? createElement(SegmentViewNode, {
type: 'template',
pagePath: templateFilePath
}, templateNode) : templateNode,
templateStyles: templateStyles,
templateScripts: templateScripts,
notFound: notFoundComponent,
forbidden: forbiddenComponent,
unauthorized: unauthorizedComponent,
...isSegmentViewEnabled && {
segmentViewBoundaries
}
}),
childCacheNodeSeedData
];
}));
// Convert the parallel route map into an object after all promises have been resolved.
let parallelRouteProps = {};
let parallelRouteCacheNodeSeedData = {};
for (const parallelRoute of parallelRouteMap){
const [parallelRouteKey, parallelRouteProp, flightData] = parallelRoute;
parallelRouteProps[parallelRouteKey] = parallelRouteProp;
parallelRouteCacheNodeSeedData[parallelRouteKey] = flightData;
}
let loadingElement = Loading ? createElement(Loading, {
key: 'l'
}) : null;
const loadingFilePath = getConventionPathByType(tree, dir, 'loading');
if (isSegmentViewEnabled && loadingElement) {
if (loadingFilePath) {
loadingElement = createElement(SegmentViewNode, {
key: cacheNodeKey + '-loading',
type: 'loading',
pagePath: loadingFilePath
}, loadingElement);
}
}
const loadingData = loadingElement ? [
loadingElement,
loadingStyles,
loadingScripts
] : null;
// When the segment does not have a layout or page we still have to add the layout router to ensure the path holds the loading component
if (!MaybeComponent) {
return createSeedData(ctx, createElement(Fragment, {
key: cacheNodeKey
}, layerAssets, parallelRouteProps.children), parallelRouteCacheNodeSeedData, loadingData, isPossiblyPartialResponse, isRuntimePrefetchable, // No user-provided component, so no params will be accessed. Use the
// pre-resolved empty tracker.
emptyVaryParamsAccumulator);
}
const Component = MaybeComponent;
// If force-dynamic is used and the current render supports postponing, we
// replace it with a node that will postpone the render. This ensures that the
// postpone is invoked during the react render phase and not during the next
// render phase.
// @TODO this does not actually do what it seems like it would or should do. The idea is that
// if we are rendering in a force-dynamic mode and we can postpone we should only make the segments
// that ask for force-dynamic to be dynamic, allowing other segments to still prerender. However
// because this comes after the children traversal and the static generation store is mutated every segment
// along the parent path of a force-dynamic segment will hit this condition effectively making the entire
// render force-dynamic. We should refactor this function so that we can correctly track which segments
// need to be dynamic
if (workStore.isStaticGeneration && workStore.forceDynamic && experimental.isRoutePPREnabled) {
return createSeedData(ctx, createElement(Fragment, {
key: cacheNodeKey
}, createElement(Postpone, {
reason: 'dynamic = "force-dynamic" was used',
route: workStore.route
}), layerAssets), parallelRouteCacheNodeSeedData, loadingData, true, isRuntimePrefetchable, // force-dynamic postpones without rendering the component, so no params
// are accessed. The vary params are empty.
emptyVaryParamsAccumulator);
}
const isClientComponent = isClientReference(layoutOrPageMod);
const varyParamsAccumulator = isClientComponent && cacheComponents ? // from the server, so they have an empty vary params set.
emptyVaryParamsAccumulator : createVaryParamsAccumulator();
if (process.env.NODE_ENV === 'development' && 'params' in parallelRouteProps) {
// @TODO consider making this an error and running the check in build as well
console.error(`"params" is a reserved prop in Layouts and Pages and cannot be used as the name of a parallel route in ${segment}`);
}
if (isPage) {
const PageComponent = Component;
// Assign searchParams to props if this is a page
let pageElement;
if (isClientComponent) {
if (cacheComponents) {
// Params are omitted when Cache Components is enabled
pageElement = createElement(ClientPageRoot, {
Component: PageComponent,
serverProvidedParams: null
});
} else if (isStaticGeneration) {
const promiseOfParams = createPrerenderParamsForClientSegment(currentParams);
const promiseOfSearchParams = createPrerenderSearchParamsForClientPage();
pageElement = createElement(ClientPageRoot, {
Component: PageComponent,
serverProvidedParams: {
searchParams: query,
params: currentParams,
promises: [
promiseOfSearchParams,
promiseOfParams
]
}
});
} else {
pageElement = createElement(ClientPageRoot, {
Component: PageComponent,
serverProvidedParams: {
searchParams: query,
params: currentParams,
promises: null
}
});
}
} else {
// If we are passing params to a server component Page we need to track
// their usage in case the current render mode tracks dynamic API usage.
const params = createServerParamsForServerSegment(currentParams, optionalCatchAllParamName, varyParamsAccumulator, isRuntimePrefetchable);
// If we are passing searchParams to a server component Page we need to
// track their usage in case the current render mode tracks dynamic API
// usage.
let searchParams = createServerSearchParamsForServerPage(query, varyParamsAccumulator, isRuntimePrefetchable);
if (isUseCacheFunction(PageComponent)) {
const UseCachePageComponent = PageComponent;
pageElement = createElement(UseCachePageComponent, {
params: params,
searchParams: searchParams,
$$isPage: true
});
} else {
pageElement = createElement(PageComponent, {
params: params,
searchParams: searchParams
});
}
}
const isDefaultSegment = segment === DEFAULT_SEGMENT_KEY;
const pageFilePath = getConventionPathByType(tree, dir, 'page') ?? getConventionPathByType(tree, dir, 'defaultPage');
const segmentType = isDefaultSegment ? 'default' : 'page';
const wrappedPageElement = isSegmentViewEnabled && pageFilePath ? createElement(SegmentViewNode, {
key: cacheNodeKey + '-' + segmentType,
type: segmentType,
pagePath: pageFilePath
}, pageElement) : pageElement;
return createSeedData(ctx, createElement(Fragment, {
key: cacheNodeKey
}, wrappedPageElement, layerAssets, MetadataOutlet ? createElement(MetadataOutlet, null) : null), parallelRouteCacheNodeSeedData, loadingData, isPossiblyPartialResponse, isRuntimePrefetchable, varyParamsAccumulator);
} else {
const SegmentComponent = Component;
const isRootLayoutWithChildrenSlotAndAtLeastOneMoreSlot = rootLayoutAtThisLevel && 'children' in parallelRoutes && Object.keys(parallelRoutes).length > 1;
let segmentNode;
if (isClientComponent) {
let clientSegment;
if (cacheComponents) {
// Params are omitted when Cache Components is enabled
clientSegment = createElement(ClientSegmentRoot, {
Component: SegmentComponent,
slots: parallelRouteProps,
serverProvidedParams: null
});
} else if (isStaticGeneration) {
const promiseOfParams = createPrerenderParamsForClientSegment(currentParams);
clientSegment = createElement(ClientSegmentRoot, {
Component: SegmentComponent,
slots: parallelRouteProps,
serverProvidedParams: {
params: currentParams,
promises: [
promiseOfParams
]
}
});
} else {
clientSegment = createElement(ClientSegmentRoot, {
Component: SegmentComponent,
slots: parallelRouteProps,
serverProvidedParams: {
params: currentParams,
promises: null
}
});
}
if (isRootLayoutWithChildrenSlotAndAtLeastOneMoreSlot) {
let notfoundClientSegment;
let forbiddenClientSegment;
let unauthorizedClientSegment;
// TODO-APP: This is a hack to support unmatched parallel routes, which will throw `notFound()`.
// This ensures that a `HTTPAccessFallbackBoundary` is available for when that happens,
// but it's not ideal, as it needlessly invokes the `NotFound` component and renders the `RootLayout` twice.
// We should instead look into handling the fallback behavior differently in development mode so that it doesn't
// rely on the `NotFound` behavior.
notfoundClientSegment = createErrorBoundaryClientSegmentRoot({
ctx,
ErrorBoundaryComponent: NotFound,
errorElement: notFoundElement,
ClientSegmentRoot,
layerAssets,
SegmentComponent,
currentParams
});
forbiddenClientSegment = createErrorBoundaryClientSegmentRoot({
ctx,
ErrorBoundaryComponent: Forbidden,
errorElement: forbiddenElement,
ClientSegmentRoot,
layerAssets,
SegmentComponent,
currentParams
});
unauthorizedClientSegment = createErrorBoundaryClientSegmentRoot({
ctx,
ErrorBoundaryComponent: Unauthorized,
errorElement: unauthorizedElement,
ClientSegmentRoot,
layerAssets,
SegmentComponent,
currentParams
});
if (notfoundClientSegment || forbiddenClientSegment || unauthorizedClientSegment) {
segmentNode = createElement(HTTPAccessFallbackBoundary, {
key: cacheNodeKey,
notFound: notfoundClientSegment,
forbidden: forbiddenClientSegment,
unauthorized: unauthorizedClientSegment
}, layerAssets, clientSegment);
} else {
segmentNode = createElement(Fragment, {
key: cacheNodeKey
}, layerAssets, clientSegment);
}
} else {
segmentNode = createElement(Fragment, {
key: cacheNodeKey
}, layerAssets, clientSegment);
}
} else {
const params = createServerParamsForServerSegment(currentParams, optionalCatchAllParamName, varyParamsAccumulator, isRuntimePrefetchable);
let serverSegment;
if (isUseCacheFunction(SegmentComponent)) {
const UseCacheLayoutComponent = SegmentComponent;
serverSegment = createElement(UseCacheLayoutComponent, {
...parallelRouteProps,
params: params,
$$isLayout: true
}, // Force static children here so that they're validated.
// See https://github.com/facebook/react/pull/34846
parallelRouteProps.children);
} else {
serverSegment = createElement(SegmentComponent, {
...parallelRouteProps,
params: params
}, // Force static children here so that they're validated.
// See https://github.com/facebook/react/pull/34846
parallelRouteProps.children);
}
if (isRootLayoutWithChildrenSlotAndAtLeastOneMoreSlot) {
// TODO-APP: This is a hack to support unmatched parallel routes, which will throw `notFound()`.
// This ensures that a `HTTPAccessFallbackBoundary` is available for when that happens,
// but it's not ideal, as it needlessly invokes the `NotFound` component and renders the `RootLayout` twice.
// We should instead look into handling the fallback behavior differently in development mode so that it doesn't
// rely on the `NotFound` behavior.
segmentNode = createElement(HTTPAccessFallbackBoundary, {
key: cacheNodeKey,
notFound: notFoundElement ? createElement(Fragment, null, layerAssets, createElement(SegmentComponent, {
params: params
}, notFoundStyles, notFoundElement)) : undefined
}, layerAssets, serverSegment);
} else {
segmentNode = createElement(Fragment, {
key: cacheNodeKey
}, layerAssets, serverSegment);
}
}
const layoutFilePath = getConventionPathByType(tree, dir, 'layout');
const wrappedSegmentNode = isSegmentViewEnabled && layoutFilePath ? createElement(SegmentViewNode, {
key: 'layout',
type: 'layout',
pagePath: layoutFilePath
}, segmentNode) : segmentNode;
// For layouts we just render the component
return createSeedData(ctx, wrappedSegmentNode, parallelRouteCacheNodeSeedData, loadingData, isPossiblyPartialResponse, isRuntimePrefetchable, varyParamsAccumulator);
}
}
function createErrorBoundaryClientSegmentRoot({ ctx, ErrorBoundaryComponent, errorElement, ClientSegmentRoot, layerAssets, SegmentComponent, currentParams }) {
const { componentMod: { createElement, Fragment } } = ctx;
if (ErrorBoundaryComponent) {
const notFoundParallelRouteProps = {
children: errorElement
};
return createElement(Fragment, null, layerAssets, createElement(ClientSegmentRoot, {
Component: SegmentComponent,
slots: notFoundParallelRouteProps,
params: currentParams
}));
}
return null;
}
export function getRootParams(loaderTree, getDynamicParamFromSegment) {
return getRootParamsImpl({}, loaderTree, getDynamicParamFromSegment);
}
function getRootParamsImpl(parentParams, loaderTree, getDynamicParamFromSegment) {
const { modules: { layout }, parallelRoutes } = parseLoaderTree(loaderTree);
const segmentParam = getDynamicParamFromSegment(loaderTree);
let currentParams = parentParams;
if (segmentParam && segmentParam.value !== null) {
currentParams = {
...parentParams,
[segmentParam.param]: segmentParam.value
};
}
const isRootLayout = typeof layout !== 'undefined';
if (isRootLayout) {
return currentParams;
} else if (!parallelRoutes.children) {
// This should really be an error but there are bugs in Turbopack that cause
// the _not-found LoaderTree to not have any layouts. For rootParams sake
// this is somewhat irrelevant when you are not customizing the 404 page.
// If you are customizing 404
// TODO update rootParams to make all params optional if `/app/not-found.tsx` is defined
return currentParams;
} else {
return getRootParamsImpl(currentParams, // We stop looking for root params as soon as we hit the first layout
// and it is not possible to use parallel route children above the root layout
// so every parallelRoutes object that this function can visit will necessarily
// have a single `children` prop and no others.
parallelRoutes.children, getDynamicParamFromSegment);
}
}
async function createBoundaryConventionElement({ ctx, conventionName, Component, styles, tree }) {
const { componentMod: { createElement, Fragment } } = ctx;
const isSegmentViewEnabled = !!process.env.__NEXT_DEV_SERVER;
const dir = (process.env.NEXT_RUNTIME === 'edge' ? process.env.__NEXT_EDGE_PROJECT_DIR : ctx.renderOpts.dir) || '';
const { SegmentViewNode } = ctx.componentMod;
const element = Component ? createElement(Fragment, null, createElement(Component, null), styles) : undefined;
const pagePath = getConventionPathByType(tree, dir, conventionName);
const wrappedElement = isSegmentViewEnabled && element ? createElement(SegmentViewNode, {
key: cacheNodeKey + '-' + conventionName,
type: conventionName,
// TODO: Discovered when moving to `createElement`.
// `SegmentViewNode` doesn't support undefined `pagePath`
pagePath: pagePath
}, element) : element;
return [
wrappedElement,
pagePath
];
}
function createSeedData(ctx, rsc, parallelRoutes, loading, isPossiblyPartialResponse, isRuntimePrefetchable, varyParamsAccumulator) {
const createElement = ctx.componentMod.createElement;
// When this segment is NOT runtime-prefetchable, delay it until the Static
// stage by wrapping the node in a promise. This allows runtime-prefetchable
// segments (the lower tree) to render first during EarlyStatic, so their
// runtime data resolves in EarlyRuntime where sync IO can be checked.
// React will suspend on the thenable and resume when the stage advances.
if (!isRuntimePrefetchable) {
const workUnitStore = workUnitAsyncStorage.getStore();
if (workUnitStore) {
let stagedRendering;
switch(workUnitStore.type){
case 'request':
case 'prerender-runtime':
stagedRendering = workUnitStore.stagedRendering;
if (stagedRendering) {
const deferredRsc = rsc;
rsc = stagedRendering.waitForStage(RenderStage.Static).then(()=>deferredRsc);
}
break;
case 'prerender':
case 'prerender-client':
case 'validation-client':
case 'prerender-ppr':
case 'prerender-legacy':
case 'cache':
case 'private-cache':
case 'unstable-cache':
case 'generate-static-params':
break;
default:
workUnitStore;
}
}
}
if (loading !== null) {
// If a loading.tsx boundary is present, wrap the component data in an
// additional context provider to pass the loading data to the next
// set of children.
// NOTE: The reason this is a separate wrapper from LayoutRouter is because
// not all segments render a LayoutRouter component, e.g. the root segment.
const LoadingBoundaryProvider = ctx.componentMod.LoadingBoundaryProvider;
rsc = createElement(LoadingBoundaryProvider, {
loading: loading,
children: rsc
});
}
return [
rsc,
parallelRoutes,
null,
isPossiblyPartialResponse,
varyParamsAccumulator ? getVaryParamsThenable(varyParamsAccumulator) : null
];
}
//# sourceMappingURL=create-component-tree.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,158 @@
import stringHash from 'next/dist/compiled/string-hash';
import { formatServerError } from '../../lib/format-server-error';
import { SpanStatusCode, getTracer } from '../lib/trace/tracer';
import { isAbortError } from '../pipe-readable';
import { isBailoutToCSRError } from '../../shared/lib/lazy-dynamic/bailout-to-csr';
import { isDynamicServerError } from '../../client/components/hooks-server-context';
import { isNextRouterError } from '../../client/components/is-next-router-error';
import { isPrerenderInterruptedError } from './dynamic-rendering';
import { getProperError } from '../../lib/is-error';
import { createDigestWithErrorCode } from '../../lib/error-telemetry-utils';
import { isReactLargeShellError } from './react-large-shell-error';
import { isInstantValidationError } from './instant-validation/instant-validation-error';
/**
* Returns a digest for well-known Next.js errors, otherwise `undefined`. If a
* digest is returned this also means that the error does not need to be
* reported.
*/ export function getDigestForWellKnownError(error) {
// If we're bailing out to CSR, we don't need to log the error.
if (isBailoutToCSRError(error)) return error.digest;
// If this is a navigation error, we don't need to log the error.
if (isNextRouterError(error)) return error.digest;
// If this error occurs, we know that we should be stopping the static
// render. This is only thrown in static generation when PPR is not enabled,
// which causes the whole page to be marked as dynamic. We don't need to
// tell the user about this error, as it's not actionable.
if (isDynamicServerError(error)) return error.digest;
// If this is a prerender interrupted error, we don't need to log the error.
if (isPrerenderInterruptedError(error)) return error.digest;
if (isInstantValidationError(error)) return error.digest;
return undefined;
}
export function createReactServerErrorHandler(shouldFormatError, isBuildTimePrerendering, reactServerErrors, onReactServerRenderError, spanToRecordOn) {
return (thrownValue)=>{
var _err_message;
if (typeof thrownValue === 'string') {
// TODO-APP: look at using webcrypto instead. Requires a promise to be awaited.
return stringHash(thrownValue).toString();
}
// If the response was closed, we don't need to log the error.
if (isAbortError(thrownValue)) return;
const digest = getDigestForWellKnownError(thrownValue);
if (digest) {
return digest;
}
if (isReactLargeShellError(thrownValue)) {
// TODO: Aggregate
console.error(thrownValue);
return undefined;
}
let err = getProperError(thrownValue);
let silenceLog = false;
// If the error already has a digest, respect the original digest,
// so it won't get re-generated into another new error.
if (err.digest) {
if (process.env.NODE_ENV === 'production' && reactServerErrors.has(err.digest)) {
// This error is likely an obfuscated error from another react-server
// environment (e.g. 'use cache'). We recover the original error here
// for reporting purposes.
err = reactServerErrors.get(err.digest);
// We don't log it again though, as it was already logged in the
// original environment.
silenceLog = true;
} else {
// Either we're in development (where we want to keep the transported
// error with environmentName), or the error is not in reactServerErrors
// but has a digest from other means. Keep the error as-is.
}
} else {
err.digest = createDigestWithErrorCode(err, // TODO-APP: look at using webcrypto instead. Requires a promise to be awaited.
stringHash(err.message + (err.stack || '')).toString());
}
// @TODO by putting this here and not at the top it is possible that
// we don't error the build in places we actually expect to
if (!reactServerErrors.has(err.digest)) {
reactServerErrors.set(err.digest, err);
}
// Format server errors in development to add more helpful error messages
if (shouldFormatError) {
formatServerError(err);
}
// Don't log the suppressed error during export
if (!(isBuildTimePrerendering && (err == null ? void 0 : (_err_message = err.message) == null ? void 0 : _err_message.includes('The specific message is omitted in production builds to avoid leaking sensitive details.')))) {
// Record exception on the provided span if available, otherwise try active span.
const span = spanToRecordOn ?? getTracer().getActiveScopeSpan();
if (span) {
span.recordException(err);
span.setAttribute('error.type', err.name);
span.setStatus({
code: SpanStatusCode.ERROR,
message: err.message
});
}
onReactServerRenderError(err, silenceLog);
}
return err.digest;
};
}
export function createHTMLErrorHandler(shouldFormatError, isBuildTimePrerendering, reactServerErrors, allCapturedErrors, onHTMLRenderSSRError, spanToRecordOn) {
return (thrownValue, errorInfo)=>{
var _err_message;
if (isReactLargeShellError(thrownValue)) {
// TODO: Aggregate
console.error(thrownValue);
return undefined;
}
let isSSRError = true;
allCapturedErrors.push(thrownValue);
// If the response was closed, we don't need to log the error.
if (isAbortError(thrownValue)) return;
const digest = getDigestForWellKnownError(thrownValue);
if (digest) {
return digest;
}
const err = getProperError(thrownValue);
// If the error already has a digest, respect the original digest,
// so it won't get re-generated into another new error.
if (err.digest) {
if (reactServerErrors.has(err.digest)) {
// This error is likely an obfuscated error from react-server.
// We recover the original error here.
thrownValue = reactServerErrors.get(err.digest);
isSSRError = false;
} else {
// The error is not from react-server but has a digest
// from other means so we don't need to produce a new one
}
} else {
err.digest = createDigestWithErrorCode(err, stringHash(err.message + ((errorInfo == null ? void 0 : errorInfo.componentStack) || err.stack || '')).toString());
}
// Format server errors in development to add more helpful error messages
if (shouldFormatError) {
formatServerError(err);
}
// Don't log the suppressed error during export
if (!(isBuildTimePrerendering && (err == null ? void 0 : (_err_message = err.message) == null ? void 0 : _err_message.includes('The specific message is omitted in production builds to avoid leaking sensitive details.')))) {
// HTML errors contain RSC errors as well, filter them out before reporting
if (isSSRError) {
// Record exception on the provided span if available, otherwise try active span.
const span = spanToRecordOn ?? getTracer().getActiveScopeSpan();
if (span) {
span.recordException(err);
span.setAttribute('error.type', err.name);
span.setStatus({
code: SpanStatusCode.ERROR,
message: err.message
});
}
onHTMLRenderSSRError(err, errorInfo);
}
}
return err.digest;
};
}
export function isUserLandError(err) {
return !isAbortError(err) && !isBailoutToCSRError(err) && !isNextRouterError(err);
}
//# sourceMappingURL=create-error-handler.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,80 @@
import { PrefetchHint } from '../../shared/lib/app-router-types';
import { addSearchParamsIfPageSegment } from '../../shared/lib/segment';
async function createFlightRouterStateFromLoaderTreeImpl(loaderTree, hintTree, getDynamicParamFromSegment, searchParams, didFindRootLayout) {
const [segment, parallelRoutes, { layout, loading, page }] = loaderTree;
const dynamicParam = getDynamicParamFromSegment(loaderTree);
const treeSegment = dynamicParam ? dynamicParam.treeSegment : segment;
const segmentTree = [
addSearchParamsIfPageSegment(treeSegment, searchParams),
{}
];
// Load the layout or page module to check for unstable_instant config
const mod = layout ? await layout[0]() : page ? await page[0]() : undefined;
const instantConfig = mod ? mod.unstable_instant : undefined;
let prefetchHints = 0;
// Union in the precomputed build-time hints (e.g. segment inlining
// decisions) if available. When hints are not available (e.g. dev mode or
// if prefetch-hints.json was not generated), we fall through and still
// compute the other hints below. In the future this should be a build
// error, but for now we gracefully degrade.
//
// TODO: Move more of the hints computation (IsRootLayout, instant config,
// loading boundary detection) into the build-time measurement step in
// collectPrefetchHints, so this function only needs to union the
// precomputed bitmask rather than re-derive hints on every render.
if (hintTree !== null) {
prefetchHints |= hintTree.hints;
}
// Mark the first segment that has a layout as the "root" layout
if (!didFindRootLayout && typeof layout !== 'undefined') {
didFindRootLayout = true;
prefetchHints |= PrefetchHint.IsRootLayout;
}
if (instantConfig && typeof instantConfig === 'object') {
prefetchHints |= PrefetchHint.SubtreeHasInstant;
if (instantConfig.prefetch === 'runtime') {
prefetchHints |= PrefetchHint.HasRuntimePrefetch;
}
}
// Check if this segment has a loading boundary
if (loading) {
prefetchHints |= PrefetchHint.SegmentHasLoadingBoundary;
}
const children = {};
for(const parallelRouteKey in parallelRoutes){
var _hintTree_slots;
// Look up the child hint node by parallel route key, traversing the
// hint tree in parallel with the loader tree.
const childHintNode = (hintTree == null ? void 0 : (_hintTree_slots = hintTree.slots) == null ? void 0 : _hintTree_slots[parallelRouteKey]) ?? null;
const child = await createFlightRouterStateFromLoaderTreeImpl(parallelRoutes[parallelRouteKey], childHintNode, getDynamicParamFromSegment, searchParams, didFindRootLayout);
// Propagate subtree flags from children
if (child[4] !== undefined) {
prefetchHints |= child[4] & (PrefetchHint.SubtreeHasInstant | PrefetchHint.SubtreeHasLoadingBoundary);
// If a child has a loading boundary (either directly or in its subtree),
// propagate that as SubtreeHasLoadingBoundary to this segment.
if (child[4] & (PrefetchHint.SegmentHasLoadingBoundary | PrefetchHint.SubtreeHasLoadingBoundary)) {
prefetchHints |= PrefetchHint.SubtreeHasLoadingBoundary;
}
}
children[parallelRouteKey] = child;
}
segmentTree[1] = children;
if (prefetchHints !== 0) {
segmentTree[4] = prefetchHints;
}
return segmentTree;
}
export async function createFlightRouterStateFromLoaderTree(loaderTree, hintTree, getDynamicParamFromSegment, searchParams) {
const didFindRootLayout = false;
return createFlightRouterStateFromLoaderTreeImpl(loaderTree, hintTree, getDynamicParamFromSegment, searchParams, didFindRootLayout);
}
export async function createRouteTreePrefetch(loaderTree, hintTree, getDynamicParamFromSegment) {
// Search params should not be added to page segment's cache key during a
// route tree prefetch request, because they do not affect the structure of
// the route. The client cache has its own logic to handle search params.
const searchParams = {};
const didFindRootLayout = false;
return createFlightRouterStateFromLoaderTreeImpl(loaderTree, hintTree, getDynamicParamFromSegment, searchParams, didFindRootLayout);
}
//# sourceMappingURL=create-flight-router-state-from-loader-tree.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,76 @@
// micromatch is only available at node runtime, so it cannot be used here since the code path that calls this function
// can be run from edge. This is a simple implementation that safely achieves the required functionality.
// the goal is to match the functionality for remotePatterns as defined here -
// https://nextjs.org/docs/app/api-reference/components/image#remotepatterns
// TODO - retrofit micromatch to work in edge and use that instead
function matchWildcardDomain(domain, pattern) {
// DNS names are case-insensitive per RFC 1035
// Use ASCII-only toLowerCase to avoid unicode issues
const normalizedDomain = domain.replace(/[A-Z]/g, (c)=>c.toLowerCase());
const normalizedPattern = pattern.replace(/[A-Z]/g, (c)=>c.toLowerCase());
const domainParts = normalizedDomain.split('.');
const patternParts = normalizedPattern.split('.');
if (patternParts.length < 1) {
// pattern is empty and therefore invalid to match against
return false;
}
if (domainParts.length < patternParts.length) {
// domain has too few segments and thus cannot match
return false;
}
// Prevent wildcards from matching entire domains (e.g. '**' or '*.com')
// This ensures wildcards can only match subdomains, not the main domain
if (patternParts.length === 1 && (patternParts[0] === '*' || patternParts[0] === '**')) {
return false;
}
while(patternParts.length){
const patternPart = patternParts.pop();
const domainPart = domainParts.pop();
switch(patternPart){
case '':
{
// invalid pattern. pattern segments must be non empty
return false;
}
case '*':
{
// wildcard matches anything so we continue if the domain part is non-empty
if (domainPart) {
continue;
} else {
return false;
}
}
case '**':
{
// if this is not the last item in the pattern the pattern is invalid
if (patternParts.length > 0) {
return false;
}
// recursive wildcard matches anything so we terminate here if the domain part is non empty
return domainPart !== undefined;
}
case undefined:
default:
{
if (domainPart !== patternPart) {
return false;
}
}
}
}
// We exhausted the pattern. If we also exhausted the domain we have a match
return domainParts.length === 0;
}
export const isCsrfOriginAllowed = (originDomain, allowedOrigins = [])=>{
// DNS names are case-insensitive per RFC 1035
// Use ASCII-only toLowerCase to avoid unicode issues
const normalizedOrigin = originDomain.replace(/[A-Z]/g, (c)=>c.toLowerCase());
return allowedOrigins.some((allowedOrigin)=>{
if (!allowedOrigin) return false;
const normalizedAllowed = allowedOrigin.replace(/[A-Z]/g, (c)=>c.toLowerCase());
return normalizedAllowed === normalizedOrigin || matchWildcardDomain(originDomain, allowedOrigin);
});
};
//# sourceMappingURL=csrf-protection.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/csrf-protection.ts"],"sourcesContent":["// micromatch is only available at node runtime, so it cannot be used here since the code path that calls this function\n// can be run from edge. This is a simple implementation that safely achieves the required functionality.\n// the goal is to match the functionality for remotePatterns as defined here -\n// https://nextjs.org/docs/app/api-reference/components/image#remotepatterns\n// TODO - retrofit micromatch to work in edge and use that instead\nfunction matchWildcardDomain(domain: string, pattern: string) {\n // DNS names are case-insensitive per RFC 1035\n // Use ASCII-only toLowerCase to avoid unicode issues\n const normalizedDomain = domain.replace(/[A-Z]/g, (c) => c.toLowerCase())\n const normalizedPattern = pattern.replace(/[A-Z]/g, (c) => c.toLowerCase())\n\n const domainParts = normalizedDomain.split('.')\n const patternParts = normalizedPattern.split('.')\n\n if (patternParts.length < 1) {\n // pattern is empty and therefore invalid to match against\n return false\n }\n\n if (domainParts.length < patternParts.length) {\n // domain has too few segments and thus cannot match\n return false\n }\n\n // Prevent wildcards from matching entire domains (e.g. '**' or '*.com')\n // This ensures wildcards can only match subdomains, not the main domain\n if (\n patternParts.length === 1 &&\n (patternParts[0] === '*' || patternParts[0] === '**')\n ) {\n return false\n }\n\n while (patternParts.length) {\n const patternPart = patternParts.pop()\n const domainPart = domainParts.pop()\n\n switch (patternPart) {\n case '': {\n // invalid pattern. pattern segments must be non empty\n return false\n }\n case '*': {\n // wildcard matches anything so we continue if the domain part is non-empty\n if (domainPart) {\n continue\n } else {\n return false\n }\n }\n case '**': {\n // if this is not the last item in the pattern the pattern is invalid\n if (patternParts.length > 0) {\n return false\n }\n // recursive wildcard matches anything so we terminate here if the domain part is non empty\n return domainPart !== undefined\n }\n case undefined:\n default: {\n if (domainPart !== patternPart) {\n return false\n }\n }\n }\n }\n\n // We exhausted the pattern. If we also exhausted the domain we have a match\n return domainParts.length === 0\n}\n\nexport const isCsrfOriginAllowed = (\n originDomain: string,\n allowedOrigins: string[] = []\n): boolean => {\n // DNS names are case-insensitive per RFC 1035\n // Use ASCII-only toLowerCase to avoid unicode issues\n const normalizedOrigin = originDomain.replace(/[A-Z]/g, (c) =>\n c.toLowerCase()\n )\n\n return allowedOrigins.some((allowedOrigin) => {\n if (!allowedOrigin) return false\n\n const normalizedAllowed = allowedOrigin.replace(/[A-Z]/g, (c) =>\n c.toLowerCase()\n )\n\n return (\n normalizedAllowed === normalizedOrigin ||\n matchWildcardDomain(originDomain, allowedOrigin)\n )\n })\n}\n"],"names":["matchWildcardDomain","domain","pattern","normalizedDomain","replace","c","toLowerCase","normalizedPattern","domainParts","split","patternParts","length","patternPart","pop","domainPart","undefined","isCsrfOriginAllowed","originDomain","allowedOrigins","normalizedOrigin","some","allowedOrigin","normalizedAllowed"],"mappings":"AAAA,uHAAuH;AACvH,yGAAyG;AACzG,8EAA8E;AAC9E,4EAA4E;AAC5E,kEAAkE;AAClE,SAASA,oBAAoBC,MAAc,EAAEC,OAAe;IAC1D,8CAA8C;IAC9C,qDAAqD;IACrD,MAAMC,mBAAmBF,OAAOG,OAAO,CAAC,UAAU,CAACC,IAAMA,EAAEC,WAAW;IACtE,MAAMC,oBAAoBL,QAAQE,OAAO,CAAC,UAAU,CAACC,IAAMA,EAAEC,WAAW;IAExE,MAAME,cAAcL,iBAAiBM,KAAK,CAAC;IAC3C,MAAMC,eAAeH,kBAAkBE,KAAK,CAAC;IAE7C,IAAIC,aAAaC,MAAM,GAAG,GAAG;QAC3B,0DAA0D;QAC1D,OAAO;IACT;IAEA,IAAIH,YAAYG,MAAM,GAAGD,aAAaC,MAAM,EAAE;QAC5C,oDAAoD;QACpD,OAAO;IACT;IAEA,wEAAwE;IACxE,wEAAwE;IACxE,IACED,aAAaC,MAAM,KAAK,KACvBD,CAAAA,YAAY,CAAC,EAAE,KAAK,OAAOA,YAAY,CAAC,EAAE,KAAK,IAAG,GACnD;QACA,OAAO;IACT;IAEA,MAAOA,aAAaC,MAAM,CAAE;QAC1B,MAAMC,cAAcF,aAAaG,GAAG;QACpC,MAAMC,aAAaN,YAAYK,GAAG;QAElC,OAAQD;YACN,KAAK;gBAAI;oBACP,sDAAsD;oBACtD,OAAO;gBACT;YACA,KAAK;gBAAK;oBACR,2EAA2E;oBAC3E,IAAIE,YAAY;wBACd;oBACF,OAAO;wBACL,OAAO;oBACT;gBACF;YACA,KAAK;gBAAM;oBACT,qEAAqE;oBACrE,IAAIJ,aAAaC,MAAM,GAAG,GAAG;wBAC3B,OAAO;oBACT;oBACA,2FAA2F;oBAC3F,OAAOG,eAAeC;gBACxB;YACA,KAAKA;YACL;gBAAS;oBACP,IAAID,eAAeF,aAAa;wBAC9B,OAAO;oBACT;gBACF;QACF;IACF;IAEA,4EAA4E;IAC5E,OAAOJ,YAAYG,MAAM,KAAK;AAChC;AAEA,OAAO,MAAMK,sBAAsB,CACjCC,cACAC,iBAA2B,EAAE;IAE7B,8CAA8C;IAC9C,qDAAqD;IACrD,MAAMC,mBAAmBF,aAAab,OAAO,CAAC,UAAU,CAACC,IACvDA,EAAEC,WAAW;IAGf,OAAOY,eAAeE,IAAI,CAAC,CAACC;QAC1B,IAAI,CAACA,eAAe,OAAO;QAE3B,MAAMC,oBAAoBD,cAAcjB,OAAO,CAAC,UAAU,CAACC,IACzDA,EAAEC,WAAW;QAGf,OACEgB,sBAAsBH,oBACtBnB,oBAAoBiB,cAAcI;IAEtC;AACF,EAAC","ignoreList":[0]}

View File

@@ -0,0 +1,8 @@
/**
* Compile-time switcher for debug channel operations.
*
* Simple re-export from the web implementation.
* A future change will add a conditional branch for node streams.
*/ export { createDebugChannel, toNodeDebugChannel } from './debug-channel-server.web';
//# sourceMappingURL=debug-channel-server.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/debug-channel-server.ts"],"sourcesContent":["/**\n * Compile-time switcher for debug channel operations.\n *\n * Simple re-export from the web implementation.\n * A future change will add a conditional branch for node streams.\n */\nexport type {\n DebugChannelPair,\n DebugChannelServer,\n} from './debug-channel-server.web'\n\nexport {\n createDebugChannel,\n toNodeDebugChannel,\n} from './debug-channel-server.web'\n"],"names":["createDebugChannel","toNodeDebugChannel"],"mappings":"AAAA;;;;;CAKC,GAMD,SACEA,kBAAkB,EAClBC,kBAAkB,QACb,6BAA4B","ignoreList":[0]}

View File

@@ -0,0 +1,48 @@
/**
* Web debug channel implementation.
* Loaded by debug-channel-server.ts.
*/ // Types defined inline for now; will move to debug-channel-server.node.ts later.
export function createDebugChannel() {
if (process.env.NODE_ENV === 'production') {
return undefined;
}
return createWebDebugChannel();
}
export function createWebDebugChannel() {
let readableController;
const clientSideReadable = new ReadableStream({
start (controller) {
readableController = controller;
}
});
return {
serverSide: {
writable: new WritableStream({
write (chunk) {
readableController == null ? void 0 : readableController.enqueue(chunk);
},
close () {
readableController == null ? void 0 : readableController.close();
},
abort (err) {
readableController == null ? void 0 : readableController.error(err);
}
})
},
clientSide: {
readable: clientSideReadable
}
};
}
/**
* toNodeDebugChannel is a no-op stub on the web path.
* It should never be called in edge/web builds.
*/ export function toNodeDebugChannel(_webDebugChannel) {
throw Object.defineProperty(new Error('toNodeDebugChannel cannot be used in edge/web runtime, this is a bug in the Next.js codebase'), "__NEXT_ERROR_CODE", {
value: "E1071",
enumerable: false,
configurable: true
});
}
//# sourceMappingURL=debug-channel-server.web.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/debug-channel-server.web.ts"],"sourcesContent":["/**\n * Web debug channel implementation.\n * Loaded by debug-channel-server.ts.\n */\n\n// Types defined inline for now; will move to debug-channel-server.node.ts later.\nexport type DebugChannelPair = {\n serverSide: DebugChannelServer\n clientSide: DebugChannelClient\n}\n\nexport type DebugChannelServer = {\n readable?: ReadableStream<Uint8Array>\n writable: WritableStream<Uint8Array>\n}\n\ntype DebugChannelClient = {\n readable: ReadableStream<Uint8Array>\n writable?: WritableStream<Uint8Array>\n}\n\nexport function createDebugChannel(): DebugChannelPair | undefined {\n if (process.env.NODE_ENV === 'production') {\n return undefined\n }\n return createWebDebugChannel()\n}\n\nexport function createWebDebugChannel(): DebugChannelPair {\n let readableController: ReadableStreamDefaultController | undefined\n\n const clientSideReadable = new ReadableStream<Uint8Array>({\n start(controller) {\n readableController = controller\n },\n })\n\n return {\n serverSide: {\n writable: new WritableStream<Uint8Array>({\n write(chunk) {\n readableController?.enqueue(chunk)\n },\n close() {\n readableController?.close()\n },\n abort(err) {\n readableController?.error(err)\n },\n }),\n },\n clientSide: { readable: clientSideReadable },\n }\n}\n\n/**\n * toNodeDebugChannel is a no-op stub on the web path.\n * It should never be called in edge/web builds.\n */\nexport function toNodeDebugChannel(\n _webDebugChannel: DebugChannelServer\n): never {\n throw new Error(\n 'toNodeDebugChannel cannot be used in edge/web runtime, this is a bug in the Next.js codebase'\n )\n}\n"],"names":["createDebugChannel","process","env","NODE_ENV","undefined","createWebDebugChannel","readableController","clientSideReadable","ReadableStream","start","controller","serverSide","writable","WritableStream","write","chunk","enqueue","close","abort","err","error","clientSide","readable","toNodeDebugChannel","_webDebugChannel","Error"],"mappings":"AAAA;;;CAGC,GAED,iFAAiF;AAgBjF,OAAO,SAASA;IACd,IAAIC,QAAQC,GAAG,CAACC,QAAQ,KAAK,cAAc;QACzC,OAAOC;IACT;IACA,OAAOC;AACT;AAEA,OAAO,SAASA;IACd,IAAIC;IAEJ,MAAMC,qBAAqB,IAAIC,eAA2B;QACxDC,OAAMC,UAAU;YACdJ,qBAAqBI;QACvB;IACF;IAEA,OAAO;QACLC,YAAY;YACVC,UAAU,IAAIC,eAA2B;gBACvCC,OAAMC,KAAK;oBACTT,sCAAAA,mBAAoBU,OAAO,CAACD;gBAC9B;gBACAE;oBACEX,sCAAAA,mBAAoBW,KAAK;gBAC3B;gBACAC,OAAMC,GAAG;oBACPb,sCAAAA,mBAAoBc,KAAK,CAACD;gBAC5B;YACF;QACF;QACAE,YAAY;YAAEC,UAAUf;QAAmB;IAC7C;AACF;AAEA;;;CAGC,GACD,OAAO,SAASgB,mBACdC,gBAAoC;IAEpC,MAAM,qBAEL,CAFK,IAAIC,MACR,iGADI,qBAAA;eAAA;oBAAA;sBAAA;IAEN;AACF","ignoreList":[0]}

View File

@@ -0,0 +1,4 @@
import { createAsyncLocalStorage } from './async-local-storage';
export const dynamicAccessAsyncStorageInstance = createAsyncLocalStorage();
//# sourceMappingURL=dynamic-access-async-storage-instance.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/dynamic-access-async-storage-instance.ts"],"sourcesContent":["import { createAsyncLocalStorage } from './async-local-storage'\nimport type { DynamicAccessStorage } from './dynamic-access-async-storage.external'\n\nexport const dynamicAccessAsyncStorageInstance: DynamicAccessStorage =\n createAsyncLocalStorage()\n"],"names":["createAsyncLocalStorage","dynamicAccessAsyncStorageInstance"],"mappings":"AAAA,SAASA,uBAAuB,QAAQ,wBAAuB;AAG/D,OAAO,MAAMC,oCACXD,0BAAyB","ignoreList":[0]}

View File

@@ -0,0 +1,7 @@
// Share the instance module in the next-shared layer
import { dynamicAccessAsyncStorageInstance } from './dynamic-access-async-storage-instance' with {
'turbopack-transition': 'next-shared'
};
export { dynamicAccessAsyncStorageInstance as dynamicAccessAsyncStorage };
//# sourceMappingURL=dynamic-access-async-storage.external.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/dynamic-access-async-storage.external.ts"],"sourcesContent":["import type { AsyncLocalStorage } from 'async_hooks'\n\n// Share the instance module in the next-shared layer\nimport { dynamicAccessAsyncStorageInstance } from './dynamic-access-async-storage-instance' with { 'turbopack-transition': 'next-shared' }\n\nexport interface DynamicAccessAsyncStore {\n readonly abortController: AbortController\n}\n\nexport type DynamicAccessStorage = AsyncLocalStorage<DynamicAccessAsyncStore>\nexport { dynamicAccessAsyncStorageInstance as dynamicAccessAsyncStorage }\n"],"names":["dynamicAccessAsyncStorageInstance","dynamicAccessAsyncStorage"],"mappings":"AAEA,qDAAqD;AACrD,SAASA,iCAAiC,QAAQ,+CAA+C;IAAE,wBAAwB;AAAc,EAAC;AAO1I,SAASA,qCAAqCC,yBAAyB,GAAE","ignoreList":[0]}

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,99 @@
// This file should never be bundled into application's runtime code and should
// stay in the Next.js server.
import path from 'path';
import fs from 'fs';
import { getStorageDirectory } from '../cache-dir';
import { arrayBufferToString } from './encryption-utils';
// Keep the key in memory as it should never change during the lifetime of the server in
// both development and production.
let __next_encryption_key_generation_promise = null;
const CONFIG_FILE = '.rscinfo';
const ENCRYPTION_KEY = 'encryption.key';
const ENCRYPTION_EXPIRE_AT = 'encryption.expire_at';
const EXPIRATION = 1000 * 60 * 60 * 24 * 14 // 14 days
;
async function writeCache(distDir, configValue) {
const cacheBaseDir = getStorageDirectory(distDir);
if (!cacheBaseDir) return;
const configPath = path.join(cacheBaseDir, CONFIG_FILE);
if (!fs.existsSync(cacheBaseDir)) {
await fs.promises.mkdir(cacheBaseDir, {
recursive: true
});
}
await fs.promises.writeFile(configPath, JSON.stringify({
[ENCRYPTION_KEY]: configValue,
[ENCRYPTION_EXPIRE_AT]: Date.now() + EXPIRATION
}));
}
// This utility is used to get a key for the cache directory. If the
// key is not present, it will generate a new one and store it in the
// cache directory inside dist.
// The key will also expire after a certain amount of time. Once it
// expires, a new one will be generated.
// During the lifetime of the server, it will be reused and never refreshed.
async function loadOrGenerateKey(distDir, isBuild, generateKey) {
const cacheBaseDir = getStorageDirectory(distDir);
if (!cacheBaseDir) {
// There's no persistent storage available. We generate a new key.
// This also covers development time.
return await generateKey();
}
const configPath = path.join(cacheBaseDir, CONFIG_FILE);
async function hasCachedKey() {
if (!fs.existsSync(configPath)) return false;
try {
const config = JSON.parse(await fs.promises.readFile(configPath, 'utf8'));
if (!config) return false;
if (typeof config[ENCRYPTION_KEY] !== 'string' || typeof config[ENCRYPTION_EXPIRE_AT] !== 'number') {
return false;
}
// For build time, we need to rotate the key if it's expired. Otherwise
// (next start) we have to keep the key as it is so the runtime key matches
// the build time key.
if (isBuild && config[ENCRYPTION_EXPIRE_AT] < Date.now()) {
return false;
}
const cachedKey = config[ENCRYPTION_KEY];
// If encryption key is provided via env, and it's not same as valid cache,
// we should not use the cached key and respect the env key.
if (cachedKey && process.env.NEXT_SERVER_ACTIONS_ENCRYPTION_KEY && cachedKey !== process.env.NEXT_SERVER_ACTIONS_ENCRYPTION_KEY) {
return false;
}
return cachedKey;
} catch {
// Broken config file. We should generate a new key and overwrite it.
return false;
}
}
const maybeValidKey = await hasCachedKey();
if (typeof maybeValidKey === 'string') {
return maybeValidKey;
}
const key = await generateKey();
await writeCache(distDir, key);
return key;
}
export async function generateEncryptionKeyBase64({ isBuild, distDir }) {
// This avoids it being generated multiple times in parallel.
if (!__next_encryption_key_generation_promise) {
__next_encryption_key_generation_promise = loadOrGenerateKey(distDir, isBuild, async ()=>{
const providedKey = process.env.NEXT_SERVER_ACTIONS_ENCRYPTION_KEY;
if (providedKey) {
return providedKey;
}
const key = await crypto.subtle.generateKey({
name: 'AES-GCM',
length: 256
}, true, [
'encrypt',
'decrypt'
]);
const exported = await crypto.subtle.exportKey('raw', key);
return btoa(arrayBufferToString(exported));
});
}
return __next_encryption_key_generation_promise;
}
//# sourceMappingURL=encryption-utils-server.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,59 @@
import { InvariantError } from '../../shared/lib/invariant-error';
import { getServerActionsManifest } from './manifests-singleton';
let __next_loaded_action_key;
export function arrayBufferToString(buffer) {
const bytes = new Uint8Array(buffer);
const len = bytes.byteLength;
// @anonrig: V8 has a limit of 65535 arguments in a function.
// For len < 65535, this is faster.
// https://github.com/vercel/next.js/pull/56377#pullrequestreview-1656181623
if (len < 65535) {
return String.fromCharCode.apply(null, bytes);
}
let binary = '';
for(let i = 0; i < len; i++){
binary += String.fromCharCode(bytes[i]);
}
return binary;
}
export function stringToUint8Array(binary) {
const len = binary.length;
const arr = new Uint8Array(len);
for(let i = 0; i < len; i++){
arr[i] = binary.charCodeAt(i);
}
return arr;
}
export function encrypt(key, iv, data) {
return crypto.subtle.encrypt({
name: 'AES-GCM',
iv
}, key, data);
}
export function decrypt(key, iv, data) {
return crypto.subtle.decrypt({
name: 'AES-GCM',
iv
}, key, data);
}
export async function getActionEncryptionKey() {
if (__next_loaded_action_key) {
return __next_loaded_action_key;
}
const serverActionsManifest = getServerActionsManifest();
const rawKey = process.env.NEXT_SERVER_ACTIONS_ENCRYPTION_KEY || serverActionsManifest.encryptionKey;
if (rawKey === undefined) {
throw Object.defineProperty(new InvariantError('Missing encryption key for Server Actions'), "__NEXT_ERROR_CODE", {
value: "E571",
enumerable: false,
configurable: true
});
}
__next_loaded_action_key = await crypto.subtle.importKey('raw', stringToUint8Array(atob(rawKey)), 'AES-GCM', true, [
'encrypt',
'decrypt'
]);
return __next_loaded_action_key;
}
//# sourceMappingURL=encryption-utils.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/encryption-utils.ts"],"sourcesContent":["import { InvariantError } from '../../shared/lib/invariant-error'\nimport { getServerActionsManifest } from './manifests-singleton'\n\nlet __next_loaded_action_key: CryptoKey\n\nexport function arrayBufferToString(\n buffer: ArrayBuffer | Uint8Array<ArrayBufferLike>\n) {\n const bytes = new Uint8Array(buffer)\n const len = bytes.byteLength\n\n // @anonrig: V8 has a limit of 65535 arguments in a function.\n // For len < 65535, this is faster.\n // https://github.com/vercel/next.js/pull/56377#pullrequestreview-1656181623\n if (len < 65535) {\n return String.fromCharCode.apply(null, bytes as unknown as number[])\n }\n\n let binary = ''\n for (let i = 0; i < len; i++) {\n binary += String.fromCharCode(bytes[i])\n }\n return binary\n}\n\nexport function stringToUint8Array(binary: string) {\n const len = binary.length\n const arr = new Uint8Array(len)\n\n for (let i = 0; i < len; i++) {\n arr[i] = binary.charCodeAt(i)\n }\n\n return arr\n}\n\nexport function encrypt(\n key: CryptoKey,\n iv: Uint8Array<ArrayBuffer>,\n data: Uint8Array<ArrayBuffer>\n) {\n return crypto.subtle.encrypt(\n {\n name: 'AES-GCM',\n iv,\n },\n key,\n data\n )\n}\n\nexport function decrypt(\n key: CryptoKey,\n iv: Uint8Array<ArrayBuffer>,\n data: Uint8Array<ArrayBuffer>\n) {\n return crypto.subtle.decrypt(\n {\n name: 'AES-GCM',\n iv,\n },\n key,\n data\n )\n}\n\nexport async function getActionEncryptionKey() {\n if (__next_loaded_action_key) {\n return __next_loaded_action_key\n }\n\n const serverActionsManifest = getServerActionsManifest()\n\n const rawKey =\n process.env.NEXT_SERVER_ACTIONS_ENCRYPTION_KEY ||\n serverActionsManifest.encryptionKey\n\n if (rawKey === undefined) {\n throw new InvariantError('Missing encryption key for Server Actions')\n }\n\n __next_loaded_action_key = await crypto.subtle.importKey(\n 'raw',\n stringToUint8Array(atob(rawKey)),\n 'AES-GCM',\n true,\n ['encrypt', 'decrypt']\n )\n\n return __next_loaded_action_key\n}\n"],"names":["InvariantError","getServerActionsManifest","__next_loaded_action_key","arrayBufferToString","buffer","bytes","Uint8Array","len","byteLength","String","fromCharCode","apply","binary","i","stringToUint8Array","length","arr","charCodeAt","encrypt","key","iv","data","crypto","subtle","name","decrypt","getActionEncryptionKey","serverActionsManifest","rawKey","process","env","NEXT_SERVER_ACTIONS_ENCRYPTION_KEY","encryptionKey","undefined","importKey","atob"],"mappings":"AAAA,SAASA,cAAc,QAAQ,mCAAkC;AACjE,SAASC,wBAAwB,QAAQ,wBAAuB;AAEhE,IAAIC;AAEJ,OAAO,SAASC,oBACdC,MAAiD;IAEjD,MAAMC,QAAQ,IAAIC,WAAWF;IAC7B,MAAMG,MAAMF,MAAMG,UAAU;IAE5B,6DAA6D;IAC7D,mCAAmC;IACnC,4EAA4E;IAC5E,IAAID,MAAM,OAAO;QACf,OAAOE,OAAOC,YAAY,CAACC,KAAK,CAAC,MAAMN;IACzC;IAEA,IAAIO,SAAS;IACb,IAAK,IAAIC,IAAI,GAAGA,IAAIN,KAAKM,IAAK;QAC5BD,UAAUH,OAAOC,YAAY,CAACL,KAAK,CAACQ,EAAE;IACxC;IACA,OAAOD;AACT;AAEA,OAAO,SAASE,mBAAmBF,MAAc;IAC/C,MAAML,MAAMK,OAAOG,MAAM;IACzB,MAAMC,MAAM,IAAIV,WAAWC;IAE3B,IAAK,IAAIM,IAAI,GAAGA,IAAIN,KAAKM,IAAK;QAC5BG,GAAG,CAACH,EAAE,GAAGD,OAAOK,UAAU,CAACJ;IAC7B;IAEA,OAAOG;AACT;AAEA,OAAO,SAASE,QACdC,GAAc,EACdC,EAA2B,EAC3BC,IAA6B;IAE7B,OAAOC,OAAOC,MAAM,CAACL,OAAO,CAC1B;QACEM,MAAM;QACNJ;IACF,GACAD,KACAE;AAEJ;AAEA,OAAO,SAASI,QACdN,GAAc,EACdC,EAA2B,EAC3BC,IAA6B;IAE7B,OAAOC,OAAOC,MAAM,CAACE,OAAO,CAC1B;QACED,MAAM;QACNJ;IACF,GACAD,KACAE;AAEJ;AAEA,OAAO,eAAeK;IACpB,IAAIxB,0BAA0B;QAC5B,OAAOA;IACT;IAEA,MAAMyB,wBAAwB1B;IAE9B,MAAM2B,SACJC,QAAQC,GAAG,CAACC,kCAAkC,IAC9CJ,sBAAsBK,aAAa;IAErC,IAAIJ,WAAWK,WAAW;QACxB,MAAM,qBAA+D,CAA/D,IAAIjC,eAAe,8CAAnB,qBAAA;mBAAA;wBAAA;0BAAA;QAA8D;IACtE;IAEAE,2BAA2B,MAAMoB,OAAOC,MAAM,CAACW,SAAS,CACtD,OACApB,mBAAmBqB,KAAKP,UACxB,WACA,MACA;QAAC;QAAW;KAAU;IAGxB,OAAO1B;AACT","ignoreList":[0]}

View File

@@ -0,0 +1,237 @@
/* eslint-disable import/no-extraneous-dependencies */ import 'server-only';
/* eslint-disable import/no-extraneous-dependencies */ import { renderToReadableStream } from 'react-server-dom-webpack/server';
/* eslint-disable import/no-extraneous-dependencies */ import { createFromReadableStream } from 'react-server-dom-webpack/client';
import { streamToString } from '../stream-utils/node-web-streams-helper';
import { arrayBufferToString, decrypt, encrypt, getActionEncryptionKey, stringToUint8Array } from './encryption-utils';
import { getClientReferenceManifest, getServerModuleMap } from './manifests-singleton';
import { getCacheSignal, getPrerenderResumeDataCache, getRenderResumeDataCache, workUnitAsyncStorage } from './work-unit-async-storage.external';
import { createHangingInputAbortSignal } from './dynamic-rendering';
import React from 'react';
const isEdgeRuntime = process.env.NEXT_RUNTIME === 'edge';
const textEncoder = new TextEncoder();
const textDecoder = new TextDecoder();
const filterStackFrame = process.env.NODE_ENV !== 'production' ? require('../lib/source-maps').filterStackFrameDEV : undefined;
const findSourceMapURL = process.env.NODE_ENV !== 'production' ? require('../lib/source-maps').findSourceMapURLDEV : undefined;
/**
* Decrypt the serialized string with the action id as the salt.
*/ async function decodeActionBoundArg(actionId, arg) {
const key = await getActionEncryptionKey();
if (typeof key === 'undefined') {
throw Object.defineProperty(new Error(`Missing encryption key for Server Action. This is a bug in Next.js`), "__NEXT_ERROR_CODE", {
value: "E65",
enumerable: false,
configurable: true
});
}
// Get the iv (16 bytes) and the payload from the arg.
const originalPayload = atob(arg);
const ivValue = originalPayload.slice(0, 16);
const payload = originalPayload.slice(16);
const decrypted = textDecoder.decode(await decrypt(key, stringToUint8Array(ivValue), stringToUint8Array(payload)));
if (!decrypted.startsWith(actionId)) {
throw Object.defineProperty(new Error('Invalid Server Action payload: failed to decrypt.'), "__NEXT_ERROR_CODE", {
value: "E191",
enumerable: false,
configurable: true
});
}
return decrypted.slice(actionId.length);
}
/**
* Encrypt the serialized string with the action id as the salt. Add a prefix to
* later ensure that the payload is correctly decrypted, similar to a checksum.
*/ async function encodeActionBoundArg(actionId, arg) {
const key = await getActionEncryptionKey();
if (key === undefined) {
throw Object.defineProperty(new Error(`Missing encryption key for Server Action. This is a bug in Next.js`), "__NEXT_ERROR_CODE", {
value: "E65",
enumerable: false,
configurable: true
});
}
// Get 16 random bytes as iv.
const randomBytes = new Uint8Array(16);
workUnitAsyncStorage.exit(()=>crypto.getRandomValues(randomBytes));
const ivValue = arrayBufferToString(randomBytes.buffer);
const encrypted = await encrypt(key, randomBytes, textEncoder.encode(actionId + arg));
return btoa(ivValue + arrayBufferToString(encrypted));
}
var ReadStatus = /*#__PURE__*/ function(ReadStatus) {
ReadStatus[ReadStatus["Ready"] = 0] = "Ready";
ReadStatus[ReadStatus["Pending"] = 1] = "Pending";
ReadStatus[ReadStatus["Complete"] = 2] = "Complete";
return ReadStatus;
}(ReadStatus || {});
// Encrypts the action's bound args into a string. For the same combination of
// actionId and args the same cached promise is returned. This ensures reference
// equality for returned objects from "use cache" functions when they're invoked
// multiple times within one render pass using the same bound args.
export const encryptActionBoundArgs = React.cache(async function encryptActionBoundArgs(actionId, ...args) {
const workUnitStore = workUnitAsyncStorage.getStore();
const cacheSignal = workUnitStore ? getCacheSignal(workUnitStore) : undefined;
const { clientModules } = getClientReferenceManifest();
// Create an error before any asynchronous calls, to capture the original
// call stack in case we need it when the serialization errors.
const error = new Error();
Error.captureStackTrace(error, encryptActionBoundArgs);
let didCatchError = false;
const hangingInputAbortSignal = workUnitStore ? createHangingInputAbortSignal(workUnitStore) : undefined;
let readStatus = 0;
function startReadOnce() {
if (readStatus === 0) {
readStatus = 1;
cacheSignal == null ? void 0 : cacheSignal.beginRead();
}
}
function endReadIfStarted() {
if (readStatus === 1) {
cacheSignal == null ? void 0 : cacheSignal.endRead();
}
readStatus = 2;
}
// streamToString might take longer than a microtask to resolve and then other things
// waiting on the cache signal might not realize there is another cache to fill so if
// we are no longer waiting on the bound args serialization via the hangingInputAbortSignal
// we should eagerly start the cache read to prevent other readers of the cache signal from
// missing this cache fill. We use a idempotent function to only start reading once because
// it's also possible that streamToString finishes before the hangingInputAbortSignal aborts.
if (hangingInputAbortSignal && cacheSignal) {
hangingInputAbortSignal.addEventListener('abort', startReadOnce, {
once: true
});
}
const prerenderResumeDataCache = workUnitStore ? getPrerenderResumeDataCache(workUnitStore) : null;
const renderResumeDataCache = workUnitStore ? getRenderResumeDataCache(workUnitStore) : null;
// Using Flight to serialize the args into a string.
const serialized = await streamToString(renderToReadableStream(args, clientModules, {
filterStackFrame,
signal: hangingInputAbortSignal,
debugChannel: // In Cache Components, we want to cache the encrypted result,
// and we use the unencrypted bound args as a cache key.
// In order to do that we need to strip debug info, because it
// contains timing information and thus changes each time we serialize the args.
// We can do this by piping debug info into a debug channel that throws it away.
//
// Note that this can result in dangling debug info references when we decode the bound args,
// but React ignores those as long as no debug channel is passed on the decode side, so it's fine:
// https://github.com/facebook/react/blob/bb8a76c6cc77ea2976d690ea09f5a1b3d9b1792a/packages/react-client/src/ReactFlightClient.js#L1711-L1729
// https://github.com/facebook/react/blob/bb8a76c6cc77ea2976d690ea09f5a1b3d9b1792a/packages/react-client/src/ReactFlightClient.js#L4005-L4025
process.env.NODE_ENV === 'development' && (prerenderResumeDataCache || renderResumeDataCache) ? {
writable: new WritableStream()
} : undefined,
onError (err) {
if (hangingInputAbortSignal == null ? void 0 : hangingInputAbortSignal.aborted) {
return;
}
// We're only reporting one error at a time, starting with the first.
if (didCatchError) {
return;
}
didCatchError = true;
// Use the original error message together with the previously created
// stack, because err.stack is a useless Flight Server call stack.
error.message = err instanceof Error ? err.message : String(err);
}
}), // We pass the abort signal to `streamToString` so that no chunks are
// included that are emitted after the signal was already aborted. This
// ensures that we can encode hanging promises.
hangingInputAbortSignal);
if (didCatchError) {
if (process.env.NODE_ENV === 'development') {
// Logging the error is needed for server functions that are passed to the
// client where the decryption is not done during rendering. Console
// replaying allows us to still show the error dev overlay in this case.
console.error(error);
}
endReadIfStarted();
throw error;
}
if (!workUnitStore) {
// We don't need to call cacheSignal.endRead here because we can't have a cacheSignal
// if we do not have a workUnitStore.
return encodeActionBoundArg(actionId, serialized);
}
startReadOnce();
const cacheKey = actionId + serialized;
const cachedEncrypted = (prerenderResumeDataCache == null ? void 0 : prerenderResumeDataCache.encryptedBoundArgs.get(cacheKey)) ?? (renderResumeDataCache == null ? void 0 : renderResumeDataCache.encryptedBoundArgs.get(cacheKey));
if (cachedEncrypted) {
return cachedEncrypted;
}
const encrypted = await encodeActionBoundArg(actionId, serialized);
endReadIfStarted();
prerenderResumeDataCache == null ? void 0 : prerenderResumeDataCache.encryptedBoundArgs.set(cacheKey, encrypted);
return encrypted;
});
// Decrypts the action's bound args from the encrypted string.
export async function decryptActionBoundArgs(actionId, encryptedPromise) {
const encrypted = await encryptedPromise;
const workUnitStore = workUnitAsyncStorage.getStore();
let decrypted;
if (workUnitStore) {
const cacheSignal = getCacheSignal(workUnitStore);
const prerenderResumeDataCache = getPrerenderResumeDataCache(workUnitStore);
const renderResumeDataCache = getRenderResumeDataCache(workUnitStore);
decrypted = (prerenderResumeDataCache == null ? void 0 : prerenderResumeDataCache.decryptedBoundArgs.get(encrypted)) ?? (renderResumeDataCache == null ? void 0 : renderResumeDataCache.decryptedBoundArgs.get(encrypted));
if (!decrypted) {
cacheSignal == null ? void 0 : cacheSignal.beginRead();
decrypted = await decodeActionBoundArg(actionId, encrypted);
cacheSignal == null ? void 0 : cacheSignal.endRead();
prerenderResumeDataCache == null ? void 0 : prerenderResumeDataCache.decryptedBoundArgs.set(encrypted, decrypted);
}
} else {
decrypted = await decodeActionBoundArg(actionId, encrypted);
}
const { edgeRscModuleMapping, rscModuleMapping } = getClientReferenceManifest();
// Using Flight to deserialize the args from the string.
const deserialized = await createFromReadableStream(new ReadableStream({
start (controller) {
controller.enqueue(textEncoder.encode(decrypted));
switch(workUnitStore == null ? void 0 : workUnitStore.type){
case 'prerender':
case 'prerender-runtime':
// Explicitly don't close the stream here (until prerendering is
// complete) so that hanging promises are not rejected.
if (workUnitStore.renderSignal.aborted) {
controller.close();
} else {
workUnitStore.renderSignal.addEventListener('abort', ()=>controller.close(), {
once: true
});
}
break;
case 'prerender-client':
case 'validation-client':
case 'prerender-ppr':
case 'prerender-legacy':
case 'request':
case 'cache':
case 'private-cache':
case 'unstable-cache':
case 'generate-static-params':
case undefined:
return controller.close();
default:
workUnitStore;
}
}
}), {
findSourceMapURL,
// NOTE: When we serialized the bound args, we may have used a dummy debug channel to strip debug info.
// In that case, it's important that we also *don't* pass a debug channel here, because that will make
// the Flight Client ignore the dangling references:
// https://github.com/facebook/react/blob/bb8a76c6cc77ea2976d690ea09f5a1b3d9b1792a/packages/react-client/src/ReactFlightClient.js#L1711-L1729
// https://github.com/facebook/react/blob/bb8a76c6cc77ea2976d690ea09f5a1b3d9b1792a/packages/react-client/src/ReactFlightClient.js#L4005-L4025
debugChannel: undefined,
serverConsumerManifest: {
// moduleLoading must be null because we don't want to trigger preloads of ClientReferences
// to be added to the current execution. Instead, we'll wait for any ClientReference
// to be emitted which themselves will handle the preloading.
moduleLoading: null,
moduleMap: isEdgeRuntime ? edgeRscModuleMapping : rscModuleMapping,
serverModuleMap: getServerModuleMap()
}
});
return deserialized;
}
//# sourceMappingURL=encryption.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,60 @@
// eslint-disable-next-line import/no-extraneous-dependencies
export { createTemporaryReferenceSet, renderToReadableStream, decodeReply, decodeAction, decodeFormState } from 'react-server-dom-webpack/server';
// eslint-disable-next-line import/no-extraneous-dependencies
export { prerender } from 'react-server-dom-webpack/static';
// TODO: Just re-export `* as ReactServer`
export { captureOwnerStack, createElement, Fragment } from 'react';
export { default as LayoutRouter, LoadingBoundaryProvider } from '../../client/components/layout-router';
export { default as RenderFromTemplateContext } from '../../client/components/render-from-template-context';
export { workAsyncStorage } from '../app-render/work-async-storage.external';
export { workUnitAsyncStorage } from './work-unit-async-storage.external';
export { actionAsyncStorage } from '../app-render/action-async-storage.external';
export { ClientPageRoot } from '../../client/components/client-page';
export { ClientSegmentRoot } from '../../client/components/client-segment';
export { createServerSearchParamsForServerPage, createPrerenderSearchParamsForClientPage } from '../request/search-params';
export { createServerParamsForServerSegment, createPrerenderParamsForClientSegment } from '../request/params';
export * as serverHooks from '../../client/components/hooks-server-context';
export { HTTPAccessFallbackBoundary } from '../../client/components/http-access-fallback/error-boundary';
export { createMetadataComponents } from '../../lib/metadata/metadata';
export { RootLayoutBoundary } from '../../lib/framework/boundary-components';
export { preloadStyle, preloadFont, preconnect } from './rsc/preloads';
export { Postpone } from './rsc/postpone';
export { taintObjectReference } from './rsc/taint';
export { collectSegmentData, collectPrefetchHints } from './collect-segment-data';
export const InstantValidation = ()=>{
if (process.env.NEXT_RUNTIME !== 'edge' && process.env.__NEXT_CACHE_COMPONENTS) {
return require('./instant-validation/instant-validation');
} else {
return undefined;
}
};
import { workAsyncStorage } from '../app-render/work-async-storage.external';
import { workUnitAsyncStorage } from './work-unit-async-storage.external';
import { patchFetch as _patchFetch } from '../lib/patch-fetch';
let SegmentViewNode = ()=>null;
let SegmentViewStateNode = ()=>null;
if (process.env.NODE_ENV === 'development') {
const mod = require('../../next-devtools/userspace/app/segment-explorer-node');
SegmentViewNode = mod.SegmentViewNode;
SegmentViewStateNode = mod.SegmentViewStateNode;
}
// hot-reloader modules are not bundled so we need to inject `__next__clear_chunk_cache__`
// into globalThis from this file which is bundled.
if (process.env.TURBOPACK) {
globalThis.__next__clear_chunk_cache__ = __turbopack_clear_chunk_cache__;
} else {
// Webpack does not have chunks on the server
globalThis.__next__clear_chunk_cache__ = null;
}
// patchFetch makes use of APIs such as `React.unstable_postpone` which are only available
// in the experimental channel of React, so export it from here so that it comes from the bundled runtime
export function patchFetch() {
return _patchFetch({
workAsyncStorage,
workUnitAsyncStorage
});
}
// Development only
export { SegmentViewNode, SegmentViewStateNode };
//# sourceMappingURL=entry-base.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,15 @@
import { RSC_CONTENT_TYPE_HEADER } from '../../client/components/app-router-headers';
import RenderResult from '../render-result';
/**
* Flight Response is always set to RSC_CONTENT_TYPE_HEADER to ensure it does not get interpreted as HTML.
*/ export class FlightRenderResult extends RenderResult {
constructor(response, metadata = {}, waitUntil){
super(response, {
contentType: RSC_CONTENT_TYPE_HEADER,
metadata,
waitUntil
});
}
}
//# sourceMappingURL=flight-render-result.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/flight-render-result.ts"],"sourcesContent":["import { RSC_CONTENT_TYPE_HEADER } from '../../client/components/app-router-headers'\nimport RenderResult, { type RenderResultMetadata } from '../render-result'\n\n/**\n * Flight Response is always set to RSC_CONTENT_TYPE_HEADER to ensure it does not get interpreted as HTML.\n */\nexport class FlightRenderResult extends RenderResult {\n constructor(\n response: string | ReadableStream<Uint8Array>,\n metadata: RenderResultMetadata = {},\n waitUntil?: Promise<unknown>\n ) {\n super(response, {\n contentType: RSC_CONTENT_TYPE_HEADER,\n metadata,\n waitUntil,\n })\n }\n}\n"],"names":["RSC_CONTENT_TYPE_HEADER","RenderResult","FlightRenderResult","constructor","response","metadata","waitUntil","contentType"],"mappings":"AAAA,SAASA,uBAAuB,QAAQ,6CAA4C;AACpF,OAAOC,kBAAiD,mBAAkB;AAE1E;;CAEC,GACD,OAAO,MAAMC,2BAA2BD;IACtCE,YACEC,QAA6C,EAC7CC,WAAiC,CAAC,CAAC,EACnCC,SAA4B,CAC5B;QACA,KAAK,CAACF,UAAU;YACdG,aAAaP;YACbK;YACAC;QACF;IACF;AACF","ignoreList":[0]}

View File

@@ -0,0 +1,19 @@
const isDev = process.env.NODE_ENV === 'development';
const isTurbopack = !!process.env.TURBOPACK;
export function getAssetQueryString(ctx, addTimestamp) {
let qs = '';
// In development we add the request timestamp to allow react to
// reload assets when a new RSC response is received.
// Turbopack handles HMR of assets itself and react doesn't need to reload them
// so this approach is not needed for Turbopack.
const shouldAddVersion = isDev && !isTurbopack && addTimestamp;
if (shouldAddVersion) {
qs += `?v=${ctx.requestTimestamp}`;
}
if (ctx.sharedContext.clientAssetToken) {
qs += `${shouldAddVersion ? '&' : '?'}dpl=${ctx.sharedContext.clientAssetToken}`;
}
return qs;
}
//# sourceMappingURL=get-asset-query-string.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/get-asset-query-string.ts"],"sourcesContent":["import type { AppRenderContext } from './app-render'\n\nconst isDev = process.env.NODE_ENV === 'development'\nconst isTurbopack = !!process.env.TURBOPACK\n\nexport function getAssetQueryString(\n ctx: AppRenderContext,\n addTimestamp: boolean\n) {\n let qs = ''\n\n // In development we add the request timestamp to allow react to\n // reload assets when a new RSC response is received.\n // Turbopack handles HMR of assets itself and react doesn't need to reload them\n // so this approach is not needed for Turbopack.\n const shouldAddVersion = isDev && !isTurbopack && addTimestamp\n if (shouldAddVersion) {\n qs += `?v=${ctx.requestTimestamp}`\n }\n\n if (ctx.sharedContext.clientAssetToken) {\n qs += `${shouldAddVersion ? '&' : '?'}dpl=${ctx.sharedContext.clientAssetToken}`\n }\n return qs\n}\n"],"names":["isDev","process","env","NODE_ENV","isTurbopack","TURBOPACK","getAssetQueryString","ctx","addTimestamp","qs","shouldAddVersion","requestTimestamp","sharedContext","clientAssetToken"],"mappings":"AAEA,MAAMA,QAAQC,QAAQC,GAAG,CAACC,QAAQ,KAAK;AACvC,MAAMC,cAAc,CAAC,CAACH,QAAQC,GAAG,CAACG,SAAS;AAE3C,OAAO,SAASC,oBACdC,GAAqB,EACrBC,YAAqB;IAErB,IAAIC,KAAK;IAET,gEAAgE;IAChE,qDAAqD;IACrD,+EAA+E;IAC/E,gDAAgD;IAChD,MAAMC,mBAAmBV,SAAS,CAACI,eAAeI;IAClD,IAAIE,kBAAkB;QACpBD,MAAM,CAAC,GAAG,EAAEF,IAAII,gBAAgB,EAAE;IACpC;IAEA,IAAIJ,IAAIK,aAAa,CAACC,gBAAgB,EAAE;QACtCJ,MAAM,GAAGC,mBAAmB,MAAM,IAAI,IAAI,EAAEH,IAAIK,aAAa,CAACC,gBAAgB,EAAE;IAClF;IACA,OAAOJ;AACT","ignoreList":[0]}

View File

@@ -0,0 +1,41 @@
import { getClientReferenceManifest } from './manifests-singleton';
/**
* Get external stylesheet link hrefs based on server CSS manifest.
*/ export function getLinkAndScriptTags(filePath, injectedCSS, injectedScripts, collectNewImports) {
const filePathWithoutExt = filePath.replace(/\.[^.]+$/, '');
const cssChunks = new Set();
const jsChunks = new Set();
const { entryCSSFiles, entryJSFiles } = getClientReferenceManifest();
const cssFiles = entryCSSFiles[filePathWithoutExt];
const jsFiles = entryJSFiles == null ? void 0 : entryJSFiles[filePathWithoutExt];
if (cssFiles) {
for (const css of cssFiles){
if (!injectedCSS.has(css.path)) {
if (collectNewImports) {
injectedCSS.add(css.path);
}
cssChunks.add(css);
}
}
}
if (jsFiles) {
for (const file of jsFiles){
if (!injectedScripts.has(file)) {
if (collectNewImports) {
injectedScripts.add(file);
}
jsChunks.add(file);
}
}
}
return {
styles: [
...cssChunks
],
scripts: [
...jsChunks
]
};
}
//# sourceMappingURL=get-css-inlined-link-tags.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/get-css-inlined-link-tags.tsx"],"sourcesContent":["import type { CssResource } from '../../build/webpack/plugins/flight-manifest-plugin'\nimport { getClientReferenceManifest } from './manifests-singleton'\n\n/**\n * Get external stylesheet link hrefs based on server CSS manifest.\n */\nexport function getLinkAndScriptTags(\n filePath: string,\n injectedCSS: Set<string>,\n injectedScripts: Set<string>,\n collectNewImports?: boolean\n): { styles: CssResource[]; scripts: string[] } {\n const filePathWithoutExt = filePath.replace(/\\.[^.]+$/, '')\n const cssChunks = new Set<CssResource>()\n const jsChunks = new Set<string>()\n const { entryCSSFiles, entryJSFiles } = getClientReferenceManifest()\n const cssFiles = entryCSSFiles[filePathWithoutExt]\n const jsFiles = entryJSFiles?.[filePathWithoutExt]\n\n if (cssFiles) {\n for (const css of cssFiles) {\n if (!injectedCSS.has(css.path)) {\n if (collectNewImports) {\n injectedCSS.add(css.path)\n }\n cssChunks.add(css)\n }\n }\n }\n\n if (jsFiles) {\n for (const file of jsFiles) {\n if (!injectedScripts.has(file)) {\n if (collectNewImports) {\n injectedScripts.add(file)\n }\n jsChunks.add(file)\n }\n }\n }\n\n return { styles: [...cssChunks], scripts: [...jsChunks] }\n}\n"],"names":["getClientReferenceManifest","getLinkAndScriptTags","filePath","injectedCSS","injectedScripts","collectNewImports","filePathWithoutExt","replace","cssChunks","Set","jsChunks","entryCSSFiles","entryJSFiles","cssFiles","jsFiles","css","has","path","add","file","styles","scripts"],"mappings":"AACA,SAASA,0BAA0B,QAAQ,wBAAuB;AAElE;;CAEC,GACD,OAAO,SAASC,qBACdC,QAAgB,EAChBC,WAAwB,EACxBC,eAA4B,EAC5BC,iBAA2B;IAE3B,MAAMC,qBAAqBJ,SAASK,OAAO,CAAC,YAAY;IACxD,MAAMC,YAAY,IAAIC;IACtB,MAAMC,WAAW,IAAID;IACrB,MAAM,EAAEE,aAAa,EAAEC,YAAY,EAAE,GAAGZ;IACxC,MAAMa,WAAWF,aAAa,CAACL,mBAAmB;IAClD,MAAMQ,UAAUF,gCAAAA,YAAc,CAACN,mBAAmB;IAElD,IAAIO,UAAU;QACZ,KAAK,MAAME,OAAOF,SAAU;YAC1B,IAAI,CAACV,YAAYa,GAAG,CAACD,IAAIE,IAAI,GAAG;gBAC9B,IAAIZ,mBAAmB;oBACrBF,YAAYe,GAAG,CAACH,IAAIE,IAAI;gBAC1B;gBACAT,UAAUU,GAAG,CAACH;YAChB;QACF;IACF;IAEA,IAAID,SAAS;QACX,KAAK,MAAMK,QAAQL,QAAS;YAC1B,IAAI,CAACV,gBAAgBY,GAAG,CAACG,OAAO;gBAC9B,IAAId,mBAAmB;oBACrBD,gBAAgBc,GAAG,CAACC;gBACtB;gBACAT,SAASQ,GAAG,CAACC;YACf;QACF;IACF;IAEA,OAAO;QAAEC,QAAQ;eAAIZ;SAAU;QAAEa,SAAS;eAAIX;SAAS;IAAC;AAC1D","ignoreList":[0]}

View File

@@ -0,0 +1,55 @@
import { getLinkAndScriptTags } from './get-css-inlined-link-tags';
import { getPreloadableFonts } from './get-preloadable-fonts';
import { getAssetQueryString } from './get-asset-query-string';
import { encodeURIPath } from '../../shared/lib/encode-uri-path';
import { renderCssResource } from './render-css-resource';
export function getLayerAssets({ ctx, layoutOrPagePath, injectedCSS: injectedCSSWithCurrentLayout, injectedJS: injectedJSWithCurrentLayout, injectedFontPreloadTags: injectedFontPreloadTagsWithCurrentLayout, preloadCallbacks }) {
const { componentMod: { createElement } } = ctx;
const { styles: styleTags, scripts: scriptTags } = layoutOrPagePath ? getLinkAndScriptTags(layoutOrPagePath, injectedCSSWithCurrentLayout, injectedJSWithCurrentLayout, true) : {
styles: [],
scripts: []
};
const preloadedFontFiles = layoutOrPagePath ? getPreloadableFonts(ctx.renderOpts.nextFontManifest, layoutOrPagePath, injectedFontPreloadTagsWithCurrentLayout) : null;
if (preloadedFontFiles) {
if (preloadedFontFiles.length) {
for(let i = 0; i < preloadedFontFiles.length; i++){
const fontFilename = preloadedFontFiles[i];
const ext = /\.(woff|woff2|eot|ttf|otf)$/.exec(fontFilename)[1];
const type = `font/${ext}`;
const href = `${ctx.assetPrefix}/_next/${encodeURIPath(fontFilename)}${getAssetQueryString(ctx, true)}`;
preloadCallbacks.push(()=>{
ctx.componentMod.preloadFont(href, type, ctx.renderOpts.crossOrigin, ctx.nonce);
});
}
} else {
try {
let url = new URL(ctx.assetPrefix);
preloadCallbacks.push(()=>{
ctx.componentMod.preconnect(url.origin, 'anonymous', ctx.nonce);
});
} catch (error) {
// assetPrefix must not be a fully qualified domain name. We assume
// we should preconnect to same origin instead
preloadCallbacks.push(()=>{
ctx.componentMod.preconnect('/', 'anonymous', ctx.nonce);
});
}
}
}
const styles = renderCssResource(styleTags, ctx, preloadCallbacks);
const scripts = scriptTags ? scriptTags.map((href, index)=>{
const fullSrc = `${ctx.assetPrefix}/_next/${encodeURIPath(href)}${getAssetQueryString(ctx, true)}`;
return createElement('script', {
src: fullSrc,
async: true,
key: `script-${index}`,
nonce: ctx.nonce
});
}) : [];
return styles.length || scripts.length ? [
...styles,
...scripts
] : null;
}
//# sourceMappingURL=get-layer-assets.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,35 @@
/**
* Get hrefs for fonts to preload
* Returns null if there are no fonts at all.
* Returns string[] if there are fonts to preload (font paths)
* Returns empty string[] if there are fonts but none to preload and no other fonts have been preloaded
* Returns null if there are fonts but none to preload and at least some were previously preloaded
*/ export function getPreloadableFonts(nextFontManifest, filePath, injectedFontPreloadTags) {
if (!nextFontManifest || !filePath) {
return null;
}
const filepathWithoutExtension = filePath.replace(/\.[^.]+$/, '');
const fontFiles = new Set();
let foundFontUsage = false;
const preloadedFontFiles = nextFontManifest.app[filepathWithoutExtension];
if (preloadedFontFiles) {
foundFontUsage = true;
for (const fontFile of preloadedFontFiles){
if (!injectedFontPreloadTags.has(fontFile)) {
fontFiles.add(fontFile);
injectedFontPreloadTags.add(fontFile);
}
}
}
if (fontFiles.size) {
return [
...fontFiles
].sort();
} else if (foundFontUsage && injectedFontPreloadTags.size === 0) {
return [];
} else {
return null;
}
}
//# sourceMappingURL=get-preloadable-fonts.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/get-preloadable-fonts.tsx"],"sourcesContent":["import type { NextFontManifest } from '../../build/webpack/plugins/next-font-manifest-plugin'\nimport type { DeepReadonly } from '../../shared/lib/deep-readonly'\n\n/**\n * Get hrefs for fonts to preload\n * Returns null if there are no fonts at all.\n * Returns string[] if there are fonts to preload (font paths)\n * Returns empty string[] if there are fonts but none to preload and no other fonts have been preloaded\n * Returns null if there are fonts but none to preload and at least some were previously preloaded\n */\nexport function getPreloadableFonts(\n nextFontManifest: DeepReadonly<NextFontManifest> | undefined,\n filePath: string | undefined,\n injectedFontPreloadTags: Set<string>\n): string[] | null {\n if (!nextFontManifest || !filePath) {\n return null\n }\n const filepathWithoutExtension = filePath.replace(/\\.[^.]+$/, '')\n const fontFiles = new Set<string>()\n let foundFontUsage = false\n\n const preloadedFontFiles = nextFontManifest.app[filepathWithoutExtension]\n if (preloadedFontFiles) {\n foundFontUsage = true\n for (const fontFile of preloadedFontFiles) {\n if (!injectedFontPreloadTags.has(fontFile)) {\n fontFiles.add(fontFile)\n injectedFontPreloadTags.add(fontFile)\n }\n }\n }\n\n if (fontFiles.size) {\n return [...fontFiles].sort()\n } else if (foundFontUsage && injectedFontPreloadTags.size === 0) {\n return []\n } else {\n return null\n }\n}\n"],"names":["getPreloadableFonts","nextFontManifest","filePath","injectedFontPreloadTags","filepathWithoutExtension","replace","fontFiles","Set","foundFontUsage","preloadedFontFiles","app","fontFile","has","add","size","sort"],"mappings":"AAGA;;;;;;CAMC,GACD,OAAO,SAASA,oBACdC,gBAA4D,EAC5DC,QAA4B,EAC5BC,uBAAoC;IAEpC,IAAI,CAACF,oBAAoB,CAACC,UAAU;QAClC,OAAO;IACT;IACA,MAAME,2BAA2BF,SAASG,OAAO,CAAC,YAAY;IAC9D,MAAMC,YAAY,IAAIC;IACtB,IAAIC,iBAAiB;IAErB,MAAMC,qBAAqBR,iBAAiBS,GAAG,CAACN,yBAAyB;IACzE,IAAIK,oBAAoB;QACtBD,iBAAiB;QACjB,KAAK,MAAMG,YAAYF,mBAAoB;YACzC,IAAI,CAACN,wBAAwBS,GAAG,CAACD,WAAW;gBAC1CL,UAAUO,GAAG,CAACF;gBACdR,wBAAwBU,GAAG,CAACF;YAC9B;QACF;IACF;IAEA,IAAIL,UAAUQ,IAAI,EAAE;QAClB,OAAO;eAAIR;SAAU,CAACS,IAAI;IAC5B,OAAO,IAAIP,kBAAkBL,wBAAwBW,IAAI,KAAK,GAAG;QAC/D,OAAO,EAAE;IACX,OAAO;QACL,OAAO;IACT;AACF","ignoreList":[0]}

View File

@@ -0,0 +1,34 @@
import { ESCAPE_REGEX } from '../htmlescape';
export function getScriptNonceFromHeader(cspHeaderValue) {
var _directive_split_slice_map_find;
const directives = cspHeaderValue// Directives are split by ';'.
.split(';').map((directive)=>directive.trim());
// First try to find the directive for the 'script-src', otherwise try to
// fallback to the 'default-src'.
const directive = directives.find((dir)=>dir.startsWith('script-src')) || directives.find((dir)=>dir.startsWith('default-src'));
// If no directive could be found, then we're done.
if (!directive) {
return;
}
// Extract the nonce from the directive
const nonce = (_directive_split_slice_map_find = directive.split(' ')// Remove the 'strict-src'/'default-src' string, this can't be the nonce.
.slice(1).map((source)=>source.trim())// Find the first source with the 'nonce-' prefix.
.find((source)=>source.startsWith("'nonce-") && source.length > 8 && source.endsWith("'"))) == null ? void 0 : _directive_split_slice_map_find.slice(7, -1);
// If we could't find the nonce, then we're done.
if (!nonce) {
return;
}
// Don't accept the nonce value if it contains HTML escape characters.
// Technically, the spec requires a base64'd value, but this is just an
// extra layer.
if (ESCAPE_REGEX.test(nonce)) {
throw Object.defineProperty(new Error('Nonce value from Content-Security-Policy contained HTML escape characters.\nLearn more: https://nextjs.org/docs/messages/nonce-contained-invalid-characters'), "__NEXT_ERROR_CODE", {
value: "E440",
enumerable: false,
configurable: true
});
}
return nonce;
}
//# sourceMappingURL=get-script-nonce-from-header.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/get-script-nonce-from-header.tsx"],"sourcesContent":["import { ESCAPE_REGEX } from '../htmlescape'\n\nexport function getScriptNonceFromHeader(\n cspHeaderValue: string\n): string | undefined {\n const directives = cspHeaderValue\n // Directives are split by ';'.\n .split(';')\n .map((directive) => directive.trim())\n\n // First try to find the directive for the 'script-src', otherwise try to\n // fallback to the 'default-src'.\n const directive =\n directives.find((dir) => dir.startsWith('script-src')) ||\n directives.find((dir) => dir.startsWith('default-src'))\n\n // If no directive could be found, then we're done.\n if (!directive) {\n return\n }\n\n // Extract the nonce from the directive\n const nonce = directive\n .split(' ')\n // Remove the 'strict-src'/'default-src' string, this can't be the nonce.\n .slice(1)\n .map((source) => source.trim())\n // Find the first source with the 'nonce-' prefix.\n .find(\n (source) =>\n source.startsWith(\"'nonce-\") &&\n source.length > 8 &&\n source.endsWith(\"'\")\n )\n // Grab the nonce by trimming the 'nonce-' prefix.\n ?.slice(7, -1)\n\n // If we could't find the nonce, then we're done.\n if (!nonce) {\n return\n }\n\n // Don't accept the nonce value if it contains HTML escape characters.\n // Technically, the spec requires a base64'd value, but this is just an\n // extra layer.\n if (ESCAPE_REGEX.test(nonce)) {\n throw new Error(\n 'Nonce value from Content-Security-Policy contained HTML escape characters.\\nLearn more: https://nextjs.org/docs/messages/nonce-contained-invalid-characters'\n )\n }\n\n return nonce\n}\n"],"names":["ESCAPE_REGEX","getScriptNonceFromHeader","cspHeaderValue","directive","directives","split","map","trim","find","dir","startsWith","nonce","slice","source","length","endsWith","test","Error"],"mappings":"AAAA,SAASA,YAAY,QAAQ,gBAAe;AAE5C,OAAO,SAASC,yBACdC,cAAsB;QAmBRC;IAjBd,MAAMC,aAAaF,cACjB,+BAA+B;KAC9BG,KAAK,CAAC,KACNC,GAAG,CAAC,CAACH,YAAcA,UAAUI,IAAI;IAEpC,yEAAyE;IACzE,iCAAiC;IACjC,MAAMJ,YACJC,WAAWI,IAAI,CAAC,CAACC,MAAQA,IAAIC,UAAU,CAAC,kBACxCN,WAAWI,IAAI,CAAC,CAACC,MAAQA,IAAIC,UAAU,CAAC;IAE1C,mDAAmD;IACnD,IAAI,CAACP,WAAW;QACd;IACF;IAEA,uCAAuC;IACvC,MAAMQ,SAAQR,kCAAAA,UACXE,KAAK,CAAC,IACP,yEAAyE;KACxEO,KAAK,CAAC,GACNN,GAAG,CAAC,CAACO,SAAWA,OAAON,IAAI,GAC5B,kDAAkD;KACjDC,IAAI,CACH,CAACK,SACCA,OAAOH,UAAU,CAAC,cAClBG,OAAOC,MAAM,GAAG,KAChBD,OAAOE,QAAQ,CAAC,0BAVRZ,gCAaVS,KAAK,CAAC,GAAG,CAAC;IAEd,iDAAiD;IACjD,IAAI,CAACD,OAAO;QACV;IACF;IAEA,sEAAsE;IACtE,uEAAuE;IACvE,eAAe;IACf,IAAIX,aAAagB,IAAI,CAACL,QAAQ;QAC5B,MAAM,qBAEL,CAFK,IAAIM,MACR,gKADI,qBAAA;mBAAA;wBAAA;0BAAA;QAEN;IACF;IAEA,OAAON;AACT","ignoreList":[0]}

View File

@@ -0,0 +1,15 @@
export const dynamicParamTypes = {
catchall: 'c',
'catchall-intercepted-(..)(..)': 'ci(..)(..)',
'catchall-intercepted-(.)': 'ci(.)',
'catchall-intercepted-(..)': 'ci(..)',
'catchall-intercepted-(...)': 'ci(...)',
'optional-catchall': 'oc',
dynamic: 'd',
'dynamic-intercepted-(..)(..)': 'di(..)(..)',
'dynamic-intercepted-(.)': 'di(.)',
'dynamic-intercepted-(..)': 'di(..)',
'dynamic-intercepted-(...)': 'di(...)'
};
//# sourceMappingURL=get-short-dynamic-param-type.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../../src/server/app-render/get-short-dynamic-param-type.tsx"],"sourcesContent":["import type {\n DynamicParamTypes,\n DynamicParamTypesShort,\n} from '../../shared/lib/app-router-types'\n\nexport const dynamicParamTypes: Record<\n DynamicParamTypes,\n DynamicParamTypesShort\n> = {\n catchall: 'c',\n 'catchall-intercepted-(..)(..)': 'ci(..)(..)',\n 'catchall-intercepted-(.)': 'ci(.)',\n 'catchall-intercepted-(..)': 'ci(..)',\n 'catchall-intercepted-(...)': 'ci(...)',\n 'optional-catchall': 'oc',\n dynamic: 'd',\n 'dynamic-intercepted-(..)(..)': 'di(..)(..)',\n 'dynamic-intercepted-(.)': 'di(.)',\n 'dynamic-intercepted-(..)': 'di(..)',\n 'dynamic-intercepted-(...)': 'di(...)',\n}\n"],"names":["dynamicParamTypes","catchall","dynamic"],"mappings":"AAKA,OAAO,MAAMA,oBAGT;IACFC,UAAU;IACV,iCAAiC;IACjC,4BAA4B;IAC5B,6BAA6B;IAC7B,8BAA8B;IAC9B,qBAAqB;IACrBC,SAAS;IACT,gCAAgC;IAChC,2BAA2B;IAC3B,4BAA4B;IAC5B,6BAA6B;AAC/B,EAAC","ignoreList":[0]}

View File

@@ -0,0 +1,9 @@
export function hasLoadingComponentInTree(tree) {
const [, parallelRoutes, { loading }] = tree;
if (loading) {
return true;
}
return Object.values(parallelRoutes).some((parallelRoute)=>hasLoadingComponentInTree(parallelRoute));
}
//# sourceMappingURL=has-loading-component-in-tree.js.map

Some files were not shown because too many files have changed in this diff Show More