diff --git a/.eslintrc.cjs b/.eslintrc.cjs index dbcc8975f8..602a670ef1 100644 --- a/.eslintrc.cjs +++ b/.eslintrc.cjs @@ -72,6 +72,14 @@ module.exports = { message: 'Please use `getTracer()` from `./handlers/tracer.cjs` instead', }, ], + patterns: [ + { + // only */storage/storage.cjs is allowed to be imported + // rest are implementation details that should not be used directly + group: ['*/storage/*', '!*/storage/storage.cjs'], + message: 'Import public `[...]/storage/storage.cjs` module instead.', + }, + ], }, ], }, diff --git a/package-lock.json b/package-lock.json index 021efb88d7..ec206e9e6d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -37,6 +37,7 @@ "fs-monkey": "^1.0.6", "get-port": "^7.1.0", "lambda-local": "^2.2.0", + "lru-cache": "^10.4.3", "memfs": "^4.9.2", "mock-require": "^3.0.3", "msw": "^2.0.7", @@ -243,6 +244,16 @@ "node": ">=6.9.0" } }, + "node_modules/@babel/helper-compilation-targets/node_modules/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^3.0.2" + } + }, "node_modules/@babel/helper-compilation-targets/node_modules/semver": { "version": "6.3.1", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", @@ -14545,13 +14556,11 @@ } }, "node_modules/lru-cache": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", - "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "version": "10.4.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", + "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", "dev": true, - "dependencies": { - "yallist": "^3.0.2" - } + "license": "ISC" }, "node_modules/luxon": { "version": "3.4.4", @@ -32126,15 +32135,6 @@ "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/path-scurry/node_modules/lru-cache": { - "version": "10.2.2", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.2.2.tgz", - "integrity": "sha512-9hp3Vp2/hFQUiIwKo8XCeFVnrg8Pk3TYNPIR7tJADKi5YfcF7vEaK7avFHTlSy3kOKYaJQaalfEo6YuXdceBOQ==", - "dev": true, - "engines": { - "node": "14 || >=16.14" - } - }, "node_modules/path-to-regexp": { "version": "6.3.0", "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-6.3.0.tgz", @@ -32649,15 +32649,6 @@ "node": "^16.14.0 || >=18.0.0" } }, - "node_modules/read-package-up/node_modules/lru-cache": { - "version": "10.2.2", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.2.2.tgz", - "integrity": "sha512-9hp3Vp2/hFQUiIwKo8XCeFVnrg8Pk3TYNPIR7tJADKi5YfcF7vEaK7avFHTlSy3kOKYaJQaalfEo6YuXdceBOQ==", - "dev": true, - "engines": { - "node": "14 || >=16.14" - } - }, "node_modules/read-package-up/node_modules/normalize-package-data": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-6.0.1.tgz", @@ -36057,7 +36048,8 @@ "version": "3.1.1", "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", - "dev": true + "dev": true, + "license": "ISC" }, "node_modules/yargs": { "version": "17.7.2", @@ -36350,6 +36342,15 @@ "semver": "^6.3.1" }, "dependencies": { + "lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "dev": true, + "requires": { + "yallist": "^3.0.2" + } + }, "semver": { "version": "6.3.1", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", @@ -46153,13 +46154,10 @@ "dev": true }, "lru-cache": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", - "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", - "dev": true, - "requires": { - "yallist": "^3.0.2" - } + "version": "10.4.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", + "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", + "dev": true }, "luxon": { "version": "3.4.4", @@ -58505,14 +58503,6 @@ "requires": { "lru-cache": "^10.2.0", "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" - }, - "dependencies": { - "lru-cache": { - "version": "10.2.2", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.2.2.tgz", - "integrity": "sha512-9hp3Vp2/hFQUiIwKo8XCeFVnrg8Pk3TYNPIR7tJADKi5YfcF7vEaK7avFHTlSy3kOKYaJQaalfEo6YuXdceBOQ==", - "dev": true - } } }, "path-to-regexp": { @@ -58872,12 +58862,6 @@ "lru-cache": "^10.0.1" } }, - "lru-cache": { - "version": "10.2.2", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.2.2.tgz", - "integrity": "sha512-9hp3Vp2/hFQUiIwKo8XCeFVnrg8Pk3TYNPIR7tJADKi5YfcF7vEaK7avFHTlSy3kOKYaJQaalfEo6YuXdceBOQ==", - "dev": true - }, "normalize-package-data": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-6.0.1.tgz", diff --git a/package.json b/package.json index d52cd4cff8..0981a13656 100644 --- a/package.json +++ b/package.json @@ -76,6 +76,7 @@ "fs-monkey": "^1.0.6", "get-port": "^7.1.0", "lambda-local": "^2.2.0", + "lru-cache": "^10.4.3", "memfs": "^4.9.2", "mock-require": "^3.0.3", "msw": "^2.0.7", diff --git a/src/build/content/static.ts b/src/build/content/static.ts index f972e8b8bd..d251a17d97 100644 --- a/src/build/content/static.ts +++ b/src/build/content/static.ts @@ -6,7 +6,7 @@ import { trace } from '@opentelemetry/api' import { wrapTracer } from '@opentelemetry/api/experimental' import glob from 'fast-glob' -import type { HtmlBlob } from '../../run/next.cjs' +import type { HtmlBlob } from '../../shared/blob-types.cjs' import { encodeBlobKey } from '../../shared/blobkey.js' import { PluginContext } from '../plugin-context.js' import { verifyNetlifyForms } from '../verification.js' diff --git a/src/run/config.ts b/src/run/config.ts index 9ca3f3dc5b..9791c728a9 100644 --- a/src/run/config.ts +++ b/src/run/config.ts @@ -5,6 +5,7 @@ import { join, resolve } from 'node:path' import type { NextConfigComplete } from 'next/dist/server/config-shared.js' import { PLUGIN_DIR, RUN_CONFIG } from './constants.js' +import { setInMemoryCacheMaxSizeFromNextConfig } from './storage/storage.cjs' /** * Get Next.js config from the build output @@ -13,10 +14,27 @@ export const getRunConfig = async () => { return JSON.parse(await readFile(resolve(PLUGIN_DIR, RUN_CONFIG), 'utf-8')) } +type NextConfigForMultipleVersions = NextConfigComplete & { + experimental: NextConfigComplete['experimental'] & { + // those are pre 14.1.0 options that were moved out of experimental in // https://github.com/vercel/next.js/pull/57953/files#diff-c49c4767e6ed8627e6e1b8f96b141ee13246153f5e9142e1da03450c8e81e96fL311 + + // https://github.com/vercel/next.js/blob/v14.0.4/packages/next/src/server/config-shared.ts#L182-L183 + // custom path to a cache handler to use + incrementalCacheHandlerPath?: string + // https://github.com/vercel/next.js/blob/v14.0.4/packages/next/src/server/config-shared.ts#L207-L212 + /** + * In-memory cache size in bytes. + * + * If `isrMemoryCacheSize: 0` disables in-memory caching. + */ + isrMemoryCacheSize?: number + } +} + /** * Configure the custom cache handler at request time */ -export const setRunConfig = (config: NextConfigComplete) => { +export const setRunConfig = (config: NextConfigForMultipleVersions) => { const cacheHandler = join(PLUGIN_DIR, '.netlify/dist/run/handlers/cache.cjs') if (!existsSync(cacheHandler)) { throw new Error(`Cache handler not found at ${cacheHandler}`) @@ -25,15 +43,17 @@ export const setRunConfig = (config: NextConfigComplete) => { // set the path to the cache handler config.experimental = { ...config.experimental, - // @ts-expect-error incrementalCacheHandlerPath was removed from config type - // but we still need to set it for older Next.js versions + // Before Next.js 14.1.0 path to the cache handler was in experimental section, see NextConfigForMultipleVersions type incrementalCacheHandlerPath: cacheHandler, } - // Next.js 14.1.0 moved the cache handler from experimental to stable - // https://github.com/vercel/next.js/pull/57953/files#diff-c49c4767e6ed8627e6e1b8f96b141ee13246153f5e9142e1da03450c8e81e96fL311 + // Next.js 14.1.0 moved the cache handler from experimental to stable, see NextConfigForMultipleVersions type config.cacheHandler = cacheHandler - config.cacheMaxMemorySize = 0 + + // honor the in-memory cache size from next.config (either one set by user or Next.js default) + setInMemoryCacheMaxSizeFromNextConfig( + config.cacheMaxMemorySize ?? config.experimental?.isrMemoryCacheSize, + ) // set config process.env.__NEXT_PRIVATE_STANDALONE_CONFIG = JSON.stringify(config) diff --git a/src/run/handlers/cache.cts b/src/run/handlers/cache.cts index fc40ec4bdf..294700901c 100644 --- a/src/run/handlers/cache.cts +++ b/src/run/handlers/cache.cts @@ -5,13 +5,13 @@ import { Buffer } from 'node:buffer' import { join } from 'node:path' import { join as posixJoin } from 'node:path/posix' -import { Store } from '@netlify/blobs' import { purgeCache } from '@netlify/functions' import { type Span } from '@opentelemetry/api' import type { PrerenderManifest } from 'next/dist/build/index.js' import { NEXT_CACHE_TAGS_HEADER } from 'next/dist/lib/constants.js' import { name as nextRuntimePkgName, version as nextRuntimePkgVersion } from '../../../package.json' +import { type TagManifest } from '../../shared/blob-types.cjs' import { type CacheHandlerContext, type CacheHandlerForMultipleVersions, @@ -22,34 +22,26 @@ import { type NetlifyCacheHandlerValue, type NetlifyIncrementalCacheValue, } from '../../shared/cache-types.cjs' -import { getRegionalBlobStore } from '../regional-blob-store.cjs' +import { + getMemoizedKeyValueStoreBackedByRegionalBlobStore, + MemoizedKeyValueStoreBackedByRegionalBlobStore, +} from '../storage/storage.cjs' import { getLogger, getRequestContext } from './request-context.cjs' -import { getTracer } from './tracer.cjs' - -type TagManifest = { revalidatedAt: number } - -type TagManifestBlobCache = Record> +import { getTracer, recordWarning } from './tracer.cjs' const purgeCacheUserAgent = `${nextRuntimePkgName}@${nextRuntimePkgVersion}` export class NetlifyCacheHandler implements CacheHandlerForMultipleVersions { options: CacheHandlerContext revalidatedTags: string[] - blobStore: Store + cacheStore: MemoizedKeyValueStoreBackedByRegionalBlobStore tracer = getTracer() - tagManifestsFetchedFromBlobStoreInCurrentRequest: TagManifestBlobCache constructor(options: CacheHandlerContext) { this.options = options this.revalidatedTags = options.revalidatedTags - this.blobStore = getRegionalBlobStore({ consistency: 'strong' }) - this.tagManifestsFetchedFromBlobStoreInCurrentRequest = {} - } - - private async encodeBlobKey(key: string) { - const { encodeBlobKey } = await import('../../shared/blobkey.js') - return await encodeBlobKey(key) + this.cacheStore = getMemoizedKeyValueStoreBackedByRegionalBlobStore({ consistency: 'strong' }) } private getTTL(blob: NetlifyCacheHandlerValue) { @@ -89,13 +81,7 @@ export class NetlifyCacheHandler implements CacheHandlerForMultipleVersions { if (!requestContext) { // we will not be able to use request context for date header calculation // we will fallback to using blobs - getCacheKeySpan.recordException( - new Error('CacheHandler was called without a request context'), - ) - getCacheKeySpan.setAttributes({ - severity: 'alert', - warning: true, - }) + recordWarning(new Error('CacheHandler was called without a request context'), getCacheKeySpan) return } @@ -104,15 +90,13 @@ export class NetlifyCacheHandler implements CacheHandlerForMultipleVersions { // so as a safety measure we will not use any of them and let blobs be used // to calculate the date header requestContext.responseCacheGetLastModified = undefined - getCacheKeySpan.recordException( + recordWarning( new Error( `Multiple response cache keys used in single request: ["${requestContext.responseCacheKey}, "${key}"]`, ), + getCacheKeySpan, ) - getCacheKeySpan.setAttributes({ - severity: 'alert', - warning: true, - }) + return } @@ -245,19 +229,13 @@ export class NetlifyCacheHandler implements CacheHandlerForMultipleVersions { const [key, ctx = {}] = args getLogger().debug(`[NetlifyCacheHandler.get]: ${key}`) - const blobKey = await this.encodeBlobKey(key) - span.setAttributes({ key, blobKey }) + span.setAttributes({ key }) - const blob = (await this.tracer.withActiveSpan('blobStore.get', async (blobGetSpan) => { - blobGetSpan.setAttributes({ key, blobKey }) - return await this.blobStore.get(blobKey, { - type: 'json', - }) - })) as NetlifyCacheHandlerValue | null + const blob = await this.cacheStore.get(key, 'blobStore.get') // if blob is null then we don't have a cache entry if (!blob) { - span.addEvent('Cache miss', { key, blobKey }) + span.addEvent('Cache miss', { key }) return null } @@ -268,7 +246,6 @@ export class NetlifyCacheHandler implements CacheHandlerForMultipleVersions { // but opt to discard STALE data, so that Next.js generate fresh response span.addEvent('Discarding stale entry due to SWR background revalidation request', { key, - blobKey, ttl, }) getLogger() @@ -285,7 +262,7 @@ export class NetlifyCacheHandler implements CacheHandlerForMultipleVersions { const staleByTags = await this.checkCacheEntryStaleByTags(blob, ctx.tags, ctx.softTags) if (staleByTags) { - span.addEvent('Stale', { staleByTags, key, blobKey, ttl }) + span.addEvent('Stale', { staleByTags, key, ttl }) return null } @@ -403,9 +380,8 @@ export class NetlifyCacheHandler implements CacheHandlerForMultipleVersions { async set(...args: Parameters) { return this.tracer.withActiveSpan('set cache key', async (span) => { const [key, data, context] = args - const blobKey = await this.encodeBlobKey(key) const lastModified = Date.now() - span.setAttributes({ key, lastModified, blobKey }) + span.setAttributes({ key, lastModified }) getLogger().debug(`[NetlifyCacheHandler.set]: ${key}`) @@ -415,10 +391,7 @@ export class NetlifyCacheHandler implements CacheHandlerForMultipleVersions { // and we didn't yet capture cache tags, we try to get cache tags from freshly produced cache value this.captureCacheTags(value, key) - await this.blobStore.setJSON(blobKey, { - lastModified, - value, - }) + await this.cacheStore.set(key, { lastModified, value }, 'blobStore.set') if (data?.kind === 'PAGE' || data?.kind === 'PAGES') { const requestContext = getRequestContext() @@ -476,7 +449,7 @@ export class NetlifyCacheHandler implements CacheHandlerForMultipleVersions { await Promise.all( tags.map(async (tag) => { try { - await this.blobStore.setJSON(await this.encodeBlobKey(tag), data) + await this.cacheStore.set(tag, data, 'tagManifest.set') } catch (error) { getLogger().withError(error).log(`Failed to update tag manifest for ${tag}`) } @@ -492,7 +465,8 @@ export class NetlifyCacheHandler implements CacheHandlerForMultipleVersions { } resetRequestCache() { - this.tagManifestsFetchedFromBlobStoreInCurrentRequest = {} + // no-op because in-memory cache is scoped to requests and not global + // see getRequestSpecificInMemoryCache } /** @@ -531,10 +505,9 @@ export class NetlifyCacheHandler implements CacheHandlerForMultipleVersions { } // 2. If any in-memory tags don't indicate that any of tags was invalidated - // we will check blob store, but memoize results for duration of current request - // so that we only check blob store once per tag within a single request - // full-route cache and fetch caches share a lot of tags so this might save - // some roundtrips to the blob store. + // we will check blob store. Full-route cache and fetch caches share a lot of tags + // but we will only do actual blob read once withing a single request due to cacheStore + // memoization. // Additionally, we will resolve the promise as soon as we find first // stale tag, so that we don't wait for all of them to resolve (but keep all // running in case future `CacheHandler.get` calls would be able to use results). @@ -544,23 +517,17 @@ export class NetlifyCacheHandler implements CacheHandlerForMultipleVersions { const tagManifestPromises: Promise[] = [] for (const tag of cacheTags) { - let tagManifestPromise: Promise = - this.tagManifestsFetchedFromBlobStoreInCurrentRequest[tag] - - if (!tagManifestPromise) { - tagManifestPromise = this.encodeBlobKey(tag).then((blobKey) => { - return this.tracer.withActiveSpan(`get tag manifest`, async (span) => { - span.setAttributes({ tag, blobKey }) - return this.blobStore.get(blobKey, { type: 'json' }) - }) - }) - - this.tagManifestsFetchedFromBlobStoreInCurrentRequest[tag] = tagManifestPromise - } + const tagManifestPromise: Promise = this.cacheStore.get( + tag, + 'tagManifest.get', + ) tagManifestPromises.push( tagManifestPromise.then((tagManifest) => { - const isStale = tagManifest?.revalidatedAt >= (cacheEntry.lastModified || Date.now()) + if (!tagManifest) { + return false + } + const isStale = tagManifest.revalidatedAt >= (cacheEntry.lastModified || Date.now()) if (isStale) { resolve(true) return true diff --git a/src/run/handlers/request-context.cts b/src/run/handlers/request-context.cts index 36e27e35a9..1e46dab4c5 100644 --- a/src/run/handlers/request-context.cts +++ b/src/run/handlers/request-context.cts @@ -39,9 +39,22 @@ export type RequestContext = { */ backgroundWorkPromise: Promise logger: SystemLogger + requestID: string } type RequestContextAsyncLocalStorage = AsyncLocalStorage +const REQUEST_CONTEXT_GLOBAL_KEY = Symbol.for('nf-request-context-async-local-storage') +const REQUEST_COUNTER_KEY = Symbol.for('nf-request-counter') +const extendedGlobalThis = globalThis as typeof globalThis & { + [REQUEST_CONTEXT_GLOBAL_KEY]?: RequestContextAsyncLocalStorage + [REQUEST_COUNTER_KEY]?: number +} + +function getFallbackRequestID() { + const requestNumber = extendedGlobalThis[REQUEST_COUNTER_KEY] ?? 0 + extendedGlobalThis[REQUEST_COUNTER_KEY] = requestNumber + 1 + return `#${requestNumber}` +} export function createRequestContext(request?: Request, context?: FutureContext): RequestContext { const backgroundWorkPromises: Promise[] = [] @@ -72,10 +85,10 @@ export function createRequestContext(request?: Request, context?: FutureContext) return Promise.allSettled(backgroundWorkPromises) }, logger, + requestID: request?.headers.get('x-nf-request-id') ?? getFallbackRequestID(), } } -const REQUEST_CONTEXT_GLOBAL_KEY = Symbol.for('nf-request-context-async-local-storage') let requestContextAsyncLocalStorage: RequestContextAsyncLocalStorage | undefined function getRequestContextAsyncLocalStorage() { if (requestContextAsyncLocalStorage) { @@ -85,9 +98,6 @@ function getRequestContextAsyncLocalStorage() { // AsyncLocalStorage in the module scope, because it will be different for each // copy - so first time an instance of this module is used, we store AsyncLocalStorage // in global scope and reuse it for all subsequent calls - const extendedGlobalThis = globalThis as typeof globalThis & { - [REQUEST_CONTEXT_GLOBAL_KEY]?: RequestContextAsyncLocalStorage - } if (extendedGlobalThis[REQUEST_CONTEXT_GLOBAL_KEY]) { return extendedGlobalThis[REQUEST_CONTEXT_GLOBAL_KEY] } diff --git a/src/run/handlers/server.ts b/src/run/handlers/server.ts index b5a9d0f800..72666dd5b3 100644 --- a/src/run/handlers/server.ts +++ b/src/run/handlers/server.ts @@ -13,8 +13,8 @@ import { setCacheTagsHeaders, setVaryHeaders, } from '../headers.js' -import { setFetchBeforeNextPatchedIt } from '../regional-blob-store.cjs' import { nextResponseProxy } from '../revalidate.js' +import { setFetchBeforeNextPatchedIt } from '../storage/storage.cjs' import { getLogger, type RequestContext } from './request-context.cjs' import { getTracer } from './tracer.cjs' @@ -127,7 +127,6 @@ export default async ( headers: response.headers, request, span, - tracer, requestContext, }) } diff --git a/src/run/handlers/tracer.cts b/src/run/handlers/tracer.cts index b27ec88b22..85562e7376 100644 --- a/src/run/handlers/tracer.cts +++ b/src/run/handlers/tracer.cts @@ -89,3 +89,16 @@ export function getTracer(): RuntimeTracer { return tracer } + +export function recordWarning(warning: Error, span?: Span) { + const spanToRecordWarningOn = span ?? trace.getActiveSpan() + if (!spanToRecordWarningOn) { + return + } + + spanToRecordWarningOn.recordException(warning) + spanToRecordWarningOn.setAttributes({ + severity: 'alert', + warning: true, + }) +} diff --git a/src/run/headers.ts b/src/run/headers.ts index 50d82d4c16..bfc386506c 100644 --- a/src/run/headers.ts +++ b/src/run/headers.ts @@ -1,12 +1,11 @@ import type { Span } from '@opentelemetry/api' import type { NextConfigComplete } from 'next/dist/server/config-shared.js' -import { encodeBlobKey } from '../shared/blobkey.js' -import type { NetlifyCachedRouteValue } from '../shared/cache-types.cjs' +import type { NetlifyCachedRouteValue, NetlifyCacheHandlerValue } from '../shared/cache-types.cjs' import { getLogger, RequestContext } from './handlers/request-context.cjs' -import type { RuntimeTracer } from './handlers/tracer.cjs' -import { getRegionalBlobStore } from './regional-blob-store.cjs' +import { recordWarning } from './handlers/tracer.cjs' +import { getMemoizedKeyValueStoreBackedByRegionalBlobStore } from './storage/storage.cjs' const ALL_VARIATIONS = Symbol.for('ALL_VARIATIONS') interface NetlifyVaryValues { @@ -129,13 +128,11 @@ export const adjustDateHeader = async ({ headers, request, span, - tracer, requestContext, }: { headers: Headers request: Request span: Span - tracer: RuntimeTracer requestContext: RequestContext }) => { const key = new URL(request.url).pathname @@ -149,45 +146,29 @@ export const adjustDateHeader = async ({ // request context would contain lastModified value // this is not fatal as we have fallback, // but we want to know about it happening - span.recordException( + recordWarning( new Error('lastModified not found in requestContext, falling back to trying blobs'), + span, ) - span.setAttributes({ - severity: 'alert', - warning: true, - }) - const blobStore = getRegionalBlobStore({ consistency: 'strong' }) - const blobKey = await encodeBlobKey(key) - - // TODO: use metadata for this - lastModified = await tracer.withActiveSpan( + const cacheStore = getMemoizedKeyValueStoreBackedByRegionalBlobStore({ consistency: 'strong' }) + const cacheEntry = await cacheStore.get( + key, 'get cache to calculate date header', - async (getBlobForDateSpan) => { - getBlobForDateSpan.setAttributes({ - key, - blobKey, - }) - const blob = (await blobStore.get(blobKey, { type: 'json' })) ?? {} - - getBlobForDateSpan.addEvent(blob ? 'Cache hit' : 'Cache miss') - return blob.lastModified - }, ) + lastModified = cacheEntry?.lastModified } if (!lastModified) { // this should never happen as we only execute this code path for cached responses // and those should always have lastModified value - span.recordException( + recordWarning( new Error( 'lastModified not found in either requestContext or blobs, date header for cached response is not set', ), + span, ) - span.setAttributes({ - severity: 'alert', - warning: true, - }) + return } diff --git a/src/run/next.cts b/src/run/next.cts index 17859c4835..fc6b3fcbef 100644 --- a/src/run/next.cts +++ b/src/run/next.cts @@ -4,9 +4,11 @@ import { relative, resolve } from 'path' // @ts-expect-error no types installed import { patchFs } from 'fs-monkey' +import { HtmlBlob } from '../shared/blob-types.cjs' + import { getRequestContext } from './handlers/request-context.cjs' import { getTracer } from './handlers/tracer.cjs' -import { getRegionalBlobStore } from './regional-blob-store.cjs' +import { getMemoizedKeyValueStoreBackedByRegionalBlobStore } from './storage/storage.cjs' // https://github.com/vercel/next.js/pull/68193/files#diff-37243d614f1f5d3f7ea50bbf2af263f6b1a9a4f70e84427977781e07b02f57f1R49 // This import resulted in importing unbundled React which depending if NODE_ENV is `production` or not would use @@ -76,18 +78,11 @@ ResponseCache.prototype.get = function get(...getArgs: unknown[]) { type FS = typeof import('fs') -export type HtmlBlob = { - html: string - isFallback: boolean -} - export async function getMockedRequestHandler(...args: Parameters) { const tracer = getTracer() return tracer.withActiveSpan('mocked request handler', async () => { const ofs = { ...fs } - const { encodeBlobKey } = await import('../shared/blobkey.js') - async function readFileFallbackBlobStore(...fsargs: Parameters) { const [path, options] = fsargs try { @@ -97,11 +92,9 @@ export async function getMockedRequestHandler(...args: Parameters(relPath, 'staticHtml.get') if (file !== null) { if (!file.isFallback) { const requestContext = getRequestContext() diff --git a/src/run/regional-blob-store.cts b/src/run/storage/regional-blob-store.cts similarity index 100% rename from src/run/regional-blob-store.cts rename to src/run/storage/regional-blob-store.cts diff --git a/src/run/storage/request-scoped-in-memory-cache.cts b/src/run/storage/request-scoped-in-memory-cache.cts new file mode 100644 index 0000000000..8a046c3fe1 --- /dev/null +++ b/src/run/storage/request-scoped-in-memory-cache.cts @@ -0,0 +1,109 @@ +import { isPromise } from 'node:util/types' + +import { LRUCache } from 'lru-cache' + +import { type BlobType, isHtmlBlob, isTagManifest } from '../../shared/blob-types.cjs' +import { getRequestContext } from '../handlers/request-context.cjs' +import { recordWarning } from '../handlers/tracer.cjs' + +// lru-cache types don't like using `null` for values, so we use a symbol to represent it and do conversion +// so it doesn't leak outside +const NullValue = Symbol.for('null-value') +type BlobLRUCache = LRUCache> + +const IN_MEMORY_CACHE_MAX_SIZE = Symbol.for('nf-in-memory-cache-max-size') +const IN_MEMORY_LRU_CACHE = Symbol.for('nf-in-memory-lru-cache') +const extendedGlobalThis = globalThis as typeof globalThis & { + [IN_MEMORY_CACHE_MAX_SIZE]?: number + [IN_MEMORY_LRU_CACHE]?: BlobLRUCache | null +} + +const DEFAULT_FALLBACK_MAX_SIZE = 50 * 1024 * 1024 // 50MB, same as default Next.js config +export function setInMemoryCacheMaxSizeFromNextConfig(size: unknown) { + if (typeof size === 'number') { + extendedGlobalThis[IN_MEMORY_CACHE_MAX_SIZE] = size + } +} + +const estimateBlobSize = (valueToStore: BlobType | null | Promise): number => { + // very approximate size calculation to avoid expensive exact size calculation + // inspired by https://github.com/vercel/next.js/blob/ed10f7ed0246fcc763194197eb9beebcbd063162/packages/next/src/server/lib/incremental-cache/file-system-cache.ts#L60-L79 + if (valueToStore === null || isPromise(valueToStore) || isTagManifest(valueToStore)) { + return 25 + } + if (isHtmlBlob(valueToStore)) { + return valueToStore.html.length + } + let knownKindFailed = false + try { + if (valueToStore.value?.kind === 'FETCH') { + return valueToStore.value.data.body.length + } + if (valueToStore.value?.kind === 'APP_PAGE') { + return valueToStore.value.html.length + (valueToStore.value.rscData?.length ?? 0) + } + if (valueToStore.value?.kind === 'PAGE' || valueToStore.value?.kind === 'PAGES') { + return valueToStore.value.html.length + JSON.stringify(valueToStore.value.pageData).length + } + if (valueToStore.value?.kind === 'ROUTE' || valueToStore.value?.kind === 'APP_ROUTE') { + return valueToStore.value.body.length + } + } catch { + // size calculation rely on the shape of the value, so if it's not what we expect, we fallback to JSON.stringify + knownKindFailed = true + } + + // fallback for not known kinds or known kinds that did fail to calculate size + // we should also monitor cases when fallback is used because it's not the most efficient way to calculate/estimate size + // and might indicate need to make adjustments or additions to the size calculation + recordWarning( + new Error( + `Blob size calculation did fallback to JSON.stringify. Kind: KnownKindFailed: ${knownKindFailed}, ${valueToStore.value?.kind ?? 'undefined'}`, + ), + ) + + return JSON.stringify(valueToStore).length +} + +function getInMemoryLRUCache() { + if (typeof extendedGlobalThis[IN_MEMORY_LRU_CACHE] === 'undefined') { + const maxSize = + typeof extendedGlobalThis[IN_MEMORY_CACHE_MAX_SIZE] === 'number' + ? extendedGlobalThis[IN_MEMORY_CACHE_MAX_SIZE] + : DEFAULT_FALLBACK_MAX_SIZE + + extendedGlobalThis[IN_MEMORY_LRU_CACHE] = + maxSize === 0 + ? null // if user sets 0 in their config, we should honor that and not use in-memory cache + : new LRUCache>({ + max: 1000, + maxSize, + sizeCalculation: (valueToStore) => { + return estimateBlobSize(valueToStore === NullValue ? null : valueToStore) + }, + }) + } + return extendedGlobalThis[IN_MEMORY_LRU_CACHE] +} + +interface RequestScopedInMemoryCache { + get(key: string): BlobType | null | Promise | undefined + set(key: string, value: BlobType | null | Promise): void +} + +export const getRequestScopedInMemoryCache = (): RequestScopedInMemoryCache => { + const requestContext = getRequestContext() + const inMemoryLRUCache = getInMemoryLRUCache() + + return { + get(key) { + if (!requestContext) return + const value = inMemoryLRUCache?.get(`${requestContext.requestID}:${key}`) + return value === NullValue ? null : value + }, + set(key, value) { + if (!requestContext) return + inMemoryLRUCache?.set(`${requestContext?.requestID}:${key}`, value ?? NullValue) + }, + } +} diff --git a/src/run/storage/storage.cts b/src/run/storage/storage.cts new file mode 100644 index 0000000000..273c8822de --- /dev/null +++ b/src/run/storage/storage.cts @@ -0,0 +1,68 @@ +// This is storage module that rest of modules should interact with. +// Remaining modules in storage directory are implementation details +// and should not be used directly outside of this directory. +// There is eslint `no-restricted-imports` rule to enforce this. + +import { type BlobType } from '../../shared/blob-types.cjs' +import { getTracer } from '../handlers/tracer.cjs' + +import { getRegionalBlobStore } from './regional-blob-store.cjs' +import { getRequestScopedInMemoryCache } from './request-scoped-in-memory-cache.cjs' + +const encodeBlobKey = async (key: string) => { + const { encodeBlobKey: encodeBlobKeyImpl } = await import('../../shared/blobkey.js') + return await encodeBlobKeyImpl(key) +} + +export const getMemoizedKeyValueStoreBackedByRegionalBlobStore = ( + ...args: Parameters +) => { + const store = getRegionalBlobStore(...args) + const tracer = getTracer() + + return { + async get(key: string, otelSpanTitle: string): Promise { + const inMemoryCache = getRequestScopedInMemoryCache() + + const memoizedValue = inMemoryCache.get(key) + if (typeof memoizedValue !== 'undefined') { + return memoizedValue as T | null | Promise + } + + const blobKey = await encodeBlobKey(key) + const getPromise = tracer.withActiveSpan(otelSpanTitle, async (span) => { + span.setAttributes({ key, blobKey }) + const blob = (await store.get(blobKey, { type: 'json' })) as T | null + inMemoryCache.set(key, blob) + span.addEvent(blob ? 'Hit' : 'Miss') + return blob + }) + inMemoryCache.set(key, getPromise) + return getPromise + }, + async set(key: string, value: BlobType, otelSpanTitle: string) { + const inMemoryCache = getRequestScopedInMemoryCache() + + inMemoryCache.set(key, value) + + const blobKey = await encodeBlobKey(key) + return tracer.withActiveSpan(otelSpanTitle, async (span) => { + span.setAttributes({ key, blobKey }) + return await store.setJSON(blobKey, value) + }) + }, + } +} + +/** + * Wrapper around Blobs Store that memoizes the cache entries within context of a request + * to avoid duplicate requests to the same key and also allowing to read its own writes from + * memory. + */ +export type MemoizedKeyValueStoreBackedByRegionalBlobStore = ReturnType< + typeof getMemoizedKeyValueStoreBackedByRegionalBlobStore +> + +// make select methods public +export { setInMemoryCacheMaxSizeFromNextConfig } from './request-scoped-in-memory-cache.cjs' +export { setFetchBeforeNextPatchedIt } from './regional-blob-store.cjs' diff --git a/src/run/storage/storage.test.ts b/src/run/storage/storage.test.ts new file mode 100644 index 0000000000..e36e8e3f38 --- /dev/null +++ b/src/run/storage/storage.test.ts @@ -0,0 +1,205 @@ +import { beforeEach, describe, expect, it, vi } from 'vitest' + +import { decodeBlobKey } from '../../../tests/utils/helpers.ts' +import { BlobType } from '../../shared/cache-types.cts' +import { createRequestContext, runWithRequestContext } from '../handlers/request-context.cts' + +import { getMemoizedKeyValueStoreBackedByRegionalBlobStore } from './storage.cts' + +let mockBlobValues: Record = {} +const mockedStore = { + get: vi.fn((blobKey) => { + const key = decodeBlobKey(blobKey) + return Promise.resolve(mockBlobValues[key]) + }), + setJSON: vi.fn(async (blobKey, value) => { + const key = decodeBlobKey(blobKey) + mockBlobValues[key] = value + }), +} + +vi.mock('@netlify/blobs', () => { + return { + getDeployStore: vi.fn(() => mockedStore), + } +}) + +const OTEL_SPAN_TITLE = 'test' +const TEST_KEY = 'foo' +const TEST_DEFAULT_VALUE = { + revalidatedAt: 123, +} satisfies BlobType + +function generate30MBBlobTypeValue(id: string): BlobType { + return { + lastModified: Date.now(), + value: { + kind: 'ROUTE', + status: 200, + headers: {}, + body: `${id}:${'a'.repeat(30 * 1024 * 1024 - id.length - 1)}`, + }, + } +} + +beforeEach(() => { + mockBlobValues = { + [TEST_KEY]: TEST_DEFAULT_VALUE, + } +}) +describe('getMemoizedKeyValueStoreBackedByRegionalBlobStore', () => { + it('is not using in-memory lookups if not running in request context', async () => { + const store = getMemoizedKeyValueStoreBackedByRegionalBlobStore() + const get1 = await store.get(TEST_KEY, OTEL_SPAN_TITLE) + + expect(mockedStore.get, 'Blobs should be requested').toHaveBeenCalledTimes(1) + expect(get1, 'Expected blob should be returned').toBe(TEST_DEFAULT_VALUE) + + const get2 = await store.get(TEST_KEY, OTEL_SPAN_TITLE) + expect(mockedStore.get, 'Blobs should be requested twice').toHaveBeenCalledTimes(2) + expect(get2, 'Expected second .get to return the same as first one').toBe(get1) + }) + + it('is using in-memory cache when running in request context', async () => { + await runWithRequestContext(createRequestContext(), async () => { + const store = getMemoizedKeyValueStoreBackedByRegionalBlobStore() + + const get1 = await store.get(TEST_KEY, OTEL_SPAN_TITLE) + expect(mockedStore.get, 'Blobs should be requested').toHaveBeenCalledTimes(1) + expect(get1, 'Expected blob should be returned').toBe(TEST_DEFAULT_VALUE) + + const get2 = await store.get(TEST_KEY, OTEL_SPAN_TITLE) + expect(mockedStore.get, 'Blobs should be requested just once').toHaveBeenCalledTimes(1) + expect(get2, 'Expected second .get to return the same as first one').toBe(get1) + }) + }) + + it('can read their own writes without checking blobs', async () => { + await runWithRequestContext(createRequestContext(), async () => { + const store = getMemoizedKeyValueStoreBackedByRegionalBlobStore() + + const writeValue = { + revalidatedAt: 456, + } satisfies BlobType + + await store.set(TEST_KEY, writeValue, OTEL_SPAN_TITLE) + + expect(mockedStore.setJSON, 'Blobs should be posted').toHaveBeenCalledTimes(1) + + const get = await store.get(TEST_KEY, OTEL_SPAN_TITLE) + expect(mockedStore.get, 'Value should be read from memory').toHaveBeenCalledTimes(0) + expect(get, 'Value from memory should be correct').toBe(writeValue) + }) + }) + + it('is using separate in-memory caches when running in request contexts', async () => { + const store = getMemoizedKeyValueStoreBackedByRegionalBlobStore() + await runWithRequestContext(createRequestContext(), async () => { + await store.get(TEST_KEY, OTEL_SPAN_TITLE) + }) + + await runWithRequestContext(createRequestContext(), async () => { + await store.get(TEST_KEY, OTEL_SPAN_TITLE) + }) + + expect( + mockedStore.get, + 'Blobs should be requested separately for each request context', + ).toHaveBeenCalledTimes(2) + }) + + it('writing in one request context should not affect in-memory value in another request context', async () => { + const store = getMemoizedKeyValueStoreBackedByRegionalBlobStore() + + const requestContext1 = createRequestContext() + const requestContext2 = createRequestContext() + + const writeValue = { + revalidatedAt: 456, + } satisfies BlobType + + await runWithRequestContext(requestContext1, async () => { + const get = await store.get(TEST_KEY, OTEL_SPAN_TITLE) + expect(get, 'Value from memory should be the same as before').toBe(TEST_DEFAULT_VALUE) + expect(mockedStore.get, 'Blobs should be requested').toHaveBeenCalledTimes(1) + }) + + await runWithRequestContext(requestContext2, async () => { + mockedStore.get.mockClear() + await store.set(TEST_KEY, writeValue, OTEL_SPAN_TITLE) + const get = await store.get(TEST_KEY, OTEL_SPAN_TITLE) + expect(mockedStore.get, 'Value should be read from memory').toHaveBeenCalledTimes(0) + expect(get, 'Value from memory should be correct').toBe(writeValue) + }) + + await runWithRequestContext(requestContext1, async () => { + mockedStore.get.mockClear() + const get = await store.get(TEST_KEY, OTEL_SPAN_TITLE) + expect( + get, + 'Value from memory should be the same as before and not affected by other request context', + ).toBe(TEST_DEFAULT_VALUE) + expect(mockedStore.get, 'Value should be read from memory').toHaveBeenCalledTimes(0) + }) + }) + + it('in-memory caches share memory limit (~50MB)', async () => { + const store = getMemoizedKeyValueStoreBackedByRegionalBlobStore() + + const requestContext1 = createRequestContext() + const requestContext2 = createRequestContext() + + mockBlobValues = { + // very heavy values that in-memory caches can only hold one value at a time + 'heavy-route-1': generate30MBBlobTypeValue('1'), + 'heavy-route-2': generate30MBBlobTypeValue('2'), + } + + await runWithRequestContext(requestContext1, async () => { + await store.get('heavy-route-1', OTEL_SPAN_TITLE) + expect(mockedStore.get, 'Value should be read from blobs').toHaveBeenCalledTimes(1) + mockedStore.get.mockClear() + + await store.get('heavy-route-1', OTEL_SPAN_TITLE) + expect(mockedStore.get, 'Value should be read from memory').toHaveBeenCalledTimes(0) + mockedStore.get.mockClear() + + await store.get('heavy-route-2', OTEL_SPAN_TITLE) + expect(mockedStore.get, 'Value should be read from blobs').toHaveBeenCalledTimes(1) + mockedStore.get.mockClear() + + // at this point we should exceed the memory limit and least recently used value should be evicted + await store.get('heavy-route-1', OTEL_SPAN_TITLE) + expect( + mockedStore.get, + 'Previously stored in-memory value should be evicted and fresh value should be read from blobs', + ).toHaveBeenCalledTimes(1) + mockedStore.get.mockClear() + + await store.get('heavy-route-1', OTEL_SPAN_TITLE) + expect(mockedStore.get, 'Value should be read from memory again').toHaveBeenCalledTimes(0) + mockedStore.get.mockClear() + }) + + await runWithRequestContext(requestContext2, async () => { + await store.get('heavy-route-1', OTEL_SPAN_TITLE) + expect(mockedStore.get, 'Value should be read from blobs').toHaveBeenCalledTimes(1) + mockedStore.get.mockClear() + + await store.get('heavy-route-1', OTEL_SPAN_TITLE) + expect(mockedStore.get, 'Value should be read from memory').toHaveBeenCalledTimes(0) + mockedStore.get.mockClear() + }) + + await runWithRequestContext(requestContext1, async () => { + await store.get('heavy-route-1', OTEL_SPAN_TITLE) + // operations in requestContext2 should result in evicting value for requestContext1 + expect(mockedStore.get, 'Value should be read from blobs').toHaveBeenCalledTimes(1) + mockedStore.get.mockClear() + + await store.get('heavy-route-1', OTEL_SPAN_TITLE) + expect(mockedStore.get, 'Value should be read from memory').toHaveBeenCalledTimes(0) + mockedStore.get.mockClear() + }) + }) +}) diff --git a/src/shared/blob-types.cts b/src/shared/blob-types.cts new file mode 100644 index 0000000000..976ffdb79c --- /dev/null +++ b/src/shared/blob-types.cts @@ -0,0 +1,32 @@ +import { type NetlifyCacheHandlerValue } from './cache-types.cjs' + +export type TagManifest = { revalidatedAt: number } + +export type HtmlBlob = { + html: string + isFallback: boolean +} + +export type BlobType = NetlifyCacheHandlerValue | TagManifest | HtmlBlob + +export const isTagManifest = (value: BlobType): value is TagManifest => { + return ( + typeof value === 'object' && + value !== null && + 'revalidatedAt' in value && + typeof value.revalidatedAt === 'number' && + Object.keys(value).length === 1 + ) +} + +export const isHtmlBlob = (value: BlobType): value is HtmlBlob => { + return ( + typeof value === 'object' && + value !== null && + 'html' in value && + 'isFallback' in value && + typeof value.html === 'string' && + typeof value.isFallback === 'boolean' && + Object.keys(value).length === 2 + ) +} diff --git a/src/shared/blob-types.test.ts b/src/shared/blob-types.test.ts new file mode 100644 index 0000000000..e41a992045 --- /dev/null +++ b/src/shared/blob-types.test.ts @@ -0,0 +1,27 @@ +import { describe, expect, it } from 'vitest' + +import { BlobType, HtmlBlob, isHtmlBlob, isTagManifest, TagManifest } from './blob-types.cjs' + +describe('isTagManifest', () => { + it(`returns true for TagManifest instance`, () => { + const value: TagManifest = { revalidatedAt: 0 } + expect(isTagManifest(value)).toBe(true) + }) + + it(`returns false for non-TagManifest instance`, () => { + const value: BlobType = { html: '', isFallback: false } + expect(isTagManifest(value)).toBe(false) + }) +}) + +describe('isHtmlBlob', () => { + it(`returns true for HtmlBlob instance`, () => { + const value: HtmlBlob = { html: '', isFallback: false } + expect(isHtmlBlob(value)).toBe(true) + }) + + it(`returns false for non-HtmlBlob instance`, () => { + const value: BlobType = { revalidatedAt: 0 } + expect(isHtmlBlob(value)).toBe(false) + }) +}) diff --git a/tests/fixtures/revalidate-fetch/app/same-fetch-multiple-times/[id]/page.js b/tests/fixtures/revalidate-fetch/app/same-fetch-multiple-times/[id]/page.js new file mode 100644 index 0000000000..4f6d48e133 --- /dev/null +++ b/tests/fixtures/revalidate-fetch/app/same-fetch-multiple-times/[id]/page.js @@ -0,0 +1,47 @@ +const API_BASE = process.env.API_BASE || 'https://api.tvmaze.com/shows/' + +async function doFetch(params) { + const res = await fetch(new URL(params.id, API_BASE).href, { + next: { revalidate: false }, + }) + return res.json() +} + +async function getData(params) { + // trigger fetches in parallel to ensure we only do one cache lookup for ongoing fetches + const [data1, data2] = await Promise.all([doFetch(params), doFetch(params)]) + // also trigger fetch after to make sure we reuse already fetched cache + const data3 = await doFetch(params) + + if (data1?.name !== data2?.name || data1?.name !== data3?.name) { + throw new Error( + `Should have 3 names that are the same, instead got [${data1?.name}, ${data2?.name}, ${data3?.name}]`, + ) + } + + return data1 +} + +export default async function Page({ params }) { + const data = await getData(params) + + return ( + <> +

Using same fetch multiple times

+
+
Show
+
{data.name}
+
Param
+
{params.id}
+
Time
+
{Date.now()}
+
Time from fetch response
+
{data.date ?? 'no-date-in-response'}
+
+ + ) +} + +// make page dynamic, but still use fetch cache +export const fetchCache = 'force-cache' +export const dynamic = 'force-dynamic' diff --git a/tests/integration/fetch-handler.test.ts b/tests/integration/fetch-handler.test.ts index b525dfd16e..c90bf44af3 100644 --- a/tests/integration/fetch-handler.test.ts +++ b/tests/integration/fetch-handler.test.ts @@ -6,9 +6,33 @@ import { v4 } from 'uuid' import { afterAll, beforeAll, beforeEach, expect, test, vi } from 'vitest' import { type FixtureTestContext } from '../utils/contexts.js' import { createFixture, invokeFunction, runPlugin, runPluginStep } from '../utils/fixture.js' -import { generateRandomObjectID, startMockBlobStore } from '../utils/helpers.js' +import { generateRandomObjectID, getBlobServerGets, startMockBlobStore } from '../utils/helpers.js' import { nextVersionSatisfies } from '../utils/next-version-helpers.mjs' +function isFetch(key: string) { + // exclude tag manifests (starting with `_N_T_`), pages (starting with `/`) and static html files (keys including `.html`) + return !key.startsWith('_N_T_') && !key.startsWith('/') && !key.includes('.html') +} + +expect.extend({ + toBeDistinct(received: string[]) { + const { isNot } = this + const pass = new Set(received).size === received.length + return { + pass, + message: () => `${received} is${isNot ? ' not' : ''} array with distinct values`, + } + }, +}) + +interface CustomMatchers { + toBeDistinct(): R +} + +declare module 'vitest' { + interface Assertion extends CustomMatchers {} +} + // Disable the verbose logging of the lambda-local runtime getLogger().level = 'alert' @@ -304,3 +328,71 @@ test('if the fetch call is cached correctly (cached page res }), ) }) + +test('does not fetch same cached fetch data from blobs twice for the same request', async (ctx) => { + await createFixture('revalidate-fetch', ctx) + await runPluginStep(ctx, 'onPreBuild') + await runPlugin(ctx) + + handlerCalled = 0 + const request1 = await invokeFunction(ctx, { + url: 'same-fetch-multiple-times/99', + }) + + const request1FetchDate = load(request1.body)('[data-testid="date-from-response"]').text() + const request1Name = load(request1.body)('[data-testid="name"]').text() + + expect(request1.statusCode, 'Tested page should work').toBe(200) + expect(request1Name, 'Test setup should use test API mock').toBe('Fake response') + + expect( + handlerCalled, + 'Cache should be empty, and we should hit mock endpoint once to warm up the cache', + ).toBe(1) + + const request1FetchCacheKeys = getBlobServerGets(ctx, isFetch) + + expect( + request1FetchCacheKeys.length, + 'tested page should be doing 3 fetch calls to render single page - we should only try to get cached fetch data from blobs once', + ).toBe(1) + + const request1AllCacheKeys = getBlobServerGets(ctx) + expect( + request1AllCacheKeys, + 'expected blobs for all types of values to be retrieved at most once per key (including fetch data, tag manifests, static files)', + ).toBeDistinct() + + ctx.blobServerGetSpy.mockClear() + handlerCalled = 0 + const request2 = await invokeFunction(ctx, { + url: 'same-fetch-multiple-times/99', + }) + + const request2FetchDate = load(request2.body)('[data-testid="date-from-response"]').text() + const request2Name = load(request2.body)('[data-testid="name"]').text() + + expect(request2.statusCode, 'Tested page should work').toBe(200) + expect(request2Name, 'Test setup should use test API mock').toBe('Fake response') + expect(request2FetchDate, 'Cached fetch data should be used for second request').toBe( + request1FetchDate, + ) + + expect(handlerCalled, 'Cache should be warm, and we should not hit mock endpoint').toBe(0) + + const request2FetchCacheKeys = getBlobServerGets(ctx, isFetch) + + expect( + request2FetchCacheKeys.length, + 'We should not reuse in-memory cache from first request and have one fetch blob call for second request', + ).toBe(1) + expect(request2FetchCacheKeys, 'Same fetch keys should be used in both requests').toEqual( + request1FetchCacheKeys, + ) + + const request2AllCacheKeys = getBlobServerGets(ctx) + expect( + request2AllCacheKeys, + 'expected blobs for all types of values to be retrieved at most once per key (including fetch data, tag manifests, static files)', + ).toBeDistinct() +}) diff --git a/tools/build.js b/tools/build.js index c622a26d67..2c7f9fdecb 100644 --- a/tools/build.js +++ b/tools/build.js @@ -131,14 +131,17 @@ await Promise.all([ async function ensureNoRegionalBlobsModuleDuplicates() { const REGIONAL_BLOB_STORE_CONTENT_TO_FIND = 'fetchBeforeNextPatchedIt' + const EXPECTED_MODULE_TO_CONTAIN_FETCH_BEFORE_NEXT_PATCHED_IT = + 'run/storage/regional-blob-store.cjs' const filesToTest = await glob(`${OUT_DIR}/**/*.{js,cjs}`) const unexpectedModulesContainingFetchBeforeNextPatchedIt = [] let foundInExpectedModule = false + for (const fileToTest of filesToTest) { const content = await readFile(fileToTest, 'utf-8') if (content.includes(REGIONAL_BLOB_STORE_CONTENT_TO_FIND)) { - if (fileToTest.endsWith('run/regional-blob-store.cjs')) { + if (fileToTest.endsWith(EXPECTED_MODULE_TO_CONTAIN_FETCH_BEFORE_NEXT_PATCHED_IT)) { foundInExpectedModule = true } else { unexpectedModulesContainingFetchBeforeNextPatchedIt.push(fileToTest) @@ -147,7 +150,7 @@ async function ensureNoRegionalBlobsModuleDuplicates() { } if (!foundInExpectedModule) { throw new Error( - 'Expected to find "fetchBeforeNextPatchedIt" variable in "run/regional-blob-store.cjs", but it was not found. This might indicate a setup change that requires the bundling validation in "tools/build.js" to be adjusted.', + `Expected to find "fetchBeforeNextPatchedIt" variable in "${EXPECTED_MODULE_TO_CONTAIN_FETCH_BEFORE_NEXT_PATCHED_IT}", but it was not found. This might indicate a setup change that requires the bundling validation in "tools/build.js" to be adjusted.`, ) } if (unexpectedModulesContainingFetchBeforeNextPatchedIt.length !== 0) {