diff --git a/.changeset/slow-walls-allow.md b/.changeset/slow-walls-allow.md new file mode 100644 index 000000000..e0311b3db --- /dev/null +++ b/.changeset/slow-walls-allow.md @@ -0,0 +1,8 @@ +--- +"@opennextjs/aws": minor +--- + +Add an option to keep the data cache persistent between deployments. + +BREAKING CHANGE: Incremental cache keys are now an object of type `CacheKey` instead of a string. The new type includes properties like `baseKey`, `buildId`, and `cacheType`. Build_id is automatically provided according to the cache type and the `dangerous.persistentDataCache` option. Up to the Incremental Cache implementation to use it as they see fit. +**Custom Incremental cache will need to be updated** \ No newline at end of file diff --git a/packages/open-next/src/adapters/cache.ts b/packages/open-next/src/adapters/cache.ts index 6f0945b93..c399b04b7 100644 --- a/packages/open-next/src/adapters/cache.ts +++ b/packages/open-next/src/adapters/cache.ts @@ -3,7 +3,14 @@ import type { IncrementalCacheContext, IncrementalCacheValue, } from "types/cache"; -import { getTagsFromValue, hasBeenRevalidated, writeTags } from "utils/cache"; +import type { CacheKey } from "types/overrides"; +import { + createCacheKey, + createTagKey, + getTagsFromValue, + hasBeenRevalidated, + writeTags, +} from "utils/cache"; import { isBinaryContentType } from "../utils/binary"; import { debug, error, warn } from "./logger"; @@ -31,7 +38,7 @@ function isFetchCache( // We need to use globalThis client here as this class can be defined at load time in next 12 but client is not available at load time export default class Cache { public async get( - key: string, + baseKey: string, // fetchCache is for next 13.5 and above, kindHint is for next 14 and above and boolean is for earlier versions options?: | boolean @@ -50,14 +57,15 @@ export default class Cache { const softTags = typeof options === "object" ? options.softTags : []; const tags = typeof options === "object" ? options.tags : []; return isFetchCache(options) - ? this.getFetchCache(key, softTags, tags) - : this.getIncrementalCache(key); + ? this.getFetchCache(baseKey, softTags, tags) + : this.getIncrementalCache(baseKey); } - async getFetchCache(key: string, softTags?: string[], tags?: string[]) { - debug("get fetch cache", { key, softTags, tags }); + async getFetchCache(baseKey: string, softTags?: string[], tags?: string[]) { + debug("get fetch cache", { baseKey, softTags, tags }); try { - const cachedEntry = await globalThis.incrementalCache.get(key, "fetch"); + const key = createCacheKey({ key: baseKey, type: "fetch" }); + const cachedEntry = await globalThis.incrementalCache.get(key); if (cachedEntry?.value === undefined) return null; @@ -83,7 +91,7 @@ export default class Cache { ); if (path) { const hasPathBeenUpdated = await hasBeenRevalidated( - path.replace("_N_T_/", ""), + createCacheKey({ key: path.replace("_N_T_/", ""), type: "cache" }), [], cachedEntry, ); @@ -105,9 +113,15 @@ export default class Cache { } } - async getIncrementalCache(key: string): Promise { + async getIncrementalCache( + baseKey: string, + ): Promise { try { - const cachedEntry = await globalThis.incrementalCache.get(key, "cache"); + const key = createCacheKey({ + key: baseKey, + type: "cache", + }); + const cachedEntry = await globalThis.incrementalCache.get(key); if (!cachedEntry?.value) { return null; @@ -191,13 +205,18 @@ export default class Cache { } async set( - key: string, + baseKey: string, data?: IncrementalCacheValue, ctx?: IncrementalCacheContext, ): Promise { if (globalThis.openNextConfig.dangerous?.disableIncrementalCache) { return; } + const key = createCacheKey({ + key: baseKey, + type: data?.kind === "FETCH" ? "fetch" : "cache", + }); + debug("Setting cache", { key, data, ctx }); // This one might not even be necessary anymore // Better be safe than sorry const detachedPromise = globalThis.__openNextAls @@ -205,30 +224,27 @@ export default class Cache { ?.pendingPromiseRunner.withResolvers(); try { if (data === null || data === undefined) { - await globalThis.incrementalCache.delete(key); + // only case where we delete the cache is for ISR/SSG cache + await globalThis.incrementalCache.delete(key as CacheKey<"cache">); } else { const revalidate = this.extractRevalidateForSet(ctx); switch (data.kind) { case "ROUTE": case "APP_ROUTE": { const { body, status, headers } = data; - await globalThis.incrementalCache.set( - key, - { - type: "route", - body: body.toString( - isBinaryContentType(String(headers["content-type"])) - ? "base64" - : "utf8", - ), - meta: { - status, - headers, - }, - revalidate, + await globalThis.incrementalCache.set(key, { + type: "route", + body: body.toString( + isBinaryContentType(String(headers["content-type"])) + ? "base64" + : "utf8", + ), + meta: { + status, + headers, }, - "cache", - ); + revalidate, + }); break; } case "PAGE": @@ -236,65 +252,49 @@ export default class Cache { const { html, pageData, status, headers } = data; const isAppPath = typeof pageData === "string"; if (isAppPath) { - await globalThis.incrementalCache.set( - key, - { - type: "app", - html, - rsc: pageData, - meta: { - status, - headers, - }, - revalidate, - }, - "cache", - ); - } else { - await globalThis.incrementalCache.set( - key, - { - type: "page", - html, - json: pageData, - revalidate, - }, - "cache", - ); - } - break; - } - case "APP_PAGE": { - const { html, rscData, headers, status } = data; - await globalThis.incrementalCache.set( - key, - { + await globalThis.incrementalCache.set(key, { type: "app", html, - rsc: rscData.toString("utf8"), + rsc: pageData, meta: { status, headers, }, revalidate, + }); + } else { + await globalThis.incrementalCache.set(key, { + type: "page", + html, + json: pageData, + revalidate, + }); + } + break; + } + case "APP_PAGE": { + const { html, rscData, headers, status } = data; + await globalThis.incrementalCache.set(key, { + type: "app", + html, + rsc: rscData.toString("utf8"), + meta: { + status, + headers, }, - "cache", - ); + revalidate, + }); break; } case "FETCH": - await globalThis.incrementalCache.set(key, data, "fetch"); + await globalThis.incrementalCache.set(key, data); break; case "REDIRECT": - await globalThis.incrementalCache.set( - key, - { - type: "redirect", - props: data.props, - revalidate, - }, - "cache", - ); + await globalThis.incrementalCache.set(key, { + type: "redirect", + props: data.props, + revalidate, + }); break; case "IMAGE": // Not implemented @@ -302,7 +302,7 @@ export default class Cache { } } - await this.updateTagsOnSet(key, data, ctx); + await this.updateTagsOnSet(baseKey, data, ctx); debug("Finished setting cache"); } catch (e) { error("Failed to set cache", e); @@ -324,7 +324,10 @@ export default class Cache { try { if (globalThis.tagCache.mode === "nextMode") { - const paths = (await globalThis.tagCache.getPathsByTags?.(_tags)) ?? []; + const paths = + (await globalThis.tagCache.getPathsByTags?.( + _tags.map(createTagKey), + )) ?? []; await writeTags(_tags); if (paths.length > 0) { @@ -350,7 +353,7 @@ export default class Cache { for (const tag of _tags) { debug("revalidateTag", tag); // Find all keys with the given tag - const paths = await globalThis.tagCache.getByTag(tag); + const paths = await globalThis.tagCache.getByTag(createTagKey(tag)); debug("Items", paths); const toInsert = paths.map((path) => ({ path, @@ -361,11 +364,15 @@ export default class Cache { if (tag.startsWith("_N_T_/")) { for (const path of paths) { // We need to find all hard tags for a given path - const _tags = await globalThis.tagCache.getByPath(path); + const _tags = await globalThis.tagCache.getByPath( + createTagKey(path), + ); const hardTags = _tags.filter((t) => !t.startsWith("_N_T_/")); // For every hard tag, we need to find all paths and revalidate them for (const hardTag of hardTags) { - const _paths = await globalThis.tagCache.getByTag(hardTag); + const _paths = await globalThis.tagCache.getByTag( + createTagKey(hardTag), + ); debug({ hardTag, _paths }); toInsert.push( ..._paths.map((path) => ({ @@ -378,7 +385,12 @@ export default class Cache { } // Update all keys with the given tag with revalidatedAt set to now - await writeTags(toInsert); + await writeTags( + toInsert.map((t) => ({ + path: createTagKey(t.path), + tag: createTagKey(t.tag), + })), + ); // We can now invalidate all paths in the CDN // This only applies to `revalidateTag`, not to `res.revalidate()` @@ -439,13 +451,13 @@ export default class Cache { // Get all tags stored in dynamodb for the given key // If any of the derived tags are not stored in dynamodb for the given key, write them - const storedTags = await globalThis.tagCache.getByPath(key); + const storedTags = await globalThis.tagCache.getByPath(createTagKey(key)); const tagsToWrite = derivedTags.filter((tag) => !storedTags.includes(tag)); if (tagsToWrite.length > 0) { await writeTags( tagsToWrite.map((tag) => ({ - path: key, - tag: tag, + path: createTagKey(key), + tag: createTagKey(tag), // In case the tags are not there we just need to create them // but we don't want them to return from `getLastModified` as they are not stale revalidatedAt: 1, diff --git a/packages/open-next/src/adapters/composable-cache.ts b/packages/open-next/src/adapters/composable-cache.ts index 0c4fecdc2..89aebe459 100644 --- a/packages/open-next/src/adapters/composable-cache.ts +++ b/packages/open-next/src/adapters/composable-cache.ts @@ -1,22 +1,21 @@ import type { ComposableCacheEntry, ComposableCacheHandler } from "types/cache"; -import { writeTags } from "utils/cache"; +import type { TagKey } from "types/overrides"; +import { createCacheKey, createTagKey, writeTags } from "utils/cache"; import { fromReadableStream, toReadableStream } from "utils/stream"; import { debug } from "./logger"; const pendingWritePromiseMap = new Map>(); export default { - async get(cacheKey: string) { + async get(key: string) { try { + const cacheKey = createCacheKey({ key, type: "composable" }); // We first check if we have a pending write for this cache key // If we do, we return the pending promise instead of fetching the cache - if (pendingWritePromiseMap.has(cacheKey)) { - return pendingWritePromiseMap.get(cacheKey); + if (pendingWritePromiseMap.has(cacheKey.baseKey)) { + return pendingWritePromiseMap.get(cacheKey.baseKey); } - const result = await globalThis.incrementalCache.get( - cacheKey, - "composable", - ); + const result = await globalThis.incrementalCache.get(cacheKey); if (!result?.value?.value) { return undefined; } @@ -29,7 +28,7 @@ export default { result.value.tags.length > 0 ) { const hasBeenRevalidated = await globalThis.tagCache.hasBeenRevalidated( - result.value.tags, + result.value.tags.map(createTagKey), result.lastModified, ); if (hasBeenRevalidated) return undefined; @@ -55,25 +54,27 @@ export default { } }, - async set(cacheKey: string, pendingEntry: Promise) { - pendingWritePromiseMap.set(cacheKey, pendingEntry); + async set(key: string, pendingEntry: Promise) { + const cacheKey = createCacheKey({ key, type: "composable" }); + pendingWritePromiseMap.set(cacheKey.baseKey, pendingEntry); const entry = await pendingEntry.finally(() => { - pendingWritePromiseMap.delete(cacheKey); + pendingWritePromiseMap.delete(cacheKey.baseKey); }); const valueToStore = await fromReadableStream(entry.value); - await globalThis.incrementalCache.set( - cacheKey, - { - ...entry, - value: valueToStore, - }, - "composable", - ); + await globalThis.incrementalCache.set(cacheKey, { + ...entry, + value: valueToStore, + }); if (globalThis.tagCache.mode === "original") { const storedTags = await globalThis.tagCache.getByPath(cacheKey); const tagsToWrite = entry.tags.filter((tag) => !storedTags.includes(tag)); if (tagsToWrite.length > 0) { - await writeTags(tagsToWrite.map((tag) => ({ tag, path: cacheKey }))); + await writeTags( + tagsToWrite.map((tag) => ({ + tag: createTagKey(tag), + path: createTagKey(cacheKey.baseKey), + })), + ); } } }, @@ -84,7 +85,7 @@ export default { }, async getExpiration(...tags: string[]) { if (globalThis.tagCache.mode === "nextMode") { - return globalThis.tagCache.getLastRevalidated(tags); + return globalThis.tagCache.getLastRevalidated(tags.map(createTagKey)); } // We always return 0 here, original tag cache are handled directly in the get part // TODO: We need to test this more, i'm not entirely sure that this is working as expected @@ -100,16 +101,20 @@ export default { // We need to find all paths linked to to these tags const pathsToUpdate = await Promise.all( tags.map(async (tag) => { - const paths = await tagCache.getByTag(tag); + const paths = await tagCache.getByTag(createTagKey(tag)); return paths.map((path) => ({ - path, - tag, + path: createTagKey(path), + tag: createTagKey(tag), revalidatedAt, })); }), ); // We need to deduplicate paths, we use a set for that - const setToWrite = new Set<{ path: string; tag: string }>(); + const setToWrite = new Set<{ + path: TagKey; + tag: TagKey; + revalidatedAt: number; + }>(); for (const entry of pathsToUpdate.flat()) { setToWrite.add(entry); } diff --git a/packages/open-next/src/adapters/dynamo-provider.ts b/packages/open-next/src/adapters/dynamo-provider.ts index 6aacd7159..7958920cb 100644 --- a/packages/open-next/src/adapters/dynamo-provider.ts +++ b/packages/open-next/src/adapters/dynamo-provider.ts @@ -1,5 +1,6 @@ import { readFileSync } from "node:fs"; +import { createTagKey } from "utils/cache.js"; import { createGenericHandler } from "../core/createGenericHandler.js"; import { resolveTagCache } from "../core/resolve.js"; @@ -60,8 +61,8 @@ async function insert( const data: DataType[] = JSON.parse(file); const parsedData = data.map((item) => ({ - tag: item.tag.S, - path: item.path.S, + tag: createTagKey(item.tag.S), + path: createTagKey(item.path.S), revalidatedAt: Number.parseInt(item.revalidatedAt.N), })); diff --git a/packages/open-next/src/core/routing/cacheInterceptor.ts b/packages/open-next/src/core/routing/cacheInterceptor.ts index 70dd083f8..67d256ff3 100644 --- a/packages/open-next/src/core/routing/cacheInterceptor.ts +++ b/packages/open-next/src/core/routing/cacheInterceptor.ts @@ -5,7 +5,11 @@ import type { InternalEvent, InternalResult } from "types/open-next"; import type { CacheValue } from "types/overrides"; import { emptyReadableStream, toReadableStream } from "utils/stream"; -import { getTagsFromValue, hasBeenRevalidated } from "utils/cache"; +import { + createCacheKey, + getTagsFromValue, + hasBeenRevalidated, +} from "utils/cache"; import { debug } from "../../adapters/logger"; import { localizePath } from "./i18n"; import { generateMessageGroupId } from "./queue"; @@ -208,7 +212,10 @@ export async function cacheInterceptor( if (isISR) { try { const cachedData = await globalThis.incrementalCache.get( - localizedPath ?? "/index", + createCacheKey({ + key: localizedPath ?? "/index", + type: "cache", + }), ); debug("cached data in interceptor", cachedData); @@ -219,7 +226,7 @@ export async function cacheInterceptor( if (cachedData.value?.type === "app") { const tags = getTagsFromValue(cachedData.value); const _hasBeenRevalidated = await hasBeenRevalidated( - localizedPath, + createCacheKey({ key: localizedPath, type: "cache" }), tags, cachedData, ); diff --git a/packages/open-next/src/overrides/incrementalCache/fs-dev.ts b/packages/open-next/src/overrides/incrementalCache/fs-dev.ts index 6fe772f24..3f1643105 100644 --- a/packages/open-next/src/overrides/incrementalCache/fs-dev.ts +++ b/packages/open-next/src/overrides/incrementalCache/fs-dev.ts @@ -1,11 +1,10 @@ import fs from "node:fs/promises"; import path from "node:path"; -import type { IncrementalCache } from "types/overrides.js"; +import type { CacheKey, IncrementalCache } from "types/overrides.js"; import { getMonorepoRelativePath } from "utils/normalize-path"; -const buildId = process.env.NEXT_BUILD_ID; -const basePath = path.join(getMonorepoRelativePath(), `cache/${buildId}`); +const basePath = path.join(getMonorepoRelativePath(), "cache"); const getCacheKey = (key: string) => { return path.join(basePath, `${key}.cache`); @@ -13,24 +12,26 @@ const getCacheKey = (key: string) => { const cache: IncrementalCache = { name: "fs-dev", - get: async (key: string) => { - const fileData = await fs.readFile(getCacheKey(key), "utf-8"); + get: async (cacheKey: CacheKey) => { + // This cache is always shared across build (the build id is not used) + const { baseKey } = cacheKey; + const fileData = await fs.readFile(getCacheKey(baseKey), "utf-8"); const data = JSON.parse(fileData); - const { mtime } = await fs.stat(getCacheKey(key)); + const { mtime } = await fs.stat(getCacheKey(baseKey)); return { value: data, lastModified: mtime.getTime(), }; }, - set: async (key, value, isFetch) => { + set: async ({ baseKey }, value) => { const data = JSON.stringify(value); - const cacheKey = getCacheKey(key); + const cacheKey = getCacheKey(baseKey); // We need to create the directory before writing the file await fs.mkdir(path.dirname(cacheKey), { recursive: true }); await fs.writeFile(cacheKey, data); }, - delete: async (key) => { - await fs.rm(getCacheKey(key)); + delete: async ({ baseKey }) => { + await fs.rm(getCacheKey(baseKey)); }, }; diff --git a/packages/open-next/src/overrides/incrementalCache/multi-tier-ddb-s3.ts b/packages/open-next/src/overrides/incrementalCache/multi-tier-ddb-s3.ts index 5ee8bcc32..8ba127cbf 100644 --- a/packages/open-next/src/overrides/incrementalCache/multi-tier-ddb-s3.ts +++ b/packages/open-next/src/overrides/incrementalCache/multi-tier-ddb-s3.ts @@ -1,5 +1,6 @@ import type { CacheEntryType, + CacheKey, CacheValue, IncrementalCache, } from "types/overrides"; @@ -40,9 +41,8 @@ const awsFetch = (body: RequestInit["body"], type: "get" | "set" = "get") => { ); }; -const buildDynamoKey = (key: string) => { - const { NEXT_BUILD_ID } = process.env; - return `__meta_${NEXT_BUILD_ID}_${key}`; +const buildDynamoKey = (key: CacheKey) => { + return `__meta_${key.buildId ?? ""}_${key.baseKey}`; }; /** @@ -55,11 +55,10 @@ const buildDynamoKey = (key: string) => { const multiTierCache: IncrementalCache = { name: "multi-tier-ddb-s3", async get( - key: string, - isFetch?: CacheType, + key: CacheKey, ) { // First we check the local cache - const localCacheEntry = localCache.get(key) as + const localCacheEntry = localCache.get(key.baseKey) as | { value: CacheValue; lastModified: number; @@ -87,7 +86,7 @@ const multiTierCache: IncrementalCache = { const data = await result.json(); const hasBeenDeleted = data.Item?.deleted?.BOOL; if (hasBeenDeleted) { - localCache.delete(key); + localCache.delete(key.baseKey); return { value: undefined, lastModified: 0 }; } // If the metadata is older than the local cache, we can use the local cache @@ -104,9 +103,9 @@ const multiTierCache: IncrementalCache = { debug("Failed to get metadata from ddb", e); } } - const result = await S3Cache.get(key, isFetch); + const result = await S3Cache.get(key); if (result?.value) { - localCache.set(key, { + localCache.set(key.baseKey, { value: result.value, lastModified: result.lastModified ?? Date.now(), }); @@ -117,9 +116,9 @@ const multiTierCache: IncrementalCache = { // Both for set and delete we choose to do the write to S3 first and then to DynamoDB // Which means that if it fails in DynamoDB, instance that don't have local cache will work as expected. // But instance that have local cache will have a stale cache until the next working set or delete. - async set(key, value, isFetch) { + async set(key, value) { const revalidatedAt = Date.now(); - await S3Cache.set(key, value, isFetch); + await S3Cache.set(key, value); await awsFetch( JSON.stringify({ TableName: process.env.CACHE_DYNAMO_TABLE, @@ -131,7 +130,7 @@ const multiTierCache: IncrementalCache = { }), "set", ); - localCache.set(key, { + localCache.set(key.baseKey, { value, lastModified: revalidatedAt, }); @@ -149,7 +148,7 @@ const multiTierCache: IncrementalCache = { }), "set", ); - localCache.delete(key); + localCache.delete(key.baseKey); }, }; diff --git a/packages/open-next/src/overrides/incrementalCache/s3-lite.ts b/packages/open-next/src/overrides/incrementalCache/s3-lite.ts index e2355be34..6dc0a3648 100644 --- a/packages/open-next/src/overrides/incrementalCache/s3-lite.ts +++ b/packages/open-next/src/overrides/incrementalCache/s3-lite.ts @@ -2,8 +2,7 @@ import path from "node:path"; import { AwsClient } from "aws4fetch"; -import type { Extension } from "types/cache"; -import type { IncrementalCache } from "types/overrides"; +import type { CacheKey, IncrementalCache } from "types/overrides"; import { IgnorableError, RecoverableError } from "utils/error"; import { customFetchClient } from "utils/fetch"; @@ -33,19 +32,19 @@ const awsFetch = async (key: string, options: RequestInit) => { return customFetchClient(client)(url, options); }; -function buildS3Key(key: string, extension: Extension) { - const { CACHE_BUCKET_KEY_PREFIX, NEXT_BUILD_ID } = process.env; +function buildS3Key(key: CacheKey) { return path.posix.join( - CACHE_BUCKET_KEY_PREFIX ?? "", - extension === "fetch" ? "__fetch" : "", - NEXT_BUILD_ID ?? "", - extension === "fetch" ? key : `${key}.${extension}`, + process.env.CACHE_BUCKET_KEY_PREFIX ?? "", + key.cacheType === "fetch" ? "__fetch" : "", + key.cacheType === "fetch" + ? key.baseKey + : `${key.buildId ? `${key.buildId}/` : ""}${key.baseKey}.${key.cacheType}`, ); } const incrementalCache: IncrementalCache = { - async get(key, cacheType) { - const result = await awsFetch(buildS3Key(key, cacheType ?? "cache"), { + async get(key) { + const result = await awsFetch(buildS3Key(key), { method: "GET", }); @@ -63,8 +62,8 @@ const incrementalCache: IncrementalCache = { ).getTime(), }; }, - async set(key, value, cacheType): Promise { - const response = await awsFetch(buildS3Key(key, cacheType ?? "cache"), { + async set(key, value): Promise { + const response = await awsFetch(buildS3Key(key), { method: "PUT", body: JSON.stringify(value), }); @@ -73,7 +72,7 @@ const incrementalCache: IncrementalCache = { } }, async delete(key): Promise { - const response = await awsFetch(buildS3Key(key, "cache"), { + const response = await awsFetch(buildS3Key(key), { method: "DELETE", }); if (response.status !== 204) { diff --git a/packages/open-next/src/overrides/incrementalCache/s3.ts b/packages/open-next/src/overrides/incrementalCache/s3.ts index 371499209..6bf6a7c91 100644 --- a/packages/open-next/src/overrides/incrementalCache/s3.ts +++ b/packages/open-next/src/overrides/incrementalCache/s3.ts @@ -7,18 +7,13 @@ import { PutObjectCommand, S3Client, } from "@aws-sdk/client-s3"; -import type { Extension } from "types/cache"; -import type { IncrementalCache } from "types/overrides"; +import type { CacheKey, IncrementalCache } from "types/overrides"; import { awsLogger } from "../../adapters/logger"; import { parseNumberFromEnv } from "../../adapters/util"; -const { - CACHE_BUCKET_REGION, - CACHE_BUCKET_KEY_PREFIX, - NEXT_BUILD_ID, - CACHE_BUCKET_NAME, -} = process.env; +const { CACHE_BUCKET_REGION, CACHE_BUCKET_KEY_PREFIX, CACHE_BUCKET_NAME } = + process.env; function parseS3ClientConfigFromEnv(): S3ClientConfig { return { @@ -30,21 +25,20 @@ function parseS3ClientConfigFromEnv(): S3ClientConfig { const s3Client = new S3Client(parseS3ClientConfigFromEnv()); -function buildS3Key(key: string, extension: Extension) { +function buildS3Key(key: CacheKey) { return path.posix.join( CACHE_BUCKET_KEY_PREFIX ?? "", - extension === "fetch" ? "__fetch" : "", - NEXT_BUILD_ID ?? "", - extension === "fetch" ? key : `${key}.${extension}`, + key.cacheType === "fetch" ? "__fetch" : "", + `${key.buildId ? `${key.buildId}/` : ""}${key.baseKey}.${key.cacheType}`, ); } const incrementalCache: IncrementalCache = { - async get(key, cacheType) { + async get(key) { const result = await s3Client.send( new GetObjectCommand({ Bucket: CACHE_BUCKET_NAME, - Key: buildS3Key(key, cacheType ?? "cache"), + Key: buildS3Key(key), }), ); @@ -56,11 +50,11 @@ const incrementalCache: IncrementalCache = { lastModified: result.LastModified?.getTime(), }; }, - async set(key, value, cacheType): Promise { + async set(key, value): Promise { await s3Client.send( new PutObjectCommand({ Bucket: CACHE_BUCKET_NAME, - Key: buildS3Key(key, cacheType ?? "cache"), + Key: buildS3Key(key), Body: JSON.stringify(value), }), ); @@ -69,7 +63,7 @@ const incrementalCache: IncrementalCache = { await s3Client.send( new DeleteObjectCommand({ Bucket: CACHE_BUCKET_NAME, - Key: buildS3Key(key, "cache"), + Key: buildS3Key(key), }), ); }, diff --git a/packages/open-next/src/overrides/tagCache/dummy.ts b/packages/open-next/src/overrides/tagCache/dummy.ts index fc21e862c..fbd21d00f 100644 --- a/packages/open-next/src/overrides/tagCache/dummy.ts +++ b/packages/open-next/src/overrides/tagCache/dummy.ts @@ -1,4 +1,4 @@ -import type { TagCache } from "types/overrides"; +import type { TagCache, TagKey } from "types/overrides"; // We don't want to throw error on this one because we might use it when we don't need tag cache const dummyTagCache: TagCache = { @@ -10,7 +10,7 @@ const dummyTagCache: TagCache = { getByTag: async () => { return []; }, - getLastModified: async (_: string, lastModified) => { + getLastModified: async (_: TagKey, lastModified) => { return lastModified ?? Date.now(); }, writeTags: async () => { diff --git a/packages/open-next/src/overrides/tagCache/dynamodb-lite.ts b/packages/open-next/src/overrides/tagCache/dynamodb-lite.ts index 490646afb..b5e197c06 100644 --- a/packages/open-next/src/overrides/tagCache/dynamodb-lite.ts +++ b/packages/open-next/src/overrides/tagCache/dynamodb-lite.ts @@ -1,8 +1,5 @@ -/* eslint-disable @typescript-eslint/no-non-null-assertion */ -import path from "node:path"; - import { AwsClient } from "aws4fetch"; -import type { TagCache } from "types/overrides"; +import type { TagCache, TagKey } from "types/overrides"; import { RecoverableError } from "utils/error"; import { customFetchClient } from "utils/fetch"; @@ -50,17 +47,14 @@ const awsFetch = ( ); }; -function buildDynamoKey(key: string) { - const { NEXT_BUILD_ID } = process.env; - // FIXME: We should probably use something else than path.join here - // this could transform some fetch cache key into a valid path - return path.posix.join(NEXT_BUILD_ID ?? "", key); +function buildDynamoKey(key: TagKey) { + return `${key.buildId ?? ""}_${key.baseKey}`; } -function buildDynamoObject(path: string, tags: string, revalidatedAt?: number) { +function buildDynamoObject(path: TagKey, tag: TagKey, revalidatedAt?: number) { return { path: { S: buildDynamoKey(path) }, - tag: { S: buildDynamoKey(tags) }, + tag: { S: buildDynamoKey(tag) }, revalidatedAt: { N: `${revalidatedAt ?? Date.now()}` }, }; } diff --git a/packages/open-next/src/overrides/tagCache/dynamodb-nextMode.ts b/packages/open-next/src/overrides/tagCache/dynamodb-nextMode.ts index 1d4a5da38..4ec36b3ac 100644 --- a/packages/open-next/src/overrides/tagCache/dynamodb-nextMode.ts +++ b/packages/open-next/src/overrides/tagCache/dynamodb-nextMode.ts @@ -1,10 +1,8 @@ -import type { NextModeTagCache } from "types/overrides"; +import type { NextModeTagCache, TagKey } from "types/overrides"; import { AwsClient } from "aws4fetch"; import { RecoverableError } from "utils/error"; import { customFetchClient } from "utils/fetch"; - -import path from "node:path"; import { debug, error } from "../../adapters/logger"; import { chunk, parseNumberFromEnv } from "../../adapters/util"; import { @@ -49,17 +47,14 @@ const awsFetch = ( ); }; -function buildDynamoKey(key: string) { - const { NEXT_BUILD_ID } = process.env; - // FIXME: We should probably use something else than path.join here - // this could transform some fetch cache key into a valid path - return path.posix.join(NEXT_BUILD_ID ?? "", "_tag", key); +function buildDynamoKey(key: TagKey) { + return `${key.buildId ?? ""}_${key.baseKey}`; } // We use the same key for both path and tag // That's mostly for compatibility reason so that it's easier to use this with existing infra // FIXME: Allow a simpler object without an unnecessary path key -function buildDynamoObject(tag: string, revalidatedAt?: number) { +function buildDynamoObject(tag: TagKey, revalidatedAt?: number) { return { path: { S: buildDynamoKey(tag) }, tag: { S: buildDynamoKey(tag) }, @@ -71,11 +66,11 @@ function buildDynamoObject(tag: string, revalidatedAt?: number) { export default { name: "ddb-nextMode", mode: "nextMode", - getLastRevalidated: async (tags: string[]) => { + getLastRevalidated: async (tags: TagKey[]) => { // Not supported for now return 0; }, - hasBeenRevalidated: async (tags: string[], lastModified?: number) => { + hasBeenRevalidated: async (tags: TagKey[], lastModified?: number) => { if (globalThis.openNextConfig.dangerous?.disableTagCache) { return false; } @@ -117,7 +112,7 @@ export default { debug("retrieved tags", revalidatedTags); return revalidatedTags.length > 0; }, - writeTags: async (tags: string[]) => { + writeTags: async (tags: TagKey[]) => { try { const { CACHE_DYNAMO_TABLE } = process.env; if (globalThis.openNextConfig.dangerous?.disableTagCache) { diff --git a/packages/open-next/src/overrides/tagCache/dynamodb.ts b/packages/open-next/src/overrides/tagCache/dynamodb.ts index 8b9406f87..9bf2fb150 100644 --- a/packages/open-next/src/overrides/tagCache/dynamodb.ts +++ b/packages/open-next/src/overrides/tagCache/dynamodb.ts @@ -1,12 +1,10 @@ -import path from "node:path"; - import type { DynamoDBClientConfig } from "@aws-sdk/client-dynamodb"; import { BatchWriteItemCommand, DynamoDBClient, QueryCommand, } from "@aws-sdk/client-dynamodb"; -import type { TagCache } from "types/overrides"; +import type { TagCache, TagKey } from "types/overrides"; import { awsLogger, debug, error } from "../../adapters/logger"; import { chunk, parseNumberFromEnv } from "../../adapters/util"; @@ -27,16 +25,14 @@ function parseDynamoClientConfigFromEnv(): DynamoDBClientConfig { const dynamoClient = new DynamoDBClient(parseDynamoClientConfigFromEnv()); -function buildDynamoKey(key: string) { - // FIXME: We should probably use something else than path.join here - // this could transform some fetch cache key into a valid path - return path.posix.join(NEXT_BUILD_ID ?? "", key); +function buildDynamoKey(key: TagKey) { + return `${key.buildId ?? ""}_${key.baseKey}`; } -function buildDynamoObject(path: string, tags: string, revalidatedAt?: number) { +function buildDynamoObject(path: TagKey, tag: TagKey, revalidatedAt?: number) { return { path: { S: buildDynamoKey(path) }, - tag: { S: buildDynamoKey(tags) }, + tag: { S: buildDynamoKey(tag) }, revalidatedAt: { N: `${revalidatedAt ?? Date.now()}` }, }; } diff --git a/packages/open-next/src/overrides/tagCache/fs-dev.ts b/packages/open-next/src/overrides/tagCache/fs-dev.ts index d5706ccec..38179dc44 100644 --- a/packages/open-next/src/overrides/tagCache/fs-dev.ts +++ b/packages/open-next/src/overrides/tagCache/fs-dev.ts @@ -1,7 +1,7 @@ import fs from "node:fs"; import path from "node:path"; -import type { TagCache } from "types/overrides"; +import type { TagCache, TagKey } from "types/overrides"; import { getMonorepoRelativePath } from "utils/normalize-path"; const tagFile = path.join( @@ -19,20 +19,20 @@ let tags = JSON.parse(tagContent) as { const tagCache: TagCache = { name: "fs-dev", mode: "original", - getByPath: async (path: string) => { + getByPath: async (path: TagKey) => { return tags - .filter((tagPathMapping) => tagPathMapping.path.S === path) + .filter((tagPathMapping) => tagPathMapping.path.S === path.baseKey) .map((tag) => tag.tag.S); }, - getByTag: async (tag: string) => { + getByTag: async (tag: TagKey) => { return tags - .filter((tagPathMapping) => tagPathMapping.tag.S === tag) + .filter((tagPathMapping) => tagPathMapping.tag.S === tag.baseKey) .map((tag) => tag.path.S); }, - getLastModified: async (path: string, lastModified?: number) => { + getLastModified: async (path: TagKey, lastModified?: number) => { const revalidatedTags = tags.filter( (tagPathMapping) => - tagPathMapping.path.S === path && + tagPathMapping.path.S === path.baseKey && Number.parseInt(tagPathMapping.revalidatedAt.N) > (lastModified ?? 0), ); return revalidatedTags.length > 0 ? -1 : (lastModified ?? Date.now()); @@ -46,8 +46,8 @@ const tagCache: TagCache = { ); tags = unchangedTags.concat( newTags.map((tag) => ({ - tag: { S: tag.tag }, - path: { S: tag.path }, + tag: { S: tag.tag.baseKey }, + path: { S: tag.path.baseKey }, revalidatedAt: { N: String(tag.revalidatedAt ?? 1) }, })), ); diff --git a/packages/open-next/src/types/open-next.ts b/packages/open-next/src/types/open-next.ts index 210803468..6cec7d7f2 100644 --- a/packages/open-next/src/types/open-next.ts +++ b/packages/open-next/src/types/open-next.ts @@ -78,6 +78,16 @@ export interface DangerousOptions { headersAndCookiesPriority?: ( event: InternalEvent, ) => "middleware" | "handler"; + + /** + * Persist data cache between deployments. + * Next.js claims that the data cache is persistent (not true for `use cache` and it depends on how you build/deploy otherwise). + * By default, every entry will be prepended with the BUILD_ID, when enabled it will not. + * This means that the data cache will be persistent between deployments. + * This is useful in a lot of cases, but be aware that it could cause issues, especially with `use cache` or `unstable_cache` (Some external change may not be reflected in the key, leading to stale data) + * @default false + */ + persistentDataCache?: boolean; } export type BaseOverride = { diff --git a/packages/open-next/src/types/overrides.ts b/packages/open-next/src/types/overrides.ts index e6ea87120..a797ccf10 100644 --- a/packages/open-next/src/types/overrides.ts +++ b/packages/open-next/src/types/overrides.ts @@ -109,17 +109,35 @@ export type CacheValue = revalidate?: number | false; }; +export type TagKey = { + baseKey: string; + buildId?: string; +}; + +/** + * Represents a cache key used in the incremental cache. + * Depending on the `dangerous.persistentDataCache` setting, the key may include the build ID. + * If `persistentDataCache` is enabled, the key will not include the build ID for data cache entries + */ +export type CacheKey = { + cacheType: CacheType; + buildId: CacheType extends "cache" ? string : string | undefined; + /** + * The base key is the main identifier for the cache entry. + * It never depends on the build ID, and is used to identify the cache entry. + */ + baseKey: string; +}; + export type IncrementalCache = { get( - key: string, - cacheType?: CacheType, + key: CacheKey, ): Promise> | null>; set( - key: string, + key: CacheKey, value: CacheValue, - isFetch?: CacheType, ): Promise; - delete(key: string): Promise; + delete(key: CacheKey<"cache">): Promise; name: string; }; @@ -153,17 +171,17 @@ Cons : export type NextModeTagCache = BaseTagCache & { mode: "nextMode"; // Necessary for the composable cache - getLastRevalidated(tags: string[]): Promise; - hasBeenRevalidated(tags: string[], lastModified?: number): Promise; - writeTags(tags: string[]): Promise; + getLastRevalidated(tags: TagKey[]): Promise; + hasBeenRevalidated(tags: TagKey[], lastModified?: number): Promise; + writeTags(tags: TagKey[]): Promise; // Optional method to get paths by tags // It is used to automatically invalidate paths in the CDN - getPathsByTags?: (tags: string[]) => Promise; + getPathsByTags?: (tags: TagKey[]) => Promise; }; export interface OriginalTagCacheWriteInput { - tag: string; - path: string; + tag: TagKey; + path: TagKey; revalidatedAt?: number; } @@ -188,9 +206,9 @@ Cons : */ export type OriginalTagCache = BaseTagCache & { mode?: "original"; - getByTag(tag: string): Promise; - getByPath(path: string): Promise; - getLastModified(path: string, lastModified?: number): Promise; + getByTag(tag: TagKey): Promise; + getByPath(path: TagKey): Promise; + getLastModified(path: TagKey, lastModified?: number): Promise; writeTags(tags: OriginalTagCacheWriteInput[]): Promise; }; diff --git a/packages/open-next/src/utils/cache.ts b/packages/open-next/src/utils/cache.ts index bfc2fd781..3a48efc10 100644 --- a/packages/open-next/src/utils/cache.ts +++ b/packages/open-next/src/utils/cache.ts @@ -1,12 +1,15 @@ import type { + CacheEntryType, + CacheKey, CacheValue, OriginalTagCacheWriteInput, + TagKey, WithLastModified, } from "types/overrides"; -import { debug } from "../adapters/logger"; +import { debug, warn } from "../adapters/logger"; export async function hasBeenRevalidated( - key: string, + key: CacheKey, tags: string[], cacheEntry: WithLastModified>, ): Promise { @@ -23,7 +26,13 @@ export async function hasBeenRevalidated( } const lastModified = cacheEntry.lastModified ?? Date.now(); if (globalThis.tagCache.mode === "nextMode") { - return await globalThis.tagCache.hasBeenRevalidated(tags, lastModified); + return await globalThis.tagCache.hasBeenRevalidated( + tags.map((t) => ({ + baseKey: t, + buildId: key.buildId, + })), + lastModified, + ); } // TODO: refactor this, we should introduce a new method in the tagCache interface so that both implementations use hasBeenRevalidated const _lastModified = await globalThis.tagCache.getLastModified( @@ -80,3 +89,57 @@ export async function writeTags( // Here we know that we have the correct type await globalThis.tagCache.writeTags(tagsToWrite as any); } + +export function createCacheKey({ + key, + type, +}: { key: string; type: CacheType }): CacheKey { + // We always provide the build ID to the cache key for ISR/SSG cache entry + // For data cache, we only provide the build ID if the persistentDataCache is not enabled + const shouldProvideBuildId = + globalThis.openNextConfig.dangerous?.persistentDataCache !== true; + const buildId = process.env.NEXT_BUILD_ID ?? "undefined-build-id"; + // ISR/SSG cache entry should always have a build ID + if (type === "cache") { + return { + cacheType: "cache", + buildId, + baseKey: key, + } as CacheKey; + } + let baseKey = key; + if (type === "composable") { + try { + const [_buildId, ...rest] = JSON.parse(key); + baseKey = JSON.stringify(rest); + } catch (e) { + warn("Error while parsing composable cache key", e); + // If we fail to parse the key, we just return it as is + // This is not ideal, but we don't want to crash the application + baseKey = key; + } + } + if (shouldProvideBuildId) { + return { + cacheType: type, + buildId, + baseKey: key, + }; + } + return { + cacheType: type, + buildId: undefined, + baseKey: key, + } as CacheKey; +} + +export function createTagKey(tag: string): TagKey { + const shouldProvideBuildId = + globalThis.openNextConfig.dangerous?.persistentDataCache !== true; + // We always prepend the build ID to the tag key + const buildId = process.env.NEXT_BUILD_ID ?? "undefined-build-id"; + return { + buildId: shouldProvideBuildId ? buildId : undefined, + baseKey: tag, + }; +} diff --git a/packages/tests-unit/tests/adapters/cache.test.ts b/packages/tests-unit/tests/adapters/cache.test.ts index af507200c..ddcc361a3 100644 --- a/packages/tests-unit/tests/adapters/cache.test.ts +++ b/packages/tests-unit/tests/adapters/cache.test.ts @@ -4,7 +4,11 @@ import { vi } from "vitest"; declare global { var openNextConfig: { - dangerous: { disableIncrementalCache?: boolean; disableTagCache?: boolean }; + dangerous: { + disableIncrementalCache?: boolean; + disableTagCache?: boolean; + persistentDataCache?: boolean; + }; }; var isNextAfter15: boolean; } @@ -360,9 +364,12 @@ describe("CacheHandler", () => { }); expect(incrementalCache.set).toHaveBeenCalledWith( - "key", + { + baseKey: "key", + cacheType: "cache", + buildId: "undefined-build-id", + }, { type: "route", body: "{}", meta: { status: 200, headers: {} } }, - "cache", ); }); @@ -377,13 +384,16 @@ describe("CacheHandler", () => { }); expect(incrementalCache.set).toHaveBeenCalledWith( - "key", + { + baseKey: "key", + cacheType: "cache", + buildId: "undefined-build-id", + }, { type: "route", body: Buffer.from("{}").toString("base64"), meta: { status: 200, headers: { "content-type": "image/png" } }, }, - "cache", ); }); @@ -397,13 +407,16 @@ describe("CacheHandler", () => { }); expect(incrementalCache.set).toHaveBeenCalledWith( - "key", + { + baseKey: "key", + cacheType: "cache", + buildId: "undefined-build-id", + }, { type: "page", html: "", json: {}, }, - "cache", ); }); @@ -417,14 +430,17 @@ describe("CacheHandler", () => { }); expect(incrementalCache.set).toHaveBeenCalledWith( - "key", + { + baseKey: "key", + cacheType: "cache", + buildId: "undefined-build-id", + }, { type: "app", html: "", rsc: "rsc", meta: { status: 200, headers: {} }, }, - "cache", ); }); @@ -438,14 +454,17 @@ describe("CacheHandler", () => { }); expect(incrementalCache.set).toHaveBeenCalledWith( - "key", + { + baseKey: "key", + cacheType: "cache", + buildId: "undefined-build-id", + }, { type: "app", html: "", rsc: "rsc", meta: { status: 200, headers: {} }, }, - "cache", ); }); @@ -463,7 +482,11 @@ describe("CacheHandler", () => { }); expect(incrementalCache.set).toHaveBeenCalledWith( - "key", + { + baseKey: "key", + cacheType: "fetch", + buildId: "undefined-build-id", + }, { kind: "FETCH", data: { @@ -475,7 +498,6 @@ describe("CacheHandler", () => { }, revalidate: 60, }, - "fetch", ); }); @@ -483,12 +505,15 @@ describe("CacheHandler", () => { await cache.set("key", { kind: "REDIRECT", props: {} }); expect(incrementalCache.set).toHaveBeenCalledWith( - "key", + { + baseKey: "key", + cacheType: "cache", + buildId: "undefined-build-id", + }, { type: "redirect", props: {}, }, - "cache", ); }); @@ -537,13 +562,22 @@ describe("CacheHandler", () => { globalThis.tagCache.getByTag.mockResolvedValueOnce(["/path"]); await cache.revalidateTag("tag"); - expect(globalThis.tagCache.getByTag).toHaveBeenCalledWith("tag"); + expect(globalThis.tagCache.getByTag).toHaveBeenCalledWith({ + baseKey: "tag", + buildId: "undefined-build-id", + }); expect(tagCache.writeTags).toHaveBeenCalledTimes(1); expect(tagCache.writeTags).toHaveBeenCalledWith([ { - path: "/path", - tag: "tag", + path: { + baseKey: "/path", + buildId: "undefined-build-id", + }, + tag: { + baseKey: "tag", + buildId: "undefined-build-id", + }, }, ]); }); @@ -556,8 +590,14 @@ describe("CacheHandler", () => { expect(tagCache.writeTags).toHaveBeenCalledTimes(1); expect(tagCache.writeTags).toHaveBeenCalledWith([ { - path: "/path", - tag: "_N_T_/path", + path: { + baseKey: "/path", + buildId: "undefined-build-id", + }, + tag: { + baseKey: "_N_T_/path", + buildId: "undefined-build-id", + }, }, ]); @@ -571,8 +611,14 @@ describe("CacheHandler", () => { expect(tagCache.writeTags).toHaveBeenCalledTimes(1); expect(tagCache.writeTags).toHaveBeenCalledWith([ { - path: "123456", - tag: "tag", + path: { + baseKey: "123456", + buildId: "undefined-build-id", + }, + tag: { + baseKey: "tag", + buildId: "undefined-build-id", + }, }, ]); diff --git a/packages/tests-unit/tests/utils/cache.test.ts b/packages/tests-unit/tests/utils/cache.test.ts new file mode 100644 index 000000000..cc5681fd3 --- /dev/null +++ b/packages/tests-unit/tests/utils/cache.test.ts @@ -0,0 +1,62 @@ +import { createCacheKey } from "@opennextjs/aws/utils/cache.js"; +import { afterEach, beforeEach, describe, expect, test, vi } from "vitest"; + +describe("createCacheKey", () => { + const originalEnv = process.env; + const originalGlobalThis = globalThis as any; + + beforeEach(() => { + vi.resetModules(); + process.env = { ...originalEnv }; + + // Mock globalThis.openNextConfig + if (!globalThis.openNextConfig) { + globalThis.openNextConfig = { + dangerous: {}, + }; + } + }); + + afterEach(() => { + process.env = originalEnv; + globalThis.openNextConfig = originalGlobalThis.openNextConfig; + }); + + test("have a defined build id for non-data cache entries", () => { + process.env.NEXT_BUILD_ID = "test-build-id"; + const key = "test-key"; + + const result = createCacheKey({ key, type: "cache" }); + + expect(result.buildId).toBe("test-build-id"); + }); + + test("have a defined build id for data cache when persistentDataCache is not enabled", () => { + process.env.NEXT_BUILD_ID = "test-build-id"; + globalThis.openNextConfig.dangerous.persistentDataCache = false; + const key = "test-key"; + + const result = createCacheKey({ key, type: "fetch" }); + + expect(result.buildId).toBe("test-build-id"); + }); + + test("does not prepend build ID for data cache when persistentDataCache is enabled", () => { + process.env.NEXT_BUILD_ID = "test-build-id"; + globalThis.openNextConfig.dangerous.persistentDataCache = true; + const key = "test-key"; + + const result = createCacheKey({ key, type: "fetch" }); + + expect(result.buildId).toBeUndefined(); + }); + + test("handles missing build ID", () => { + process.env.NEXT_BUILD_ID = undefined; + const key = "test-key"; + + const result = createCacheKey({ key, type: "fetch" }); + + expect(result.buildId).toBeUndefined(); + }); +});