diff --git a/bun.lockb b/bun.lockb index 69b2fb2..932f37e 100755 Binary files a/bun.lockb and b/bun.lockb differ diff --git a/package.json b/package.json index 27ad92d..bf87f6f 100644 --- a/package.json +++ b/package.json @@ -55,7 +55,6 @@ "jose": "^6.1.3", "mermaid": "^11.12.2", "motion": "^12.23.26", - "redis": "^5.10.0", "solid-js": "^1.9.5", "solid-tiptap": "^0.8.0", "ua-parser-js": "^2.0.7", diff --git a/src/components/Bars.tsx b/src/components/Bars.tsx index 1a2bb91..d1f0e16 100644 --- a/src/components/Bars.tsx +++ b/src/components/Bars.tsx @@ -46,6 +46,45 @@ interface ContributionDay { count: number; } +interface GitActivityData { + githubCommits: GitCommit[]; + giteaCommits: GitCommit[]; + githubActivity: ContributionDay[]; + giteaActivity: ContributionDay[]; +} + +// Shared fetch promise — whichever instance mounts first starts the fetch; +// the second instance awaits the same Promise instead of firing its own requests. +let gitActivityPromise: Promise | null = null; + +function fetchGitActivity(): Promise { + if (gitActivityPromise) return gitActivityPromise; + + gitActivityPromise = (async () => { + const [ghCommits, gtCommits, ghActivity, gtActivity] = await Promise.all([ + api.gitActivity.getGitHubCommits.query({ limit: 6 }).catch(() => []), + api.gitActivity.getGiteaCommits.query({ limit: 6 }).catch(() => []), + api.gitActivity.getGitHubActivity.query().catch(() => []), + api.gitActivity.getGiteaActivity.query().catch(() => []) + ]); + + const displayedGithubCommits = ghCommits.slice(0, 3); + const githubShas = new Set(displayedGithubCommits.map((c) => c.sha)); + const uniqueGiteaCommits = gtCommits + .filter((commit) => !githubShas.has(commit.sha)) + .slice(0, 3); + + return { + githubCommits: displayedGithubCommits, + giteaCommits: uniqueGiteaCommits, + githubActivity: ghActivity, + giteaActivity: gtActivity + }; + })(); + + return gitActivityPromise; +} + export function RightBarContent() { const { setLeftBarVisible } = useBars(); const [githubCommits, setGithubCommits] = createSignal([]); @@ -66,41 +105,20 @@ export function RightBarContent() { }; onMount(() => { - const fetchData = async () => { - try { - // Fetch more commits to account for deduplication - const [ghCommits, gtCommits, ghActivity, gtActivity] = - await Promise.all([ - api.gitActivity.getGitHubCommits - .query({ limit: 6 }) - .catch(() => []), - api.gitActivity.getGiteaCommits.query({ limit: 6 }).catch(() => []), - api.gitActivity.getGitHubActivity.query().catch(() => []), - api.gitActivity.getGiteaActivity.query().catch(() => []) - ]); - - // Take first 3 from GitHub - const displayedGithubCommits = ghCommits.slice(0, 3); - - // Deduplicate Gitea commits - only against the 3 shown in GitHub section - const githubShas = new Set(displayedGithubCommits.map((c) => c.sha)); - const uniqueGiteaCommits = gtCommits.filter( - (commit) => !githubShas.has(commit.sha) - ); - - setGithubCommits(displayedGithubCommits); - setGiteaCommits(uniqueGiteaCommits.slice(0, 3)); - setGithubActivity(ghActivity); - setGiteaActivity(gtActivity); - } catch (error) { - console.error("Failed to fetch git activity:", error); - } finally { - setLoading(false); - } - }; - setTimeout(() => { - fetchData(); + fetchGitActivity() + .then((data) => { + setGithubCommits(data.githubCommits); + setGiteaCommits(data.giteaCommits); + setGithubActivity(data.githubActivity); + setGiteaActivity(data.giteaActivity); + }) + .catch((error) => { + console.error("Failed to fetch git activity:", error); + }) + .finally(() => { + setLoading(false); + }); }, 0); }); diff --git a/src/env/client.ts b/src/env/client.ts index 68090f9..7081d74 100644 --- a/src/env/client.ts +++ b/src/env/client.ts @@ -1,80 +1,46 @@ -import { z } from "zod"; +export interface ClientEnv { + VITE_DOMAIN: string; + VITE_AWS_BUCKET_STRING: string; + VITE_DOWNLOAD_BUCKET_STRING: string; + VITE_GOOGLE_CLIENT_ID: string; + VITE_GOOGLE_CLIENT_ID_MAGIC_DELVE: string; + VITE_GITHUB_CLIENT_ID: string; + VITE_WEBSOCKET: string; + VITE_INFILL_ENDPOINT: string; +} -const clientEnvSchema = z.object({ - VITE_DOMAIN: z.string().min(1), - VITE_AWS_BUCKET_STRING: z.string().min(1), - VITE_DOWNLOAD_BUCKET_STRING: z.string().min(1), - VITE_GOOGLE_CLIENT_ID: z.string().min(1), - VITE_GOOGLE_CLIENT_ID_MAGIC_DELVE: z.string().min(1), - VITE_GITHUB_CLIENT_ID: z.string().min(1), - VITE_WEBSOCKET: z.string().min(1), - VITE_INFILL_ENDPOINT: z.string().min(1) -}); - -export type ClientEnv = z.infer; +const requiredKeys: (keyof ClientEnv)[] = [ + "VITE_DOMAIN", + "VITE_AWS_BUCKET_STRING", + "VITE_DOWNLOAD_BUCKET_STRING", + "VITE_GOOGLE_CLIENT_ID", + "VITE_GOOGLE_CLIENT_ID_MAGIC_DELVE", + "VITE_GITHUB_CLIENT_ID", + "VITE_WEBSOCKET", + "VITE_INFILL_ENDPOINT" +]; export const validateClientEnv = ( envVars: Record ): ClientEnv => { - try { - return clientEnvSchema.parse(envVars); - } catch (error) { - if (error instanceof z.ZodError) { - const formattedErrors = error.format(); - const missingVars = Object.entries(formattedErrors) - .filter( - ([key, value]) => - key !== "_errors" && - typeof value === "object" && - value._errors?.length > 0 && - value._errors[0] === "Required" - ) - .map(([key, _]) => key); + const missing = requiredKeys.filter( + (key) => !envVars[key] || envVars[key]!.trim() === "" + ); - const invalidVars = Object.entries(formattedErrors) - .filter( - ([key, value]) => - key !== "_errors" && - typeof value === "object" && - value._errors?.length > 0 && - value._errors[0] !== "Required" - ) - .map(([key, value]) => ({ - key, - error: value._errors[0] - })); - - let errorMessage = "Client environment validation failed:\n"; - - if (missingVars.length > 0) { - errorMessage += `Missing required variables: ${missingVars.join(", ")}\n`; - } - - if (invalidVars.length > 0) { - errorMessage += "Invalid values:\n"; - invalidVars.forEach(({ key, error }) => { - errorMessage += ` ${key}: ${error}\n`; - }); - } - - console.error(errorMessage); - throw new Error(errorMessage); - } - console.error( - "Client environment validation failed with unknown error:", - error - ); - throw new Error("Client environment validation failed with unknown error"); + if (missing.length > 0) { + const message = `Client environment validation failed:\nMissing required variables: ${missing.join(", ")}`; + console.error(message); + throw new Error(message); } + + return envVars as unknown as ClientEnv; }; const validateAndExportEnv = (): ClientEnv => { try { const validated = validateClientEnv(import.meta.env); - console.log("✅ Client environment validation successful"); return validated; } catch (error) { - console.error("❌ Client environment validation failed:", error); throw error; } }; @@ -86,14 +52,5 @@ export const isMissingEnvVar = (varName: string): boolean => { }; export const getMissingEnvVars = (): string[] => { - const requiredClientVars = [ - "VITE_DOMAIN", - "VITE_AWS_BUCKET_STRING", - "VITE_GOOGLE_CLIENT_ID", - "VITE_GOOGLE_CLIENT_ID_MAGIC_DELVE", - "VITE_GITHUB_CLIENT_ID", - "VITE_WEBSOCKET" - ]; - - return requiredClientVars.filter((varName) => isMissingEnvVar(varName)); + return requiredKeys.filter((varName) => isMissingEnvVar(varName)); }; diff --git a/src/lib/performance-tracking.ts b/src/lib/performance-tracking.ts index 36a0e97..e2ee102 100644 --- a/src/lib/performance-tracking.ts +++ b/src/lib/performance-tracking.ts @@ -24,66 +24,76 @@ export function initPerformanceTracking() { return; } + const supported = new Set(PerformanceObserver.supportedEntryTypes ?? []); + // Observe LCP - try { - const lcpObserver = new PerformanceObserver((entryList) => { - const entries = entryList.getEntries(); - const lastEntry = entries[entries.length - 1] as any; - metrics.lcp = lastEntry.renderTime || lastEntry.loadTime; - }); - lcpObserver.observe({ type: "largest-contentful-paint", buffered: true }); - } catch (e) { - console.debug("LCP not supported"); + if (supported.has("largest-contentful-paint")) { + try { + const lcpObserver = new PerformanceObserver((entryList) => { + const entries = entryList.getEntries(); + const lastEntry = entries[entries.length - 1] as any; + metrics.lcp = lastEntry.renderTime || lastEntry.loadTime; + }); + lcpObserver.observe({ type: "largest-contentful-paint", buffered: true }); + } catch (e) { + console.debug("LCP observer failed"); + } } // Observe CLS - try { - const clsObserver = new PerformanceObserver((entryList) => { - for (const entry of entryList.getEntries()) { - const layoutShift = entry as any; - if (!layoutShift.hadRecentInput) { - clsValue += layoutShift.value; - clsEntries.push(layoutShift.value); + if (supported.has("layout-shift")) { + try { + const clsObserver = new PerformanceObserver((entryList) => { + for (const entry of entryList.getEntries()) { + const layoutShift = entry as any; + if (!layoutShift.hadRecentInput) { + clsValue += layoutShift.value; + clsEntries.push(layoutShift.value); + } } - } - metrics.cls = clsValue; - }); - clsObserver.observe({ type: "layout-shift", buffered: true }); - } catch (e) { - console.debug("CLS not supported"); + metrics.cls = clsValue; + }); + clsObserver.observe({ type: "layout-shift", buffered: true }); + } catch (e) { + console.debug("CLS observer failed"); + } } // Observe FID - try { - const fidObserver = new PerformanceObserver((entryList) => { - const firstInput = entryList.getEntries()[0] as any; - if (firstInput) { - metrics.fid = firstInput.processingStart - firstInput.startTime; - } - }); - fidObserver.observe({ type: "first-input", buffered: true }); - } catch (e) { - console.debug("FID not supported"); + if (supported.has("first-input")) { + try { + const fidObserver = new PerformanceObserver((entryList) => { + const firstInput = entryList.getEntries()[0] as any; + if (firstInput) { + metrics.fid = firstInput.processingStart - firstInput.startTime; + } + }); + fidObserver.observe({ type: "first-input", buffered: true }); + } catch (e) { + console.debug("FID observer failed"); + } } // Observe INP (event timing) - try { - const interactions: number[] = []; - const inpObserver = new PerformanceObserver((entryList) => { - for (const entry of entryList.getEntries()) { - const eventEntry = entry as any; - if (eventEntry.interactionId) { - interactions.push(eventEntry.duration); - const sorted = [...interactions].sort((a, b) => b - a); - const p98Index = Math.floor(sorted.length * 0.02); - inpValue = sorted[p98Index] || sorted[0] || 0; - metrics.inp = inpValue; + if (supported.has("event")) { + try { + const interactions: number[] = []; + const inpObserver = new PerformanceObserver((entryList) => { + for (const entry of entryList.getEntries()) { + const eventEntry = entry as any; + if (eventEntry.interactionId) { + interactions.push(eventEntry.duration); + const sorted = [...interactions].sort((a, b) => b - a); + const p98Index = Math.floor(sorted.length * 0.02); + inpValue = sorted[p98Index] || sorted[0] || 0; + metrics.inp = inpValue; + } } - } - }); - inpObserver.observe({ type: "event", buffered: true }); - } catch (e) { - console.debug("INP not supported"); + }); + inpObserver.observe({ type: "event", buffered: true }); + } catch (e) { + console.debug("INP observer failed"); + } } // Get navigation timing metrics diff --git a/src/server/api/routers/git-activity.ts b/src/server/api/routers/git-activity.ts index d47169d..eedc0a5 100644 --- a/src/server/api/routers/git-activity.ts +++ b/src/server/api/routers/git-activity.ts @@ -49,73 +49,29 @@ export const gitActivityRouter = createTRPCRouter({ const events = await eventsResponse.json(); const allCommits: GitCommit[] = []; - // Extract push events and fetch commit details + // Extract commits directly from PushEvent payload — no per-commit API calls needed for (const event of events) { if (event.type !== "PushEvent") continue; - if (allCommits.length >= input.limit * 5) break; // Get extra to ensure we have enough + if (allCommits.length >= input.limit) break; const repoName = event.repo.name; - const commitSha = event.payload.head; + const payloadCommits: any[] = event.payload.commits || []; - try { - // Fetch the actual commit details to get the message - const commitResponse = await fetchWithTimeout( - `https://api.github.com/repos/${repoName}/commits/${commitSha}`, - { - headers: { - Authorization: `Bearer ${env.GITHUB_API_TOKEN}`, - Accept: "application/vnd.github.v3+json" - }, - timeout: 5000 - } - ); - - if (commitResponse.ok) { - const commit = await commitResponse.json(); - - // Filter for your commits - if ( - commit.author?.login === "MikeFreno" || - commit.author?.login === "mikefreno" || - commit.commit?.author?.email?.includes("mike") - ) { - allCommits.push({ - sha: commit.sha?.substring(0, 7) || "unknown", - message: - commit.commit?.message?.split("\n")[0] || "No message", - author: - commit.commit?.author?.name || - commit.author?.login || - "Unknown", - date: - commit.commit?.author?.date || new Date().toISOString(), - repo: repoName, - url: `https://github.com/${repoName}/commit/${commit.sha}` - }); - } - } - } catch (error) { - if ( - error instanceof NetworkError || - error instanceof TimeoutError - ) { - console.warn( - `Network error fetching commit ${commitSha} for ${repoName}, skipping` - ); - } else { - console.error( - `Error fetching commit ${commitSha} for ${repoName}:`, - error - ); - } + for (const payloadCommit of payloadCommits) { + if (allCommits.length >= input.limit) break; + allCommits.push({ + sha: payloadCommit.sha?.substring(0, 7) || "unknown", + message: payloadCommit.message?.split("\n")[0] || "No message", + author: payloadCommit.author?.name || "Unknown", + // event.created_at is the push timestamp — close enough to commit date + date: event.created_at || new Date().toISOString(), + repo: repoName, + url: `https://github.com/${repoName}/commit/${payloadCommit.sha}` + }); } } - // Already sorted by event date, but sort again by commit date to be precise - allCommits.sort( - (a, b) => new Date(b.date).getTime() - new Date(a.date).getTime() - ); - + // Events are already in reverse-chronological order return allCommits.slice(0, input.limit); }, { maxStaleMs: CACHE_CONFIG.GIT_ACTIVITY_MAX_STALE_MS } @@ -155,13 +111,11 @@ export const gitActivityRouter = createTRPCRouter({ await checkResponse(reposResponse); const repos = await reposResponse.json(); - const allCommits: GitCommit[] = []; - for (const repo of repos) { - if (allCommits.length >= input.limit * 3) break; // Get extra to sort later - - try { - const commitsResponse = await fetchWithTimeout( + // Fetch commits for all repos in parallel instead of serially + const commitResults = await Promise.allSettled( + repos.map((repo: any) => + fetchWithTimeout( `${env.GITEA_URL}/api/v1/repos/Mike/${repo.name}/commits?limit=5`, { headers: { @@ -170,46 +124,36 @@ export const gitActivityRouter = createTRPCRouter({ }, timeout: 10000 } - ); + ) + .then((res) => (res.ok ? res.json() : [])) + .catch(() => []) + ) + ); - if (commitsResponse.ok) { - const commits = await commitsResponse.json(); - for (const commit of commits) { - if ( - (commit.commit?.author?.email && - commit.commit.author.email.includes( - "michael@freno.me" - )) || - commit.commit.author.email.includes( - "michaelt.freno@gmail.com" - ) // Filter for your commits - ) { - allCommits.push({ - sha: commit.sha?.substring(0, 7) || "unknown", - message: - commit.commit?.message?.split("\n")[0] || "No message", - author: commit.commit?.author?.name || repo.owner.login, - date: - commit.commit?.author?.date || new Date().toISOString(), - repo: repo.full_name, - url: `${env.GITEA_URL}/${repo.full_name}/commit/${commit.sha}` - }); - } - } - } - } catch (error) { + const allCommits: GitCommit[] = []; + for (let i = 0; i < commitResults.length; i++) { + const result = commitResults[i]; + if (result.status === "rejected") continue; + const repo = repos[i]; + const commits: any[] = result.value; + for (const commit of commits) { + const email: string = commit.commit?.author?.email ?? ""; if ( - error instanceof NetworkError || - error instanceof TimeoutError + email.includes("michael@freno.me") || + email.includes("michaelt.freno@gmail.com") ) { - console.warn( - `Network error fetching commits for ${repo.name}, skipping` - ); - } else { - console.error( - `Error fetching commits for ${repo.name}:`, - error - ); + allCommits.push({ + sha: commit.sha?.substring(0, 7) || "unknown", + message: + commit.commit?.message?.split("\n")[0] || "No message", + author: + commit.commit?.author?.name || + repo.owner?.login || + "Unknown", + date: commit.commit?.author?.date || new Date().toISOString(), + repo: repo.full_name, + url: `${env.GITEA_URL}/${repo.full_name}/commit/${commit.sha}` + }); } } } @@ -336,11 +280,13 @@ export const gitActivityRouter = createTRPCRouter({ const threeMonthsAgo = new Date(); threeMonthsAgo.setMonth(threeMonthsAgo.getMonth() - 3); + const sinceParam = threeMonthsAgo.toISOString(); - for (const repo of repos) { - try { - const commitsResponse = await fetchWithTimeout( - `${env.GITEA_URL}/api/v1/repos/${repo.owner.login}/${repo.name}/commits?limit=100`, + // Fetch commits for all repos in parallel, scoped to the 3-month window + const commitResults = await Promise.allSettled( + repos.map((repo: any) => + fetchWithTimeout( + `${env.GITEA_URL}/api/v1/repos/${repo.owner.login}/${repo.name}/commits?limit=100&since=${sinceParam}`, { headers: { Authorization: `token ${env.GITEA_TOKEN}`, @@ -348,31 +294,23 @@ export const gitActivityRouter = createTRPCRouter({ }, timeout: 10000 } - ); + ) + .then((res) => (res.ok ? res.json() : [])) + .catch(() => []) + ) + ); - if (commitsResponse.ok) { - const commits = await commitsResponse.json(); - for (const commit of commits) { - const date = new Date(commit.commit.author.date) - .toISOString() - .split("T")[0]; - contributionsByDay.set( - date, - (contributionsByDay.get(date) || 0) + 1 - ); - } - } - } catch (error) { - if ( - error instanceof NetworkError || - error instanceof TimeoutError - ) { - console.warn( - `Network error fetching commits for ${repo.name}, skipping` - ); - } else { - console.error(`Error fetching commits for ${repo.name}:`, error); - } + for (const result of commitResults) { + if (result.status === "rejected") continue; + const commits: any[] = result.value; + for (const commit of commits) { + const date = new Date(commit.commit.author.date) + .toISOString() + .split("T")[0]; + contributionsByDay.set( + date, + (contributionsByDay.get(date) || 0) + 1 + ); } } diff --git a/src/server/cache.ts b/src/server/cache.ts index 6e35268..87aa3c9 100644 --- a/src/server/cache.ts +++ b/src/server/cache.ts @@ -1,167 +1,89 @@ /** - * Redis-backed Cache for Serverless + * In-memory cache with TTL * - * Uses Redis for persistent caching across serverless invocations. - * Redis provides: - * - Fast in-memory storage - * - Built-in TTL expiration (automatic cleanup) - * - Persistence across function invocations - * - Native support in Vercel and other platforms + * Redis was replaced because on a low-traffic site the cache TTL almost always + * expires between visits, so every request paid Redis connection + round-trip + * overhead with no benefit. A module-level Map has zero network latency: + * cache hits are a single dictionary lookup, misses fall through immediately. */ -import { createClient } from "redis"; -import { env } from "~/env/server"; import { CACHE_CONFIG } from "~/config"; -let redisClient: ReturnType | null = null; -let isConnecting = false; -let connectionError: Error | null = null; - -/** - * Get or create Redis client (singleton pattern) - */ -async function getRedisClient() { - if (redisClient && redisClient.isOpen) { - return redisClient; - } - - if (isConnecting) { - // Wait for existing connection attempt - await new Promise((resolve) => setTimeout(resolve, 100)); - return getRedisClient(); - } - - if (connectionError) { - throw connectionError; - } - - try { - isConnecting = true; - redisClient = createClient({ url: env.REDIS_URL }); - - redisClient.on("error", (err) => { - console.error("Redis Client Error:", err); - connectionError = err; - }); - - await redisClient.connect(); - isConnecting = false; - connectionError = null; - return redisClient; - } catch (error) { - isConnecting = false; - connectionError = error as Error; - console.error("Failed to connect to Redis:", error); - throw error; - } +interface CacheEntry { + data: T; + /** Absolute timestamp (ms) after which this entry is considered stale */ + expiresAt: number; + /** Absolute timestamp (ms) after which stale fallback is also discarded */ + staleExpiresAt: number; } -/** - * Redis-backed cache interface - */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +const store = new Map>(); + export const cache = { - async get(key: string): Promise { - try { - const client = await getRedisClient(); - const value = await client.get(key); + get(key: string): T | null { + const entry = store.get(key) as CacheEntry | undefined; + if (!entry) return null; + if (Date.now() > entry.expiresAt) return null; + return entry.data; + }, - if (!value) { - return null; - } + set(key: string, data: T, ttlMs: number): void { + const existing = store.get(key); + store.set(key, { + data, + expiresAt: Date.now() + ttlMs, + // Preserve an existing stale expiry if it's longer, otherwise default + staleExpiresAt: + existing?.staleExpiresAt ?? Date.now() + CACHE_CONFIG.MAX_STALE_DATA_MS + }); + }, - return JSON.parse(value) as T; - } catch (error) { - console.error(`Cache get error for key "${key}":`, error); - return null; + delete(key: string): void { + store.delete(key); + }, + + deleteByPrefix(prefix: string): void { + for (const key of store.keys()) { + if (key.startsWith(prefix)) store.delete(key); } }, - async set(key: string, data: T, ttlMs: number): Promise { - try { - const client = await getRedisClient(); - const value = JSON.stringify(data); - - // Redis SET with EX (expiry in seconds) - await client.set(key, value, { - EX: Math.ceil(ttlMs / 1000) - }); - } catch (error) { - console.error(`Cache set error for key "${key}":`, error); - } + clear(): void { + store.clear(); }, - async delete(key: string): Promise { - try { - const client = await getRedisClient(); - await client.del(key); - } catch (error) { - console.error(`Cache delete error for key "${key}":`, error); - } - }, - - async deleteByPrefix(prefix: string): Promise { - try { - const client = await getRedisClient(); - const keys = await client.keys(`${prefix}*`); - - if (keys.length > 0) { - await client.del(keys); - } - } catch (error) { - console.error( - `Cache deleteByPrefix error for prefix "${prefix}":`, - error - ); - } - }, - - async clear(): Promise { - try { - const client = await getRedisClient(); - await client.flushDb(); - } catch (error) { - console.error("Cache clear error:", error); - } - }, - - async has(key: string): Promise { - try { - const client = await getRedisClient(); - const exists = await client.exists(key); - return exists === 1; - } catch (error) { - console.error(`Cache has error for key "${key}":`, error); - return false; - } + has(key: string): boolean { + const entry = store.get(key); + if (!entry) return false; + return Date.now() <= entry.expiresAt; } }; /** - * Execute function with Redis caching + * Execute function with in-memory caching. */ export async function withCache( key: string, ttlMs: number, fn: () => Promise ): Promise { - const cached = await cache.get(key); - if (cached !== null) { - return cached; - } + const cached = cache.get(key); + if (cached !== null) return cached; const result = await fn(); - await cache.set(key, result, ttlMs); + cache.set(key, result, ttlMs); return result; } /** - * Execute function with Redis caching and stale data fallback + * Execute function with caching and stale-data fallback. * * Strategy: - * 1. Try to get fresh cached data (within TTL) - * 2. If not found, execute function - * 3. If function fails, try to get stale data (ignore TTL) - * 4. Store result with TTL for future requests + * 1. Return data if fresh (within TTL). + * 2. Otherwise run fn(). + * 3. If fn() throws, return stale data if still within maxStaleMs. + * 4. Store fresh result for future requests. */ export async function withCacheAndStale( key: string, @@ -175,34 +97,29 @@ export async function withCacheAndStale( const { maxStaleMs = CACHE_CONFIG.MAX_STALE_DATA_MS, logErrors = true } = options; - // Try fresh cache - const cached = await cache.get(key); - if (cached !== null) { - return cached; - } + const now = Date.now(); + const entry = store.get(key) as CacheEntry | undefined; + + // Fresh hit + if (entry && entry.expiresAt > now) return entry.data; try { - // Execute function const result = await fn(); - await cache.set(key, result, ttlMs); - // Also store with longer TTL for stale fallback - const staleKey = `${key}:stale`; - await cache.set(staleKey, result, maxStaleMs); + store.set(key, { + data: result, + expiresAt: now + ttlMs, + staleExpiresAt: now + maxStaleMs + }); return result; } catch (error) { if (logErrors) { console.error(`Error fetching data for cache key "${key}":`, error); } - // Try stale cache with longer TTL key - const staleKey = `${key}:stale`; - const staleData = await cache.get(staleKey); - - if (staleData !== null) { - if (logErrors) { - console.log(`Serving stale data for cache key "${key}"`); - } - return staleData; + // Stale fallback + if (entry && entry.staleExpiresAt > now) { + if (logErrors) console.log(`Serving stale data for cache key "${key}"`); + return entry.data; } throw error;