(sidebar)forget redis, better parallization

This commit is contained in:
2026-04-06 13:32:46 -04:00
parent 67bf77815e
commit 4dbd0ac965
7 changed files with 274 additions and 435 deletions

BIN
bun.lockb

Binary file not shown.

View File

@@ -55,7 +55,6 @@
"jose": "^6.1.3", "jose": "^6.1.3",
"mermaid": "^11.12.2", "mermaid": "^11.12.2",
"motion": "^12.23.26", "motion": "^12.23.26",
"redis": "^5.10.0",
"solid-js": "^1.9.5", "solid-js": "^1.9.5",
"solid-tiptap": "^0.8.0", "solid-tiptap": "^0.8.0",
"ua-parser-js": "^2.0.7", "ua-parser-js": "^2.0.7",

View File

@@ -46,6 +46,45 @@ interface ContributionDay {
count: number; count: number;
} }
interface GitActivityData {
githubCommits: GitCommit[];
giteaCommits: GitCommit[];
githubActivity: ContributionDay[];
giteaActivity: ContributionDay[];
}
// Shared fetch promise — whichever instance mounts first starts the fetch;
// the second instance awaits the same Promise instead of firing its own requests.
let gitActivityPromise: Promise<GitActivityData> | null = null;
function fetchGitActivity(): Promise<GitActivityData> {
if (gitActivityPromise) return gitActivityPromise;
gitActivityPromise = (async () => {
const [ghCommits, gtCommits, ghActivity, gtActivity] = await Promise.all([
api.gitActivity.getGitHubCommits.query({ limit: 6 }).catch(() => []),
api.gitActivity.getGiteaCommits.query({ limit: 6 }).catch(() => []),
api.gitActivity.getGitHubActivity.query().catch(() => []),
api.gitActivity.getGiteaActivity.query().catch(() => [])
]);
const displayedGithubCommits = ghCommits.slice(0, 3);
const githubShas = new Set(displayedGithubCommits.map((c) => c.sha));
const uniqueGiteaCommits = gtCommits
.filter((commit) => !githubShas.has(commit.sha))
.slice(0, 3);
return {
githubCommits: displayedGithubCommits,
giteaCommits: uniqueGiteaCommits,
githubActivity: ghActivity,
giteaActivity: gtActivity
};
})();
return gitActivityPromise;
}
export function RightBarContent() { export function RightBarContent() {
const { setLeftBarVisible } = useBars(); const { setLeftBarVisible } = useBars();
const [githubCommits, setGithubCommits] = createSignal<GitCommit[]>([]); const [githubCommits, setGithubCommits] = createSignal<GitCommit[]>([]);
@@ -66,41 +105,20 @@ export function RightBarContent() {
}; };
onMount(() => { onMount(() => {
const fetchData = async () => {
try {
// Fetch more commits to account for deduplication
const [ghCommits, gtCommits, ghActivity, gtActivity] =
await Promise.all([
api.gitActivity.getGitHubCommits
.query({ limit: 6 })
.catch(() => []),
api.gitActivity.getGiteaCommits.query({ limit: 6 }).catch(() => []),
api.gitActivity.getGitHubActivity.query().catch(() => []),
api.gitActivity.getGiteaActivity.query().catch(() => [])
]);
// Take first 3 from GitHub
const displayedGithubCommits = ghCommits.slice(0, 3);
// Deduplicate Gitea commits - only against the 3 shown in GitHub section
const githubShas = new Set(displayedGithubCommits.map((c) => c.sha));
const uniqueGiteaCommits = gtCommits.filter(
(commit) => !githubShas.has(commit.sha)
);
setGithubCommits(displayedGithubCommits);
setGiteaCommits(uniqueGiteaCommits.slice(0, 3));
setGithubActivity(ghActivity);
setGiteaActivity(gtActivity);
} catch (error) {
console.error("Failed to fetch git activity:", error);
} finally {
setLoading(false);
}
};
setTimeout(() => { setTimeout(() => {
fetchData(); fetchGitActivity()
.then((data) => {
setGithubCommits(data.githubCommits);
setGiteaCommits(data.giteaCommits);
setGithubActivity(data.githubActivity);
setGiteaActivity(data.giteaActivity);
})
.catch((error) => {
console.error("Failed to fetch git activity:", error);
})
.finally(() => {
setLoading(false);
});
}, 0); }, 0);
}); });

103
src/env/client.ts vendored
View File

@@ -1,80 +1,46 @@
import { z } from "zod"; export interface ClientEnv {
VITE_DOMAIN: string;
VITE_AWS_BUCKET_STRING: string;
VITE_DOWNLOAD_BUCKET_STRING: string;
VITE_GOOGLE_CLIENT_ID: string;
VITE_GOOGLE_CLIENT_ID_MAGIC_DELVE: string;
VITE_GITHUB_CLIENT_ID: string;
VITE_WEBSOCKET: string;
VITE_INFILL_ENDPOINT: string;
}
const clientEnvSchema = z.object({ const requiredKeys: (keyof ClientEnv)[] = [
VITE_DOMAIN: z.string().min(1), "VITE_DOMAIN",
VITE_AWS_BUCKET_STRING: z.string().min(1), "VITE_AWS_BUCKET_STRING",
VITE_DOWNLOAD_BUCKET_STRING: z.string().min(1), "VITE_DOWNLOAD_BUCKET_STRING",
VITE_GOOGLE_CLIENT_ID: z.string().min(1), "VITE_GOOGLE_CLIENT_ID",
VITE_GOOGLE_CLIENT_ID_MAGIC_DELVE: z.string().min(1), "VITE_GOOGLE_CLIENT_ID_MAGIC_DELVE",
VITE_GITHUB_CLIENT_ID: z.string().min(1), "VITE_GITHUB_CLIENT_ID",
VITE_WEBSOCKET: z.string().min(1), "VITE_WEBSOCKET",
VITE_INFILL_ENDPOINT: z.string().min(1) "VITE_INFILL_ENDPOINT"
}); ];
export type ClientEnv = z.infer<typeof clientEnvSchema>;
export const validateClientEnv = ( export const validateClientEnv = (
envVars: Record<string, string | undefined> envVars: Record<string, string | undefined>
): ClientEnv => { ): ClientEnv => {
try { const missing = requiredKeys.filter(
return clientEnvSchema.parse(envVars); (key) => !envVars[key] || envVars[key]!.trim() === ""
} catch (error) { );
if (error instanceof z.ZodError) {
const formattedErrors = error.format();
const missingVars = Object.entries(formattedErrors)
.filter(
([key, value]) =>
key !== "_errors" &&
typeof value === "object" &&
value._errors?.length > 0 &&
value._errors[0] === "Required"
)
.map(([key, _]) => key);
const invalidVars = Object.entries(formattedErrors) if (missing.length > 0) {
.filter( const message = `Client environment validation failed:\nMissing required variables: ${missing.join(", ")}`;
([key, value]) => console.error(message);
key !== "_errors" && throw new Error(message);
typeof value === "object" &&
value._errors?.length > 0 &&
value._errors[0] !== "Required"
)
.map(([key, value]) => ({
key,
error: value._errors[0]
}));
let errorMessage = "Client environment validation failed:\n";
if (missingVars.length > 0) {
errorMessage += `Missing required variables: ${missingVars.join(", ")}\n`;
}
if (invalidVars.length > 0) {
errorMessage += "Invalid values:\n";
invalidVars.forEach(({ key, error }) => {
errorMessage += ` ${key}: ${error}\n`;
});
}
console.error(errorMessage);
throw new Error(errorMessage);
}
console.error(
"Client environment validation failed with unknown error:",
error
);
throw new Error("Client environment validation failed with unknown error");
} }
return envVars as unknown as ClientEnv;
}; };
const validateAndExportEnv = (): ClientEnv => { const validateAndExportEnv = (): ClientEnv => {
try { try {
const validated = validateClientEnv(import.meta.env); const validated = validateClientEnv(import.meta.env);
console.log("✅ Client environment validation successful");
return validated; return validated;
} catch (error) { } catch (error) {
console.error("❌ Client environment validation failed:", error);
throw error; throw error;
} }
}; };
@@ -86,14 +52,5 @@ export const isMissingEnvVar = (varName: string): boolean => {
}; };
export const getMissingEnvVars = (): string[] => { export const getMissingEnvVars = (): string[] => {
const requiredClientVars = [ return requiredKeys.filter((varName) => isMissingEnvVar(varName));
"VITE_DOMAIN",
"VITE_AWS_BUCKET_STRING",
"VITE_GOOGLE_CLIENT_ID",
"VITE_GOOGLE_CLIENT_ID_MAGIC_DELVE",
"VITE_GITHUB_CLIENT_ID",
"VITE_WEBSOCKET"
];
return requiredClientVars.filter((varName) => isMissingEnvVar(varName));
}; };

View File

@@ -24,66 +24,76 @@ export function initPerformanceTracking() {
return; return;
} }
const supported = new Set(PerformanceObserver.supportedEntryTypes ?? []);
// Observe LCP // Observe LCP
try { if (supported.has("largest-contentful-paint")) {
const lcpObserver = new PerformanceObserver((entryList) => { try {
const entries = entryList.getEntries(); const lcpObserver = new PerformanceObserver((entryList) => {
const lastEntry = entries[entries.length - 1] as any; const entries = entryList.getEntries();
metrics.lcp = lastEntry.renderTime || lastEntry.loadTime; const lastEntry = entries[entries.length - 1] as any;
}); metrics.lcp = lastEntry.renderTime || lastEntry.loadTime;
lcpObserver.observe({ type: "largest-contentful-paint", buffered: true }); });
} catch (e) { lcpObserver.observe({ type: "largest-contentful-paint", buffered: true });
console.debug("LCP not supported"); } catch (e) {
console.debug("LCP observer failed");
}
} }
// Observe CLS // Observe CLS
try { if (supported.has("layout-shift")) {
const clsObserver = new PerformanceObserver((entryList) => { try {
for (const entry of entryList.getEntries()) { const clsObserver = new PerformanceObserver((entryList) => {
const layoutShift = entry as any; for (const entry of entryList.getEntries()) {
if (!layoutShift.hadRecentInput) { const layoutShift = entry as any;
clsValue += layoutShift.value; if (!layoutShift.hadRecentInput) {
clsEntries.push(layoutShift.value); clsValue += layoutShift.value;
clsEntries.push(layoutShift.value);
}
} }
} metrics.cls = clsValue;
metrics.cls = clsValue; });
}); clsObserver.observe({ type: "layout-shift", buffered: true });
clsObserver.observe({ type: "layout-shift", buffered: true }); } catch (e) {
} catch (e) { console.debug("CLS observer failed");
console.debug("CLS not supported"); }
} }
// Observe FID // Observe FID
try { if (supported.has("first-input")) {
const fidObserver = new PerformanceObserver((entryList) => { try {
const firstInput = entryList.getEntries()[0] as any; const fidObserver = new PerformanceObserver((entryList) => {
if (firstInput) { const firstInput = entryList.getEntries()[0] as any;
metrics.fid = firstInput.processingStart - firstInput.startTime; if (firstInput) {
} metrics.fid = firstInput.processingStart - firstInput.startTime;
}); }
fidObserver.observe({ type: "first-input", buffered: true }); });
} catch (e) { fidObserver.observe({ type: "first-input", buffered: true });
console.debug("FID not supported"); } catch (e) {
console.debug("FID observer failed");
}
} }
// Observe INP (event timing) // Observe INP (event timing)
try { if (supported.has("event")) {
const interactions: number[] = []; try {
const inpObserver = new PerformanceObserver((entryList) => { const interactions: number[] = [];
for (const entry of entryList.getEntries()) { const inpObserver = new PerformanceObserver((entryList) => {
const eventEntry = entry as any; for (const entry of entryList.getEntries()) {
if (eventEntry.interactionId) { const eventEntry = entry as any;
interactions.push(eventEntry.duration); if (eventEntry.interactionId) {
const sorted = [...interactions].sort((a, b) => b - a); interactions.push(eventEntry.duration);
const p98Index = Math.floor(sorted.length * 0.02); const sorted = [...interactions].sort((a, b) => b - a);
inpValue = sorted[p98Index] || sorted[0] || 0; const p98Index = Math.floor(sorted.length * 0.02);
metrics.inp = inpValue; inpValue = sorted[p98Index] || sorted[0] || 0;
metrics.inp = inpValue;
}
} }
} });
}); inpObserver.observe({ type: "event", buffered: true });
inpObserver.observe({ type: "event", buffered: true }); } catch (e) {
} catch (e) { console.debug("INP observer failed");
console.debug("INP not supported"); }
} }
// Get navigation timing metrics // Get navigation timing metrics

View File

@@ -49,73 +49,29 @@ export const gitActivityRouter = createTRPCRouter({
const events = await eventsResponse.json(); const events = await eventsResponse.json();
const allCommits: GitCommit[] = []; const allCommits: GitCommit[] = [];
// Extract push events and fetch commit details // Extract commits directly from PushEvent payload — no per-commit API calls needed
for (const event of events) { for (const event of events) {
if (event.type !== "PushEvent") continue; if (event.type !== "PushEvent") continue;
if (allCommits.length >= input.limit * 5) break; // Get extra to ensure we have enough if (allCommits.length >= input.limit) break;
const repoName = event.repo.name; const repoName = event.repo.name;
const commitSha = event.payload.head; const payloadCommits: any[] = event.payload.commits || [];
try { for (const payloadCommit of payloadCommits) {
// Fetch the actual commit details to get the message if (allCommits.length >= input.limit) break;
const commitResponse = await fetchWithTimeout( allCommits.push({
`https://api.github.com/repos/${repoName}/commits/${commitSha}`, sha: payloadCommit.sha?.substring(0, 7) || "unknown",
{ message: payloadCommit.message?.split("\n")[0] || "No message",
headers: { author: payloadCommit.author?.name || "Unknown",
Authorization: `Bearer ${env.GITHUB_API_TOKEN}`, // event.created_at is the push timestamp — close enough to commit date
Accept: "application/vnd.github.v3+json" date: event.created_at || new Date().toISOString(),
}, repo: repoName,
timeout: 5000 url: `https://github.com/${repoName}/commit/${payloadCommit.sha}`
} });
);
if (commitResponse.ok) {
const commit = await commitResponse.json();
// Filter for your commits
if (
commit.author?.login === "MikeFreno" ||
commit.author?.login === "mikefreno" ||
commit.commit?.author?.email?.includes("mike")
) {
allCommits.push({
sha: commit.sha?.substring(0, 7) || "unknown",
message:
commit.commit?.message?.split("\n")[0] || "No message",
author:
commit.commit?.author?.name ||
commit.author?.login ||
"Unknown",
date:
commit.commit?.author?.date || new Date().toISOString(),
repo: repoName,
url: `https://github.com/${repoName}/commit/${commit.sha}`
});
}
}
} catch (error) {
if (
error instanceof NetworkError ||
error instanceof TimeoutError
) {
console.warn(
`Network error fetching commit ${commitSha} for ${repoName}, skipping`
);
} else {
console.error(
`Error fetching commit ${commitSha} for ${repoName}:`,
error
);
}
} }
} }
// Already sorted by event date, but sort again by commit date to be precise // Events are already in reverse-chronological order
allCommits.sort(
(a, b) => new Date(b.date).getTime() - new Date(a.date).getTime()
);
return allCommits.slice(0, input.limit); return allCommits.slice(0, input.limit);
}, },
{ maxStaleMs: CACHE_CONFIG.GIT_ACTIVITY_MAX_STALE_MS } { maxStaleMs: CACHE_CONFIG.GIT_ACTIVITY_MAX_STALE_MS }
@@ -155,13 +111,11 @@ export const gitActivityRouter = createTRPCRouter({
await checkResponse(reposResponse); await checkResponse(reposResponse);
const repos = await reposResponse.json(); const repos = await reposResponse.json();
const allCommits: GitCommit[] = [];
for (const repo of repos) { // Fetch commits for all repos in parallel instead of serially
if (allCommits.length >= input.limit * 3) break; // Get extra to sort later const commitResults = await Promise.allSettled(
repos.map((repo: any) =>
try { fetchWithTimeout(
const commitsResponse = await fetchWithTimeout(
`${env.GITEA_URL}/api/v1/repos/Mike/${repo.name}/commits?limit=5`, `${env.GITEA_URL}/api/v1/repos/Mike/${repo.name}/commits?limit=5`,
{ {
headers: { headers: {
@@ -170,46 +124,36 @@ export const gitActivityRouter = createTRPCRouter({
}, },
timeout: 10000 timeout: 10000
} }
); )
.then((res) => (res.ok ? res.json() : []))
.catch(() => [])
)
);
if (commitsResponse.ok) { const allCommits: GitCommit[] = [];
const commits = await commitsResponse.json(); for (let i = 0; i < commitResults.length; i++) {
for (const commit of commits) { const result = commitResults[i];
if ( if (result.status === "rejected") continue;
(commit.commit?.author?.email && const repo = repos[i];
commit.commit.author.email.includes( const commits: any[] = result.value;
"michael@freno.me" for (const commit of commits) {
)) || const email: string = commit.commit?.author?.email ?? "";
commit.commit.author.email.includes(
"michaelt.freno@gmail.com"
) // Filter for your commits
) {
allCommits.push({
sha: commit.sha?.substring(0, 7) || "unknown",
message:
commit.commit?.message?.split("\n")[0] || "No message",
author: commit.commit?.author?.name || repo.owner.login,
date:
commit.commit?.author?.date || new Date().toISOString(),
repo: repo.full_name,
url: `${env.GITEA_URL}/${repo.full_name}/commit/${commit.sha}`
});
}
}
}
} catch (error) {
if ( if (
error instanceof NetworkError || email.includes("michael@freno.me") ||
error instanceof TimeoutError email.includes("michaelt.freno@gmail.com")
) { ) {
console.warn( allCommits.push({
`Network error fetching commits for ${repo.name}, skipping` sha: commit.sha?.substring(0, 7) || "unknown",
); message:
} else { commit.commit?.message?.split("\n")[0] || "No message",
console.error( author:
`Error fetching commits for ${repo.name}:`, commit.commit?.author?.name ||
error repo.owner?.login ||
); "Unknown",
date: commit.commit?.author?.date || new Date().toISOString(),
repo: repo.full_name,
url: `${env.GITEA_URL}/${repo.full_name}/commit/${commit.sha}`
});
} }
} }
} }
@@ -336,11 +280,13 @@ export const gitActivityRouter = createTRPCRouter({
const threeMonthsAgo = new Date(); const threeMonthsAgo = new Date();
threeMonthsAgo.setMonth(threeMonthsAgo.getMonth() - 3); threeMonthsAgo.setMonth(threeMonthsAgo.getMonth() - 3);
const sinceParam = threeMonthsAgo.toISOString();
for (const repo of repos) { // Fetch commits for all repos in parallel, scoped to the 3-month window
try { const commitResults = await Promise.allSettled(
const commitsResponse = await fetchWithTimeout( repos.map((repo: any) =>
`${env.GITEA_URL}/api/v1/repos/${repo.owner.login}/${repo.name}/commits?limit=100`, fetchWithTimeout(
`${env.GITEA_URL}/api/v1/repos/${repo.owner.login}/${repo.name}/commits?limit=100&since=${sinceParam}`,
{ {
headers: { headers: {
Authorization: `token ${env.GITEA_TOKEN}`, Authorization: `token ${env.GITEA_TOKEN}`,
@@ -348,31 +294,23 @@ export const gitActivityRouter = createTRPCRouter({
}, },
timeout: 10000 timeout: 10000
} }
); )
.then((res) => (res.ok ? res.json() : []))
.catch(() => [])
)
);
if (commitsResponse.ok) { for (const result of commitResults) {
const commits = await commitsResponse.json(); if (result.status === "rejected") continue;
for (const commit of commits) { const commits: any[] = result.value;
const date = new Date(commit.commit.author.date) for (const commit of commits) {
.toISOString() const date = new Date(commit.commit.author.date)
.split("T")[0]; .toISOString()
contributionsByDay.set( .split("T")[0];
date, contributionsByDay.set(
(contributionsByDay.get(date) || 0) + 1 date,
); (contributionsByDay.get(date) || 0) + 1
} );
}
} catch (error) {
if (
error instanceof NetworkError ||
error instanceof TimeoutError
) {
console.warn(
`Network error fetching commits for ${repo.name}, skipping`
);
} else {
console.error(`Error fetching commits for ${repo.name}:`, error);
}
} }
} }

View File

@@ -1,167 +1,89 @@
/** /**
* Redis-backed Cache for Serverless * In-memory cache with TTL
* *
* Uses Redis for persistent caching across serverless invocations. * Redis was replaced because on a low-traffic site the cache TTL almost always
* Redis provides: * expires between visits, so every request paid Redis connection + round-trip
* - Fast in-memory storage * overhead with no benefit. A module-level Map has zero network latency:
* - Built-in TTL expiration (automatic cleanup) * cache hits are a single dictionary lookup, misses fall through immediately.
* - Persistence across function invocations
* - Native support in Vercel and other platforms
*/ */
import { createClient } from "redis";
import { env } from "~/env/server";
import { CACHE_CONFIG } from "~/config"; import { CACHE_CONFIG } from "~/config";
let redisClient: ReturnType<typeof createClient> | null = null; interface CacheEntry<T> {
let isConnecting = false; data: T;
let connectionError: Error | null = null; /** Absolute timestamp (ms) after which this entry is considered stale */
expiresAt: number;
/** /** Absolute timestamp (ms) after which stale fallback is also discarded */
* Get or create Redis client (singleton pattern) staleExpiresAt: number;
*/
async function getRedisClient() {
if (redisClient && redisClient.isOpen) {
return redisClient;
}
if (isConnecting) {
// Wait for existing connection attempt
await new Promise((resolve) => setTimeout(resolve, 100));
return getRedisClient();
}
if (connectionError) {
throw connectionError;
}
try {
isConnecting = true;
redisClient = createClient({ url: env.REDIS_URL });
redisClient.on("error", (err) => {
console.error("Redis Client Error:", err);
connectionError = err;
});
await redisClient.connect();
isConnecting = false;
connectionError = null;
return redisClient;
} catch (error) {
isConnecting = false;
connectionError = error as Error;
console.error("Failed to connect to Redis:", error);
throw error;
}
} }
/** // eslint-disable-next-line @typescript-eslint/no-explicit-any
* Redis-backed cache interface const store = new Map<string, CacheEntry<any>>();
*/
export const cache = { export const cache = {
async get<T>(key: string): Promise<T | null> { get<T>(key: string): T | null {
try { const entry = store.get(key) as CacheEntry<T> | undefined;
const client = await getRedisClient(); if (!entry) return null;
const value = await client.get(key); if (Date.now() > entry.expiresAt) return null;
return entry.data;
},
if (!value) { set<T>(key: string, data: T, ttlMs: number): void {
return null; const existing = store.get(key);
} store.set(key, {
data,
expiresAt: Date.now() + ttlMs,
// Preserve an existing stale expiry if it's longer, otherwise default
staleExpiresAt:
existing?.staleExpiresAt ?? Date.now() + CACHE_CONFIG.MAX_STALE_DATA_MS
});
},
return JSON.parse(value) as T; delete(key: string): void {
} catch (error) { store.delete(key);
console.error(`Cache get error for key "${key}":`, error); },
return null;
deleteByPrefix(prefix: string): void {
for (const key of store.keys()) {
if (key.startsWith(prefix)) store.delete(key);
} }
}, },
async set<T>(key: string, data: T, ttlMs: number): Promise<void> { clear(): void {
try { store.clear();
const client = await getRedisClient();
const value = JSON.stringify(data);
// Redis SET with EX (expiry in seconds)
await client.set(key, value, {
EX: Math.ceil(ttlMs / 1000)
});
} catch (error) {
console.error(`Cache set error for key "${key}":`, error);
}
}, },
async delete(key: string): Promise<void> { has(key: string): boolean {
try { const entry = store.get(key);
const client = await getRedisClient(); if (!entry) return false;
await client.del(key); return Date.now() <= entry.expiresAt;
} catch (error) {
console.error(`Cache delete error for key "${key}":`, error);
}
},
async deleteByPrefix(prefix: string): Promise<void> {
try {
const client = await getRedisClient();
const keys = await client.keys(`${prefix}*`);
if (keys.length > 0) {
await client.del(keys);
}
} catch (error) {
console.error(
`Cache deleteByPrefix error for prefix "${prefix}":`,
error
);
}
},
async clear(): Promise<void> {
try {
const client = await getRedisClient();
await client.flushDb();
} catch (error) {
console.error("Cache clear error:", error);
}
},
async has(key: string): Promise<boolean> {
try {
const client = await getRedisClient();
const exists = await client.exists(key);
return exists === 1;
} catch (error) {
console.error(`Cache has error for key "${key}":`, error);
return false;
}
} }
}; };
/** /**
* Execute function with Redis caching * Execute function with in-memory caching.
*/ */
export async function withCache<T>( export async function withCache<T>(
key: string, key: string,
ttlMs: number, ttlMs: number,
fn: () => Promise<T> fn: () => Promise<T>
): Promise<T> { ): Promise<T> {
const cached = await cache.get<T>(key); const cached = cache.get<T>(key);
if (cached !== null) { if (cached !== null) return cached;
return cached;
}
const result = await fn(); const result = await fn();
await cache.set(key, result, ttlMs); cache.set(key, result, ttlMs);
return result; return result;
} }
/** /**
* Execute function with Redis caching and stale data fallback * Execute function with caching and stale-data fallback.
* *
* Strategy: * Strategy:
* 1. Try to get fresh cached data (within TTL) * 1. Return data if fresh (within TTL).
* 2. If not found, execute function * 2. Otherwise run fn().
* 3. If function fails, try to get stale data (ignore TTL) * 3. If fn() throws, return stale data if still within maxStaleMs.
* 4. Store result with TTL for future requests * 4. Store fresh result for future requests.
*/ */
export async function withCacheAndStale<T>( export async function withCacheAndStale<T>(
key: string, key: string,
@@ -175,34 +97,29 @@ export async function withCacheAndStale<T>(
const { maxStaleMs = CACHE_CONFIG.MAX_STALE_DATA_MS, logErrors = true } = const { maxStaleMs = CACHE_CONFIG.MAX_STALE_DATA_MS, logErrors = true } =
options; options;
// Try fresh cache const now = Date.now();
const cached = await cache.get<T>(key); const entry = store.get(key) as CacheEntry<T> | undefined;
if (cached !== null) {
return cached; // Fresh hit
} if (entry && entry.expiresAt > now) return entry.data;
try { try {
// Execute function
const result = await fn(); const result = await fn();
await cache.set(key, result, ttlMs); store.set(key, {
// Also store with longer TTL for stale fallback data: result,
const staleKey = `${key}:stale`; expiresAt: now + ttlMs,
await cache.set(staleKey, result, maxStaleMs); staleExpiresAt: now + maxStaleMs
});
return result; return result;
} catch (error) { } catch (error) {
if (logErrors) { if (logErrors) {
console.error(`Error fetching data for cache key "${key}":`, error); console.error(`Error fetching data for cache key "${key}":`, error);
} }
// Try stale cache with longer TTL key // Stale fallback
const staleKey = `${key}:stale`; if (entry && entry.staleExpiresAt > now) {
const staleData = await cache.get<T>(staleKey); if (logErrors) console.log(`Serving stale data for cache key "${key}"`);
return entry.data;
if (staleData !== null) {
if (logErrors) {
console.log(`Serving stale data for cache key "${key}"`);
}
return staleData;
} }
throw error; throw error;