oof
This commit is contained in:
@@ -39,6 +39,7 @@ import {
|
||||
checkAccountLockout,
|
||||
recordFailedLogin,
|
||||
resetFailedAttempts,
|
||||
resetLoginRateLimits,
|
||||
createPasswordResetToken,
|
||||
validatePasswordResetToken,
|
||||
markPasswordResetTokenUsed
|
||||
@@ -51,7 +52,8 @@ import {
|
||||
NETWORK_CONFIG,
|
||||
COOLDOWN_TIMERS,
|
||||
getAccessTokenExpiry,
|
||||
getAccessCookieMaxAge
|
||||
getAccessCookieMaxAge,
|
||||
getRefreshCookieMaxAge
|
||||
} from "~/config";
|
||||
import { randomBytes, createHash, timingSafeEqual } from "crypto";
|
||||
|
||||
@@ -177,6 +179,7 @@ async function validateRefreshToken(
|
||||
*/
|
||||
async function invalidateSession(sessionId: string): Promise<void> {
|
||||
const conn = ConnectionFactory();
|
||||
console.log(`[Session] Invalidating session ${sessionId}`);
|
||||
await conn.execute({
|
||||
sql: "UPDATE Session SET revoked = 1 WHERE id = ?",
|
||||
args: [sessionId]
|
||||
@@ -202,6 +205,9 @@ async function revokeTokenFamily(
|
||||
});
|
||||
|
||||
// Revoke all sessions in family
|
||||
console.log(
|
||||
`[Token Family] Revoking entire family ${tokenFamily} (reason: ${reason}). Sessions affected: ${sessions.rows.length}`
|
||||
);
|
||||
await conn.execute({
|
||||
sql: "UPDATE Session SET revoked = 1 WHERE token_family = ?",
|
||||
args: [tokenFamily]
|
||||
@@ -255,14 +261,14 @@ async function detectTokenReuse(sessionId: string): Promise<boolean> {
|
||||
// Grace period for race conditions (e.g., slow network, retries)
|
||||
if (timeSinceRotation < AUTH_CONFIG.REFRESH_TOKEN_REUSE_WINDOW_MS) {
|
||||
console.warn(
|
||||
`Token reuse within grace period (${timeSinceRotation}ms), allowing`
|
||||
`[Token Reuse] Within grace period (${timeSinceRotation}ms < ${AUTH_CONFIG.REFRESH_TOKEN_REUSE_WINDOW_MS}ms), allowing for session ${sessionId}`
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Reuse detected outside grace period - this is a breach!
|
||||
console.error(
|
||||
`Token reuse detected! Session ${sessionId} rotated ${timeSinceRotation}ms ago`
|
||||
`[Token Reuse] BREACH DETECTED! Session ${sessionId} rotated ${timeSinceRotation}ms ago (grace period: ${AUTH_CONFIG.REFRESH_TOKEN_REUSE_WINDOW_MS}ms). Child session: ${childSession.id}`
|
||||
);
|
||||
|
||||
// Get token family and revoke entire family
|
||||
@@ -316,28 +322,49 @@ async function rotateRefreshToken(
|
||||
refreshToken: string;
|
||||
sessionId: string;
|
||||
} | null> {
|
||||
console.log(`[Token Rotation] Starting rotation for session ${oldSessionId}`);
|
||||
|
||||
// Step 1: Validate old refresh token
|
||||
const oldSession = await validateRefreshToken(oldRefreshToken, oldSessionId);
|
||||
|
||||
if (!oldSession) {
|
||||
console.warn("Invalid refresh token during rotation");
|
||||
console.warn(
|
||||
`[Token Rotation] Invalid refresh token during rotation for session ${oldSessionId}`
|
||||
);
|
||||
return null;
|
||||
}
|
||||
|
||||
console.log(
|
||||
`[Token Rotation] Refresh token validated for session ${oldSessionId}`
|
||||
);
|
||||
|
||||
// Step 2: Detect token reuse (breach detection)
|
||||
const reuseDetected = await detectTokenReuse(oldSessionId);
|
||||
if (reuseDetected) {
|
||||
console.error(
|
||||
`[Token Rotation] Token reuse detected for session ${oldSessionId}`
|
||||
);
|
||||
// Token family already revoked by detectTokenReuse
|
||||
return null;
|
||||
}
|
||||
|
||||
console.log(
|
||||
`[Token Rotation] No token reuse detected for session ${oldSessionId}`
|
||||
);
|
||||
|
||||
// Step 3: Check rotation limit
|
||||
if (oldSession.rotation_count >= AUTH_CONFIG.MAX_ROTATION_COUNT) {
|
||||
console.warn(`Max rotation count reached for session ${oldSessionId}`);
|
||||
console.warn(
|
||||
`[Token Rotation] Max rotation count reached for session ${oldSessionId}`
|
||||
);
|
||||
await invalidateSession(oldSessionId);
|
||||
return null;
|
||||
}
|
||||
|
||||
console.log(
|
||||
`[Token Rotation] Rotation count OK (${oldSession.rotation_count}/${AUTH_CONFIG.MAX_ROTATION_COUNT})`
|
||||
);
|
||||
|
||||
// Step 4: Generate new tokens
|
||||
const newRefreshToken = generateRefreshToken();
|
||||
const refreshExpiry = rememberMe
|
||||
@@ -549,28 +576,42 @@ function setAuthCookies(
|
||||
rememberMe: boolean = false
|
||||
) {
|
||||
// Access token cookie (short-lived, always same duration)
|
||||
const accessMaxAge = getAccessCookieMaxAge();
|
||||
|
||||
setCookie(event, ACCESS_TOKEN_COOKIE_NAME, accessToken, {
|
||||
maxAge: accessMaxAge,
|
||||
// Session cookies (no maxAge) vs persistent cookies (with maxAge)
|
||||
const accessCookieOptions: any = {
|
||||
path: "/",
|
||||
httpOnly: true,
|
||||
secure: env.NODE_ENV === "production",
|
||||
sameSite: "strict"
|
||||
});
|
||||
};
|
||||
|
||||
// Refresh token cookie (long-lived, varies based on rememberMe)
|
||||
const refreshMaxAge = rememberMe
|
||||
? AUTH_CONFIG.REFRESH_COOKIE_MAX_AGE_LONG
|
||||
: AUTH_CONFIG.REFRESH_COOKIE_MAX_AGE_SHORT;
|
||||
if (rememberMe) {
|
||||
// Persistent cookie - survives browser restart
|
||||
accessCookieOptions.maxAge = getAccessCookieMaxAge();
|
||||
}
|
||||
// else: session cookie - expires when browser closes (no maxAge)
|
||||
|
||||
setCookie(event, REFRESH_TOKEN_COOKIE_NAME, refreshToken, {
|
||||
maxAge: refreshMaxAge,
|
||||
setCookie(event, ACCESS_TOKEN_COOKIE_NAME, accessToken, accessCookieOptions);
|
||||
|
||||
// Refresh token cookie (varies based on rememberMe)
|
||||
const refreshCookieOptions: any = {
|
||||
path: "/",
|
||||
httpOnly: true,
|
||||
secure: env.NODE_ENV === "production",
|
||||
sameSite: "strict"
|
||||
});
|
||||
};
|
||||
|
||||
if (rememberMe) {
|
||||
// Persistent cookie - long-lived (90 days)
|
||||
refreshCookieOptions.maxAge = getRefreshCookieMaxAge(true);
|
||||
}
|
||||
// else: session cookie - expires when browser closes (no maxAge)
|
||||
|
||||
setCookie(
|
||||
event,
|
||||
REFRESH_TOKEN_COOKIE_NAME,
|
||||
refreshToken,
|
||||
refreshCookieOptions
|
||||
);
|
||||
|
||||
// CSRF token for authenticated session
|
||||
setCSRFToken(event);
|
||||
@@ -613,6 +654,119 @@ async function sendEmail(to: string, subject: string, htmlContent: string) {
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempt server-side token refresh for SSR
|
||||
* Called from getUserState() when access token is expired but refresh token exists
|
||||
* @param event - H3Event from SSR
|
||||
* @param refreshToken - Refresh token from httpOnly cookie
|
||||
* @returns true if refresh succeeded, false otherwise
|
||||
*/
|
||||
export async function attemptTokenRefresh(
|
||||
event: H3Event,
|
||||
refreshToken: string
|
||||
): Promise<boolean> {
|
||||
try {
|
||||
// Step 1: Find session by refresh token hash
|
||||
// (Access token may not exist if user closed browser and returned later)
|
||||
const conn = ConnectionFactory();
|
||||
const tokenHash = hashRefreshToken(refreshToken);
|
||||
|
||||
const sessionResult = await conn.execute({
|
||||
sql: `SELECT id, user_id, expires_at, revoked
|
||||
FROM Session
|
||||
WHERE refresh_token_hash = ?
|
||||
AND revoked = 0`,
|
||||
args: [tokenHash]
|
||||
});
|
||||
|
||||
if (sessionResult.rows.length === 0) {
|
||||
console.warn(
|
||||
"[Token Refresh SSR] No valid session found for refresh token"
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
const session = sessionResult.rows[0];
|
||||
const sessionId = session.id as string;
|
||||
|
||||
// Check if session is expired
|
||||
const expiresAt = new Date(session.expires_at as string);
|
||||
if (expiresAt < new Date()) {
|
||||
console.warn("[Token Refresh SSR] Session expired");
|
||||
return false;
|
||||
}
|
||||
|
||||
// Step 2: Determine rememberMe from existing session
|
||||
const now = new Date();
|
||||
const daysUntilExpiry =
|
||||
(expiresAt.getTime() - now.getTime()) / (1000 * 60 * 60 * 24);
|
||||
// If expires in > 30 days, assume rememberMe was true
|
||||
const rememberMe = daysUntilExpiry > 30;
|
||||
|
||||
// Step 3: Get client info
|
||||
const clientIP = getClientIP(event);
|
||||
const userAgent = getUserAgent(event);
|
||||
|
||||
// Step 4: Rotate tokens
|
||||
console.log(`[Token Refresh SSR] Rotating tokens for session ${sessionId}`);
|
||||
const rotated = await rotateRefreshToken(
|
||||
refreshToken,
|
||||
sessionId,
|
||||
rememberMe,
|
||||
clientIP,
|
||||
userAgent
|
||||
);
|
||||
|
||||
if (!rotated) {
|
||||
console.warn("[Token Refresh SSR] Token rotation failed");
|
||||
return false;
|
||||
}
|
||||
|
||||
// Step 5: Set new cookies
|
||||
const accessCookieOptions: any = {
|
||||
path: "/",
|
||||
httpOnly: true,
|
||||
secure: env.NODE_ENV === "production",
|
||||
sameSite: "strict"
|
||||
};
|
||||
|
||||
if (rememberMe) {
|
||||
accessCookieOptions.maxAge = getAccessCookieMaxAge();
|
||||
}
|
||||
|
||||
setCookie(
|
||||
event,
|
||||
ACCESS_TOKEN_COOKIE_NAME,
|
||||
rotated.accessToken,
|
||||
accessCookieOptions
|
||||
);
|
||||
|
||||
const refreshCookieOptions: any = {
|
||||
path: "/",
|
||||
httpOnly: true,
|
||||
secure: env.NODE_ENV === "production",
|
||||
sameSite: "strict"
|
||||
};
|
||||
|
||||
if (rememberMe) {
|
||||
refreshCookieOptions.maxAge = getRefreshCookieMaxAge(true);
|
||||
}
|
||||
|
||||
setCookie(
|
||||
event,
|
||||
REFRESH_TOKEN_COOKIE_NAME,
|
||||
rotated.refreshToken,
|
||||
refreshCookieOptions
|
||||
);
|
||||
|
||||
console.log("[Token Refresh SSR] Token refresh successful");
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error("[Token Refresh SSR] Error:", error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export const authRouter = createTRPCRouter({
|
||||
githubCallback: publicProcedure
|
||||
.input(z.object({ code: z.string() }))
|
||||
@@ -1405,6 +1559,9 @@ export const authRouter = createTRPCRouter({
|
||||
// Reset failed attempts on successful login
|
||||
await resetFailedAttempts(user.id);
|
||||
|
||||
// Reset rate limits on successful login
|
||||
await resetLoginRateLimits(email, clientIP);
|
||||
|
||||
// Determine token expiry based on rememberMe
|
||||
const accessExpiry = getAccessTokenExpiry(); // Always 15m
|
||||
const refreshExpiry = rememberMe
|
||||
@@ -2098,37 +2255,46 @@ export const authRouter = createTRPCRouter({
|
||||
}
|
||||
|
||||
// Step 6: Set new access token cookie
|
||||
const accessCookieMaxAge = getAccessCookieMaxAge();
|
||||
// Session cookies (no maxAge) vs persistent cookies (with maxAge)
|
||||
const accessCookieOptions: any = {
|
||||
path: "/",
|
||||
httpOnly: true,
|
||||
secure: env.NODE_ENV === "production",
|
||||
sameSite: "strict"
|
||||
};
|
||||
|
||||
if (rememberMe) {
|
||||
// Persistent cookie - survives browser restart
|
||||
accessCookieOptions.maxAge = getAccessCookieMaxAge();
|
||||
}
|
||||
// else: session cookie - expires when browser closes (no maxAge)
|
||||
|
||||
setCookie(
|
||||
getH3Event(ctx),
|
||||
ACCESS_TOKEN_COOKIE_NAME,
|
||||
rotated.accessToken,
|
||||
{
|
||||
maxAge: accessCookieMaxAge,
|
||||
path: "/",
|
||||
httpOnly: true,
|
||||
secure: env.NODE_ENV === "production",
|
||||
sameSite: "strict"
|
||||
}
|
||||
accessCookieOptions
|
||||
);
|
||||
|
||||
// Step 7: Set new refresh token cookie
|
||||
const refreshCookieMaxAge = rememberMe
|
||||
? AUTH_CONFIG.REFRESH_COOKIE_MAX_AGE_LONG
|
||||
: AUTH_CONFIG.REFRESH_COOKIE_MAX_AGE_SHORT;
|
||||
const refreshCookieOptions: any = {
|
||||
path: "/",
|
||||
httpOnly: true,
|
||||
secure: env.NODE_ENV === "production",
|
||||
sameSite: "strict"
|
||||
};
|
||||
|
||||
if (rememberMe) {
|
||||
// Persistent cookie - long-lived (90 days)
|
||||
refreshCookieOptions.maxAge = getRefreshCookieMaxAge(true);
|
||||
}
|
||||
// else: session cookie - expires when browser closes (no maxAge)
|
||||
|
||||
setCookie(
|
||||
getH3Event(ctx),
|
||||
REFRESH_TOKEN_COOKIE_NAME,
|
||||
rotated.refreshToken,
|
||||
{
|
||||
maxAge: refreshCookieMaxAge,
|
||||
path: "/",
|
||||
httpOnly: true,
|
||||
secure: env.NODE_ENV === "production",
|
||||
sameSite: "strict"
|
||||
}
|
||||
refreshCookieOptions
|
||||
);
|
||||
|
||||
// Step 8: Refresh CSRF token
|
||||
@@ -2245,6 +2411,10 @@ export const authRouter = createTRPCRouter({
|
||||
maxAge: 0,
|
||||
path: "/"
|
||||
});
|
||||
setCookie(getH3Event(ctx), "csrf-token", "", {
|
||||
maxAge: 0,
|
||||
path: "/"
|
||||
});
|
||||
|
||||
// Step 4: Log signout event
|
||||
if (userId) {
|
||||
|
||||
@@ -413,7 +413,7 @@ export const databaseRouter = createTRPCRouter({
|
||||
await conn.execute(tagQuery);
|
||||
}
|
||||
|
||||
cache.deleteByPrefix("blog-");
|
||||
await cache.deleteByPrefix("blog-");
|
||||
|
||||
return { data: results.lastInsertRowid };
|
||||
} catch (error) {
|
||||
@@ -529,7 +529,7 @@ export const databaseRouter = createTRPCRouter({
|
||||
await conn.execute(tagQuery);
|
||||
}
|
||||
|
||||
cache.deleteByPrefix("blog-");
|
||||
await cache.deleteByPrefix("blog-");
|
||||
|
||||
return { data: results.lastInsertRowid };
|
||||
} catch (error) {
|
||||
@@ -565,7 +565,7 @@ export const databaseRouter = createTRPCRouter({
|
||||
args: [input.id]
|
||||
});
|
||||
|
||||
cache.deleteByPrefix("blog-");
|
||||
await cache.deleteByPrefix("blog-");
|
||||
|
||||
return { success: true };
|
||||
} catch (error) {
|
||||
|
||||
@@ -1,77 +1,166 @@
|
||||
import { CACHE_CONFIG } from "~/config";
|
||||
/**
|
||||
* Redis-backed Cache for Serverless
|
||||
*
|
||||
* Uses Redis for persistent caching across serverless invocations.
|
||||
* Redis provides:
|
||||
* - Fast in-memory storage
|
||||
* - Built-in TTL expiration (automatic cleanup)
|
||||
* - Persistence across function invocations
|
||||
* - Native support in Vercel and other platforms
|
||||
*/
|
||||
|
||||
interface CacheEntry<T> {
|
||||
data: T;
|
||||
timestamp: number;
|
||||
import { createClient } from "redis";
|
||||
import { env } from "~/env/server";
|
||||
|
||||
let redisClient: ReturnType<typeof createClient> | null = null;
|
||||
let isConnecting = false;
|
||||
let connectionError: Error | null = null;
|
||||
|
||||
/**
|
||||
* Get or create Redis client (singleton pattern)
|
||||
*/
|
||||
async function getRedisClient() {
|
||||
if (redisClient && redisClient.isOpen) {
|
||||
return redisClient;
|
||||
}
|
||||
|
||||
if (isConnecting) {
|
||||
// Wait for existing connection attempt
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
return getRedisClient();
|
||||
}
|
||||
|
||||
if (connectionError) {
|
||||
throw connectionError;
|
||||
}
|
||||
|
||||
try {
|
||||
isConnecting = true;
|
||||
redisClient = createClient({ url: env.REDIS_URL });
|
||||
|
||||
redisClient.on("error", (err) => {
|
||||
console.error("Redis Client Error:", err);
|
||||
connectionError = err;
|
||||
});
|
||||
|
||||
await redisClient.connect();
|
||||
isConnecting = false;
|
||||
connectionError = null;
|
||||
return redisClient;
|
||||
} catch (error) {
|
||||
isConnecting = false;
|
||||
connectionError = error as Error;
|
||||
console.error("Failed to connect to Redis:", error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
class SimpleCache {
|
||||
private cache: Map<string, CacheEntry<any>> = new Map();
|
||||
/**
|
||||
* Redis-backed cache interface
|
||||
*/
|
||||
export const cache = {
|
||||
async get<T>(key: string): Promise<T | null> {
|
||||
try {
|
||||
const client = await getRedisClient();
|
||||
const value = await client.get(key);
|
||||
|
||||
get<T>(key: string, ttlMs: number): T | null {
|
||||
const entry = this.cache.get(key);
|
||||
if (!entry) return null;
|
||||
if (!value) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const now = Date.now();
|
||||
if (now - entry.timestamp > ttlMs) {
|
||||
this.cache.delete(key);
|
||||
return JSON.parse(value) as T;
|
||||
} catch (error) {
|
||||
console.error(`Cache get error for key "${key}":`, error);
|
||||
return null;
|
||||
}
|
||||
},
|
||||
|
||||
return entry.data as T;
|
||||
}
|
||||
async set<T>(key: string, data: T, ttlMs: number): Promise<void> {
|
||||
try {
|
||||
const client = await getRedisClient();
|
||||
const value = JSON.stringify(data);
|
||||
|
||||
getStale<T>(key: string): T | null {
|
||||
const entry = this.cache.get(key);
|
||||
return entry ? (entry.data as T) : null;
|
||||
}
|
||||
// Redis SET with EX (expiry in seconds)
|
||||
await client.set(key, value, {
|
||||
EX: Math.ceil(ttlMs / 1000)
|
||||
});
|
||||
} catch (error) {
|
||||
console.error(`Cache set error for key "${key}":`, error);
|
||||
}
|
||||
},
|
||||
|
||||
has(key: string): boolean {
|
||||
return this.cache.has(key);
|
||||
}
|
||||
async delete(key: string): Promise<void> {
|
||||
try {
|
||||
const client = await getRedisClient();
|
||||
await client.del(key);
|
||||
} catch (error) {
|
||||
console.error(`Cache delete error for key "${key}":`, error);
|
||||
}
|
||||
},
|
||||
|
||||
set<T>(key: string, data: T): void {
|
||||
this.cache.set(key, {
|
||||
data,
|
||||
timestamp: Date.now()
|
||||
});
|
||||
}
|
||||
async deleteByPrefix(prefix: string): Promise<void> {
|
||||
try {
|
||||
const client = await getRedisClient();
|
||||
const keys = await client.keys(`${prefix}*`);
|
||||
|
||||
clear(): void {
|
||||
this.cache.clear();
|
||||
}
|
||||
|
||||
delete(key: string): void {
|
||||
this.cache.delete(key);
|
||||
}
|
||||
|
||||
deleteByPrefix(prefix: string): void {
|
||||
for (const key of this.cache.keys()) {
|
||||
if (key.startsWith(prefix)) {
|
||||
this.cache.delete(key);
|
||||
if (keys.length > 0) {
|
||||
await client.del(keys);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`Cache deleteByPrefix error for prefix "${prefix}":`,
|
||||
error
|
||||
);
|
||||
}
|
||||
},
|
||||
|
||||
async clear(): Promise<void> {
|
||||
try {
|
||||
const client = await getRedisClient();
|
||||
await client.flushDb();
|
||||
} catch (error) {
|
||||
console.error("Cache clear error:", error);
|
||||
}
|
||||
},
|
||||
|
||||
async has(key: string): Promise<boolean> {
|
||||
try {
|
||||
const client = await getRedisClient();
|
||||
const exists = await client.exists(key);
|
||||
return exists === 1;
|
||||
} catch (error) {
|
||||
console.error(`Cache has error for key "${key}":`, error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export const cache = new SimpleCache();
|
||||
/**
|
||||
* Execute function with Redis caching
|
||||
*/
|
||||
export async function withCache<T>(
|
||||
key: string,
|
||||
ttlMs: number,
|
||||
fn: () => Promise<T>
|
||||
): Promise<T> {
|
||||
const cached = cache.get<T>(key, ttlMs);
|
||||
const cached = await cache.get<T>(key);
|
||||
if (cached !== null) {
|
||||
return cached;
|
||||
}
|
||||
|
||||
const result = await fn();
|
||||
cache.set(key, result);
|
||||
await cache.set(key, result, ttlMs);
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns stale data if fetch fails, with optional stale time limit
|
||||
* Execute function with Redis caching and stale data fallback
|
||||
*
|
||||
* Strategy:
|
||||
* 1. Try to get fresh cached data (within TTL)
|
||||
* 2. If not found, execute function
|
||||
* 3. If function fails, try to get stale data (ignore TTL)
|
||||
* 4. Store result with TTL for future requests
|
||||
*/
|
||||
export async function withCacheAndStale<T>(
|
||||
key: string,
|
||||
@@ -82,36 +171,36 @@ export async function withCacheAndStale<T>(
|
||||
logErrors?: boolean;
|
||||
} = {}
|
||||
): Promise<T> {
|
||||
const { maxStaleMs = CACHE_CONFIG.MAX_STALE_DATA_MS, logErrors = true } =
|
||||
options;
|
||||
const { maxStaleMs = 7 * 24 * 60 * 60 * 1000, logErrors = true } = options;
|
||||
|
||||
const cached = cache.get<T>(key, ttlMs);
|
||||
// Try fresh cache
|
||||
const cached = await cache.get<T>(key);
|
||||
if (cached !== null) {
|
||||
return cached;
|
||||
}
|
||||
|
||||
try {
|
||||
// Execute function
|
||||
const result = await fn();
|
||||
cache.set(key, result);
|
||||
await cache.set(key, result, ttlMs);
|
||||
// Also store with longer TTL for stale fallback
|
||||
const staleKey = `${key}:stale`;
|
||||
await cache.set(staleKey, result, maxStaleMs);
|
||||
return result;
|
||||
} catch (error) {
|
||||
if (logErrors) {
|
||||
console.error(`Error fetching data for cache key "${key}":`, error);
|
||||
}
|
||||
|
||||
const stale = cache.getStale<T>(key);
|
||||
if (stale !== null) {
|
||||
const entry = (cache as any).cache.get(key);
|
||||
const age = Date.now() - entry.timestamp;
|
||||
// Try stale cache with longer TTL key
|
||||
const staleKey = `${key}:stale`;
|
||||
const staleData = await cache.get<T>(staleKey);
|
||||
|
||||
if (age <= maxStaleMs) {
|
||||
if (logErrors) {
|
||||
console.log(
|
||||
`Serving stale data for cache key "${key}" (age: ${Math.round(age / 1000 / 60)}m)`
|
||||
);
|
||||
}
|
||||
return stale;
|
||||
if (staleData !== null) {
|
||||
if (logErrors) {
|
||||
console.log(`Serving stale data for cache key "${key}"`);
|
||||
}
|
||||
return staleData;
|
||||
}
|
||||
|
||||
throw error;
|
||||
|
||||
@@ -200,9 +200,11 @@ export async function clearRateLimitStore(): Promise<void> {
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleanup expired rate limit entries every 5 minutes
|
||||
* Opportunistic cleanup of expired rate limit entries
|
||||
* Called probabilistically during rate limit checks (serverless-friendly)
|
||||
* Note: setInterval is not reliable in serverless environments
|
||||
*/
|
||||
setInterval(async () => {
|
||||
async function cleanupExpiredRateLimits(): Promise<void> {
|
||||
try {
|
||||
const { ConnectionFactory } = await import("./database");
|
||||
const conn = ConnectionFactory();
|
||||
@@ -212,9 +214,10 @@ setInterval(async () => {
|
||||
args: [now]
|
||||
});
|
||||
} catch (error) {
|
||||
// Silent fail - cleanup is opportunistic
|
||||
console.error("Failed to cleanup expired rate limits:", error);
|
||||
}
|
||||
}, RATE_LIMIT_CLEANUP_INTERVAL_MS);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get client IP address from request headers
|
||||
@@ -274,6 +277,11 @@ export async function checkRateLimit(
|
||||
const now = Date.now();
|
||||
const resetAt = new Date(now + windowMs);
|
||||
|
||||
// Opportunistic cleanup (10% chance) - serverless-friendly
|
||||
if (Math.random() < 0.1) {
|
||||
cleanupExpiredRateLimits().catch(() => {}); // Fire and forget
|
||||
}
|
||||
|
||||
const result = await conn.execute({
|
||||
sql: "SELECT id, count, reset_at FROM RateLimit WHERE identifier = ?",
|
||||
args: [identifier]
|
||||
@@ -506,6 +514,22 @@ export async function resetFailedAttempts(userId: string): Promise<void> {
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Reset login rate limits on successful login
|
||||
*/
|
||||
export async function resetLoginRateLimits(
|
||||
email: string,
|
||||
clientIP: string
|
||||
): Promise<void> {
|
||||
const { ConnectionFactory } = await import("./database");
|
||||
const conn = ConnectionFactory();
|
||||
|
||||
await conn.execute({
|
||||
sql: "DELETE FROM RateLimit WHERE identifier IN (?, ?)",
|
||||
args: [`login:ip:${clientIP}`, `login:email:${email}`]
|
||||
});
|
||||
}
|
||||
|
||||
export const PASSWORD_RESET_CONFIG = CONFIG_PASSWORD_RESET;
|
||||
|
||||
/**
|
||||
|
||||
Reference in New Issue
Block a user