Compare commits
3 Commits
67bf77815e
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
| 8b6551330f | |||
| 3635133994 | |||
| 4dbd0ac965 |
@@ -55,7 +55,6 @@
|
||||
"jose": "^6.1.3",
|
||||
"mermaid": "^11.12.2",
|
||||
"motion": "^12.23.26",
|
||||
"redis": "^5.10.0",
|
||||
"solid-js": "^1.9.5",
|
||||
"solid-tiptap": "^0.8.0",
|
||||
"ua-parser-js": "^2.0.7",
|
||||
|
||||
@@ -46,6 +46,34 @@ interface ContributionDay {
|
||||
count: number;
|
||||
}
|
||||
|
||||
// Four independent cached promises — first RightBarContent instance to mount
|
||||
// starts each fetch; the second gets the already-in-flight promise.
|
||||
let ghCommitsPromise: Promise<GitCommit[]> | null = null;
|
||||
let gtCommitsPromise: Promise<GitCommit[]> | null = null;
|
||||
let ghActivityPromise: Promise<ContributionDay[]> | null = null;
|
||||
let gtActivityPromise: Promise<ContributionDay[]> | null = null;
|
||||
|
||||
function getGhCommitsPromise(): Promise<GitCommit[]> {
|
||||
return (ghCommitsPromise ??= api.gitActivity.getGitHubCommits
|
||||
.query({ limit: 6 })
|
||||
.catch(() => []));
|
||||
}
|
||||
function getGtCommitsPromise(): Promise<GitCommit[]> {
|
||||
return (gtCommitsPromise ??= api.gitActivity.getGiteaCommits
|
||||
.query({ limit: 6 })
|
||||
.catch(() => []));
|
||||
}
|
||||
function getGhActivityPromise(): Promise<ContributionDay[]> {
|
||||
return (ghActivityPromise ??= api.gitActivity.getGitHubActivity
|
||||
.query()
|
||||
.catch(() => []));
|
||||
}
|
||||
function getGtActivityPromise(): Promise<ContributionDay[]> {
|
||||
return (gtActivityPromise ??= api.gitActivity.getGiteaActivity
|
||||
.query()
|
||||
.catch(() => []));
|
||||
}
|
||||
|
||||
export function RightBarContent() {
|
||||
const { setLeftBarVisible } = useBars();
|
||||
const [githubCommits, setGithubCommits] = createSignal<GitCommit[]>([]);
|
||||
@@ -54,7 +82,8 @@ export function RightBarContent() {
|
||||
[]
|
||||
);
|
||||
const [giteaActivity, setGiteaActivity] = createSignal<ContributionDay[]>([]);
|
||||
const [loading, setLoading] = createSignal(true);
|
||||
const [githubCommitsLoading, setGithubCommitsLoading] = createSignal(true);
|
||||
const [giteaCommitsLoading, setGiteaCommitsLoading] = createSignal(true);
|
||||
|
||||
const handleLinkClick = () => {
|
||||
if (
|
||||
@@ -66,41 +95,23 @@ export function RightBarContent() {
|
||||
};
|
||||
|
||||
onMount(() => {
|
||||
const fetchData = async () => {
|
||||
try {
|
||||
// Fetch more commits to account for deduplication
|
||||
const [ghCommits, gtCommits, ghActivity, gtActivity] =
|
||||
await Promise.all([
|
||||
api.gitActivity.getGitHubCommits
|
||||
.query({ limit: 6 })
|
||||
.catch(() => []),
|
||||
api.gitActivity.getGiteaCommits.query({ limit: 6 }).catch(() => []),
|
||||
api.gitActivity.getGitHubActivity.query().catch(() => []),
|
||||
api.gitActivity.getGiteaActivity.query().catch(() => [])
|
||||
]);
|
||||
|
||||
// Take first 3 from GitHub
|
||||
const displayedGithubCommits = ghCommits.slice(0, 3);
|
||||
|
||||
// Deduplicate Gitea commits - only against the 3 shown in GitHub section
|
||||
const githubShas = new Set(displayedGithubCommits.map((c) => c.sha));
|
||||
const uniqueGiteaCommits = gtCommits.filter(
|
||||
(commit) => !githubShas.has(commit.sha)
|
||||
);
|
||||
|
||||
setGithubCommits(displayedGithubCommits);
|
||||
setGiteaCommits(uniqueGiteaCommits.slice(0, 3));
|
||||
setGithubActivity(ghActivity);
|
||||
setGiteaActivity(gtActivity);
|
||||
} catch (error) {
|
||||
console.error("Failed to fetch git activity:", error);
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
setTimeout(() => {
|
||||
fetchData();
|
||||
getGhCommitsPromise().then((commits) => {
|
||||
setGithubCommits(commits.slice(0, 3));
|
||||
setGithubCommitsLoading(false);
|
||||
});
|
||||
|
||||
// Deduplicate Gitea against whatever GitHub has resolved by the time this lands
|
||||
getGtCommitsPromise().then((gtCommits) => {
|
||||
const ghShas = new Set(githubCommits().map((c) => c.sha));
|
||||
setGiteaCommits(
|
||||
gtCommits.filter((c) => !ghShas.has(c.sha)).slice(0, 3)
|
||||
);
|
||||
setGiteaCommitsLoading(false);
|
||||
});
|
||||
|
||||
getGhActivityPromise().then((activity) => setGithubActivity(activity));
|
||||
getGtActivityPromise().then((activity) => setGiteaActivity(activity));
|
||||
}, 0);
|
||||
});
|
||||
|
||||
@@ -190,7 +201,7 @@ export function RightBarContent() {
|
||||
<RecentCommits
|
||||
commits={giteaCommits()}
|
||||
title="Recent Gitea Commits"
|
||||
loading={loading()}
|
||||
loading={giteaCommitsLoading()}
|
||||
/>
|
||||
<ActivityHeatmap
|
||||
contributions={giteaActivity()}
|
||||
@@ -199,7 +210,7 @@ export function RightBarContent() {
|
||||
<RecentCommits
|
||||
commits={githubCommits()}
|
||||
title="Recent GitHub Commits"
|
||||
loading={loading()}
|
||||
loading={githubCommitsLoading()}
|
||||
/>
|
||||
<ActivityHeatmap
|
||||
contributions={githubActivity()}
|
||||
|
||||
103
src/env/client.ts
vendored
103
src/env/client.ts
vendored
@@ -1,80 +1,46 @@
|
||||
import { z } from "zod";
|
||||
export interface ClientEnv {
|
||||
VITE_DOMAIN: string;
|
||||
VITE_AWS_BUCKET_STRING: string;
|
||||
VITE_DOWNLOAD_BUCKET_STRING: string;
|
||||
VITE_GOOGLE_CLIENT_ID: string;
|
||||
VITE_GOOGLE_CLIENT_ID_MAGIC_DELVE: string;
|
||||
VITE_GITHUB_CLIENT_ID: string;
|
||||
VITE_WEBSOCKET: string;
|
||||
VITE_INFILL_ENDPOINT: string;
|
||||
}
|
||||
|
||||
const clientEnvSchema = z.object({
|
||||
VITE_DOMAIN: z.string().min(1),
|
||||
VITE_AWS_BUCKET_STRING: z.string().min(1),
|
||||
VITE_DOWNLOAD_BUCKET_STRING: z.string().min(1),
|
||||
VITE_GOOGLE_CLIENT_ID: z.string().min(1),
|
||||
VITE_GOOGLE_CLIENT_ID_MAGIC_DELVE: z.string().min(1),
|
||||
VITE_GITHUB_CLIENT_ID: z.string().min(1),
|
||||
VITE_WEBSOCKET: z.string().min(1),
|
||||
VITE_INFILL_ENDPOINT: z.string().min(1)
|
||||
});
|
||||
|
||||
export type ClientEnv = z.infer<typeof clientEnvSchema>;
|
||||
const requiredKeys: (keyof ClientEnv)[] = [
|
||||
"VITE_DOMAIN",
|
||||
"VITE_AWS_BUCKET_STRING",
|
||||
"VITE_DOWNLOAD_BUCKET_STRING",
|
||||
"VITE_GOOGLE_CLIENT_ID",
|
||||
"VITE_GOOGLE_CLIENT_ID_MAGIC_DELVE",
|
||||
"VITE_GITHUB_CLIENT_ID",
|
||||
"VITE_WEBSOCKET",
|
||||
"VITE_INFILL_ENDPOINT"
|
||||
];
|
||||
|
||||
export const validateClientEnv = (
|
||||
envVars: Record<string, string | undefined>
|
||||
): ClientEnv => {
|
||||
try {
|
||||
return clientEnvSchema.parse(envVars);
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
const formattedErrors = error.format();
|
||||
const missingVars = Object.entries(formattedErrors)
|
||||
.filter(
|
||||
([key, value]) =>
|
||||
key !== "_errors" &&
|
||||
typeof value === "object" &&
|
||||
value._errors?.length > 0 &&
|
||||
value._errors[0] === "Required"
|
||||
)
|
||||
.map(([key, _]) => key);
|
||||
const missing = requiredKeys.filter(
|
||||
(key) => !envVars[key] || envVars[key]!.trim() === ""
|
||||
);
|
||||
|
||||
const invalidVars = Object.entries(formattedErrors)
|
||||
.filter(
|
||||
([key, value]) =>
|
||||
key !== "_errors" &&
|
||||
typeof value === "object" &&
|
||||
value._errors?.length > 0 &&
|
||||
value._errors[0] !== "Required"
|
||||
)
|
||||
.map(([key, value]) => ({
|
||||
key,
|
||||
error: value._errors[0]
|
||||
}));
|
||||
|
||||
let errorMessage = "Client environment validation failed:\n";
|
||||
|
||||
if (missingVars.length > 0) {
|
||||
errorMessage += `Missing required variables: ${missingVars.join(", ")}\n`;
|
||||
}
|
||||
|
||||
if (invalidVars.length > 0) {
|
||||
errorMessage += "Invalid values:\n";
|
||||
invalidVars.forEach(({ key, error }) => {
|
||||
errorMessage += ` ${key}: ${error}\n`;
|
||||
});
|
||||
}
|
||||
|
||||
console.error(errorMessage);
|
||||
throw new Error(errorMessage);
|
||||
}
|
||||
console.error(
|
||||
"Client environment validation failed with unknown error:",
|
||||
error
|
||||
);
|
||||
throw new Error("Client environment validation failed with unknown error");
|
||||
if (missing.length > 0) {
|
||||
const message = `Client environment validation failed:\nMissing required variables: ${missing.join(", ")}`;
|
||||
console.error(message);
|
||||
throw new Error(message);
|
||||
}
|
||||
|
||||
return envVars as unknown as ClientEnv;
|
||||
};
|
||||
|
||||
const validateAndExportEnv = (): ClientEnv => {
|
||||
try {
|
||||
const validated = validateClientEnv(import.meta.env);
|
||||
console.log("✅ Client environment validation successful");
|
||||
return validated;
|
||||
} catch (error) {
|
||||
console.error("❌ Client environment validation failed:", error);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
@@ -86,14 +52,5 @@ export const isMissingEnvVar = (varName: string): boolean => {
|
||||
};
|
||||
|
||||
export const getMissingEnvVars = (): string[] => {
|
||||
const requiredClientVars = [
|
||||
"VITE_DOMAIN",
|
||||
"VITE_AWS_BUCKET_STRING",
|
||||
"VITE_GOOGLE_CLIENT_ID",
|
||||
"VITE_GOOGLE_CLIENT_ID_MAGIC_DELVE",
|
||||
"VITE_GITHUB_CLIENT_ID",
|
||||
"VITE_WEBSOCKET"
|
||||
];
|
||||
|
||||
return requiredClientVars.filter((varName) => isMissingEnvVar(varName));
|
||||
return requiredKeys.filter((varName) => isMissingEnvVar(varName));
|
||||
};
|
||||
|
||||
@@ -24,66 +24,76 @@ export function initPerformanceTracking() {
|
||||
return;
|
||||
}
|
||||
|
||||
const supported = new Set(PerformanceObserver.supportedEntryTypes ?? []);
|
||||
|
||||
// Observe LCP
|
||||
try {
|
||||
const lcpObserver = new PerformanceObserver((entryList) => {
|
||||
const entries = entryList.getEntries();
|
||||
const lastEntry = entries[entries.length - 1] as any;
|
||||
metrics.lcp = lastEntry.renderTime || lastEntry.loadTime;
|
||||
});
|
||||
lcpObserver.observe({ type: "largest-contentful-paint", buffered: true });
|
||||
} catch (e) {
|
||||
console.debug("LCP not supported");
|
||||
if (supported.has("largest-contentful-paint")) {
|
||||
try {
|
||||
const lcpObserver = new PerformanceObserver((entryList) => {
|
||||
const entries = entryList.getEntries();
|
||||
const lastEntry = entries[entries.length - 1] as any;
|
||||
metrics.lcp = lastEntry.renderTime || lastEntry.loadTime;
|
||||
});
|
||||
lcpObserver.observe({ type: "largest-contentful-paint", buffered: true });
|
||||
} catch (e) {
|
||||
console.debug("LCP observer failed");
|
||||
}
|
||||
}
|
||||
|
||||
// Observe CLS
|
||||
try {
|
||||
const clsObserver = new PerformanceObserver((entryList) => {
|
||||
for (const entry of entryList.getEntries()) {
|
||||
const layoutShift = entry as any;
|
||||
if (!layoutShift.hadRecentInput) {
|
||||
clsValue += layoutShift.value;
|
||||
clsEntries.push(layoutShift.value);
|
||||
if (supported.has("layout-shift")) {
|
||||
try {
|
||||
const clsObserver = new PerformanceObserver((entryList) => {
|
||||
for (const entry of entryList.getEntries()) {
|
||||
const layoutShift = entry as any;
|
||||
if (!layoutShift.hadRecentInput) {
|
||||
clsValue += layoutShift.value;
|
||||
clsEntries.push(layoutShift.value);
|
||||
}
|
||||
}
|
||||
}
|
||||
metrics.cls = clsValue;
|
||||
});
|
||||
clsObserver.observe({ type: "layout-shift", buffered: true });
|
||||
} catch (e) {
|
||||
console.debug("CLS not supported");
|
||||
metrics.cls = clsValue;
|
||||
});
|
||||
clsObserver.observe({ type: "layout-shift", buffered: true });
|
||||
} catch (e) {
|
||||
console.debug("CLS observer failed");
|
||||
}
|
||||
}
|
||||
|
||||
// Observe FID
|
||||
try {
|
||||
const fidObserver = new PerformanceObserver((entryList) => {
|
||||
const firstInput = entryList.getEntries()[0] as any;
|
||||
if (firstInput) {
|
||||
metrics.fid = firstInput.processingStart - firstInput.startTime;
|
||||
}
|
||||
});
|
||||
fidObserver.observe({ type: "first-input", buffered: true });
|
||||
} catch (e) {
|
||||
console.debug("FID not supported");
|
||||
if (supported.has("first-input")) {
|
||||
try {
|
||||
const fidObserver = new PerformanceObserver((entryList) => {
|
||||
const firstInput = entryList.getEntries()[0] as any;
|
||||
if (firstInput) {
|
||||
metrics.fid = firstInput.processingStart - firstInput.startTime;
|
||||
}
|
||||
});
|
||||
fidObserver.observe({ type: "first-input", buffered: true });
|
||||
} catch (e) {
|
||||
console.debug("FID observer failed");
|
||||
}
|
||||
}
|
||||
|
||||
// Observe INP (event timing)
|
||||
try {
|
||||
const interactions: number[] = [];
|
||||
const inpObserver = new PerformanceObserver((entryList) => {
|
||||
for (const entry of entryList.getEntries()) {
|
||||
const eventEntry = entry as any;
|
||||
if (eventEntry.interactionId) {
|
||||
interactions.push(eventEntry.duration);
|
||||
const sorted = [...interactions].sort((a, b) => b - a);
|
||||
const p98Index = Math.floor(sorted.length * 0.02);
|
||||
inpValue = sorted[p98Index] || sorted[0] || 0;
|
||||
metrics.inp = inpValue;
|
||||
if (supported.has("event")) {
|
||||
try {
|
||||
const interactions: number[] = [];
|
||||
const inpObserver = new PerformanceObserver((entryList) => {
|
||||
for (const entry of entryList.getEntries()) {
|
||||
const eventEntry = entry as any;
|
||||
if (eventEntry.interactionId) {
|
||||
interactions.push(eventEntry.duration);
|
||||
const sorted = [...interactions].sort((a, b) => b - a);
|
||||
const p98Index = Math.floor(sorted.length * 0.02);
|
||||
inpValue = sorted[p98Index] || sorted[0] || 0;
|
||||
metrics.inp = inpValue;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
inpObserver.observe({ type: "event", buffered: true });
|
||||
} catch (e) {
|
||||
console.debug("INP not supported");
|
||||
});
|
||||
inpObserver.observe({ type: "event", buffered: true });
|
||||
} catch (e) {
|
||||
console.debug("INP observer failed");
|
||||
}
|
||||
}
|
||||
|
||||
// Get navigation timing metrics
|
||||
|
||||
62
src/routes/api/InputHalo/appcast.xml.ts
Normal file
62
src/routes/api/InputHalo/appcast.xml.ts
Normal file
@@ -0,0 +1,62 @@
|
||||
import type { APIEvent } from "@solidjs/start/server";
|
||||
import { S3Client, GetObjectCommand } from "@aws-sdk/client-s3";
|
||||
import { env } from "~/env/server";
|
||||
|
||||
/**
|
||||
* Serves the InputHalo appcast.xml file from S3
|
||||
* This endpoint is used by Sparkle updater to check for new versions
|
||||
*
|
||||
* URL: https://freno.me/api/InputHalo/appcast.xml
|
||||
*/
|
||||
export async function GET(event: APIEvent) {
|
||||
const bucket = env.VITE_DOWNLOAD_BUCKET_STRING;
|
||||
const key = "api/InputHalo/appcast.xml";
|
||||
|
||||
const credentials = {
|
||||
accessKeyId: env.MY_AWS_ACCESS_KEY,
|
||||
secretAccessKey: env.MY_AWS_SECRET_KEY
|
||||
};
|
||||
|
||||
try {
|
||||
const client = new S3Client({
|
||||
region: env.AWS_REGION,
|
||||
credentials: credentials
|
||||
});
|
||||
|
||||
const command = new GetObjectCommand({
|
||||
Bucket: bucket,
|
||||
Key: key
|
||||
});
|
||||
|
||||
const response = await client.send(command);
|
||||
|
||||
if (!response.Body) {
|
||||
return new Response("Appcast not found", {
|
||||
status: 404,
|
||||
headers: {
|
||||
"Content-Type": "text/plain"
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Stream the XML content from S3
|
||||
const body = await response.Body.transformToString();
|
||||
|
||||
return new Response(body, {
|
||||
status: 200,
|
||||
headers: {
|
||||
"Content-Type": "application/xml; charset=utf-8",
|
||||
"Cache-Control": "public, max-age=300", // Cache for 5 minutes
|
||||
"Access-Control-Allow-Origin": "*" // Allow CORS for appcast
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Failed to fetch appcast:", error);
|
||||
return new Response("Internal Server Error", {
|
||||
status: 500,
|
||||
headers: {
|
||||
"Content-Type": "text/plain"
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -3,12 +3,12 @@ import { S3Client, GetObjectCommand } from "@aws-sdk/client-s3";
|
||||
import { env } from "~/env/server";
|
||||
|
||||
/**
|
||||
* Serves Gaze DMG files and delta updates from S3
|
||||
* Serves macOS app DMG files and delta updates from S3
|
||||
* This endpoint is used by Sparkle updater to download updates
|
||||
*
|
||||
* Handles:
|
||||
* - Full DMG files: /api/downloads/Gaze-0.2.2.dmg
|
||||
* - Delta updates: /api/downloads/Gaze3-2.delta
|
||||
* - Full DMG files: /api/downloads/Gaze-0.2.2.dmg, /api/downloads/InputHalo-0.1.0.dmg
|
||||
* - Delta updates: /api/downloads/Gaze3-2.delta, /api/downloads/InputHalo3-2.delta
|
||||
*
|
||||
* URL: https://freno.me/api/downloads/[filename]
|
||||
*/
|
||||
@@ -24,9 +24,11 @@ export async function GET(event: APIEvent) {
|
||||
});
|
||||
}
|
||||
|
||||
// Validate filename format (only allow Gaze files)
|
||||
// Validate filename format (only allow Gaze or InputHalo files)
|
||||
const validPrefixes = ["Gaze", "InputHalo"];
|
||||
const isValidPrefix = validPrefixes.some((prefix) => filename.startsWith(prefix));
|
||||
if (
|
||||
!filename.startsWith("Gaze") ||
|
||||
!isValidPrefix ||
|
||||
(!filename.endsWith(".dmg") && !filename.endsWith(".delta"))
|
||||
) {
|
||||
return new Response("Invalid file format", {
|
||||
|
||||
@@ -10,6 +10,7 @@ export default function DownloadsPage() {
|
||||
const [SwAText, setSwAText] = createSignal("Shapes with Abigail!");
|
||||
const [corkText, setCorkText] = createSignal("Cork");
|
||||
const [gazeText, setGazeText] = createSignal("Gaze");
|
||||
const [inputHaloText, setInputHaloText] = createSignal("InputHalo");
|
||||
|
||||
// Track loading states for each download button
|
||||
const [loadingState, setLoadingState] = createSignal<Record<string, boolean>>(
|
||||
@@ -17,7 +18,8 @@ export default function DownloadsPage() {
|
||||
lineage: false,
|
||||
cork: false,
|
||||
gaze: false,
|
||||
"shapes-with-abigail": false
|
||||
"shapes-with-abigail": false,
|
||||
inputhalo: false
|
||||
}
|
||||
);
|
||||
|
||||
@@ -53,12 +55,14 @@ export default function DownloadsPage() {
|
||||
const swaInterval = glitchText(SwAText(), setSwAText);
|
||||
const corkInterval = glitchText(corkText(), setCorkText);
|
||||
const gazeInterval = glitchText(gazeText(), setGazeText);
|
||||
const inputHaloInterval = glitchText(inputHaloText(), setInputHaloText);
|
||||
|
||||
onCleanup(() => {
|
||||
clearInterval(lalInterval);
|
||||
clearInterval(swaInterval);
|
||||
clearInterval(corkInterval);
|
||||
clearInterval(gazeInterval);
|
||||
clearInterval(inputHaloInterval);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -66,7 +70,7 @@ export default function DownloadsPage() {
|
||||
<>
|
||||
<PageHead
|
||||
title="Downloads"
|
||||
description="Download Life and Lineage, Shapes with Abigail, and Cork for macOS. Available on iOS, Android, and macOS."
|
||||
description="Download InputHalo, Gaze, Life and Lineage, Shapes with Abigail, and Cork. Available on iOS, Android, and macOS."
|
||||
/>
|
||||
|
||||
<div class="bg-base relative min-h-screen overflow-hidden px-4 pt-[15vh] pb-12 md:px-8">
|
||||
@@ -86,6 +90,52 @@ export default function DownloadsPage() {
|
||||
Ordered by date of initial release
|
||||
</div>
|
||||
<div class="mx-auto max-w-5xl space-y-16">
|
||||
{/* InputHalo */}
|
||||
<div class="border-overlay0 rounded-lg border p-6 md:p-8">
|
||||
<h2 class="text-text mb-6 font-mono text-2xl">
|
||||
<span class="text-yellow">{">"}</span> {inputHaloText()}
|
||||
</h2>
|
||||
|
||||
<div class="flex flex-col gap-8 lg:flex-row lg:justify-around">
|
||||
<div class="flex flex-col items-center gap-3">
|
||||
<span class="text-subtext0 font-mono text-sm">
|
||||
platform: macOS (14.6+)
|
||||
</span>
|
||||
<Button
|
||||
variant="download"
|
||||
size="lg"
|
||||
loading={loadingState()["inputhalo"]}
|
||||
onClick={() => download("inputhalo")}
|
||||
>
|
||||
download.dmg
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
<div class="flex flex-col items-center gap-3">
|
||||
<span class="text-subtext0 font-mono text-sm">
|
||||
variant: paid (coming soon)
|
||||
</span>
|
||||
<A
|
||||
class="transition-all duration-200 ease-out hover:scale-105 active:scale-95"
|
||||
href="https://apps.apple.com/us/app/inputhalo/"
|
||||
>
|
||||
<DownloadOnAppStore size={50} />
|
||||
</A>
|
||||
</div>
|
||||
<div class="flex flex-col items-center gap-3">
|
||||
<span class="text-subtext0 font-mono text-sm">
|
||||
variant: free (coming soon)
|
||||
</span>
|
||||
<A
|
||||
class="transition-all duration-200 ease-out hover:scale-105 active:scale-95"
|
||||
href=""
|
||||
>
|
||||
<DownloadOnAppStore size={50} />
|
||||
</A>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Gaze */}
|
||||
<div class="border-overlay0 rounded-lg border p-6 md:p-8">
|
||||
<h2 class="text-text mb-6 font-mono text-2xl">
|
||||
|
||||
@@ -16,23 +16,24 @@ const assets: Record<string, string> = {
|
||||
};
|
||||
|
||||
/**
|
||||
* Get the latest Gaze DMG from S3 by finding the most recent file in downloads/ folder
|
||||
* Get the latest DMG from S3 by finding the most recent file with the given prefix
|
||||
*/
|
||||
async function getLatestGazeDMG(
|
||||
async function getLatestDMG(
|
||||
client: S3Client,
|
||||
bucket: string
|
||||
bucket: string,
|
||||
prefix: string
|
||||
): Promise<string> {
|
||||
try {
|
||||
const listCommand = new ListObjectsV2Command({
|
||||
Bucket: bucket,
|
||||
Prefix: "downloads/Gaze-",
|
||||
Prefix: prefix,
|
||||
MaxKeys: 100
|
||||
});
|
||||
|
||||
const response = await client.send(listCommand);
|
||||
|
||||
if (!response.Contents || response.Contents.length === 0) {
|
||||
throw new Error("No Gaze DMG files found in S3");
|
||||
throw new Error(`No DMG files found in S3 with prefix ${prefix}`);
|
||||
}
|
||||
|
||||
// Filter for .dmg files only and sort by LastModified (newest first)
|
||||
@@ -45,18 +46,38 @@ async function getLatestGazeDMG(
|
||||
});
|
||||
|
||||
if (dmgFiles.length === 0) {
|
||||
throw new Error("No .dmg files found in downloads/Gaze-* prefix");
|
||||
throw new Error(`No .dmg files found in ${prefix} prefix`);
|
||||
}
|
||||
|
||||
const latestFile = dmgFiles[0].Key!;
|
||||
console.log(`Latest Gaze DMG: ${latestFile}`);
|
||||
console.log(`Latest DMG: ${latestFile}`);
|
||||
return latestFile;
|
||||
} catch (error) {
|
||||
console.error("Error finding latest Gaze DMG:", error);
|
||||
console.error(`Error finding latest DMG for ${prefix}:`, error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the latest Gaze DMG from S3
|
||||
*/
|
||||
async function getLatestGazeDMG(
|
||||
client: S3Client,
|
||||
bucket: string
|
||||
): Promise<string> {
|
||||
return getLatestDMG(client, bucket, "downloads/Gaze-");
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the latest InputHalo DMG from S3
|
||||
*/
|
||||
async function getLatestInputHaloDMG(
|
||||
client: S3Client,
|
||||
bucket: string
|
||||
): Promise<string> {
|
||||
return getLatestDMG(client, bucket, "downloads/InputHalo-");
|
||||
}
|
||||
|
||||
export const downloadsRouter = createTRPCRouter({
|
||||
getDownloadUrl: publicProcedure
|
||||
.input(z.object({ asset_name: z.string() }))
|
||||
@@ -76,9 +97,11 @@ export const downloadsRouter = createTRPCRouter({
|
||||
try {
|
||||
let fileKey: string;
|
||||
|
||||
// Special handling for Gaze - find latest version automatically
|
||||
// Special handling for macOS apps - find latest version automatically
|
||||
if (input.asset_name === "gaze") {
|
||||
fileKey = await getLatestGazeDMG(client, bucket);
|
||||
} else if (input.asset_name === "inputhalo") {
|
||||
fileKey = await getLatestInputHaloDMG(client, bucket);
|
||||
} else {
|
||||
// Use static mapping for other assets
|
||||
fileKey = assets[input.asset_name];
|
||||
|
||||
@@ -33,7 +33,7 @@ export const gitActivityRouter = createTRPCRouter({
|
||||
`github-commits-${input.limit}`,
|
||||
CACHE_CONFIG.GIT_ACTIVITY_CACHE_TTL_MS,
|
||||
async () => {
|
||||
// Use Events API to get recent push events - much more efficient
|
||||
// Use Events API to get recent push events
|
||||
const eventsResponse = await fetchWithTimeout(
|
||||
`https://api.github.com/users/MikeFreno/events/public?per_page=100`,
|
||||
{
|
||||
@@ -47,20 +47,23 @@ export const gitActivityRouter = createTRPCRouter({
|
||||
|
||||
await checkResponse(eventsResponse);
|
||||
const events = await eventsResponse.json();
|
||||
const allCommits: GitCommit[] = [];
|
||||
|
||||
// Extract push events and fetch commit details
|
||||
// Collect (repo, sha) pairs from push events up front
|
||||
const toFetch: { repoName: string; sha: string }[] = [];
|
||||
for (const event of events) {
|
||||
if (event.type !== "PushEvent") continue;
|
||||
if (allCommits.length >= input.limit * 5) break; // Get extra to ensure we have enough
|
||||
if (toFetch.length >= input.limit * 5) break;
|
||||
toFetch.push({
|
||||
repoName: event.repo.name,
|
||||
sha: event.payload.head
|
||||
});
|
||||
}
|
||||
|
||||
const repoName = event.repo.name;
|
||||
const commitSha = event.payload.head;
|
||||
|
||||
try {
|
||||
// Fetch the actual commit details to get the message
|
||||
const commitResponse = await fetchWithTimeout(
|
||||
`https://api.github.com/repos/${repoName}/commits/${commitSha}`,
|
||||
// Fetch all commits in parallel instead of serially
|
||||
const results = await Promise.allSettled(
|
||||
toFetch.map(({ repoName, sha }) =>
|
||||
fetchWithTimeout(
|
||||
`https://api.github.com/repos/${repoName}/commits/${sha}`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${env.GITHUB_API_TOKEN}`,
|
||||
@@ -68,50 +71,38 @@ export const gitActivityRouter = createTRPCRouter({
|
||||
},
|
||||
timeout: 5000
|
||||
}
|
||||
);
|
||||
)
|
||||
.then((res) => (res.ok ? res.json() : null))
|
||||
.catch(() => null)
|
||||
)
|
||||
);
|
||||
|
||||
if (commitResponse.ok) {
|
||||
const commit = await commitResponse.json();
|
||||
const allCommits: GitCommit[] = [];
|
||||
for (let i = 0; i < results.length; i++) {
|
||||
const result = results[i];
|
||||
if (result.status === "rejected" || !result.value) continue;
|
||||
const commit = result.value;
|
||||
const { repoName } = toFetch[i];
|
||||
|
||||
// Filter for your commits
|
||||
if (
|
||||
commit.author?.login === "MikeFreno" ||
|
||||
commit.author?.login === "mikefreno" ||
|
||||
commit.commit?.author?.email?.includes("mike")
|
||||
) {
|
||||
allCommits.push({
|
||||
sha: commit.sha?.substring(0, 7) || "unknown",
|
||||
message:
|
||||
commit.commit?.message?.split("\n")[0] || "No message",
|
||||
author:
|
||||
commit.commit?.author?.name ||
|
||||
commit.author?.login ||
|
||||
"Unknown",
|
||||
date:
|
||||
commit.commit?.author?.date || new Date().toISOString(),
|
||||
repo: repoName,
|
||||
url: `https://github.com/${repoName}/commit/${commit.sha}`
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
if (
|
||||
error instanceof NetworkError ||
|
||||
error instanceof TimeoutError
|
||||
) {
|
||||
console.warn(
|
||||
`Network error fetching commit ${commitSha} for ${repoName}, skipping`
|
||||
);
|
||||
} else {
|
||||
console.error(
|
||||
`Error fetching commit ${commitSha} for ${repoName}:`,
|
||||
error
|
||||
);
|
||||
}
|
||||
if (
|
||||
commit.author?.login === "MikeFreno" ||
|
||||
commit.author?.login === "mikefreno" ||
|
||||
commit.commit?.author?.email?.includes("mike")
|
||||
) {
|
||||
allCommits.push({
|
||||
sha: commit.sha?.substring(0, 7) || "unknown",
|
||||
message: commit.commit?.message?.split("\n")[0] || "No message",
|
||||
author:
|
||||
commit.commit?.author?.name ||
|
||||
commit.author?.login ||
|
||||
"Unknown",
|
||||
date: commit.commit?.author?.date || new Date().toISOString(),
|
||||
repo: repoName,
|
||||
url: `https://github.com/${repoName}/commit/${commit.sha}`
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Already sorted by event date, but sort again by commit date to be precise
|
||||
allCommits.sort(
|
||||
(a, b) => new Date(b.date).getTime() - new Date(a.date).getTime()
|
||||
);
|
||||
@@ -155,13 +146,11 @@ export const gitActivityRouter = createTRPCRouter({
|
||||
|
||||
await checkResponse(reposResponse);
|
||||
const repos = await reposResponse.json();
|
||||
const allCommits: GitCommit[] = [];
|
||||
|
||||
for (const repo of repos) {
|
||||
if (allCommits.length >= input.limit * 3) break; // Get extra to sort later
|
||||
|
||||
try {
|
||||
const commitsResponse = await fetchWithTimeout(
|
||||
// Fetch commits for all repos in parallel instead of serially
|
||||
const commitResults = await Promise.allSettled(
|
||||
repos.map((repo: any) =>
|
||||
fetchWithTimeout(
|
||||
`${env.GITEA_URL}/api/v1/repos/Mike/${repo.name}/commits?limit=5`,
|
||||
{
|
||||
headers: {
|
||||
@@ -170,46 +159,36 @@ export const gitActivityRouter = createTRPCRouter({
|
||||
},
|
||||
timeout: 10000
|
||||
}
|
||||
);
|
||||
)
|
||||
.then((res) => (res.ok ? res.json() : []))
|
||||
.catch(() => [])
|
||||
)
|
||||
);
|
||||
|
||||
if (commitsResponse.ok) {
|
||||
const commits = await commitsResponse.json();
|
||||
for (const commit of commits) {
|
||||
if (
|
||||
(commit.commit?.author?.email &&
|
||||
commit.commit.author.email.includes(
|
||||
"michael@freno.me"
|
||||
)) ||
|
||||
commit.commit.author.email.includes(
|
||||
"michaelt.freno@gmail.com"
|
||||
) // Filter for your commits
|
||||
) {
|
||||
allCommits.push({
|
||||
sha: commit.sha?.substring(0, 7) || "unknown",
|
||||
message:
|
||||
commit.commit?.message?.split("\n")[0] || "No message",
|
||||
author: commit.commit?.author?.name || repo.owner.login,
|
||||
date:
|
||||
commit.commit?.author?.date || new Date().toISOString(),
|
||||
repo: repo.full_name,
|
||||
url: `${env.GITEA_URL}/${repo.full_name}/commit/${commit.sha}`
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
const allCommits: GitCommit[] = [];
|
||||
for (let i = 0; i < commitResults.length; i++) {
|
||||
const result = commitResults[i];
|
||||
if (result.status === "rejected") continue;
|
||||
const repo = repos[i];
|
||||
const commits: any[] = result.value;
|
||||
for (const commit of commits) {
|
||||
const email: string = commit.commit?.author?.email ?? "";
|
||||
if (
|
||||
error instanceof NetworkError ||
|
||||
error instanceof TimeoutError
|
||||
email.includes("michael@freno.me") ||
|
||||
email.includes("michaelt.freno@gmail.com")
|
||||
) {
|
||||
console.warn(
|
||||
`Network error fetching commits for ${repo.name}, skipping`
|
||||
);
|
||||
} else {
|
||||
console.error(
|
||||
`Error fetching commits for ${repo.name}:`,
|
||||
error
|
||||
);
|
||||
allCommits.push({
|
||||
sha: commit.sha?.substring(0, 7) || "unknown",
|
||||
message:
|
||||
commit.commit?.message?.split("\n")[0] || "No message",
|
||||
author:
|
||||
commit.commit?.author?.name ||
|
||||
repo.owner?.login ||
|
||||
"Unknown",
|
||||
date: commit.commit?.author?.date || new Date().toISOString(),
|
||||
repo: repo.full_name,
|
||||
url: `${env.GITEA_URL}/${repo.full_name}/commit/${commit.sha}`
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -336,11 +315,13 @@ export const gitActivityRouter = createTRPCRouter({
|
||||
|
||||
const threeMonthsAgo = new Date();
|
||||
threeMonthsAgo.setMonth(threeMonthsAgo.getMonth() - 3);
|
||||
const sinceParam = threeMonthsAgo.toISOString();
|
||||
|
||||
for (const repo of repos) {
|
||||
try {
|
||||
const commitsResponse = await fetchWithTimeout(
|
||||
`${env.GITEA_URL}/api/v1/repos/${repo.owner.login}/${repo.name}/commits?limit=100`,
|
||||
// Fetch commits for all repos in parallel, scoped to the 3-month window
|
||||
const commitResults = await Promise.allSettled(
|
||||
repos.map((repo: any) =>
|
||||
fetchWithTimeout(
|
||||
`${env.GITEA_URL}/api/v1/repos/${repo.owner.login}/${repo.name}/commits?limit=100&since=${sinceParam}`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `token ${env.GITEA_TOKEN}`,
|
||||
@@ -348,31 +329,23 @@ export const gitActivityRouter = createTRPCRouter({
|
||||
},
|
||||
timeout: 10000
|
||||
}
|
||||
);
|
||||
)
|
||||
.then((res) => (res.ok ? res.json() : []))
|
||||
.catch(() => [])
|
||||
)
|
||||
);
|
||||
|
||||
if (commitsResponse.ok) {
|
||||
const commits = await commitsResponse.json();
|
||||
for (const commit of commits) {
|
||||
const date = new Date(commit.commit.author.date)
|
||||
.toISOString()
|
||||
.split("T")[0];
|
||||
contributionsByDay.set(
|
||||
date,
|
||||
(contributionsByDay.get(date) || 0) + 1
|
||||
);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
if (
|
||||
error instanceof NetworkError ||
|
||||
error instanceof TimeoutError
|
||||
) {
|
||||
console.warn(
|
||||
`Network error fetching commits for ${repo.name}, skipping`
|
||||
);
|
||||
} else {
|
||||
console.error(`Error fetching commits for ${repo.name}:`, error);
|
||||
}
|
||||
for (const result of commitResults) {
|
||||
if (result.status === "rejected") continue;
|
||||
const commits: any[] = result.value;
|
||||
for (const commit of commits) {
|
||||
const date = new Date(commit.commit.author.date)
|
||||
.toISOString()
|
||||
.split("T")[0];
|
||||
contributionsByDay.set(
|
||||
date,
|
||||
(contributionsByDay.get(date) || 0) + 1
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,167 +1,89 @@
|
||||
/**
|
||||
* Redis-backed Cache for Serverless
|
||||
* In-memory cache with TTL
|
||||
*
|
||||
* Uses Redis for persistent caching across serverless invocations.
|
||||
* Redis provides:
|
||||
* - Fast in-memory storage
|
||||
* - Built-in TTL expiration (automatic cleanup)
|
||||
* - Persistence across function invocations
|
||||
* - Native support in Vercel and other platforms
|
||||
* Redis was replaced because on a low-traffic site the cache TTL almost always
|
||||
* expires between visits, so every request paid Redis connection + round-trip
|
||||
* overhead with no benefit. A module-level Map has zero network latency:
|
||||
* cache hits are a single dictionary lookup, misses fall through immediately.
|
||||
*/
|
||||
|
||||
import { createClient } from "redis";
|
||||
import { env } from "~/env/server";
|
||||
import { CACHE_CONFIG } from "~/config";
|
||||
|
||||
let redisClient: ReturnType<typeof createClient> | null = null;
|
||||
let isConnecting = false;
|
||||
let connectionError: Error | null = null;
|
||||
|
||||
/**
|
||||
* Get or create Redis client (singleton pattern)
|
||||
*/
|
||||
async function getRedisClient() {
|
||||
if (redisClient && redisClient.isOpen) {
|
||||
return redisClient;
|
||||
}
|
||||
|
||||
if (isConnecting) {
|
||||
// Wait for existing connection attempt
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
return getRedisClient();
|
||||
}
|
||||
|
||||
if (connectionError) {
|
||||
throw connectionError;
|
||||
}
|
||||
|
||||
try {
|
||||
isConnecting = true;
|
||||
redisClient = createClient({ url: env.REDIS_URL });
|
||||
|
||||
redisClient.on("error", (err) => {
|
||||
console.error("Redis Client Error:", err);
|
||||
connectionError = err;
|
||||
});
|
||||
|
||||
await redisClient.connect();
|
||||
isConnecting = false;
|
||||
connectionError = null;
|
||||
return redisClient;
|
||||
} catch (error) {
|
||||
isConnecting = false;
|
||||
connectionError = error as Error;
|
||||
console.error("Failed to connect to Redis:", error);
|
||||
throw error;
|
||||
}
|
||||
interface CacheEntry<T> {
|
||||
data: T;
|
||||
/** Absolute timestamp (ms) after which this entry is considered stale */
|
||||
expiresAt: number;
|
||||
/** Absolute timestamp (ms) after which stale fallback is also discarded */
|
||||
staleExpiresAt: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Redis-backed cache interface
|
||||
*/
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const store = new Map<string, CacheEntry<any>>();
|
||||
|
||||
export const cache = {
|
||||
async get<T>(key: string): Promise<T | null> {
|
||||
try {
|
||||
const client = await getRedisClient();
|
||||
const value = await client.get(key);
|
||||
get<T>(key: string): T | null {
|
||||
const entry = store.get(key) as CacheEntry<T> | undefined;
|
||||
if (!entry) return null;
|
||||
if (Date.now() > entry.expiresAt) return null;
|
||||
return entry.data;
|
||||
},
|
||||
|
||||
if (!value) {
|
||||
return null;
|
||||
}
|
||||
set<T>(key: string, data: T, ttlMs: number): void {
|
||||
const existing = store.get(key);
|
||||
store.set(key, {
|
||||
data,
|
||||
expiresAt: Date.now() + ttlMs,
|
||||
// Preserve an existing stale expiry if it's longer, otherwise default
|
||||
staleExpiresAt:
|
||||
existing?.staleExpiresAt ?? Date.now() + CACHE_CONFIG.MAX_STALE_DATA_MS
|
||||
});
|
||||
},
|
||||
|
||||
return JSON.parse(value) as T;
|
||||
} catch (error) {
|
||||
console.error(`Cache get error for key "${key}":`, error);
|
||||
return null;
|
||||
delete(key: string): void {
|
||||
store.delete(key);
|
||||
},
|
||||
|
||||
deleteByPrefix(prefix: string): void {
|
||||
for (const key of store.keys()) {
|
||||
if (key.startsWith(prefix)) store.delete(key);
|
||||
}
|
||||
},
|
||||
|
||||
async set<T>(key: string, data: T, ttlMs: number): Promise<void> {
|
||||
try {
|
||||
const client = await getRedisClient();
|
||||
const value = JSON.stringify(data);
|
||||
|
||||
// Redis SET with EX (expiry in seconds)
|
||||
await client.set(key, value, {
|
||||
EX: Math.ceil(ttlMs / 1000)
|
||||
});
|
||||
} catch (error) {
|
||||
console.error(`Cache set error for key "${key}":`, error);
|
||||
}
|
||||
clear(): void {
|
||||
store.clear();
|
||||
},
|
||||
|
||||
async delete(key: string): Promise<void> {
|
||||
try {
|
||||
const client = await getRedisClient();
|
||||
await client.del(key);
|
||||
} catch (error) {
|
||||
console.error(`Cache delete error for key "${key}":`, error);
|
||||
}
|
||||
},
|
||||
|
||||
async deleteByPrefix(prefix: string): Promise<void> {
|
||||
try {
|
||||
const client = await getRedisClient();
|
||||
const keys = await client.keys(`${prefix}*`);
|
||||
|
||||
if (keys.length > 0) {
|
||||
await client.del(keys);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`Cache deleteByPrefix error for prefix "${prefix}":`,
|
||||
error
|
||||
);
|
||||
}
|
||||
},
|
||||
|
||||
async clear(): Promise<void> {
|
||||
try {
|
||||
const client = await getRedisClient();
|
||||
await client.flushDb();
|
||||
} catch (error) {
|
||||
console.error("Cache clear error:", error);
|
||||
}
|
||||
},
|
||||
|
||||
async has(key: string): Promise<boolean> {
|
||||
try {
|
||||
const client = await getRedisClient();
|
||||
const exists = await client.exists(key);
|
||||
return exists === 1;
|
||||
} catch (error) {
|
||||
console.error(`Cache has error for key "${key}":`, error);
|
||||
return false;
|
||||
}
|
||||
has(key: string): boolean {
|
||||
const entry = store.get(key);
|
||||
if (!entry) return false;
|
||||
return Date.now() <= entry.expiresAt;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Execute function with Redis caching
|
||||
* Execute function with in-memory caching.
|
||||
*/
|
||||
export async function withCache<T>(
|
||||
key: string,
|
||||
ttlMs: number,
|
||||
fn: () => Promise<T>
|
||||
): Promise<T> {
|
||||
const cached = await cache.get<T>(key);
|
||||
if (cached !== null) {
|
||||
return cached;
|
||||
}
|
||||
const cached = cache.get<T>(key);
|
||||
if (cached !== null) return cached;
|
||||
|
||||
const result = await fn();
|
||||
await cache.set(key, result, ttlMs);
|
||||
cache.set(key, result, ttlMs);
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute function with Redis caching and stale data fallback
|
||||
* Execute function with caching and stale-data fallback.
|
||||
*
|
||||
* Strategy:
|
||||
* 1. Try to get fresh cached data (within TTL)
|
||||
* 2. If not found, execute function
|
||||
* 3. If function fails, try to get stale data (ignore TTL)
|
||||
* 4. Store result with TTL for future requests
|
||||
* 1. Return data if fresh (within TTL).
|
||||
* 2. Otherwise run fn().
|
||||
* 3. If fn() throws, return stale data if still within maxStaleMs.
|
||||
* 4. Store fresh result for future requests.
|
||||
*/
|
||||
export async function withCacheAndStale<T>(
|
||||
key: string,
|
||||
@@ -175,34 +97,29 @@ export async function withCacheAndStale<T>(
|
||||
const { maxStaleMs = CACHE_CONFIG.MAX_STALE_DATA_MS, logErrors = true } =
|
||||
options;
|
||||
|
||||
// Try fresh cache
|
||||
const cached = await cache.get<T>(key);
|
||||
if (cached !== null) {
|
||||
return cached;
|
||||
}
|
||||
const now = Date.now();
|
||||
const entry = store.get(key) as CacheEntry<T> | undefined;
|
||||
|
||||
// Fresh hit
|
||||
if (entry && entry.expiresAt > now) return entry.data;
|
||||
|
||||
try {
|
||||
// Execute function
|
||||
const result = await fn();
|
||||
await cache.set(key, result, ttlMs);
|
||||
// Also store with longer TTL for stale fallback
|
||||
const staleKey = `${key}:stale`;
|
||||
await cache.set(staleKey, result, maxStaleMs);
|
||||
store.set(key, {
|
||||
data: result,
|
||||
expiresAt: now + ttlMs,
|
||||
staleExpiresAt: now + maxStaleMs
|
||||
});
|
||||
return result;
|
||||
} catch (error) {
|
||||
if (logErrors) {
|
||||
console.error(`Error fetching data for cache key "${key}":`, error);
|
||||
}
|
||||
|
||||
// Try stale cache with longer TTL key
|
||||
const staleKey = `${key}:stale`;
|
||||
const staleData = await cache.get<T>(staleKey);
|
||||
|
||||
if (staleData !== null) {
|
||||
if (logErrors) {
|
||||
console.log(`Serving stale data for cache key "${key}"`);
|
||||
}
|
||||
return staleData;
|
||||
// Stale fallback
|
||||
if (entry && entry.staleExpiresAt > now) {
|
||||
if (logErrors) console.log(`Serving stale data for cache key "${key}"`);
|
||||
return entry.data;
|
||||
}
|
||||
|
||||
throw error;
|
||||
|
||||
Reference in New Issue
Block a user