FRE-4471: Scaffold DarkWatch MVP — monorepo, schema, services, API routes, tests

- Turborepo monorepo structure (packages: api, db, types, jobs; services: darkwatch)
- Prisma schema: User, WatchListItem, Exposure, Alert, ScanJob models
- WatchListService: CRUD with normalization, dedup, tier-based limits
- HIBPService: API integration with severity scoring
- MatchingEngine: exact-match with content hash dedup
- AlertPipeline: dedup window, email notifications
- ScanService: orchestrates watch list -> HIBP -> match -> alert flow
- BullMQ job workers for scan and alert processing
- Fastify API routes: watchlist, exposures, alerts, scan
- Docker Compose: PostgreSQL 16 + Redis 7
- 15 unit tests passing
- Implementation plan document uploaded
This commit is contained in:
Senior Engineer
2026-04-29 09:47:45 -04:00
committed by Michael Freno
parent f8f90502fa
commit 218de3b03b
40 changed files with 5225 additions and 0 deletions

View File

@@ -0,0 +1,18 @@
{
"name": "@shieldai/jobs",
"version": "0.1.0",
"scripts": {
"dev": "tsx watch src/index.ts",
"build": "tsc",
"start": "node dist/index.js",
"test": "vitest run",
"lint": "eslint src/"
},
"dependencies": {
"bullmq": "^5.24.0",
"@shieldai/db": "0.1.0",
"@shieldai/types": "0.1.0",
"@shieldai/darkwatch": "0.1.0",
"ioredis": "^5.4.0"
}
}

View File

@@ -0,0 +1,69 @@
import { Queue, Worker, QueueScheduler } from "bullmq";
import { Redis } from "ioredis";
import { ScanService } from "@shieldai/darkwatch";
import { AlertPipeline } from "@shieldai/darkwatch";
const redisUrl = process.env.REDIS_URL || "redis://localhost:6379";
const connection = new Redis(redisUrl);
const scanQueue = new Queue("darkwatch-scans", { connection });
const alertQueue = new Queue("darkwatch-alerts", { connection });
const scanWorker = new Worker(
"darkwatch-scans",
async (job) => {
const { userId, source } = job.data;
const scanService = new ScanService();
const resultCount = await scanService.runScan(userId, source);
return { resultCount, completedAt: new Date().toISOString() };
},
{ connection, concurrency: 3 }
);
const alertWorker = new Worker(
"darkwatch-alerts",
async () => {
const pipeline = new AlertPipeline();
const sent = await pipeline.sendPendingAlerts();
return { sent, processedAt: new Date().toISOString() };
},
{ connection, concurrency: 1 }
);
const scheduler = new QueueScheduler("darkwatch-alerts", { connection });
scanWorker.on("completed", (job) => {
console.log(`[Scan] Job ${job.id} completed: ${JSON.stringify(job.returnvalue)}`);
});
scanWorker.on("failed", (job, err) => {
console.error(`[Scan] Job ${job.id} failed: ${err.message}`);
});
alertWorker.on("completed", (job) => {
console.log(`[Alert] Job ${job.id} completed: ${JSON.stringify(job.returnvalue)}`);
});
alertWorker.on("failed", (job, err) => {
console.error(`[Alert] Job ${job.id} failed: ${err.message}`);
});
export async function addScanJob(userId: string, source?: string) {
return scanQueue.add("scan", { userId, source }, {
attempts: 3,
backoff: { type: "exponential", delay: 5000 },
jobId: `scan-${userId}-${Date.now()}`,
});
}
export async function scheduleAlertProcessing() {
return alertQueue.add("process-alerts", {}, {
repeat: { pattern: "*/5 * * * *" },
jobId: "alert-processor-recurring",
});
}
scanWorker.on("waiting", () => console.log("[Worker] Scan worker ready"));
alertWorker.on("waiting", () => console.log("[Worker] Alert worker ready"));
console.log("Job workers started");

View File

@@ -0,0 +1,8 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src"
},
"include": ["src/**/*.ts"]
}