FRE-4471: Scaffold DarkWatch MVP — monorepo, schema, services, API routes, tests

- Turborepo monorepo structure (packages: api, db, types, jobs; services: darkwatch)
- Prisma schema: User, WatchListItem, Exposure, Alert, ScanJob models
- WatchListService: CRUD with normalization, dedup, tier-based limits
- HIBPService: API integration with severity scoring
- MatchingEngine: exact-match with content hash dedup
- AlertPipeline: dedup window, email notifications
- ScanService: orchestrates watch list -> HIBP -> match -> alert flow
- BullMQ job workers for scan and alert processing
- Fastify API routes: watchlist, exposures, alerts, scan
- Docker Compose: PostgreSQL 16 + Redis 7
- 15 unit tests passing
- Implementation plan document uploaded
This commit is contained in:
Senior Engineer
2026-04-29 09:47:45 -04:00
committed by Michael Freno
parent f8f90502fa
commit 218de3b03b
40 changed files with 5225 additions and 0 deletions

21
packages/api/package.json Normal file
View File

@@ -0,0 +1,21 @@
{
"name": "@shieldai/api",
"version": "0.1.0",
"scripts": {
"dev": "tsx watch src/server.ts",
"build": "tsc",
"start": "node dist/server.js",
"test": "vitest run",
"lint": "eslint src/"
},
"dependencies": {
"@fastify/cors": "^10.0.1",
"@fastify/helmet": "^13.0.1",
"@fastify/rate-limit": "^9.0.0",
"@fastify/sensible": "^6.0.1",
"@shieldai/db": "0.1.0",
"@shieldai/types": "0.1.0",
"fastify": "^5.2.0",
"@shieldai/darkwatch": "0.1.0"
}
}

View File

@@ -0,0 +1,30 @@
import { FastifyInstance } from "fastify";
import { AlertPipeline } from "@shieldai/darkwatch";
export function alertRoutes(fastify: FastifyInstance) {
const pipeline = new AlertPipeline();
fastify.get("/", async (request, reply) => {
const userId = (request.user as { id: string })?.id;
if (!userId) {
return reply.code(401).send({ error: "User not authenticated" });
}
const limit = parseInt(request.query.limit as string) || 50;
const offset = parseInt(request.query.offset as string) || 0;
const alerts = await pipeline.getUserAlerts(userId, limit, offset);
return reply.send(alerts);
});
fastify.patch("/:id/read", async (request, reply) => {
const userId = (request.user as { id: string })?.id;
if (!userId) {
return reply.code(401).send({ error: "User not authenticated" });
}
await pipeline.markRead(request.params.id, userId);
return reply.send({ read: true });
});
}

View File

@@ -0,0 +1,27 @@
import { FastifyInstance } from "fastify";
import { MatchingEngine } from "@shieldai/darkwatch";
export function exposureRoutes(fastify: FastifyInstance) {
const engine = new MatchingEngine();
fastify.get("/", async (request, reply) => {
const userId = (request.user as { id: string })?.id;
if (!userId) {
return reply.code(401).send({ error: "User not authenticated" });
}
const exposures = await engine.getExposuresForUser(userId);
return reply.send(exposures);
});
fastify.get("/:id", async (request, reply) => {
const exposure = await engine.getExposureById(request.params.id);
if (!exposure) {
return reply.code(404).send({ error: "Exposure not found" });
}
return reply.send(exposure);
});
}

View File

@@ -0,0 +1,15 @@
import { FastifyInstance } from "fastify";
export function darkwatchRoutes(fastify: FastifyInstance) {
fastify.register(async (root) => {
const watchlist = (await import("./watchlist.routes")).watchlistRoutes;
const exposures = (await import("./exposure.routes")).exposureRoutes;
const alerts = (await import("./alert.routes")).alertRoutes;
const scans = (await import("./scan.routes")).scanRoutes;
root.register(watchlist, { prefix: "/watchlist" });
root.register(exposures, { prefix: "/exposures" });
root.register(alerts, { prefix: "/alerts" });
root.register(scans, { prefix: "/scan" });
}, { prefix: "/api/v1/darkwatch" });
}

View File

@@ -0,0 +1,32 @@
import { FastifyInstance } from "fastify";
import { ScanService } from "@shieldai/darkwatch";
import { DataSource } from "@shieldai/types";
export function scanRoutes(fastify: FastifyInstance) {
const scanService = new ScanService();
fastify.post("/", async (request, reply) => {
const userId = (request.user as { id: string })?.id;
if (!userId) {
return reply.code(401).send({ error: "User not authenticated" });
}
const body = request.body as { source?: string };
const source = body.source ? (body.source as DataSource) : undefined;
const resultCount = await scanService.runScan(userId, source);
return reply.code(200).send({ scanned: true, resultCount });
});
fastify.get("/history", async (request, reply) => {
const userId = (request.user as { id: string })?.id;
if (!userId) {
return reply.code(401).send({ error: "User not authenticated" });
}
const history = await scanService.getScanHistory(userId);
return reply.send(history);
});
}

View File

@@ -0,0 +1,41 @@
import { FastifyInstance } from "fastify";
import { WatchListService } from "@shieldai/darkwatch";
import { IdentifierType } from "@shieldai/types";
export function watchlistRoutes(fastify: FastifyInstance) {
const service = new WatchListService();
fastify.post("/", async (request, reply) => {
const body = request.body as { identifierType: string; identifierValue: string };
const userId = (request.user as { id: string })?.id;
if (!userId) {
return reply.code(401).send({ error: "User not authenticated" });
}
const item = await service.addItem(userId, body.identifierType as IdentifierType, body.identifierValue);
return reply.code(201).send(item);
});
fastify.get("/", async (request, reply) => {
const userId = (request.user as { id: string })?.id;
if (!userId) {
return reply.code(401).send({ error: "User not authenticated" });
}
const items = await service.listItems(userId);
return reply.send(items);
});
fastify.delete("/:id", async (request, reply) => {
const userId = (request.user as { id: string })?.id;
if (!userId) {
return reply.code(401).send({ error: "User not authenticated" });
}
const result = await service.removeItem(userId, request.params.id);
return reply.send({ count: result.count });
});
}

View File

@@ -0,0 +1,31 @@
import Fastify from "fastify";
import cors from "@fastify/cors";
import helmet from "@fastify/helmet";
import sensible from "@fastify/sensible";
import { darkwatchRoutes } from "./routes";
const app = Fastify({
logger: {
level: process.env.LOG_LEVEL || "info",
},
});
async function bootstrap() {
await app.register(cors, { origin: true });
await app.register(helmet);
await app.register(sensible);
await app.register(darkwatchRoutes);
app.get("/health", async () => ({ status: "ok", timestamp: new Date().toISOString() }));
try {
await app.listen({ port: parseInt(process.env.PORT || "3000", 10), host: "0.0.0.0" });
app.log.info(`Server listening on port ${process.env.PORT || 3000}`);
} catch (err) {
app.log.error(err);
process.exit(1);
}
}
bootstrap();

View File

@@ -0,0 +1,8 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src"
},
"include": ["src/**/*.ts"]
}

23
packages/db/package.json Normal file
View File

@@ -0,0 +1,23 @@
{
"name": "@shieldai/db",
"version": "0.1.0",
"main": "./dist/index.js",
"types": "./dist/index.js",
"scripts": {
"build": "prisma generate && tsc",
"db:migrate": "prisma migrate dev",
"db:seed": "tsx prisma/seed.ts",
"db:studio": "prisma studio",
"generate": "prisma generate"
},
"dependencies": {
"@prisma/client": "^6.2.0",
"prisma": "^6.2.0"
},
"devDependencies": {
"tsx": "^4.19.0"
},
"exports": {
".": "./src/index.ts"
}
}

View File

@@ -0,0 +1,152 @@
-- CreateEnum
CREATE TYPE "SubscriptionTier" AS ENUM ('BASIC', 'PLUS', 'PREMIUM');
-- CreateEnum
CREATE TYPE "IdentifierType" AS ENUM ('EMAIL', 'PHONE', 'SSN');
-- CreateEnum
CREATE TYPE "WatchListStatus" AS ENUM ('ACTIVE', 'PAUSED');
-- CreateEnum
CREATE TYPE "Severity" AS ENUM ('INFO', 'WARNING', 'CRITICAL');
-- CreateEnum
CREATE TYPE "AlertChannel" AS ENUM ('EMAIL', 'PUSH', 'SMS');
-- CreateEnum
CREATE TYPE "AlertStatus" AS ENUM ('PENDING', 'SENT', 'READ');
-- CreateEnum
CREATE TYPE "ScanJobStatus" AS ENUM ('PENDING', 'RUNNING', 'COMPLETED', 'FAILED');
-- CreateEnum
CREATE TYPE "DataSource" AS ENUM ('HIBP', 'SECURITY_TRAILS', 'CENSYS', 'SHODAN', 'HONEYPOT');
-- CreateTable
CREATE TABLE "User" (
"id" TEXT NOT NULL,
"email" TEXT NOT NULL,
"name" TEXT,
"subscriptionTier" "SubscriptionTier" NOT NULL DEFAULT 'BASIC',
"familyGroupId" TEXT,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL,
CONSTRAINT "User_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "WatchListItem" (
"id" TEXT NOT NULL,
"userId" TEXT NOT NULL,
"identifierType" "IdentifierType" NOT NULL,
"identifierValue" TEXT NOT NULL,
"identifierHash" TEXT NOT NULL,
"status" "WatchListStatus" NOT NULL DEFAULT 'ACTIVE',
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL,
CONSTRAINT "WatchListItem_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "Exposure" (
"id" TEXT NOT NULL,
"watchListItemId" TEXT NOT NULL,
"dataSource" "DataSource" NOT NULL,
"breachName" TEXT NOT NULL,
"exposedAt" TIMESTAMP(3) NOT NULL,
"dataType" TEXT[],
"severity" "Severity" NOT NULL,
"details" TEXT,
"contentHash" TEXT NOT NULL,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "Exposure_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "Alert" (
"id" TEXT NOT NULL,
"userId" TEXT NOT NULL,
"exposureId" TEXT NOT NULL,
"severity" "Severity" NOT NULL,
"channel" "AlertChannel" NOT NULL,
"status" "AlertStatus" NOT NULL DEFAULT 'PENDING',
"dedupKey" TEXT NOT NULL,
"sentAt" TIMESTAMP(3),
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "Alert_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "ScanJob" (
"id" TEXT NOT NULL,
"userId" TEXT NOT NULL,
"status" "ScanJobStatus" NOT NULL DEFAULT 'PENDING',
"source" "DataSource",
"resultCount" INTEGER NOT NULL DEFAULT 0,
"errorMessage" TEXT,
"completedAt" TIMESTAMP(3),
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "ScanJob_pkey" PRIMARY KEY ("id")
);
-- CreateIndex
CREATE UNIQUE INDEX "User_email_key" ON "User"("email");
-- CreateIndex
CREATE INDEX "User_email_idx" ON "User"("email");
-- CreateIndex
CREATE UNIQUE INDEX "WatchListItem_identifierHash_key" ON "WatchListItem"("identifierHash");
-- CreateIndex
CREATE INDEX "WatchListItem_userId_idx" ON "WatchListItem"("userId");
-- CreateIndex
CREATE INDEX "WatchListItem_identifierHash_idx" ON "WatchListItem"("identifierHash");
-- CreateIndex
CREATE UNIQUE INDEX "Exposure_contentHash_key" ON "Exposure"("contentHash");
-- CreateIndex
CREATE INDEX "Exposure_watchListItemId_idx" ON "Exposure"("watchListItemId");
-- CreateIndex
CREATE INDEX "Exposure_contentHash_idx" ON "Exposure"("contentHash");
-- CreateIndex
CREATE INDEX "Exposure_dataSource_idx" ON "Exposure"("dataSource");
-- CreateIndex
CREATE UNIQUE INDEX "Alert_exposureId_key" ON "Alert"("exposureId");
-- CreateIndex
CREATE INDEX "Alert_userId_status_idx" ON "Alert"("userId", "status");
-- CreateIndex
CREATE INDEX "Alert_dedupKey_idx" ON "Alert"("dedupKey");
-- CreateIndex
CREATE INDEX "ScanJob_userId_status_idx" ON "ScanJob"("userId", "status");
-- CreateIndex
CREATE INDEX "ScanJob_createdAt_idx" ON "ScanJob"("createdAt");
-- AddForeignKey
ALTER TABLE "WatchListItem" ADD CONSTRAINT "WatchListItem_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "Exposure" ADD CONSTRAINT "Exposure_watchListItemId_fkey" FOREIGN KEY ("watchListItemId") REFERENCES "WatchListItem"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "Alert" ADD CONSTRAINT "Alert_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "Alert" ADD CONSTRAINT "Alert_exposureId_fkey" FOREIGN KEY ("exposureId") REFERENCES "Exposure"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "ScanJob" ADD CONSTRAINT "ScanJob_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE;

View File

@@ -0,0 +1,3 @@
# Please do not edit this file manually
# It should be added in your version-control system (e.g., Git)
provider = "postgresql"

View File

@@ -0,0 +1,140 @@
generator client {
provider = "prisma-client-js"
}
datasource db {
provider = "postgresql"
url = env("DATABASE_URL")
}
enum SubscriptionTier {
BASIC
PLUS
PREMIUM
}
enum IdentifierType {
EMAIL
PHONE
SSN
}
enum WatchListStatus {
ACTIVE
PAUSED
}
enum Severity {
INFO
WARNING
CRITICAL
}
enum AlertChannel {
EMAIL
PUSH
SMS
}
enum AlertStatus {
PENDING
SENT
READ
}
enum ScanJobStatus {
PENDING
RUNNING
COMPLETED
FAILED
}
enum DataSource {
HIBP
SECURITY_TRAILS
CENSYS
SHODAN
HONEYPOT
}
model User {
id String @id @default(uuid())
email String @unique
name String?
subscriptionTier SubscriptionTier @default(BASIC)
familyGroupId String?
watchListItems WatchListItem[]
alerts Alert[]
scanJobs ScanJob[]
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
@@index([email])
}
model WatchListItem {
id String @id @default(uuid())
userId String
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
identifierType IdentifierType
identifierValue String
identifierHash String @unique
status WatchListStatus @default(ACTIVE)
exposures Exposure[]
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
@@index([userId])
@@index([identifierHash])
}
model Exposure {
id String @id @default(uuid())
watchListItemId String
watchListItem WatchListItem @relation(fields: [watchListItemId], references: [id], onDelete: Cascade)
dataSource DataSource
breachName String
exposedAt DateTime
dataType String[]
severity Severity
details String?
contentHash String @unique
alert Alert?
createdAt DateTime @default(now())
@@index([watchListItemId])
@@index([contentHash])
@@index([dataSource])
}
model Alert {
id String @id @default(uuid())
userId String
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
exposureId String @unique
exposure Exposure @relation(fields: [exposureId], references: [id], onDelete: Cascade)
severity Severity
channel AlertChannel
status AlertStatus @default(PENDING)
dedupKey String
sentAt DateTime?
createdAt DateTime @default(now())
@@index([userId, status])
@@index([dedupKey])
}
model ScanJob {
id String @id @default(uuid())
userId String
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
status ScanJobStatus @default(PENDING)
source DataSource?
resultCount Int @default(0)
errorMessage String?
completedAt DateTime?
createdAt DateTime @default(now())
@@index([userId, status])
@@index([createdAt])
}

View File

@@ -0,0 +1,24 @@
import prisma from "../src";
async function main() {
const user = await prisma.user.upsert({
where: { email: "dev@shieldai.local" },
update: {},
create: {
email: "dev@shieldai.local",
name: "Dev User",
subscriptionTier: "PREMIUM",
},
});
console.log("Seeded user:", user.email);
}
main()
.catch((e) => {
console.error(e);
process.exit(1);
})
.finally(async () => {
await prisma.$disconnect();
});

4
packages/db/src/index.ts Normal file
View File

@@ -0,0 +1,4 @@
import { PrismaClient } from "@prisma/client";
export const prisma = new PrismaClient();
export default prisma;

View File

@@ -0,0 +1,8 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src"
},
"include": ["src/**/*.ts"]
}

View File

@@ -0,0 +1,18 @@
{
"name": "@shieldai/jobs",
"version": "0.1.0",
"scripts": {
"dev": "tsx watch src/index.ts",
"build": "tsc",
"start": "node dist/index.js",
"test": "vitest run",
"lint": "eslint src/"
},
"dependencies": {
"bullmq": "^5.24.0",
"@shieldai/db": "0.1.0",
"@shieldai/types": "0.1.0",
"@shieldai/darkwatch": "0.1.0",
"ioredis": "^5.4.0"
}
}

View File

@@ -0,0 +1,69 @@
import { Queue, Worker, QueueScheduler } from "bullmq";
import { Redis } from "ioredis";
import { ScanService } from "@shieldai/darkwatch";
import { AlertPipeline } from "@shieldai/darkwatch";
const redisUrl = process.env.REDIS_URL || "redis://localhost:6379";
const connection = new Redis(redisUrl);
const scanQueue = new Queue("darkwatch-scans", { connection });
const alertQueue = new Queue("darkwatch-alerts", { connection });
const scanWorker = new Worker(
"darkwatch-scans",
async (job) => {
const { userId, source } = job.data;
const scanService = new ScanService();
const resultCount = await scanService.runScan(userId, source);
return { resultCount, completedAt: new Date().toISOString() };
},
{ connection, concurrency: 3 }
);
const alertWorker = new Worker(
"darkwatch-alerts",
async () => {
const pipeline = new AlertPipeline();
const sent = await pipeline.sendPendingAlerts();
return { sent, processedAt: new Date().toISOString() };
},
{ connection, concurrency: 1 }
);
const scheduler = new QueueScheduler("darkwatch-alerts", { connection });
scanWorker.on("completed", (job) => {
console.log(`[Scan] Job ${job.id} completed: ${JSON.stringify(job.returnvalue)}`);
});
scanWorker.on("failed", (job, err) => {
console.error(`[Scan] Job ${job.id} failed: ${err.message}`);
});
alertWorker.on("completed", (job) => {
console.log(`[Alert] Job ${job.id} completed: ${JSON.stringify(job.returnvalue)}`);
});
alertWorker.on("failed", (job, err) => {
console.error(`[Alert] Job ${job.id} failed: ${err.message}`);
});
export async function addScanJob(userId: string, source?: string) {
return scanQueue.add("scan", { userId, source }, {
attempts: 3,
backoff: { type: "exponential", delay: 5000 },
jobId: `scan-${userId}-${Date.now()}`,
});
}
export async function scheduleAlertProcessing() {
return alertQueue.add("process-alerts", {}, {
repeat: { pattern: "*/5 * * * *" },
jobId: "alert-processor-recurring",
});
}
scanWorker.on("waiting", () => console.log("[Worker] Scan worker ready"));
alertWorker.on("waiting", () => console.log("[Worker] Alert worker ready"));
console.log("Job workers started");

View File

@@ -0,0 +1,8 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src"
},
"include": ["src/**/*.ts"]
}

View File

@@ -0,0 +1,13 @@
{
"name": "@shieldai/types",
"version": "0.1.0",
"main": "./dist/index.js",
"types": "./dist/index.js",
"scripts": {
"build": "tsc",
"lint": "eslint src/"
},
"exports": {
".": "./src/index.ts"
}
}

View File

@@ -0,0 +1,84 @@
export const IdentifierType = {
EMAIL: "EMAIL",
PHONE: "PHONE",
SSN: "SSN",
} as const;
export type IdentifierType = (typeof IdentifierType)[keyof typeof IdentifierType];
export const SubscriptionTier = {
BASIC: "BASIC",
PLUS: "PLUS",
PREMIUM: "PREMIUM",
} as const;
export type SubscriptionTier = (typeof SubscriptionTier)[keyof typeof SubscriptionTier];
export const Severity = {
INFO: "INFO",
WARNING: "WARNING",
CRITICAL: "CRITICAL",
} as const;
export type Severity = (typeof Severity)[keyof typeof Severity];
export const AlertChannel = {
EMAIL: "EMAIL",
PUSH: "PUSH",
SMS: "SMS",
} as const;
export type AlertChannel = (typeof AlertChannel)[keyof typeof AlertChannel];
export const AlertStatus = {
PENDING: "PENDING",
SENT: "SENT",
READ: "READ",
} as const;
export type AlertStatus = (typeof AlertStatus)[keyof typeof AlertStatus];
export const ScanJobStatus = {
PENDING: "PENDING",
RUNNING: "RUNNING",
COMPLETED: "COMPLETED",
FAILED: "FAILED",
} as const;
export type ScanJobStatus = (typeof ScanJobStatus)[keyof typeof ScanJobStatus];
export const WatchListStatus = {
ACTIVE: "ACTIVE",
PAUSED: "PAUSED",
} as const;
export type WatchListStatus = (typeof WatchListStatus)[keyof typeof WatchListStatus];
export const DataSource = {
HIBP: "HIBP",
SECURITY_TRAILS: "SECURITY_TRAILS",
CENSYS: "CENSYS",
SHODAN: "SHODAN",
HONEYPOT: "HONEYPOT",
} as const;
export type DataSource = (typeof DataSource)[keyof typeof DataSource];
export interface WatchListItemInput {
identifierType: IdentifierType;
identifierValue: string;
}
export interface ScanTriggerInput {
userId: string;
source?: DataSource;
}
export interface ExposureResult {
dataSource: DataSource;
breachName: string;
exposedAt: Date;
dataType: string[];
severity: Severity;
details: string;
}
export interface AlertInput {
userId: string;
exposureId: string;
severity: Severity;
channel: AlertChannel;
dedupKey: string;
}

View File

@@ -0,0 +1,8 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src"
},
"include": ["src/**/*.ts"]
}