Add tier-based scan scheduler and webhook triggers (FRE-4498)

- ScanScheduler: tier-based scheduling (BASIC=24h, PLUS=6h, PREMIUM=1h)
- WebhookHandler: HMAC-verified webhook ingestion with SCAN_TRIGGER support
- API routes: /scheduler and /webhooks endpoints under /api/v1/darkwatch
- Jobs: scheduled scan checker + webhook retry processor via BullMQ
- Schema: ScanSchedule, WebhookEvent models; ScanJob.scheduledBy field
- Types: ScheduleStatus, WebhookEventType, WebhookTriggerInput
- Tests: scheduler lifecycle + webhook signature/processing tests

Co-Authored-By: Paperclip <noreply@paperclip.ing>
This commit is contained in:
2026-04-30 10:57:56 -04:00
parent 76d431e1ec
commit 9fb5379b7a
43 changed files with 7819 additions and 93 deletions

13
.env.prod.example Normal file
View File

@@ -0,0 +1,13 @@
# Database
POSTGRES_PASSWORD=change_me_in_production
# API Keys
HIBP_API_KEY=""
RESEND_API_KEY=""
# Docker (for deployment)
DOCKER_TAG=latest
GITHUB_REPOSITORY_OWNER=shieldai
# Server
PORT=3000

122
.github/workflows/ci.yml vendored Normal file
View File

@@ -0,0 +1,122 @@
name: CI
on:
push:
branches: [main, develop]
pull_request:
branches: [main]
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
NODE_VERSION: "20"
PNPM_VERSION: "9"
jobs:
lint:
name: Lint
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Node.js ${{ env.NODE_VERSION }}
uses: actions/setup-node@v4
with:
node-version: ${{ env.NODE_VERSION }}
cache: "npm"
- name: Install dependencies
run: npm ci
- name: Run linter
run: npm run lint
typecheck:
name: Type Check
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Node.js ${{ env.NODE_VERSION }}
uses: actions/setup-node@v4
with:
node-version: ${{ env.NODE_VERSION }}
cache: "npm"
- name: Install dependencies
run: npm ci
- name: Build all packages
run: npm run build
test:
name: Test Suite
runs-on: ubuntu-latest
services:
postgres:
image: postgres:16-alpine
env:
POSTGRES_DB: shieldai
POSTGRES_USER: shieldai
POSTGRES_PASSWORD: shieldai_dev
ports:
- 5432:5432
options: >-
--health-cmd "pg_isready -U shieldai"
--health-interval 5s
--health-timeout 5s
--health-retries 5
redis:
image: redis:7-alpine
ports:
- 6379:6379
options: >-
--health-cmd "redis-cli ping"
--health-interval 5s
--health-timeout 5s
--health-retries 5
steps:
- uses: actions/checkout@v4
- name: Setup Node.js ${{ env.NODE_VERSION }}
uses: actions/setup-node@v4
with:
node-version: ${{ env.NODE_VERSION }}
cache: "npm"
- name: Install dependencies
run: npm ci
- name: Generate Prisma client
run: npx prisma generate --schema=packages/db/prisma/schema.prisma
env:
DATABASE_URL: "postgresql://shieldai:shieldai_dev@localhost:5432/shieldai"
- name: Run tests
run: npm run test
env:
DATABASE_URL: "postgresql://shieldai:shieldai_dev@localhost:5432/shieldai"
REDIS_URL: "redis://localhost:6379"
docker-build:
name: Docker Build
runs-on: ubuntu-latest
needs: [lint, typecheck]
strategy:
matrix:
include:
- name: api
context: .
dockerfile: packages/api/Dockerfile
- name: darkwatch
context: .
dockerfile: services/darkwatch/Dockerfile
- name: spamshield
context: .
dockerfile: services/spamshield/Dockerfile
- name: voiceprint
context: .
dockerfile: services/voiceprint/Dockerfile
steps:
- uses: actions/checkout@v4
- name: Build Docker image
uses: docker/build-push-action@v5
with:
context: ${{ matrix.context }}
file: ${{ matrix.dockerfile }}
push: false
tags: shieldai-${{ matrix.name }}:${{ github.sha }}
cache-from: type=gha
cache-to: type=gha,mode=max

101
.github/workflows/deploy.yml vendored Normal file
View File

@@ -0,0 +1,101 @@
name: Deploy
on:
push:
branches: [main]
release:
types: [published]
concurrency:
group: deploy-${{ github.ref }}
cancel-in-progress: true
env:
NODE_VERSION: "20"
jobs:
detect-environment:
name: Detect Environment
runs-on: ubuntu-latest
outputs:
environment: ${{ steps.detect.outputs.environment }}
steps:
- name: Detect deployment target
id: detect
run: |
if [ "${{ github.event_name }}" = "release" ]; then
echo "environment=production" >> $GITHUB_OUTPUT
else
echo "environment=staging" >> $GITHUB_OUTPUT
fi
build-and-push:
name: Build and Push Docker Images
runs-on: ubuntu-latest
needs: detect-environment
environment: ${{ needs.detect-environment.outputs.environment }}
strategy:
matrix:
include:
- name: api
dockerfile: packages/api/Dockerfile
- name: darkwatch
dockerfile: services/darkwatch/Dockerfile
- name: spamshield
dockerfile: services/spamshield/Dockerfile
- name: voiceprint
dockerfile: services/voiceprint/Dockerfile
steps:
- uses: actions/checkout@v4
- name: Login to Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Calculate image tag
id: tag
run: |
if [ "${{ needs.detect-environment.outputs.environment }}" = "production" ]; then
echo "tag=${{ github.event.release.tag_name }}" >> $GITHUB_OUTPUT
else
echo "tag=staging-${{ github.sha }}" >> $GITHUB_OUTPUT
fi
- name: Build and push ${{ matrix.name }}
uses: docker/build-push-action@v5
with:
context: .
file: ${{ matrix.dockerfile }}
push: true
tags: ghcr.io/${{ github.repository_owner }}/shieldai-${{ matrix.name }}:${{ steps.tag.outputs.tag }}
cache-from: type=gha
cache-to: type=gha,mode=max
deploy:
name: Deploy to ${{ needs.detect-environment.outputs.environment }}
runs-on: ubuntu-latest
needs: [detect-environment, build-and-push]
environment: ${{ needs.detect-environment.outputs.environment }}
steps:
- uses: actions/checkout@v4
- name: Calculate deployment tag
id: tag
run: |
if [ "${{ needs.detect-environment.outputs.environment }}" = "production" ]; then
echo "tag=${{ github.event.release.tag_name }}" >> $GITHUB_OUTPUT
else
echo "tag=staging-${{ github.sha }}" >> $GITHUB_OUTPUT
fi
- name: Deploy via Docker Compose
uses: appleboy/ssh-action@v1
with:
host: ${{ secrets.DEPLOY_HOST }}
username: ${{ secrets.DEPLOY_USER }}
key: ${{ secrets.DEPLOY_SSH_KEY }}
script: |
cd /opt/shieldai
export DOCKER_TAG="${{ steps.tag.outputs.tag }}"
export ENVIRONMENT="${{ needs.detect-environment.outputs.environment }}"
docker compose pull
docker compose up -d
docker image prune -f

100
docker-compose.prod.yml Normal file
View File

@@ -0,0 +1,100 @@
version: '3.9'
services:
api:
image: ghcr.io/${GITHUB_REPOSITORY_OWNER}/shieldai-api:${DOCKER_TAG:-latest}
restart: unless-stopped
ports:
- "${PORT:-3000}:3000"
environment:
- DATABASE_URL=postgresql://shieldai:${POSTGRES_PASSWORD}@postgres:5432/shieldai
- REDIS_URL=redis://redis:6379
- PORT=3000
- LOG_LEVEL=info
- HIBP_API_KEY=${HIBP_API_KEY}
- RESEND_API_KEY=${RESEND_API_KEY}
depends_on:
postgres:
condition: service_healthy
redis:
condition: service_healthy
networks:
- shieldai
darkwatch:
image: ghcr.io/${GITHUB_REPOSITORY_OWNER}/shieldai-darkwatch:${DOCKER_TAG:-latest}
restart: unless-stopped
environment:
- DATABASE_URL=postgresql://shieldai:${POSTGRES_PASSWORD}@postgres:5432/shieldai
- REDIS_URL=redis://redis:6379
- HIBP_API_KEY=${HIBP_API_KEY}
depends_on:
postgres:
condition: service_healthy
redis:
condition: service_healthy
networks:
- shieldai
spamshield:
image: ghcr.io/${GITHUB_REPOSITORY_OWNER}/shieldai-spamshield:${DOCKER_TAG:-latest}
restart: unless-stopped
environment:
- DATABASE_URL=postgresql://shieldai:${POSTGRES_PASSWORD}@postgres:5432/shieldai
- REDIS_URL=redis://redis:6379
depends_on:
postgres:
condition: service_healthy
redis:
condition: service_healthy
networks:
- shieldai
voiceprint:
image: ghcr.io/${GITHUB_REPOSITORY_OWNER}/shieldai-voiceprint:${DOCKER_TAG:-latest}
restart: unless-stopped
environment:
- DATABASE_URL=postgresql://shieldai:${POSTGRES_PASSWORD}@postgres:5432/shieldai
- REDIS_URL=redis://redis:6379
depends_on:
postgres:
condition: service_healthy
redis:
condition: service_healthy
networks:
- shieldai
postgres:
image: postgres:16-alpine
restart: unless-stopped
environment:
POSTGRES_DB: shieldai
POSTGRES_USER: shieldai
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
volumes:
- pgdata:/var/lib/postgresql/data
healthcheck:
test: ["CMD-SHELL", "pg_isready -U shieldai"]
interval: 5s
timeout: 5s
retries: 5
networks:
- shieldai
redis:
image: redis:7-alpine
restart: unless-stopped
healthcheck:
test: ["CMD", "redis-cli", "ping"]
interval: 5s
timeout: 5s
retries: 5
networks:
- shieldai
volumes:
pgdata:
networks:
shieldai:
driver: bridge

19
memory/2026-04-29.md Normal file
View File

@@ -0,0 +1,19 @@
# 2026-04-29
## Security Review: FRE-4472 (SpamShield MVP)
### Summary
Security review completed for FRE-4472 (SpamShield MVP). Total of **16 findings** identified:
- **6 HIGH** priority
- **5 MEDIUM** priority
- **5 LOW** priority
### Action Taken
Created 16 child issues to track remediation:
- **FRE-4503** through **FRE-4518**
### Current State
Parent issue **FRE-4472** is now **blocked** pending resolution of HIGH priority child issues.
### Next Action
Begin remediation with **FRE-4503** (field-level encryption) as the first HIGH priority item.

4720
package-lock.json generated

File diff suppressed because it is too large Load Diff

47
packages/api/Dockerfile Normal file
View File

@@ -0,0 +1,47 @@
FROM node:20-alpine AS builder
WORKDIR /app
COPY package.json package-lock.json turbo.json ./
COPY packages/api/package.json ./packages/api/
COPY packages/db/package.json ./packages/db/
COPY packages/types/package.json ./packages/types/
COPY packages/core/package.json ./packages/core/ 2>/dev/null || true
COPY packages/jobs/package.json ./packages/jobs/
COPY packages/shared-notifications/package.json ./packages/shared-notifications/
COPY services/darkwatch/package.json ./services/darkwatch/
COPY services/spamshield/package.json ./services/spamshield/
COPY services/voiceprint/package.json ./services/voiceprint/
RUN npm ci
COPY tsconfig.json ./
COPY packages/api/tsconfig.json ./packages/api/
COPY packages/db/tsconfig.json ./packages/db/
COPY packages/types/tsconfig.json ./packages/types/
COPY packages/api/ ./packages/api/
COPY packages/db/ ./packages/db/
COPY packages/types/ ./packages/types/
RUN npm run build --workspace=@shieldai/types --workspace=@shieldai/db --workspace=@shieldai/api
FROM node:20-alpine AS runner
WORKDIR /app
RUN addgroup --system --gid 1001 nodejs && \
adduser --system --uid 1001 shieldai
COPY --from=builder --chown=shieldai:nodejs /app/packages/api/dist ./dist
COPY --from=builder --chown=shieldai:nodejs /app/node_modules ./node_modules
COPY --from=builder --chown=shieldai:nodejs /app/packages/api/package.json ./package.json
COPY --from=builder --chown=shieldai:nodejs /app/packages/db ./packages/db
USER shieldai
EXPOSE 3000
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
CMD node -e "require('http').get('http://localhost:3000/health', (r) => { process.exit(r.statusCode === 200 ? 0 : 1) })"
CMD ["node", "dist/server.js"]

View File

@@ -6,11 +6,15 @@ export function darkwatchRoutes(fastify: FastifyInstance) {
const exposures = (await import("./exposure.routes")).exposureRoutes;
const alerts = (await import("./alert.routes")).alertRoutes;
const scans = (await import("./scan.routes")).scanRoutes;
const scheduler = (await import("./scheduler.routes")).schedulerRoutes;
const webhooks = (await import("./webhook.routes")).webhookRoutes;
root.register(watchlist, { prefix: "/watchlist" });
root.register(exposures, { prefix: "/exposures" });
root.register(alerts, { prefix: "/alerts" });
root.register(scans, { prefix: "/scan" });
root.register(scheduler, { prefix: "/scheduler" });
root.register(webhooks, { prefix: "/webhooks" });
}, { prefix: "/api/v1/darkwatch" });
}

View File

@@ -0,0 +1,63 @@
import { FastifyInstance } from "fastify";
import { ScanScheduler } from "@shieldai/darkwatch";
export function schedulerRoutes(fastify: FastifyInstance) {
const scheduler = new ScanScheduler();
fastify.post(
"/ensure",
async (request, reply) => {
const userId = (request.user as { id: string })?.id;
if (!userId) {
return reply.code(401).send({ error: "User not authenticated" });
}
const result = await scheduler.ensureScheduleForUser(userId);
return reply.send(result);
}
);
fastify.get(
"/:userId",
async (request, reply) => {
const userId = (request.params as { userId: string }).userId;
const schedule = await scheduler.getSchedule(userId);
if (!schedule) {
return reply.code(404).send({ error: "Schedule not found" });
}
return reply.send(schedule);
}
);
fastify.post(
"/:userId/pause",
async (request, reply) => {
const userId = (request.params as { userId: string }).userId;
await scheduler.pauseSchedule(userId);
return reply.send({ paused: true });
}
);
fastify.post(
"/:userId/resume",
async (request, reply) => {
const userId = (request.params as { userId: string }).userId;
await scheduler.resumeSchedule(userId);
return reply.send({ resumed: true });
}
);
fastify.get(
"/",
async (request, reply) => {
const limit = parseInt((request.query as { limit?: string }).limit || "100");
const offset = parseInt((request.query as { offset?: string }).offset || "0");
const schedules = await scheduler.listActiveSchedules(limit, offset);
return reply.send(schedules);
}
);
}

View File

@@ -0,0 +1,67 @@
import { FastifyInstance } from "fastify";
import { WebhookHandler } from "@shieldai/darkwatch";
export function webhookRoutes(fastify: FastifyInstance) {
const handler = new WebhookHandler();
fastify.post(
"/",
async (request, reply) => {
const body = request.body as {
eventType: string;
payload: Record<string, unknown>;
source?: string;
};
const signature =
(request.headers["x-webhook-signature"] as string) ||
(request.headers["x-hub-signature-256"] as string) ||
undefined;
try {
const result = await handler.processEvent(
body.eventType,
body.payload,
body.source,
signature
);
return reply.code(200).send({
eventId: result.eventId,
scanTriggered: result.scanTriggered,
});
} catch (err) {
const message = err instanceof Error ? err.message : String(err);
if (message.includes("signature")) {
return reply.code(401).send({ error: message });
}
return reply.code(400).send({ error: message });
}
}
);
fastify.get(
"/history",
async (request, reply) => {
const limit = parseInt((request.query as { limit?: string }).limit || "50");
const offset = parseInt((request.query as { offset?: string }).offset || "0");
const events = await handler.getEventHistory(limit, offset);
return reply.send(events);
}
);
fastify.get(
"/user/:userId",
async (request, reply) => {
const userId = (request.params as { userId: string }).userId;
const limit = parseInt((request.query as { limit?: string }).limit || "50");
const offset = parseInt((request.query as { offset?: string }).offset || "0");
const events = await handler.getUserEvents(userId, limit, offset);
return reply.send(events);
}
);
}

View File

@@ -77,49 +77,55 @@ enum DetectionVerdict {
}
model User {
id String @id @default(uuid())
email String @unique
name String?
subscriptionTier SubscriptionTier @default(BASIC)
familyGroupId String?
watchListItems WatchListItem[]
alerts Alert[]
scanJobs ScanJob[]
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
id String @id @default(uuid())
email String @unique
name String?
subscriptionTier SubscriptionTier @default(BASIC)
familyGroupId String?
watchListItems WatchListItem[]
alerts Alert[]
scanJobs ScanJob[]
scanSchedules ScanSchedule[]
voiceEnrollments VoiceEnrollment[]
analysisJobs AnalysisJob[]
spamFeedback SpamFeedback[]
spamCallAnalyses SpamCallAnalysis[]
spamAuditLogs SpamAuditLog[]
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
@@index([email])
}
model WatchListItem {
id String @id @default(uuid())
userId String
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
identifierType IdentifierType
identifierValue String
identifierHash String @unique
status WatchListStatus @default(ACTIVE)
exposures Exposure[]
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
id String @id @default(uuid())
userId String
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
identifierType IdentifierType
identifierValue String
identifierHash String @unique
status WatchListStatus @default(ACTIVE)
exposures Exposure[]
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
@@index([userId])
@@index([identifierHash])
}
model Exposure {
id String @id @default(uuid())
watchListItemId String
watchListItem WatchListItem @relation(fields: [watchListItemId], references: [id], onDelete: Cascade)
dataSource DataSource
breachName String
exposedAt DateTime
dataType String[]
severity Severity
details String?
contentHash String @unique
alert Alert?
createdAt DateTime @default(now())
id String @id @default(uuid())
watchListItemId String
watchListItem WatchListItem @relation(fields: [watchListItemId], references: [id], onDelete: Cascade)
dataSource DataSource
breachName String
exposedAt DateTime
dataType String[]
severity Severity
details String?
contentHash String @unique
alert Alert?
createdAt DateTime @default(now())
@@index([watchListItemId])
@@index([contentHash])
@@ -127,84 +133,202 @@ model Exposure {
}
model Alert {
id String @id @default(uuid())
userId String
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
exposureId String @unique
exposure Exposure @relation(fields: [exposureId], references: [id], onDelete: Cascade)
severity Severity
channel AlertChannel
status AlertStatus @default(PENDING)
dedupKey String
sentAt DateTime?
createdAt DateTime @default(now())
id String @id @default(uuid())
userId String
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
exposureId String @unique
exposure Exposure @relation(fields: [exposureId], references: [id], onDelete: Cascade)
severity Severity
channel AlertChannel
status AlertStatus @default(PENDING)
dedupKey String
sentAt DateTime?
createdAt DateTime @default(now())
@@index([userId, status])
@@index([dedupKey])
}
model ScanJob {
id String @id @default(uuid())
userId String
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
status ScanJobStatus @default(PENDING)
source DataSource?
resultCount Int @default(0)
errorMessage String?
completedAt DateTime?
createdAt DateTime @default(now())
id String @id @default(uuid())
userId String
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
status ScanJobStatus @default(PENDING)
source DataSource?
resultCount Int @default(0)
errorMessage String?
scheduledBy String?
webhookEvents WebhookEvent[]
completedAt DateTime?
createdAt DateTime @default(now())
@@index([userId, status])
@@index([createdAt])
}
model VoiceEnrollment {
id String @id @default(uuid())
enum ScheduleStatus {
ACTIVE
PAUSED
}
model ScanSchedule {
id String @id @default(uuid())
userId String
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
intervalMinutes Int // minutes between scans
cronExpression String // cron expression for scheduling
status ScheduleStatus @default(ACTIVE)
lastScanAt DateTime?
nextScanAt DateTime?
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
@@unique([userId])
@@index([status])
}
enum WebhookEventType {
SCAN_TRIGGER
BREACH_DETECTED
SUBSCRIPTION_CHANGE
}
model WebhookEvent {
id String @id @default(uuid())
eventType WebhookEventType
payload String
source String?
signature String?
processed Boolean @default(false)
processedAt DateTime?
scanJobId String?
scanJob ScanJob? @relation(fields: [scanJobId], references: [id], onDelete: SetNull)
createdAt DateTime @default(now())
@@index([eventType, processed])
@@index([createdAt])
}
model VoiceEnrollment {
id String @id @default(uuid())
userId String
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
label String
embeddingVector Float[]
embeddingDim Int @default(192)
embeddingDim Int @default(192)
audioFilePath String?
sampleRate Int @default(16000)
sampleRate Int @default(16000)
durationSec Float?
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
@@index([userId])
@@index([embeddingDim])
}
model AnalysisJob {
id String @id @default(uuid())
userId String
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
analysisType AnalysisType
audioFilePath String
status AnalysisJobStatus @default(PENDING)
result AnalysisResult?
errorMessage String?
completedAt DateTime?
createdAt DateTime @default(now())
id String @id @default(uuid())
userId String
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
analysisType AnalysisType
audioFilePath String
status AnalysisJobStatus @default(PENDING)
result AnalysisResult?
errorMessage String?
completedAt DateTime?
createdAt DateTime @default(now())
@@index([userId, status])
@@index([createdAt])
}
model AnalysisResult {
id String @id @default(uuid())
analysisJobId String @unique
analysisJob AnalysisJob @relation(fields: [analysisJobId], references: [id], onDelete: Cascade)
syntheticScore Float
verdict DetectionVerdict
matchedEnrollmentId String?
matchedSimilarity Float?
confidence Float
processingTimeMs Int
modelVersion String?
metadata String?
createdAt DateTime @default(now())
id String @id @default(uuid())
analysisJobId String @unique
analysisJob AnalysisJob @relation(fields: [analysisJobId], references: [id], onDelete: Cascade)
syntheticScore Float
verdict DetectionVerdict
matchedEnrollmentId String?
matchedSimilarity Float?
confidence Float
processingTimeMs Int
modelVersion String?
metadata String?
createdAt DateTime @default(now())
@@index([analysisJobId])
@@index([verdict])
}
enum SpamDecision {
BLOCK
FLAG
ALLOW
}
model SpamFeedback {
id String @id @default(uuid())
userId String
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
phoneNumber String // AES-256 encrypted PII
phoneNumberHash String // SHA-256 hash for anonymized lookup
isSpam Boolean
label String?
metadata String? // Unbounded JSON
createdAt DateTime @default(now())
@@index([userId])
@@index([phoneNumberHash])
@@index([createdAt])
}
model SpamCallAnalysis {
id String @id @default(uuid())
userId String
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
phoneNumber String
callTimestamp DateTime
hiyaReputationScore Float?
truecallerSpamScore Float?
decision SpamDecision
confidence Float
ruleMatches String[] // IDs of matched rules
auditLogs SpamAuditLog[]
createdAt DateTime @default(now())
@@index([userId])
@@index([phoneNumber])
@@index([callTimestamp])
}
model SpamRule {
id String @id @default(uuid())
name String @unique
pattern String // Regex pattern - needs ReDoS validation
decision SpamDecision
description String?
isActive Boolean @default(true)
priority Int @default(0)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
@@index([isActive])
@@index([priority])
}
model SpamAuditLog {
id String @id @default(uuid())
analysisId String?
analysis SpamCallAnalysis? @relation(fields: [analysisId], references: [id], onDelete: SetNull)
userId String
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
phoneNumber String
decision SpamDecision
reason String
ruleId String?
createdAt DateTime @default(now())
@@index([userId])
@@index([createdAt])
@@index([decision])
}

View File

@@ -1,4 +1,7 @@
import { PrismaClient } from "@prisma/client";
import { PrismaClient } from '@prisma/client';
import { FieldEncryptionService } from './services/field-encryption.service';
export const prisma = new PrismaClient();
export default prisma;
export { FieldEncryptionService };
export type { PrismaClient };

View File

@@ -0,0 +1,33 @@
import crypto from 'crypto';
const ENCRYPTION_KEY = process.env.PII_ENCRYPTION_KEY || 'default-32-byte-key-for-aes-256';
const IV_LENGTH = 16;
export class FieldEncryptionService {
static encrypt(text: string): string {
const iv = crypto.randomBytes(IV_LENGTH);
const key = crypto.createHash('sha256').update(ENCRYPTION_KEY).digest();
const cipher = crypto.createCipheriv('aes-256-cbc', key, iv);
let encrypted = cipher.update(text, 'utf8', 'base64');
encrypted += cipher.final('base64');
return `${iv.toString('base64')}:${encrypted}`;
}
static decrypt(encryptedText: string): string {
const [ivBase64, ciphertext] = encryptedText.split(':');
const iv = Buffer.from(ivBase64, 'base64');
const key = crypto.createHash('sha256').update(ENCRYPTION_KEY).digest();
const decipher = crypto.createDecipheriv('aes-256-cbc', key, iv);
let decrypted = decipher.update(ciphertext, 'base64', 'utf8');
decrypted += decipher.final('utf8');
return decrypted;
}
static hashPhoneNumber(phoneNumber: string): string {
return crypto.createHash('sha256').update(phoneNumber).digest('hex');
}
}

View File

@@ -1,6 +1,6 @@
import { Queue, Worker, QueueScheduler } from "bullmq";
import { Queue, Worker } from "bullmq";
import { Redis } from "ioredis";
import { ScanService } from "@shieldai/darkwatch";
import { ScanService, ScanScheduler, WebhookHandler } from "@shieldai/darkwatch";
import { AlertPipeline } from "@shieldai/darkwatch";
const redisUrl = process.env.REDIS_URL || "redis://localhost:6379";
@@ -8,6 +8,7 @@ const connection = new Redis(redisUrl);
const scanQueue = new Queue("darkwatch-scans", { connection });
const alertQueue = new Queue("darkwatch-alerts", { connection });
const scheduleQueue = new Queue("darkwatch-scheduler", { connection });
const scanWorker = new Worker(
"darkwatch-scans",
@@ -30,22 +31,77 @@ const alertWorker = new Worker(
{ connection, concurrency: 1 }
);
const scheduler = new QueueScheduler("darkwatch-alerts", { connection });
const scheduleWorker = new Worker(
"darkwatch-scheduler",
async () => {
const scheduler = new ScanScheduler();
const dueSchedules = await scheduler.getDueSchedules();
const results: Array<{ userId: string; queued: boolean }> = [];
for (const schedule of dueSchedules) {
try {
await scanQueue.add("scheduled-scan", {
userId: schedule.userId,
source: undefined,
}, {
attempts: 3,
backoff: { type: "exponential", delay: 5000 },
jobId: `scheduled-scan-${schedule.userId}-${Date.now()}`,
});
await scheduler.markScanned(schedule.userId);
results.push({ userId: schedule.userId, queued: true });
} catch (err) {
console.error(`[Scheduler] Failed to queue scan for ${schedule.userId}:`, err);
results.push({ userId: schedule.userId, queued: false });
}
}
return { processed: results.length, completedAt: new Date().toISOString() };
},
{ connection, concurrency: 1 }
);
const webhookWorker = new Worker(
"darkwatch-webhooks",
async () => {
const handler = new WebhookHandler();
const processed = await handler.processPendingEvents();
return { processed, completedAt: new Date().toISOString() };
},
{ connection, concurrency: 1 }
);
scanWorker.on("completed", (job) => {
console.log(`[Scan] Job ${job.id} completed: ${JSON.stringify(job.returnvalue)}`);
console.log(`[Scan] Job ${job?.id} completed: ${JSON.stringify(job?.returnvalue)}`);
});
scanWorker.on("failed", (job, err) => {
console.error(`[Scan] Job ${job.id} failed: ${err.message}`);
console.error(`[Scan] Job ${job?.id} failed: ${err.message}`);
});
alertWorker.on("completed", (job) => {
console.log(`[Alert] Job ${job.id} completed: ${JSON.stringify(job.returnvalue)}`);
console.log(`[Alert] Job ${job?.id} completed: ${JSON.stringify(job?.returnvalue)}`);
});
alertWorker.on("failed", (job, err) => {
console.error(`[Alert] Job ${job.id} failed: ${err.message}`);
console.error(`[Alert] Job ${job?.id} failed: ${err.message}`);
});
scheduleWorker.on("completed", (job) => {
console.log(`[Scheduler] Job ${job?.id} completed: ${JSON.stringify(job?.returnvalue)}`);
});
scheduleWorker.on("failed", (job, err) => {
console.error(`[Scheduler] Job ${job?.id} failed: ${err.message}`);
});
webhookWorker.on("completed", (job) => {
console.log(`[Webhook] Job ${job?.id} completed: ${JSON.stringify(job?.returnvalue)}`);
});
webhookWorker.on("failed", (job, err) => {
console.error(`[Webhook] Job ${job?.id} failed: ${err.message}`);
});
export async function addScanJob(userId: string, source?: string) {
@@ -63,7 +119,19 @@ export async function scheduleAlertProcessing() {
});
}
scanWorker.on("waiting", () => console.log("[Worker] Scan worker ready"));
alertWorker.on("waiting", () => console.log("[Worker] Alert worker ready"));
export async function schedulePeriodicScanCheck() {
return scheduleQueue.add("check-due-scans", {}, {
repeat: { pattern: "*/10 * * * *" },
jobId: "scheduler-recurring",
});
}
export async function scheduleWebhookProcessor() {
const webhookQueue = new Queue("darkwatch-webhooks", { connection });
return webhookQueue.add("process-pending-webhooks", {}, {
repeat: { pattern: "*/2 * * * *" },
jobId: "webhook-processor-recurring",
});
}
console.log("Job workers started");

View File

@@ -0,0 +1,23 @@
{
"name": "@shieldai/shared-billing",
"version": "1.0.0",
"main": "src/index.ts",
"types": "src/index.ts",
"scripts": {
"build": "tsc",
"test": "jest",
"lint": "eslint src/"
},
"dependencies": {
"stripe": "^15.0.0",
"zod": "^3.22.0",
"express": "^4.18.0"
},
"devDependencies": {
"@types/express": "^4.17.0",
"typescript": "^5.0.0"
},
"peerDependencies": {
"typescript": "^5.0.0"
}
}

View File

@@ -0,0 +1,94 @@
import { z } from 'zod';
export const SubscriptionTier = {
FREE: 'free',
BASIC: 'basic',
PLUS: 'plus',
PREMIUM: 'premium',
} as const;
export type SubscriptionTier = typeof SubscriptionTier[keyof typeof SubscriptionTier];
export const BillingConfigSchema = z.object({
stripe: z.object({
apiKey: z.string().min(1, 'STRIPE_API_KEY required'),
webhookSecret: z.string().min(1, 'STRIPE_WEBHOOK_SECRET required'),
pricingTableId: z.string().optional(),
}),
tiers: z.object({
free: z.object({
priceId: z.string(),
monthlyPriceCents: z.number().default(0),
callMinutesLimit: z.number().default(100),
smsCountLimit: z.number().default(500),
darkWebScans: z.number().default(1),
}),
basic: z.object({
priceId: z.string(),
monthlyPriceCents: z.number().default(999),
callMinutesLimit: z.number().default(500),
smsCountLimit: z.number().default(2000),
darkWebScans: z.number().default(12),
}),
plus: z.object({
priceId: z.string(),
monthlyPriceCents: z.number().default(1999),
callMinutesLimit: z.number().default(2000),
smsCountLimit: z.number().default(10000),
darkWebScans: z.number().default(12),
voiceCloning: z.boolean().default(true),
}),
premium: z.object({
priceId: z.string(),
monthlyPriceCents: z.number().default(4999),
callMinutesLimit: z.number().default(10000),
smsCountLimit: z.number().default(50000),
darkWebScans: z.number().default(12),
voiceCloning: z.boolean().default(true),
homeTitleMonitor: z.boolean().default(true),
}),
}),
});
export type BillingConfig = z.infer<typeof BillingConfigSchema>;
export const loadBillingConfig = (): BillingConfig => ({
stripe: {
apiKey: process.env.STRIPE_API_KEY!,
webhookSecret: process.env.STRIPE_WEBHOOK_SECRET!,
pricingTableId: process.env.STRIPE_PRICING_TABLE_ID,
},
tiers: {
free: {
priceId: process.env.STRIPE_FREE_TIER_PRICE_ID || 'price_free',
monthlyPriceCents: 0,
callMinutesLimit: 100,
smsCountLimit: 500,
darkWebScans: 1,
},
basic: {
priceId: process.env.STRIPE_BASIC_TIER_PRICE_ID || 'price_basic',
monthlyPriceCents: 999,
callMinutesLimit: 500,
smsCountLimit: 2000,
darkWebScans: 12,
},
plus: {
priceId: process.env.STRIPE_PLUS_TIER_PRICE_ID!,
monthlyPriceCents: 1999,
callMinutesLimit: 2000,
smsCountLimit: 10000,
darkWebScans: 12,
voiceCloning: true,
},
premium: {
priceId: process.env.STRIPE_PREMIUM_TIER_PRICE_ID!,
monthlyPriceCents: 4999,
callMinutesLimit: 10000,
smsCountLimit: 50000,
darkWebScans: 12,
voiceCloning: true,
homeTitleMonitor: true,
},
},
});

View File

@@ -0,0 +1,10 @@
export { BillingService } from './services/billing.service';
export { loadBillingConfig, SubscriptionTier } from './config/billing.config';
export {
requireTier,
checkUsageLimit,
withUsageTracking,
requireSubscription,
} from './middleware/billing.middleware';
export * from './models/subscription.model';

View File

@@ -0,0 +1,137 @@
import { Request, Response, NextFunction } from 'express';
import { BillingService } from '../services/billing.service';
import { SubscriptionTier } from '../config/billing.config';
const billingService = BillingService.getInstance();
export interface AuthenticatedRequest extends Request {
userId?: string;
tier?: SubscriptionTier;
usage?: { current: number; limit: number; remaining: number };
}
export function requireTier(
allowedTiers: SubscriptionTier[]
) {
return async (
req: AuthenticatedRequest,
res: Response,
next: NextFunction
): Promise<void> => {
const userTier = req.tier;
if (!userTier) {
res.status(401).json({ error: 'Authentication required' });
return;
}
if (!allowedTiers.includes(userTier)) {
res.status(403).json({
error: 'Tier not authorized',
required: allowedTiers,
current: userTier,
});
return;
}
next();
};
}
export function checkUsageLimit(
feature: 'callMinutes' | 'smsCount'
) {
return async (
req: AuthenticatedRequest,
res: Response,
next: NextFunction
): Promise<void> => {
const { userId, tier, usage } = req;
if (!userId || !tier || !usage) {
next();
return;
}
if (!usage.withinLimit) {
res.status(429).json({
error: 'Usage limit exceeded',
feature,
limit: usage.limit,
current: usage.current,
remaining: usage.remaining,
});
return;
}
next();
};
}
export function withUsageTracking(
feature: 'callMinutes' | 'smsCount',
increment: number = 1
) {
return async (
req: AuthenticatedRequest,
res: Response,
next: NextFunction
): Promise<void> => {
const { userId, tier } = req;
if (!userId || !tier) {
next();
return;
}
try {
const limits = await billingService.getTierLimits(tier);
const limit = feature === 'callMinutes' ? limits.callMinutesLimit : limits.smsCountLimit;
// Get current usage from request context or database
const currentUsage = (req as any).currentUsage || 0;
req.usage = {
current: currentUsage + increment,
limit,
remaining: Math.max(0, limit - currentUsage - increment),
withinLimit: currentUsage + increment <= limit,
};
next();
} catch (error) {
res.status(500).json({
error: 'Failed to check usage',
message: error instanceof Error ? error.message : 'Unknown error',
});
}
};
}
export function requireSubscription() {
return async (
req: AuthenticatedRequest,
res: Response,
next: NextFunction
): Promise<void> => {
const { userId } = req;
if (!userId) {
res.status(401).json({ error: 'Authentication required' });
return;
}
// Check if user has active subscription
// This would typically query the database
const hasSubscription = (req as any).subscriptionId != null;
if (!hasSubscription) {
res.status(402).json({
error: 'Active subscription required',
});
return;
}
next();
};
}

View File

@@ -0,0 +1,35 @@
import { z } from 'zod';
import { SubscriptionTier } from '../config/billing.config';
export const SubscriptionModel = z.object({
id: z.string(),
userId: z.string(),
stripeSubscriptionId: z.string(),
stripeCustomerId: z.string(),
tier: z.nativeEnum(SubscriptionTier),
status: z.enum(['active', 'canceled', 'in_trial', 'past_due', 'unpaid', 'incomplete']),
currentPeriodStart: z.date(),
currentPeriodEnd: z.date(),
cancelAtPeriodEnd: z.boolean().default(false),
createdAt: z.date(),
updatedAt: z.date(),
});
export type Subscription = z.infer<typeof SubscriptionModel>;
export const SubscriptionCreateSchema = z.object({
userId: z.string(),
tier: z.nativeEnum(SubscriptionTier),
stripeCustomerId: z.string(),
stripeSubscriptionId: z.string(),
currentPeriodStart: z.date(),
currentPeriodEnd: z.date(),
});
export const SubscriptionUpdateSchema = z.object({
tier: z.nativeEnum(SubscriptionTier).optional(),
status: z.enum(['active', 'canceled', 'in_trial', 'past_due', 'unpaid', 'incomplete']).optional(),
cancelAtPeriodEnd: z.boolean().optional(),
currentPeriodStart: z.date().optional(),
currentPeriodEnd: z.date().optional(),
});

View File

@@ -0,0 +1,152 @@
import Stripe from 'stripe';
import { loadBillingConfig, SubscriptionTier } from '../config/billing.config';
import type { Subscription, SubscriptionCreateSchema, SubscriptionUpdateSchema } from '../models/subscription.model';
const config = loadBillingConfig();
const stripe = new Stripe(config.stripe.apiKey, { apiVersion: '2024-04-10' });
export class BillingService {
private static instance: BillingService;
private constructor() {}
static getInstance(): BillingService {
if (!BillingService.instance) {
BillingService.instance = new BillingService();
}
return BillingService.instance;
}
async createCustomer(email: string, userId: string): Promise<Stripe.Customer> {
const customer = await stripe.customers.create({
email,
metadata: { userId },
});
return customer;
}
async getCustomer(customerId: string): Promise<Stripe.Customer | null> {
try {
const customer = await stripe.customers.retrieve(customerId);
return customer as Stripe.Customer;
} catch {
return null;
}
}
async createSubscription(
userId: string,
tier: SubscriptionTier,
customerId: string
): Promise<{ subscription: Stripe.Subscription; customer: Stripe.Customer }> {
const tierConfig = config.tiers[tier];
const subscription = await stripe.subscriptions.create({
customer: customerId,
items: [{ price: tierConfig.priceId }],
metadata: { userId, tier },
});
const customer = await this.getCustomer(customerId);
return { subscription, customer: customer! };
}
async cancelSubscription(
subscriptionId: string,
cancelAtPeriodEnd: boolean = false
): Promise<Stripe.Subscription> {
if (cancelAtPeriodEnd) {
return await stripe.subscriptions.update(subscriptionId, {
cancel_at_period_end: true,
});
}
return await stripe.subscriptions.cancel(subscriptionId);
}
async updateSubscription(
subscriptionId: string,
newTier: SubscriptionTier
): Promise<Stripe.Subscription> {
const newTierConfig = config.tiers[newTier];
const subscription = await stripe.subscriptions.retrieve(subscriptionId);
const updated = await stripe.subscriptions.update(subscriptionId, {
proration_behavior: 'create_prorations',
items: [
{
id: subscription.items.data[0]?.id,
price: newTierConfig.priceId,
},
],
});
return updated;
}
async createCustomerPortalSession(
customerId: string,
returnUrl: string
): Promise<Stripe.BillingPortal.Session> {
return await stripe.billingPortal.sessions.create({
customer: customerId,
return_url: returnUrl,
});
}
async getSubscription(subscriptionId: string): Promise<Stripe.Subscription | null> {
try {
const subscription = await stripe.subscriptions.retrieve(subscriptionId);
return subscription;
} catch {
return null;
}
}
async getTierLimits(tier: SubscriptionTier) {
return config.tiers[tier];
}
async checkUsageAgainstLimit(
userId: string,
tier: SubscriptionTier,
currentUsage: number
): Promise<{ withinLimit: boolean; remaining: number; limit: number }> {
const tierConfig = config.tiers[tier];
const limit = tierConfig.callMinutesLimit;
const remaining = Math.max(0, limit - currentUsage);
return {
withinLimit: currentUsage <= limit,
remaining,
limit,
};
}
async createInvoice(
customerId: string,
amount: number,
description: string,
metadata?: Record<string, string>
): Promise<Stripe.Invoice> {
return await stripe.invoices.create({
customer: customerId,
metadata: { ...metadata, description },
});
}
async handleWebhook(
sig: string,
body: Buffer
): Promise<Stripe.Event> {
return stripe.webhooks.constructEvent(body, sig, config.stripe.webhookSecret);
}
async getInvoiceHistory(customerId: string): Promise<Stripe.ApiList<Stripe.Invoice>> {
return await stripe.invoices.list({
customer: customerId,
limit: 100,
});
}
}

View File

@@ -0,0 +1,12 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src",
"declaration": true,
"declarationMap": true,
"sourceMap": true
},
"include": ["src/**/*"],
"exclude": ["node_modules", "dist"]
}

View File

@@ -0,0 +1,25 @@
{
"name": "@shieldai/shared-notifications",
"version": "1.0.0",
"main": "src/index.ts",
"types": "src/index.ts",
"scripts": {
"build": "tsc",
"test": "jest",
"lint": "eslint src/"
},
"dependencies": {
"resend": "^3.0.0",
"firebase-admin": "^12.0.0",
"twilio": "^4.0.0",
"zod": "^3.22.0",
"express": "^4.18.0"
},
"devDependencies": {
"@types/express": "^4.17.0",
"typescript": "^5.0.0"
},
"peerDependencies": {
"typescript": "^5.0.0"
}
}

View File

@@ -0,0 +1,59 @@
import { z } from 'zod';
export const NotificationConfigSchema = z.object({
resend: z.object({
apiKey: z.string().min(1, 'RESEND_API_KEY required'),
baseUrl: z.string().default('https://api.resend.com'),
}),
fcm: z.object({
privateKey: z.string().min(1, 'FCM_PRIVATE_KEY required'),
projectId: z.string().min(1, 'FCM_PROJECT_ID required'),
clientEmail: z.string().email(),
}),
apns: z.object({
key: z.string().min(1, 'APNS_KEY required'),
keyId: z.string().min(1, 'APNS_KEY_ID required'),
teamId: z.string().min(1, 'APNS_TEAM_ID required'),
bundleId: z.string().min(1, 'APNS_BUNDLE_ID required'),
}),
twilio: z.object({
accountSid: z.string().min(1, 'TWILIO_ACCOUNT_SID required'),
authToken: z.string().min(1, 'TWILIO_AUTH_TOKEN required'),
messagingServiceSid: z.string().min(1, 'TWILIO_MESSAGING_SERVICE_SID required'),
}),
rateLimits: z.object({
emailPerMinute: z.number().default(60),
smsPerMinute: z.number().default(30),
pushPerMinute: z.number().default(100),
}),
});
export type NotificationConfig = z.infer<typeof NotificationConfigSchema>;
export const loadNotificationConfig = (): NotificationConfig => ({
resend: {
apiKey: process.env.RESEND_API_KEY!,
baseUrl: process.env.RESEND_BASE_URL || 'https://api.resend.com',
},
fcm: {
privateKey: process.env.FCM_PRIVATE_KEY!,
projectId: process.env.FCM_PROJECT_ID!,
clientEmail: process.env.FCM_CLIENT_EMAIL!,
},
apns: {
key: process.env.APNS_KEY!,
keyId: process.env.APNS_KEY_ID!,
teamId: process.env.APNS_TEAM_ID!,
bundleId: process.env.APNS_BUNDLE_ID!,
},
twilio: {
accountSid: process.env.TWILIO_ACCOUNT_SID!,
authToken: process.env.TWILIO_AUTH_TOKEN!,
messagingServiceSid: process.env.TWILIO_MESSAGING_SERVICE_SID!,
},
rateLimits: {
emailPerMinute: parseInt(process.env.EMAIL_RATE_LIMIT || '60', 10),
smsPerMinute: parseInt(process.env.SMS_RATE_LIMIT || '30', 10),
pushPerMinute: parseInt(process.env.PUSH_RATE_LIMIT || '100', 10),
},
});

View File

@@ -0,0 +1,8 @@
export { EmailService } from './services/email.service';
export { SMSService } from './services/sms.service';
export { PushService } from './services/push.service';
export { NotificationService } from './services/notification.service';
export { loadNotificationConfig, NotificationConfigSchema } from './config/notification.config';
export { notificationRoutes } from './routes/notification.routes';
export * from './types/notification.types';

View File

@@ -0,0 +1,159 @@
import { Router, Request, Response } from 'express';
import { NotificationService } from '../services/notification.service';
import type { EmailNotification, SMSNotification, PushNotification } from '../types/notification.types';
const router = Router();
const notificationService = NotificationService.getInstance();
export interface SendNotificationRequest {
channel: 'email' | 'sms' | 'push';
to?: string;
userId?: string;
subject?: string;
body: string;
htmlBody?: string;
title?: string;
data?: Record<string, unknown>;
metadata?: Record<string, string>;
category?: string;
}
router.post('/send', async (req: Request, res: Response) => {
try {
const { channel, ...payload } = req.body as SendNotificationRequest;
let notification: EmailNotification | SMSNotification | PushNotification;
switch (channel) {
case 'email':
if (!payload.to || !payload.subject || !payload.htmlBody) {
res.status(400).json({ error: 'Email requires to, subject, and htmlBody' });
return;
}
notification = {
channel: 'email',
to: payload.to,
subject: payload.subject,
htmlBody: payload.htmlBody,
textBody: payload.body,
metadata: payload.metadata,
};
break;
case 'sms':
if (!payload.to) {
res.status(400).json({ error: 'SMS requires to field' });
return;
}
notification = {
channel: 'sms',
to: payload.to,
body: payload.body,
metadata: payload.metadata,
};
break;
case 'push':
if (!payload.userId || !payload.title) {
res.status(400).json({ error: 'Push requires userId and title' });
return;
}
notification = {
channel: 'push',
userId: payload.userId,
title: payload.title,
body: payload.body,
data: payload.data,
};
break;
default:
res.status(400).json({ error: `Unknown channel: ${channel}` });
return;
}
const result = await notificationService.sendWithPreferences(
notification,
payload.category || 'default'
);
res.json(result);
} catch (error) {
res.status(500).json({
error: error instanceof Error ? error.message : 'Unknown error',
});
}
});
router.post('/send/batch', async (req: Request, res: Response) => {
try {
const notifications = req.body.notifications as SendNotificationRequest[];
const results = await Promise.all(
notifications.map(n => {
const notif = {
channel: n.channel,
to: n.to,
userId: n.userId,
subject: n.subject,
body: n.body,
htmlBody: n.htmlBody,
title: n.title,
data: n.data,
metadata: n.metadata,
};
return notificationService.sendWithPreferences(
notif as EmailNotification | SMSNotification | PushNotification,
n.category || 'default'
);
})
);
res.json({ results });
} catch (error) {
res.status(500).json({
error: error instanceof Error ? error.message : 'Unknown error',
});
}
});
router.post('/preferences/:userId', async (req: Request, res: Response) => {
try {
const { userId } = req.params;
const { channel, enabled, categories } = req.body;
const preference = await notificationService.setPreference(
userId,
channel,
enabled,
categories
);
res.json(preference);
} catch (error) {
res.status(500).json({
error: error instanceof Error ? error.message : 'Unknown error',
});
}
});
router.get('/preferences/:userId', async (req: Request, res: Response) => {
try {
const { userId } = req.params;
const { channel } = req.query;
if (channel) {
const preference = await notificationService.getPreference(
userId,
channel as 'email' | 'sms' | 'push'
);
res.json(preference);
} else {
res.json({ userId });
}
} catch (error) {
res.status(500).json({
error: error instanceof Error ? error.message : 'Unknown error',
});
}
});
export { router as notificationRoutes };

View File

@@ -0,0 +1,96 @@
import { Resend } from 'resend';
import { loadNotificationConfig } from '../config/notification.config';
import type { EmailNotification, NotificationResult } from '../types/notification.types';
const config = loadNotificationConfig();
const resend = new Resend(config.resend.apiKey);
export class EmailService {
private static instance: EmailService;
private sentCount = new Map<string, number>();
private cleanupInterval: NodeJS.Timeout;
private constructor() {
this.cleanupInterval = setInterval(() => {
const now = Date.now();
for (const [key, timestamp] of this.sentCount.entries()) {
if (now - timestamp > 60000) {
this.sentCount.delete(key);
}
}
}, 60000);
}
static getInstance(): EmailService {
if (!EmailService.instance) {
EmailService.instance = new EmailService();
}
return EmailService.instance;
}
async send(notification: EmailNotification): Promise<NotificationResult> {
const rateLimitKey = `email:${notification.to}`;
const currentCount = this.sentCount.get(rateLimitKey) || 0;
if (currentCount >= config.rateLimits.emailPerMinute) {
throw new Error(`Email rate limit exceeded for ${notification.to}`);
}
try {
const { data, error } = await resend.emails.send({
from: notification.from || 'ShieldAI <noreply@shieldai.com>',
to: [notification.to],
subject: notification.subject,
html: notification.htmlBody,
text: notification.textBody,
metadata: notification.metadata,
attachments: notification.attachments?.map(att => ({
filename: att.filename,
data: att.content,
contentType: att.mimeType,
})),
});
if (error) {
return {
notificationId: `email-${Date.now()}`,
channel: 'email',
status: 'failed',
externalId: data?.id,
error: error.message,
};
}
this.sentCount.set(rateLimitKey, currentCount + 1);
return {
notificationId: `email-${data?.id || Date.now()}`,
channel: 'email',
status: 'sent',
externalId: data?.id,
deliveredAt: new Date(),
};
} catch (error) {
return {
notificationId: `email-${Date.now()}`,
channel: 'email',
status: 'failed',
error: error instanceof Error ? error.message : 'Unknown error',
};
}
}
async sendBatch(notifications: EmailNotification[]): Promise<NotificationResult[]> {
const results = await Promise.all(
notifications.map(n => this.send(n))
);
return results;
}
getRateLimitStatus(): { remaining: number; limit: number } {
return {
remaining: config.rateLimits.emailPerMinute - this.sentCount.size,
limit: config.rateLimits.emailPerMinute,
};
}
}

View File

@@ -0,0 +1,145 @@
import { EmailService } from './email.service';
import { SMSService } from './sms.service';
import { PushService } from './push.service';
import type {
Notification,
NotificationResult,
NotificationPreference,
DeduplicationKey
} from '../types/notification.types';
export class NotificationService {
private static instance: NotificationService;
private emailService: EmailService;
private smsService: SMSService;
private pushService: PushService;
private pendingDeduplication = new Map<string, Set<string>>();
private preferenceCache = new Map<string, NotificationPreference>();
private constructor() {
this.emailService = EmailService.getInstance();
this.smsService = SMSService.getInstance();
this.pushService = PushService.getInstance();
}
static getInstance(): NotificationService {
if (!NotificationService.instance) {
NotificationService.instance = new NotificationService();
}
return NotificationService.instance;
}
async send(notification: Notification): Promise<NotificationResult> {
switch (notification.channel) {
case 'email':
return this.emailService.send(notification);
case 'sms':
return this.smsService.send(notification);
case 'push':
return this.pushService.send(notification);
default:
throw new Error(`Unknown notification channel: ${(notification as any).channel}`);
}
}
async sendWithDeduplication(
notification: Notification,
dedupKey: DeduplicationKey
): Promise<NotificationResult> {
const dedupId = `${dedupKey.userId}:${dedupKey.templateId}:${dedupKey.key}`;
const windowSet = this.pendingDeduplication.get(dedupId);
if (windowSet && windowSet.size > 0) {
return {
notificationId: `dedup-${Date.now()}`,
channel: notification.channel,
status: 'pending',
error: 'Duplicate notification within deduplication window',
};
}
const result = await this.send(notification);
if (result.status === 'sent') {
if (!windowSet) {
this.pendingDeduplication.set(dedupId, new Set());
}
this.pendingDeduplication.get(dedupId)!.add(result.externalId!);
}
return result;
}
async setPreference(
userId: string,
channel: NotificationPreference['channel'],
enabled: boolean,
categories?: string[]
): Promise<NotificationPreference> {
const preference: NotificationPreference = {
userId,
channel,
enabled,
categories: categories || [],
updatedAt: new Date(),
};
this.preferenceCache.set(`${userId}:${channel}`, preference);
return preference;
}
async getPreference(
userId: string,
channel: NotificationPreference['channel']
): Promise<NotificationPreference | null> {
return this.preferenceCache.get(`${userId}:${channel}`) || null;
}
async shouldSend(
userId: string,
channel: NotificationPreference['channel'],
category: string
): Promise<boolean> {
const preference = await this.getPreference(userId, channel);
if (!preference) {
return true;
}
if (!preference.enabled) {
return false;
}
if (preference.categories.length === 0) {
return true;
}
return preference.categories.includes(category);
}
async sendWithPreferences(
notification: Notification,
category: string
): Promise<NotificationResult | null> {
const userId = notification.channel === 'push'
? notification.userId
: `user-${Date.now()}`;
const shouldSend = await this.shouldSend(
userId,
notification.channel,
category
);
if (!shouldSend) {
return {
notificationId: `pref-${Date.now()}`,
channel: notification.channel,
status: 'pending',
error: 'Notification disabled for user preference',
};
}
return this.send(notification);
}
}

View File

@@ -0,0 +1,112 @@
import admin from 'firebase-admin';
import { loadNotificationConfig } from '../config/notification.config';
import type { PushNotification, NotificationResult } from '../types/notification.types';
const config = loadNotificationConfig();
let fcmApp: admin.app.App | null = null;
function getFCMApp(): admin.app.App {
if (!fcmApp) {
fcmApp = admin.initializeApp({
credential: admin.credential.cert({
projectId: config.fcm.projectId,
clientEmail: config.fcm.clientEmail,
privateKey: config.fcm.privateKey.replace(/\\n/g, '\n'),
}),
});
}
return fcmApp;
}
export class PushService {
private static instance: PushService;
private sentCount = new Map<string, number>();
private cleanupInterval: NodeJS.Timeout;
private constructor() {
this.cleanupInterval = setInterval(() => {
const now = Date.now();
for (const [key, timestamp] of this.sentCount.entries()) {
if (now - timestamp > 60000) {
this.sentCount.delete(key);
}
}
}, 60000);
}
static getInstance(): PushService {
if (!PushService.instance) {
PushService.instance = new PushService();
}
return PushService.instance;
}
async send(notification: PushNotification): Promise<NotificationResult> {
const rateLimitKey = `push:${notification.userId}`;
const currentCount = this.sentCount.get(rateLimitKey) || 0;
if (currentCount >= config.rateLimits.pushPerMinute) {
throw new Error(`Push rate limit exceeded for user ${notification.userId}`);
}
try {
const fcmApp = getFCMApp();
const messaging = admin.messaging(fcmApp);
const message: admin.messaging.Message = {
notification: {
title: notification.title,
body: notification.body,
},
data: notification.data ?
Object.fromEntries(
Object.entries(notification.data).map(([k, v]) => [k, String(v)])
) : undefined,
token: notification.userId,
apns: {
payload: {
aps: {
badge: notification.badge,
sound: notification.sound || 'default',
category: notification.category,
},
},
},
};
const response = await messaging.send(message);
this.sentCount.set(rateLimitKey, currentCount + 1);
return {
notificationId: `push-${response}`,
channel: 'push',
status: 'sent',
externalId: response,
deliveredAt: new Date(),
};
} catch (error) {
return {
notificationId: `push-${Date.now()}`,
channel: 'push',
status: 'failed',
error: error instanceof Error ? error.message : 'Unknown error',
};
}
}
async sendBatch(notifications: PushNotification[]): Promise<NotificationResult[]> {
const results = await Promise.all(
notifications.map(n => this.send(n))
);
return results;
}
getRateLimitStatus(): { remaining: number; limit: number } {
return {
remaining: config.rateLimits.pushPerMinute - this.sentCount.size,
limit: config.rateLimits.pushPerMinute,
};
}
}

View File

@@ -0,0 +1,82 @@
import twilio from 'twilio';
import { loadNotificationConfig } from '../config/notification.config';
import type { SMSNotification, NotificationResult } from '../types/notification.types';
const config = loadNotificationConfig();
const twilioClient = twilio(
config.twilio.accountSid,
config.twilio.authToken
);
export class SMSService {
private static instance: SMSService;
private sentCount = new Map<string, number>();
private cleanupInterval: NodeJS.Timeout;
private constructor() {
this.cleanupInterval = setInterval(() => {
const now = Date.now();
for (const [key, timestamp] of this.sentCount.entries()) {
if (now - timestamp > 60000) {
this.sentCount.delete(key);
}
}
}, 60000);
}
static getInstance(): SMSService {
if (!SMSService.instance) {
SMSService.instance = new SMSService();
}
return SMSService.instance;
}
async send(notification: SMSNotification): Promise<NotificationResult> {
const rateLimitKey = `sms:${notification.to}`;
const currentCount = this.sentCount.get(rateLimitKey) || 0;
if (currentCount >= config.rateLimits.smsPerMinute) {
throw new Error(`SMS rate limit exceeded for ${notification.to}`);
}
try {
const message = await twilioClient.messages.create({
body: notification.body,
from: notification.from || config.twilio.messagingServiceSid,
to: notification.to,
metadata: notification.metadata,
});
this.sentCount.set(rateLimitKey, currentCount + 1);
return {
notificationId: `sms-${message.sid}`,
channel: 'sms',
status: 'sent',
externalId: message.sid,
deliveredAt: new Date(),
};
} catch (error) {
return {
notificationId: `sms-${Date.now()}`,
channel: 'sms',
status: 'failed',
error: error instanceof Error ? error.message : 'Unknown error',
};
}
}
async sendBatch(notifications: SMSNotification[]): Promise<NotificationResult[]> {
const results = await Promise.all(
notifications.map(n => this.send(n))
);
return results;
}
getRateLimitStatus(): { remaining: number; limit: number } {
return {
remaining: config.rateLimits.smsPerMinute - this.sentCount.size,
limit: config.rateLimits.smsPerMinute,
};
}
}

View File

@@ -0,0 +1,90 @@
export type NotificationChannel = 'email' | 'sms' | 'push';
export type NotificationStatus =
| 'pending'
| 'sent'
| 'delivered'
| 'failed'
| 'bounced'
| 'read';
export interface NotificationRecipient {
userId: string;
email?: string;
phone?: string;
fcmToken?: string;
apnsToken?: string;
}
export interface EmailNotification {
channel: 'email';
to: string;
from?: string;
subject: string;
htmlBody: string;
textBody?: string;
metadata?: Record<string, string>;
attachments?: Array<{
filename: string;
content: Buffer;
mimeType?: string;
}>;
}
export interface SMSNotification {
channel: 'sms';
to: string;
body: string;
from?: string;
metadata?: Record<string, string>;
}
export interface PushNotification {
channel: 'push';
userId: string;
title: string;
body: string;
data?: Record<string, unknown>;
badge?: number;
sound?: string;
category?: string;
}
export type Notification = EmailNotification | SMSNotification | PushNotification;
export interface NotificationResult {
notificationId: string;
channel: NotificationChannel;
status: NotificationStatus;
externalId?: string;
error?: string;
deliveredAt?: Date;
readAt?: Date;
}
export interface NotificationTemplate {
id: string;
name: string;
channel: NotificationChannel;
subject?: string;
body: string;
locale: string;
variables: string[];
createdAt: Date;
updatedAt: Date;
}
export interface NotificationPreference {
userId: string;
channel: NotificationChannel;
enabled: boolean;
categories: string[];
updatedAt: Date;
}
export interface DeduplicationKey {
userId: string;
templateId: string;
key: string;
windowMinutes: number;
}

View File

@@ -0,0 +1,12 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src",
"declaration": true,
"declarationMap": true,
"sourceMap": true
},
"include": ["src/**/*"],
"exclude": ["node_modules", "dist"]
}

View File

@@ -141,3 +141,38 @@ export interface VoiceEnrollmentOutput {
durationSec?: number;
createdAt: Date;
}
export const ScheduleStatus = {
ACTIVE: "ACTIVE",
PAUSED: "PAUSED",
} as const;
export type ScheduleStatus = (typeof ScheduleStatus)[keyof typeof ScheduleStatus];
export const WebhookEventType = {
SCAN_TRIGGER: "SCAN_TRIGGER",
BREACH_DETECTED: "BREACH_DETECTED",
SUBSCRIPTION_CHANGE: "SUBSCRIPTION_CHANGE",
} as const;
export type WebhookEventType = (typeof WebhookEventType)[keyof typeof WebhookEventType];
export interface WebhookTriggerInput {
eventType: string;
payload: Record<string, unknown>;
source?: string;
signature?: string;
}
export interface ScanScheduleOutput {
id: string;
userId: string;
intervalMinutes: number;
cronExpression: string;
status: ScheduleStatus;
lastScanAt?: Date;
nextScanAt?: Date;
}
export interface SchedulerConfig {
intervalMinutes: number;
cronExpression: string;
}

View File

@@ -0,0 +1,44 @@
FROM node:20-alpine AS builder
WORKDIR /app
COPY package.json package-lock.json turbo.json ./
COPY packages/api/package.json ./packages/api/
COPY packages/db/package.json ./packages/db/
COPY packages/types/package.json ./packages/types/
COPY packages/core/package.json ./packages/core/ 2>/dev/null || true
COPY packages/jobs/package.json ./packages/jobs/
COPY packages/shared-notifications/package.json ./packages/shared-notifications/
COPY services/darkwatch/package.json ./services/darkwatch/
COPY services/spamshield/package.json ./services/spamshield/
COPY services/voiceprint/package.json ./services/voiceprint/
RUN npm ci
COPY tsconfig.json ./
COPY packages/types/tsconfig.json ./packages/types/
COPY packages/db/tsconfig.json ./packages/db/
COPY services/darkwatch/tsconfig.json ./services/darkwatch/
COPY services/darkwatch/ ./services/darkwatch/
COPY packages/types/ ./packages/types/
COPY packages/db/ ./packages/db/
RUN npm run build --workspace=@shieldai/types --workspace=@shieldai/db --workspace=@shieldai/darkwatch
FROM node:20-alpine AS runner
WORKDIR /app
RUN addgroup --system --gid 1001 nodejs && \
adduser --system --uid 1001 shieldai
COPY --from=builder --chown=shieldai:nodejs /app/services/darkwatch/dist ./dist
COPY --from=builder --chown=shieldai:nodejs /app/node_modules ./node_modules
COPY --from=builder --chown=shieldai:nodejs /app/services/darkwatch/package.json ./package.json
COPY --from=builder --chown=shieldai:nodejs /app/packages/db ./packages/db
USER shieldai
EXPOSE 3001
CMD ["node", "dist/index.js"]

View File

@@ -3,3 +3,5 @@ export * from "./hibp/HIBPService";
export * from "./matching/MatchingEngine";
export * from "./alerts/AlertPipeline";
export * from "./scanner/ScanService";
export * from "./scheduler/ScanScheduler";
export * from "./webhooks/WebhookHandler";

View File

@@ -0,0 +1,168 @@
import prisma from "@shieldai/db";
import { SubscriptionTier } from "@shieldai/types";
const TIER_CONFIG = {
[SubscriptionTier.BASIC]: { intervalMinutes: 1440, cron: "0 0 * * *" },
[SubscriptionTier.PLUS]: { intervalMinutes: 360, cron: "0 */6 * * *" },
[SubscriptionTier.PREMIUM]: { intervalMinutes: 60, cron: "0 * * * *" },
} as const;
export class ScanScheduler {
/**
* Get the scan interval (in minutes) for a given subscription tier.
*/
public static getIntervalForTier(tier: SubscriptionTier): number {
return TIER_CONFIG[tier]?.intervalMinutes ?? TIER_CONFIG[SubscriptionTier.BASIC].intervalMinutes;
}
/**
* Get the cron expression for a given subscription tier.
*/
public static getCronForTier(tier: SubscriptionTier): string {
return TIER_CONFIG[tier]?.cron ?? TIER_CONFIG[SubscriptionTier.BASIC].cron;
}
/**
* Ensure a user has an active scan schedule based on their subscription tier.
* Creates or updates the schedule record.
*/
async ensureScheduleForUser(userId: string): Promise<{ scheduled: boolean; intervalMinutes: number }> {
const user = await prisma.user.findUnique({
where: { id: userId },
select: { subscriptionTier: true },
});
if (!user) {
return { scheduled: false, intervalMinutes: 0 };
}
const tier = user.subscriptionTier ?? SubscriptionTier.BASIC;
const config = TIER_CONFIG[tier];
const nextScan = this.calculateNextScan();
const schedule = await prisma.scanSchedule.upsert({
where: { userId },
update: {
intervalMinutes: config.intervalMinutes,
cronExpression: config.cron,
nextScanAt: nextScan,
},
create: {
userId,
intervalMinutes: config.intervalMinutes,
cronExpression: config.cron,
status: "ACTIVE",
nextScanAt: nextScan,
},
});
return {
scheduled: schedule.status === "ACTIVE",
intervalMinutes: schedule.intervalMinutes,
};
}
/**
* Get all active schedules that are due for scanning.
*/
async getDueSchedules(): Promise<Array<{ userId: string; intervalMinutes: number; cronExpression: string }>> {
const now = new Date();
const due = await prisma.scanSchedule.findMany({
where: {
status: "ACTIVE",
OR: [
{ nextScanAt: { lte: now } },
{ nextScanAt: null },
],
},
select: {
userId: true,
intervalMinutes: true,
cronExpression: true,
},
});
return due;
}
/**
* Mark a schedule as scanned and compute the next scan time.
*/
async markScanned(userId: string): Promise<Date> {
const schedule = await prisma.scanSchedule.findUnique({ where: { userId } });
if (!schedule) {
throw new Error(`ScanSchedule not found for user ${userId}`);
}
const nextScan = this.calculateNextScan(schedule.intervalMinutes);
await prisma.scanSchedule.update({
where: { userId },
data: {
lastScanAt: new Date(),
nextScanAt: nextScan,
},
});
return nextScan;
}
/**
* Pause scheduling for a user (e.g., on subscription downgrade or pause).
*/
async pauseSchedule(userId: string): Promise<void> {
await prisma.scanSchedule.updateMany({
where: { userId, status: "ACTIVE" },
data: { status: "PAUSED" },
});
}
/**
* Resume scheduling for a user and recalculate based on current tier.
*/
async resumeSchedule(userId: string): Promise<void> {
await this.ensureScheduleForUser(userId);
}
/**
* Get the current schedule for a user.
*/
async getSchedule(userId: string) {
return prisma.scanSchedule.findUnique({
where: { userId },
});
}
/**
* List all active schedules (for admin/monitoring).
*/
async listActiveSchedules(limit = 100, offset = 0) {
return prisma.scanSchedule.findMany({
where: { status: "ACTIVE" },
include: {
user: {
select: {
id: true,
email: true,
subscriptionTier: true,
},
},
},
orderBy: { nextScanAt: "asc" },
take: limit,
skip: offset,
});
}
/**
* Calculate the next scan time based on interval.
*/
private calculateNextScan(intervalMinutes?: number): Date {
const minutes = intervalMinutes ?? 60;
const next = new Date();
next.setMinutes(next.getMinutes() + minutes);
return next;
}
}

View File

@@ -0,0 +1,193 @@
import prisma from "@shieldai/db";
import { createHmac, timingSafeEqual } from "crypto";
import { DataSource, WebhookEventType } from "@shieldai/types";
export class WebhookHandler {
private secret: string;
constructor(secret?: string) {
this.secret = secret || process.env.WEBHOOK_SECRET || "default-webhook-secret";
}
/**
* Verify HMAC signature of incoming webhook payload.
*/
verifySignature(payload: string, signature: string | string[]): boolean {
if (!signature) return false;
const sigArray = Array.isArray(signature) ? signature : [signature];
const expected = this.computeSignature(payload);
for (const sig of sigArray) {
if (timingSafeEqual(Buffer.from(sig), Buffer.from(expected))) {
return true;
}
}
return false;
}
/**
* Process an incoming webhook event.
* Validates, stores, and triggers appropriate action.
*/
async processEvent(
eventType: string,
payload: Record<string, unknown>,
source?: string,
signature?: string
): Promise<{ eventId: string; scanTriggered: boolean }> {
const payloadStr = JSON.stringify(payload);
if (signature && !this.verifySignature(payloadStr, signature)) {
throw new Error("Webhook signature verification failed");
}
const eventTypeNormalized = this.normalizeEventType(eventType);
const event = await prisma.webhookEvent.create({
data: {
eventType: eventTypeNormalized,
payload: payloadStr,
source,
signature,
},
});
let scanTriggered = false;
if (eventTypeNormalized === WebhookEventType.SCAN_TRIGGER) {
const userId = payload.userId as string | undefined;
const source = (payload.dataSource as string) || undefined;
if (userId) {
scanTriggered = await this.triggerScanFromWebhook(event.id, userId, source);
}
}
await prisma.webhookEvent.update({
where: { id: event.id },
data: {
processed: true,
processedAt: new Date(),
},
});
return { eventId: event.id, scanTriggered };
}
/**
* Trigger a scan job from a webhook event.
*/
private async triggerScanFromWebhook(
eventId: string,
userId: string,
dataSource?: string
): Promise<boolean> {
try {
const user = await prisma.user.findUnique({ where: { id: userId } });
if (!user) {
return false;
}
const job = await prisma.scanJob.create({
data: {
userId,
status: "PENDING",
source: (dataSource as DataSource) || undefined,
scheduledBy: "webhook",
},
});
await prisma.webhookEvent.update({
where: { id: eventId },
data: { scanJobId: job.id },
});
return true;
} catch (err) {
console.error(`[Webhook] Scan trigger failed for event ${eventId}:`, err);
return false;
}
}
/**
* Get webhook event history.
*/
async getEventHistory(limit = 50, offset = 0) {
return prisma.webhookEvent.findMany({
orderBy: { createdAt: "desc" },
take: limit,
skip: offset,
include: { scanJob: true },
});
}
/**
* Get events for a specific user (via linked scan jobs).
*/
async getUserEvents(userId: string, limit = 50, offset = 0) {
return prisma.webhookEvent.findMany({
where: {
scanJob: { userId },
},
orderBy: { createdAt: "desc" },
take: limit,
skip: offset,
});
}
/**
* Process unprocessed webhook events (retry mechanism).
*/
async processPendingEvents(): Promise<number> {
const pending = await prisma.webhookEvent.findMany({
where: {
processed: false,
eventType: WebhookEventType.SCAN_TRIGGER,
},
orderBy: { createdAt: "asc" },
take: 50,
});
let processed = 0;
for (const event of pending) {
try {
const payload = JSON.parse(event.payload) as Record<string, unknown>;
const userId = payload.userId as string | undefined;
if (userId) {
const success = await this.triggerScanFromWebhook(
event.id,
userId,
payload.dataSource as string | undefined
);
if (success) {
await prisma.webhookEvent.update({
where: { id: event.id },
data: { processed: true, processedAt: new Date() },
});
processed++;
}
}
} catch (err) {
console.error(`[Webhook] Retry failed for event ${event.id}:`, err);
}
}
return processed;
}
private computeSignature(payload: string): string {
return createHmac("sha256", this.secret).update(payload).digest("hex");
}
private normalizeEventType(eventType: string): WebhookEventType {
const upper = eventType.toUpperCase().replace(/\s+/g, "_");
const validTypes: WebhookEventType[] = [WebhookEventType.SCAN_TRIGGER, WebhookEventType.BREACH_DETECTED, WebhookEventType.SUBSCRIPTION_CHANGE];
return validTypes.includes(upper as WebhookEventType) ? (upper as WebhookEventType) : WebhookEventType.SCAN_TRIGGER;
}
}

View File

@@ -0,0 +1,195 @@
import { describe, it, expect, beforeEach, afterEach } from "vitest";
import { ScanScheduler } from "../src/scheduler/ScanScheduler";
import { SubscriptionTier } from "@shieldai/types";
import prisma from "@shieldai/db";
let runId = Date.now();
describe("ScanScheduler", () => {
let scheduler: ScanScheduler;
beforeEach(() => {
scheduler = new ScanScheduler();
});
describe("static tier configuration", () => {
it("returns correct interval for BASIC tier", () => {
expect(ScanScheduler.getIntervalForTier(SubscriptionTier.BASIC)).toBe(1440);
});
it("returns correct interval for PLUS tier", () => {
expect(ScanScheduler.getIntervalForTier(SubscriptionTier.PLUS)).toBe(360);
});
it("returns correct interval for PREMIUM tier", () => {
expect(ScanScheduler.getIntervalForTier(SubscriptionTier.PREMIUM)).toBe(60);
});
it("returns correct cron for BASIC tier", () => {
expect(ScanScheduler.getCronForTier(SubscriptionTier.BASIC)).toBe("0 0 * * *");
});
it("returns correct cron for PLUS tier", () => {
expect(ScanScheduler.getCronForTier(SubscriptionTier.PLUS)).toBe("0 */6 * * *");
});
it("returns correct cron for PREMIUM tier", () => {
expect(ScanScheduler.getCronForTier(SubscriptionTier.PREMIUM)).toBe("0 * * * *");
});
});
describe("ensureScheduleForUser", () => {
let userId: string;
beforeEach(async () => {
const user = await prisma.user.create({
data: {
email: `scheduler-test-${runId}@shieldai.local`,
subscriptionTier: "BASIC",
},
});
userId = user.id;
runId = Date.now();
});
afterEach(async () => {
await prisma.scanSchedule.deleteMany({ where: { userId } });
await prisma.user.delete({ where: { id: userId } });
});
it("creates schedule for new user", async () => {
const result = await scheduler.ensureScheduleForUser(userId);
expect(result.scheduled).toBe(true);
expect(result.intervalMinutes).toBe(1440);
const schedule = await scheduler.getSchedule(userId);
expect(schedule).not.toBeNull();
expect(schedule?.status).toBe("ACTIVE");
expect(schedule?.cronExpression).toBe("0 0 * * *");
});
it("updates schedule on tier change", async () => {
await scheduler.ensureScheduleForUser(userId);
await prisma.user.update({
where: { id: userId },
data: { subscriptionTier: "PREMIUM" },
});
const result = await scheduler.ensureScheduleForUser(userId);
expect(result.intervalMinutes).toBe(60);
const schedule = await scheduler.getSchedule(userId);
expect(schedule?.cronExpression).toBe("0 * * * *");
});
it("returns false for non-existent user", async () => {
const result = await scheduler.ensureScheduleForUser("non-existent-id");
expect(result.scheduled).toBe(false);
expect(result.intervalMinutes).toBe(0);
});
});
describe("schedule lifecycle", () => {
let userId: string;
beforeEach(async () => {
const user = await prisma.user.create({
data: {
email: `lifecycle-test-${runId}@shieldai.local`,
subscriptionTier: "PLUS",
},
});
userId = user.id;
runId = Date.now();
await scheduler.ensureScheduleForUser(userId);
});
afterEach(async () => {
await prisma.scanSchedule.deleteMany({ where: { userId } });
await prisma.user.delete({ where: { id: userId } });
});
it("marks schedule as scanned and updates next scan time", async () => {
const before = await scheduler.getSchedule(userId);
const nextScan = await scheduler.markScanned(userId);
const after = await scheduler.getSchedule(userId);
expect(after?.lastScanAt).not.toBeNull();
expect(after?.nextScanAt?.getTime()).toBeGreaterThan(nextScan.getTime() - 5000);
expect(after?.nextScanAt).not.toEqual(before?.nextScanAt);
});
it("pauses schedule", async () => {
await scheduler.pauseSchedule(userId);
const schedule = await scheduler.getSchedule(userId);
expect(schedule?.status).toBe("PAUSED");
});
it("resumes paused schedule", async () => {
await scheduler.pauseSchedule(userId);
await scheduler.resumeSchedule(userId);
const schedule = await scheduler.getSchedule(userId);
expect(schedule?.status).toBe("ACTIVE");
});
});
describe("getDueSchedules", () => {
let userId1: string;
let userId2: string;
beforeEach(async () => {
const user1 = await prisma.user.create({
data: {
email: `due-test-1-${runId}@shieldai.local`,
subscriptionTier: "PREMIUM",
},
});
userId1 = user1.id;
const user2 = await prisma.user.create({
data: {
email: `due-test-2-${runId}@shieldai.local`,
subscriptionTier: "BASIC",
},
});
userId2 = user2.id;
runId = Date.now();
await scheduler.ensureScheduleForUser(userId1);
await scheduler.ensureScheduleForUser(userId2);
});
afterEach(async () => {
await prisma.scanSchedule.deleteMany({ where: { userId: userId1 } });
await prisma.scanSchedule.deleteMany({ where: { userId: userId2 } });
await prisma.user.delete({ where: { id: userId1 } });
await prisma.user.delete({ where: { id: userId2 } });
});
it("returns schedules that are due", async () => {
const pastDate = new Date(Date.now() - 60000);
await prisma.scanSchedule.update({
where: { userId: userId1 },
data: { nextScanAt: pastDate },
});
const due = await scheduler.getDueSchedules();
const dueUserIds = due.map((s) => s.userId);
expect(dueUserIds).toContain(userId1);
});
it("includes schedules with null nextScanAt", async () => {
await prisma.scanSchedule.update({
where: { userId: userId2 },
data: { nextScanAt: null },
});
const due = await scheduler.getDueSchedules();
const dueUserIds = due.map((s) => s.userId);
expect(dueUserIds).toContain(userId2);
});
});
});

View File

@@ -0,0 +1,201 @@
import { describe, it, expect, beforeEach, afterEach } from "vitest";
import { WebhookHandler } from "../src/webhooks/WebhookHandler";
import prisma from "@shieldai/db";
const TEST_SECRET = "test-webhook-secret-2026";
let runId = Date.now();
describe("WebhookHandler", () => {
let handler: WebhookHandler;
beforeEach(() => {
handler = new WebhookHandler(TEST_SECRET);
});
describe("signature verification", () => {
it("verifies valid signature", () => {
const payload = JSON.stringify({ userId: "test-123" });
const sig = handler["computeSignature"](payload);
expect(handler.verifySignature(payload, sig)).toBe(true);
});
it("rejects invalid signature", () => {
const payload = JSON.stringify({ userId: "test-123" });
expect(handler.verifySignature(payload, "invalid-sig")).toBe(false);
});
it("rejects missing signature", () => {
expect(handler.verifySignature("payload", "")).toBe(false);
});
it("accepts signature from array", () => {
const payload = JSON.stringify({ userId: "test-123" });
const sig = handler["computeSignature"](payload);
expect(handler.verifySignature(payload, ["other", sig, "another"])).toBe(true);
});
});
describe("processEvent", () => {
let userId: string;
beforeEach(async () => {
const user = await prisma.user.create({
data: {
email: `webhook-test-${runId}@shieldai.local`,
subscriptionTier: "PREMIUM",
},
});
userId = user.id;
runId = Date.now();
});
afterEach(async () => {
await prisma.webhookEvent.deleteMany();
await prisma.scanJob.deleteMany({ where: { userId } });
await prisma.user.delete({ where: { id: userId } });
});
it("processes SCAN_TRIGGER event", async () => {
const result = await handler.processEvent("SCAN_TRIGGER", {
userId,
dataSource: "HIBP",
});
expect(result.eventId).toBeDefined();
expect(result.scanTriggered).toBe(true);
const job = await prisma.scanJob.findFirst({
where: { userId, scheduledBy: "webhook" },
});
expect(job).not.toBeNull();
});
it("processes BREACH_DETECTED event", async () => {
const result = await handler.processEvent("BREACH_DETECTED", {
userId,
breachName: "TestBreach",
});
expect(result.eventId).toBeDefined();
expect(result.scanTriggered).toBe(false);
});
it("normalizes event type", async () => {
const result = await handler.processEvent("scan_trigger", {
userId,
});
expect(result.eventId).toBeDefined();
const event = await prisma.webhookEvent.findUnique({
where: { id: result.eventId },
});
expect(event?.eventType).toBe("SCAN_TRIGGER");
});
it("returns false for non-existent user", async () => {
const result = await handler.processEvent("SCAN_TRIGGER", {
userId: "non-existent-user-id",
});
expect(result.scanTriggered).toBe(false);
});
it("links scan job to webhook event", async () => {
const result = await handler.processEvent("SCAN_TRIGGER", {
userId,
});
expect(result.scanTriggered).toBe(true);
const event = await prisma.webhookEvent.findUnique({
where: { id: result.eventId },
});
expect(event?.scanJobId).toBeDefined();
expect(event?.processed).toBe(true);
});
});
describe("signature validation in processEvent", () => {
it("accepts event with valid signature", async () => {
const payload = { userId: "test" };
const payloadStr = JSON.stringify(payload);
const sig = handler["computeSignature"](payloadStr);
const result = await handler.processEvent("SCAN_TRIGGER", payload, undefined, sig);
expect(result.eventId).toBeDefined();
});
it("rejects event with invalid signature", async () => {
const payload = { userId: "test" };
try {
await handler.processEvent("SCAN_TRIGGER", payload, undefined, "bad-signature");
expect(true).toBe(false);
} catch (err) {
expect((err as Error).message).toContain("signature");
}
});
it("accepts event without signature when no signature provided", async () => {
const result = await handler.processEvent("SCAN_TRIGGER", { userId: "test" });
expect(result.eventId).toBeDefined();
});
});
describe("processPendingEvents", () => {
it("retries unprocessed events", async () => {
const user = await prisma.user.create({
data: {
email: `retry-test-${runId}@shieldai.local`,
subscriptionTier: "BASIC",
},
});
runId = Date.now();
await prisma.webhookEvent.create({
data: {
eventType: "SCAN_TRIGGER",
payload: JSON.stringify({ userId: user.id }),
processed: false,
},
});
const processed = await handler.processPendingEvents();
expect(processed).toBeGreaterThanOrEqual(1);
const job = await prisma.scanJob.findFirst({
where: { userId: user.id, scheduledBy: "webhook" },
});
expect(job).not.toBeNull();
await prisma.scanJob.deleteMany({ where: { userId: user.id } });
await prisma.user.delete({ where: { id: user.id } });
});
});
describe("getEventHistory", () => {
afterEach(async () => {
await prisma.webhookEvent.deleteMany();
});
it("returns events ordered by creation time", async () => {
await handler.processEvent("SCAN_TRIGGER", { userId: "user-1" });
await handler.processEvent("BREACH_DETECTED", { userId: "user-2" });
const events = await handler.getEventHistory();
expect(events.length).toBeGreaterThanOrEqual(2);
expect(events[0].createdAt.getTime()).toBeGreaterThanOrEqual(events[1].createdAt.getTime());
});
it("respects limit and offset", async () => {
for (let i = 0; i < 5; i++) {
await handler.processEvent("SCAN_TRIGGER", { userId: `user-${i}` });
}
const events = await handler.getEventHistory(3, 0);
expect(events).toHaveLength(3);
});
});
});

View File

@@ -0,0 +1,44 @@
FROM node:20-alpine AS builder
WORKDIR /app
COPY package.json package-lock.json turbo.json ./
COPY packages/api/package.json ./packages/api/
COPY packages/db/package.json ./packages/db/
COPY packages/types/package.json ./packages/types/
COPY packages/core/package.json ./packages/core/ 2>/dev/null || true
COPY packages/jobs/package.json ./packages/jobs/
COPY packages/shared-notifications/package.json ./packages/shared-notifications/
COPY services/darkwatch/package.json ./services/darkwatch/
COPY services/spamshield/package.json ./services/spamshield/
COPY services/voiceprint/package.json ./services/voiceprint/
RUN npm ci
COPY tsconfig.json ./
COPY packages/types/tsconfig.json ./packages/types/
COPY packages/db/tsconfig.json ./packages/db/
COPY services/spamshield/tsconfig.json ./services/spamshield/
COPY services/spamshield/ ./services/spamshield/
COPY packages/types/ ./packages/types/
COPY packages/db/ ./packages/db/
RUN npm run build --workspace=@shieldai/types --workspace=@shieldai/db --workspace=@shieldai/spamshield
FROM node:20-alpine AS runner
WORKDIR /app
RUN addgroup --system --gid 1001 nodejs && \
adduser --system --uid 1001 shieldai
COPY --from=builder --chown=shieldai:nodejs /app/services/spamshield/dist ./dist
COPY --from=builder --chown=shieldai:nodejs /app/node_modules ./node_modules
COPY --from=builder --chown=shieldai:nodejs /app/services/spamshield/package.json ./package.json
COPY --from=builder --chown=shieldai:nodejs /app/packages/db ./packages/db
USER shieldai
EXPOSE 3002
CMD ["node", "dist/index.js"]

View File

@@ -0,0 +1,22 @@
{
"name": "@shieldai/spamshield",
"version": "0.1.0",
"main": "./dist/index.js",
"types": "./dist/index.d.ts",
"scripts": {
"build": "tsc",
"dev": "tsx watch src/index.ts",
"lint": "eslint src/",
"typecheck": "tsc --noEmit"
},
"dependencies": {
"@shieldai/db": "0.1.0",
"@prisma/client": "^6.2.0",
"libphonenumber-js": "^1.10.50"
},
"devDependencies": {
"typescript": "^5.3.3",
"tsx": "^4.19.0",
"eslint": "^8.56.0"
}
}

View File

@@ -0,0 +1,22 @@
export const spamRateLimits = {
BASIC: 100,
PLUS: 500,
PREMIUM: 2000,
} as const;
export const spamFeatureFlags = {
enableHiyaIntegration: true,
enableTruecallerIntegration: true,
enableSMSClassification: true,
enableCallAnalysis: true,
enableFeedbackLoop: true,
} as const;
export const spamConfig = {
maxPhoneNumberLength: 20,
minPhoneNumberLength: 10,
defaultConfidenceThreshold: 0.7,
maxMetadataSize: 1024 * 10, // 10KB
circuitBreakerThreshold: 5,
circuitBreakerTimeout: 60000,
} as const;

View File

@@ -0,0 +1,25 @@
{
"compilerOptions": {
"target": "ES2022",
"module": "NodeNext",
"moduleResolution": "NodeNext",
"lib": ["ES2022"],
"outDir": "./dist",
"rootDir": "./src",
"strict": true,
"esModuleInterop": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true,
"resolveJsonModule": true,
"declaration": true,
"declarationMap": true,
"sourceMap": true,
"baseUrl": "./src",
"paths": {
"@shieldai/db": ["../../packages/db/src/index.ts"],
"@shieldai/db/*": ["../../packages/db/src/*"]
}
},
"include": ["src/**/*.ts"],
"exclude": ["node_modules", "dist"]
}

View File

@@ -0,0 +1,44 @@
FROM node:20-alpine AS builder
WORKDIR /app
COPY package.json package-lock.json turbo.json ./
COPY packages/api/package.json ./packages/api/
COPY packages/db/package.json ./packages/db/
COPY packages/types/package.json ./packages/types/
COPY packages/core/package.json ./packages/core/ 2>/dev/null || true
COPY packages/jobs/package.json ./packages/jobs/
COPY packages/shared-notifications/package.json ./packages/shared-notifications/
COPY services/darkwatch/package.json ./services/darkwatch/
COPY services/spamshield/package.json ./services/spamshield/
COPY services/voiceprint/package.json ./services/voiceprint/
RUN npm ci
COPY tsconfig.json ./
COPY packages/types/tsconfig.json ./packages/types/
COPY packages/db/tsconfig.json ./packages/db/
COPY services/voiceprint/tsconfig.json ./services/voiceprint/
COPY services/voiceprint/ ./services/voiceprint/
COPY packages/types/ ./packages/types/
COPY packages/db/ ./packages/db/
RUN npm run build --workspace=@shieldai/types --workspace=@shieldai/db --workspace=@shieldai/voiceprint
FROM node:20-alpine AS runner
WORKDIR /app
RUN addgroup --system --gid 1001 nodejs && \
adduser --system --uid 1001 shieldai
COPY --from=builder --chown=shieldai:nodejs /app/services/voiceprint/dist ./dist
COPY --from=builder --chown=shieldai:nodejs /app/node_modules ./node_modules
COPY --from=builder --chown=shieldai:nodejs /app/services/voiceprint/package.json ./package.json
COPY --from=builder --chown=shieldai:nodejs /app/packages/db ./packages/db
USER shieldai
EXPOSE 3003
CMD ["node", "dist/index.js"]