diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 0000000..ec506e9
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,38 @@
+# Build stage
+FROM node:18-alpine AS builder
+
+WORKDIR /app
+
+# Copy package files
+COPY package*.json ./
+COPY apps/ ./apps/
+COPY packages/ ./packages/
+
+# Install dependencies
+RUN npm ci
+
+# Build all packages
+RUN npm run build
+
+# Production stage
+FROM node:18-alpine AS production
+
+WORKDIR /app
+
+# Copy package files
+COPY package*.json ./
+COPY apps/ ./apps/
+COPY packages/ ./packages/
+
+# Copy built artifacts from builder
+COPY --from=builder /app/apps/web/dist ./apps/web/dist
+COPY --from=builder /app/apps/api/dist ./apps/api/dist
+
+# Install production dependencies only
+RUN npm ci --production
+
+# Expose port
+EXPOSE 3000
+
+# Start the API server
+CMD ["node", "apps/api/dist/index.js"]
diff --git a/check-identity.js b/check-identity.js
new file mode 100644
index 0000000..e9512bc
--- /dev/null
+++ b/check-identity.js
@@ -0,0 +1,50 @@
+const http = require('http');
+
+const agentId = process.env.PAPERCLIP_AGENT_ID;
+const apiKey = process.env.PAPERCLIP_API_KEY;
+const apiUrl = process.env.PAPERCLIP_API_URL;
+const runId = process.env.PAPERCLIP_RUN_ID;
+
+console.log('Agent ID:', agentId);
+console.log('API URL:', apiUrl);
+console.log('Run ID:', runId);
+
+if (!apiKey || !apiUrl) {
+ console.error('Missing environment variables');
+ process.exit(1);
+}
+
+async function fetchJson(url, options = {}) {
+ const request = http.request({
+ hostname: new URL(url).hostname,
+ port: new URL(url).port,
+ path: new URL(url).pathname,
+ method: options.method || 'GET',
+ headers: {
+ 'Authorization': `Bearer ${apiKey}`,
+ 'X-Paperclip-Run-Id': runId,
+ ...options.headers
+ }
+ }, (response) => {
+ let data = '';
+ response.on('data', chunk => data += chunk);
+ response.on('end', () => {
+ try {
+ console.log(JSON.stringify(JSON.parse(data), null, 2));
+ } catch (e) {
+ console.log(data);
+ }
+ });
+ });
+ request.on('error', console.error);
+ request.end();
+}
+
+console.log('\n=== FETCHING AGENT IDENTITY ===\n');
+fetchJson(`${apiUrl}/api/agents/me`).catch(console.error);
+
+console.log('\n=== FETCHING INBOX-LITE ===\n');
+fetchJson(`${apiUrl}/api/agents/me/inbox-lite`).catch(console.error);
+
+console.log('\n=== FETCHING ALL ASSIGNED ISSUES ===\n');
+fetchJson(`${apiUrl}/api/companies/${apiKey.split('-')[0] || 'unknown'}/issues?assigneeAgentId=${agentId}&status=todo,in_progress,blocked`).catch(console.error);
diff --git a/docker-compose.yml b/docker-compose.yml
index bd0d53c..94c6c8b 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -1,31 +1,53 @@
-version: '3.9'
+version: '3.8'
services:
postgres:
- image: postgres:16-alpine
+ image: postgres:15-alpine
+ container_name: shieldsai_postgres
environment:
- POSTGRES_DB: shieldai
- POSTGRES_USER: shieldai
- POSTGRES_PASSWORD: shieldai_dev
+ POSTGRES_USER: postgres
+ POSTGRES_PASSWORD: postgres
+ POSTGRES_DB: shieldsai_dev
ports:
- "5432:5432"
volumes:
- - pgdata:/var/lib/postgresql/data
+ - postgres_data:/var/lib/postgresql/data
healthcheck:
- test: ["CMD-SHELL", "pg_isready -U shieldai"]
+ test: ["CMD-SHELL", "pg_isready -U postgres"]
interval: 5s
timeout: 5s
retries: 5
redis:
image: redis:7-alpine
+ container_name: shieldsai_redis
ports:
- "6379:6379"
+ volumes:
+ - redis_data:/data
healthcheck:
test: ["CMD", "redis-cli", "ping"]
interval: 5s
timeout: 5s
retries: 5
+ mailhog:
+ image: mailhog/mailhog:latest
+ container_name: shieldsai_mailhog
+ ports:
+ - "1025:1025" # SMTP
+ - "8025:8025" # Web UI
+ depends_on:
+ - postgres
+
+ adminer:
+ image: adminer:4
+ container_name: shieldsai_adminer
+ ports:
+ - "8080:8080"
+ depends_on:
+ - postgres
+
volumes:
- pgdata:
+ postgres_data:
+ redis_data:
diff --git a/drizzle.config.ts b/drizzle.config.ts
new file mode 100644
index 0000000..bdc51d4
--- /dev/null
+++ b/drizzle.config.ts
@@ -0,0 +1,11 @@
+import { defineConfig } from "drizzle-kit";
+
+export default defineConfig({
+ schema: "./src/db/schema/index.ts",
+ out: "./src/db/migrations",
+ dialect: "turso",
+ dbCredentials: {
+ url: process.env.TURSO_DATABASE_URL!,
+ authToken: process.env.TURSO_AUTH_TOKEN!,
+ },
+});
diff --git a/examples/call-analysis-example.ts b/examples/call-analysis-example.ts
new file mode 100644
index 0000000..a86a8d2
--- /dev/null
+++ b/examples/call-analysis-example.ts
@@ -0,0 +1,90 @@
+/**
+ * Example: Real-Time Call Analysis
+ * Demonstrates how to use the RealTimeCallAnalysisServer
+ */
+
+import { RealTimeCallAnalysisServer } from '../src/lib/call-analysis/real-time-call-server';
+
+async function example() {
+ // Create and start the server
+ const server = new RealTimeCallAnalysisServer({
+ port: 8089,
+ enableEchoCancellation: true,
+ enableNoiseSuppression: true,
+ enableAutoGainControl: true,
+ analysisConfig: {
+ sentimentWindowMs: 5000,
+ interruptThresholdMs: 200,
+ overlapThresholdMs: 300,
+ pauseThresholdMs: 2000,
+ volumeSpikeThreshold: 0.8,
+ anomalySensitivity: 'medium',
+ enableSpeakerDiarization: false,
+ },
+ });
+
+ // Listen for events
+ server.on('client:connected', ({ clientId }) => {
+ console.log(`Client connected: ${clientId}`);
+ });
+
+ server.on('client:disconnected', ({ clientId }) => {
+ console.log(`Client disconnected: ${clientId}`);
+ });
+
+ server.on('analysis:alert', ({ clientId, alert }) => {
+ console.log(`Alert from ${clientId}: ${alert.message} (${alert.severity})`);
+ });
+
+ server.on('analysis:result', ({ clientId, status }) => {
+ console.log(`Analysis status for ${clientId}: ${status}`);
+ });
+
+ server.on('analysis:error', ({ clientId, error }) => {
+ console.error(`Error for ${clientId}:`, error);
+ });
+
+ // Start the server
+ await server.start();
+ console.log('Server started, waiting for clients...');
+
+ // Example: Client connection simulation
+ const WebSocket = require('ws');
+ const client = new WebSocket('ws://localhost:8089?clientId=test-client');
+
+ client.on('open', () => {
+ console.log('Client connected');
+
+ // Start audio capture
+ client.send(JSON.stringify({ type: 'start' }));
+ });
+
+ client.on('message', (data: Buffer) => {
+ const message = JSON.parse(data.toString());
+ console.log('Received:', message.type, message);
+
+ if (message.type === 'alert' || message.type === 'anomaly') {
+ console.log(` - ${message.alertType}: ${message.message}`);
+ }
+
+ if (message.type === 'analysis') {
+ console.log(` - MOS: ${message.callQuality.mosScore}`);
+ console.log(` - Sentiment: ${message.sentiment.sentiment}`);
+ console.log(` - Summary: ${message.summary}`);
+ }
+ });
+
+ // Stop after 60 seconds
+ setTimeout(async () => {
+ console.log('Stopping server...');
+ await server.stop();
+ process.exit(0);
+ }, 60000);
+}
+
+// Run example if called directly
+if (require.main === module) {
+ example().catch(console.error);
+}
+
+export default example;
diff --git a/index.html b/index.html
new file mode 100644
index 0000000..4b95548
--- /dev/null
+++ b/index.html
@@ -0,0 +1,21 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Scripter — Write Faster
+
+
+
+
+
+
diff --git a/packages/api/src/routes/darkwatch.routes.ts b/packages/api/src/routes/darkwatch.routes.ts
new file mode 100644
index 0000000..161b020
--- /dev/null
+++ b/packages/api/src/routes/darkwatch.routes.ts
@@ -0,0 +1,285 @@
+import { FastifyInstance, FastifyRequest, FastifyReply } from 'fastify';
+import { prisma, SubscriptionTier } from '@shieldsai/shared-db';
+import { tierConfig, SubscriptionTier as BillingTier } from '@shieldsai/shared-billing';
+import {
+ watchlistService,
+ scanService,
+ schedulerService,
+ webhookService,
+} from '../services/darkwatch';
+
+export async function darkwatchRoutes(fastify: FastifyInstance) {
+ const authed = async (
+ request: FastifyRequest,
+ reply: FastifyReply
+ ): Promise => {
+ const authReq = request as FastifyRequest & { user?: { id: string } };
+ const userId = authReq.user?.id;
+ if (!userId) {
+ reply.code(401).send({ error: 'User ID required' });
+ return null;
+ }
+
+ const subscription = await prisma.subscription.findFirst({
+ where: { userId, status: 'active' },
+ select: { id: true, tier: true },
+ });
+
+ if (!subscription) {
+ reply.code(404).send({ error: 'Active subscription not found' });
+ return null;
+ }
+
+ return subscription.id;
+ };
+
+ // GET /darkwatch/watchlist - List watchlist items
+ fastify.get('/watchlist', async (request: FastifyRequest, reply: FastifyReply) => {
+ const subscriptionId = await authed(request, reply);
+ if (!subscriptionId) return;
+
+ try {
+ const items = await watchlistService.getItems(subscriptionId);
+ return reply.send({ items });
+ } catch (error) {
+ const message = error instanceof Error ? error.message : 'Failed to list watchlist';
+ return reply.code(500).send({ error: message });
+ }
+ });
+
+ // POST /darkwatch/watchlist - Add watchlist item
+ fastify.post('/watchlist', async (request: FastifyRequest, reply: FastifyReply) => {
+ const authReq = request as FastifyRequest & { user?: { id: string } };
+ const userId = authReq.user?.id;
+ if (!userId) {
+ return reply.code(401).send({ error: 'User ID required' });
+ }
+
+ const subscription = await prisma.subscription.findFirst({
+ where: { userId, status: 'active' },
+ select: { id: true, tier: true },
+ });
+
+ if (!subscription) {
+ return reply.code(404).send({ error: 'Active subscription not found' });
+ }
+
+ const body = request.body as { type: string; value: string };
+
+ if (!body.type || !body.value) {
+ return reply.code(400).send({ error: 'type and value are required' });
+ }
+
+ const maxItems = tierConfig[subscription.tier as BillingTier].features.maxWatchlistItems;
+
+ try {
+ const item = await watchlistService.addItem(
+ subscription.id,
+ body.type,
+ body.value,
+ maxItems
+ );
+ return reply.code(201).send({ item });
+ } catch (error) {
+ const message = error instanceof Error ? error.message : 'Failed to add watchlist item';
+ return reply.code(422).send({ error: message });
+ }
+ });
+
+ // DELETE /darkwatch/watchlist/:id - Remove watchlist item
+ fastify.delete('/watchlist/:id', async (request: FastifyRequest, reply: FastifyReply) => {
+ const subscriptionId = await authed(request, reply);
+ if (!subscriptionId) return;
+
+ const id = (request.params as { id: string }).id;
+
+ try {
+ const item = await watchlistService.removeItem(id, subscriptionId);
+ return reply.send({ item });
+ } catch (error) {
+ const message = error instanceof Error ? error.message : 'Failed to remove watchlist item';
+ return reply.code(422).send({ error: message });
+ }
+ });
+
+ // POST /darkwatch/scan - Trigger on-demand scan
+ fastify.post('/scan', async (request: FastifyRequest, reply: FastifyReply) => {
+ const subscriptionId = await authed(request, reply);
+ if (!subscriptionId) return;
+
+ try {
+ const job = await schedulerService.enqueueOnDemandScan(subscriptionId);
+ return reply.send({
+ job: {
+ id: job?.id,
+ status: 'queued',
+ },
+ });
+ } catch (error) {
+ const message = error instanceof Error ? error.message : 'Failed to trigger scan';
+ return reply.code(422).send({ error: message });
+ }
+ });
+
+ // GET /darkwatch/scan/schedule - Get scan schedule
+ fastify.get('/scan/schedule', async (request: FastifyRequest, reply: FastifyReply) => {
+ const subscriptionId = await authed(request, reply);
+ if (!subscriptionId) return;
+
+ try {
+ const schedule = await schedulerService.getScanSchedule(subscriptionId);
+ return reply.send({ schedule });
+ } catch (error) {
+ const message = error instanceof Error ? error.message : 'Failed to get schedule';
+ return reply.code(500).send({ error: message });
+ }
+ });
+
+ // GET /darkwatch/exposures - List exposures
+ fastify.get('/exposures', async (request: FastifyRequest, reply: FastifyReply) => {
+ const subscriptionId = await authed(request, reply);
+ if (!subscriptionId) return;
+
+ try {
+ const exposures = await prisma.exposure.findMany({
+ where: { subscriptionId },
+ orderBy: { detectedAt: 'desc' },
+ take: 50,
+ include: {
+ watchlistItem: true,
+ },
+ });
+ return reply.send({ exposures });
+ } catch (error) {
+ const message = error instanceof Error ? error.message : 'Failed to list exposures';
+ return reply.code(500).send({ error: message });
+ }
+ });
+
+ // GET /darkwatch/alerts - List alerts
+ fastify.get('/alerts', async (request: FastifyRequest, reply: FastifyReply) => {
+ const authReq = request as FastifyRequest & { user?: { id: string } };
+ const userId = authReq.user?.id;
+ if (!userId) {
+ return reply.code(401).send({ error: 'User ID required' });
+ }
+
+ try {
+ const alerts = await prisma.alert.findMany({
+ where: { userId },
+ orderBy: { createdAt: 'desc' },
+ take: 50,
+ include: {
+ exposure: true,
+ },
+ });
+ return reply.send({ alerts });
+ } catch (error) {
+ const message = error instanceof Error ? error.message : 'Failed to list alerts';
+ return reply.code(500).send({ error: message });
+ }
+ });
+
+ // PATCH /darkwatch/alerts/:id/read - Mark alert as read
+ fastify.patch('/alerts/:id/read', async (request: FastifyRequest, reply: FastifyReply) => {
+ const authReq = request as FastifyRequest & { user?: { id: string } };
+ const userId = authReq.user?.id;
+ if (!userId) {
+ return reply.code(401).send({ error: 'User ID required' });
+ }
+
+ const id = (request.params as { id: string }).id;
+
+ try {
+ const alert = await prisma.alert.update({
+ where: { id },
+ data: { isRead: true, readAt: new Date() },
+ });
+ return reply.send({ alert });
+ } catch (error) {
+ const message = error instanceof Error ? error.message : 'Failed to mark alert as read';
+ return reply.code(422).send({ error: message });
+ }
+ });
+
+ // POST /darkwatch/webhook - External webhook receiver
+ fastify.post('/webhook', async (request: FastifyRequest, reply: FastifyReply) => {
+ const body = request.body as Record;
+
+ const source = typeof body.source === 'string' ? body.source : '';
+ const identifier = typeof body.identifier === 'string' ? body.identifier : '';
+ const identifierType = typeof body.identifierType === 'string' ? body.identifierType : '';
+ const metadata = body.metadata as Record | undefined;
+ const timestamp = typeof body.timestamp === 'string' ? body.timestamp : new Date().toISOString();
+
+ if (!source || !identifier || !identifierType) {
+ return reply.code(400).send({
+ error: 'source, identifier, and identifierType are required',
+ });
+ }
+
+ const signature = request.headers['x-webhook-signature'] as string | undefined;
+ const webhookTimestamp = request.headers['x-webhook-timestamp'] as string | undefined;
+
+ if (!signature || !webhookTimestamp) {
+ return reply.code(401).send({ error: 'Webhook signature and timestamp required' });
+ }
+
+ const valid = await webhookService.verifyWebhookSignature(
+ JSON.stringify(body),
+ signature,
+ webhookTimestamp
+ );
+ if (!valid) {
+ return reply.code(401).send({ error: 'Invalid webhook signature' });
+ }
+
+ try {
+ const result = await webhookService.processExternalWebhook({
+ source,
+ identifier,
+ identifierType,
+ metadata,
+ timestamp,
+ });
+
+ return reply.send({
+ processed: true,
+ exposuresCreated: result.exposuresCreated,
+ alertsCreated: result.alertsCreated,
+ });
+ } catch (error) {
+ const message = error instanceof Error ? error.message : 'Webhook processing failed';
+ console.error('[DarkWatch:Webhook] Error:', message);
+ return reply.code(500).send({ error: 'Webhook processing failed' });
+ }
+ });
+
+ // POST /darkwatch/scheduler/init - Initialize scheduled scans for all subscriptions
+ fastify.post('/scheduler/init', async (request: FastifyRequest, reply: FastifyReply) => {
+ try {
+ const jobsEnqueued = await schedulerService.scheduleSubscriptionScans();
+ return reply.send({
+ scheduled: jobsEnqueued.length,
+ jobs: jobsEnqueued,
+ });
+ } catch (error) {
+ const message = error instanceof Error ? error.message : 'Scheduler init failed';
+ return reply.code(500).send({ error: message });
+ }
+ });
+
+ // POST /darkwatch/scheduler/reschedule - Reschedule all scans
+ fastify.post('/scheduler/reschedule', async (request: FastifyRequest, reply: FastifyReply) => {
+ try {
+ const jobsEnqueued = await schedulerService.rescheduleAll();
+ return reply.send({
+ rescheduled: jobsEnqueued.length,
+ jobs: jobsEnqueued,
+ });
+ } catch (error) {
+ const message = error instanceof Error ? error.message : 'Scheduler reschedule failed';
+ return reply.code(500).send({ error: message });
+ }
+ });
+}
diff --git a/packages/api/src/routes/index.ts b/packages/api/src/routes/index.ts
index dcec042..cfde1ed 100644
--- a/packages/api/src/routes/index.ts
+++ b/packages/api/src/routes/index.ts
@@ -1,33 +1,142 @@
-import { FastifyInstance } from "fastify";
+import { FastifyInstance, FastifyRequest, FastifyReply } from 'fastify';
+import { authMiddleware, AuthRequest } from './auth.middleware';
+import { voiceprintRoutes } from './voiceprint.routes';
+import { spamshieldRoutes } from './spamshield.routes';
+import { darkwatchRoutes } from './darkwatch.routes';
-export function darkwatchRoutes(fastify: FastifyInstance) {
- fastify.register(async (root) => {
- const watchlist = (await import("./watchlist.routes")).watchlistRoutes;
- const exposures = (await import("./exposure.routes")).exposureRoutes;
- const alerts = (await import("./alert.routes")).alertRoutes;
- const scans = (await import("./scan.routes")).scanRoutes;
- const scheduler = (await import("./scheduler.routes")).schedulerRoutes;
- const webhooks = (await import("./webhook.routes")).webhookRoutes;
+export async function routes(fastify: FastifyInstance) {
+ // Authenticated routes group
+ fastify.register(
+ async (authenticated) => {
+ // Add auth requirement
+ authenticated.addHook('onRequest', async (request: FastifyRequest, reply: FastifyReply) => {
+ await fastify.requireAuth(request as AuthRequest);
+ });
- root.register(watchlist, { prefix: "/watchlist" });
- root.register(exposures, { prefix: "/exposures" });
- root.register(alerts, { prefix: "/alerts" });
- root.register(scans, { prefix: "/scan" });
- root.register(scheduler, { prefix: "/scheduler" });
- root.register(webhooks, { prefix: "/webhooks" });
- }, { prefix: "/api/v1/darkwatch" });
-}
-
-export function voiceprintRoutes(fastify: FastifyInstance) {
- fastify.register(async (root) => {
- const voiceprint = (await import("./voiceprint.routes")).voiceprintRoutes;
- root.register(voiceprint);
- }, { prefix: "/api/v1/voiceprint" });
-}
-
-export function correlationRoutes(fastify: FastifyInstance) {
- fastify.register(async (root) => {
- const correlation = (await import("./correlation.routes")).correlationRoutes;
- root.register(correlation);
- }, { prefix: "/api/v1/correlation" });
+ // Example authenticated endpoint
+ authenticated.get('/user/me', async (request: FastifyRequest, reply: FastifyReply) => {
+ const authReq = request as AuthRequest;
+ return {
+ user: authReq.user,
+ authType: authReq.authType,
+ };
+ });
+
+ // Example service endpoint
+ authenticated.get('/services', async (request: FastifyRequest, reply: FastifyReply) => {
+ return {
+ services: [
+ {
+ name: 'user-service',
+ url: '/api/v1/services/user',
+ status: 'healthy',
+ },
+ {
+ name: 'billing-service',
+ url: '/api/v1/services/billing',
+ status: 'healthy',
+ },
+ {
+ name: 'notification-service',
+ url: '/api/v1/services/notifications',
+ status: 'healthy',
+ },
+ ],
+ };
+ });
+ },
+ { prefix: '/auth' }
+ );
+
+ // Public API routes
+ fastify.register(
+ async (publicRouter) => {
+ // Version info
+ publicRouter.get('/info', async () => {
+ return {
+ version: '1.0.0',
+ environment: process.env.NODE_ENV || 'development',
+ build: process.env.npm_package_version || 'unknown',
+ };
+ });
+
+ // API documentation
+ publicRouter.get('/docs', async () => {
+ return {
+ title: 'FrenoCorp API Gateway',
+ version: '1.0.0',
+ endpoints: {
+ public: [
+ { method: 'GET', path: '/', description: 'Root endpoint' },
+ { method: 'GET', path: '/health', description: 'Health check' },
+ { method: 'GET', path: '/api/v1/info', description: 'API version info' },
+ { method: 'GET', path: '/api/v1/docs', description: 'API documentation' },
+ ],
+ authenticated: [
+ { method: 'GET', path: '/api/v1/auth/user/me', description: 'Get current user' },
+ { method: 'GET', path: '/api/v1/auth/services', description: 'List available services' },
+ ],
+ },
+ };
+ });
+ },
+ { prefix: '/api/v1' }
+ );
+
+ // Service proxy placeholder (for future microservice routing)
+ fastify.register(
+ async (services) => {
+ services.get('/services/user', async (request, reply) => {
+ // In production, proxy to actual user service
+ return {
+ service: 'user-service',
+ message: 'User service endpoint',
+ timestamp: new Date().toISOString(),
+ };
+ });
+
+ services.get('/services/billing', async (request, reply) => {
+ // In production, proxy to actual billing service
+ return {
+ service: 'billing-service',
+ message: 'Billing service endpoint',
+ timestamp: new Date().toISOString(),
+ };
+ });
+
+ services.get('/services/notifications', async (request, reply) => {
+ // In production, proxy to actual notification service
+ return {
+ service: 'notification-service',
+ message: 'Notification service endpoint',
+ timestamp: new Date().toISOString(),
+ };
+ });
+ },
+ { prefix: '/api/v1/services' }
+ );
+
+ // VoicePrint service routes
+ fastify.register(
+ async (voiceprintRouter) => {
+ await voiceprintRoutes(voiceprintRouter);
+ },
+ { prefix: '/voiceprint' }
+ );
+
+ // SpamShield service routes
+ fastify.register(
+ async (spamshieldRouter) => {
+ await spamshieldRoutes(spamshieldRouter);
+ },
+ { prefix: '/spamshield' }
+ );
+
+ // DarkWatch service routes
+ fastify.register(
+ async (darkwatchRouter) => {
+ await darkwatchRoutes(darkwatchRouter);
+ },
+ { prefix: '/darkwatch' }
+ );
}
diff --git a/packages/api/src/routes/spamshield.routes.ts b/packages/api/src/routes/spamshield.routes.ts
new file mode 100644
index 0000000..6534852
--- /dev/null
+++ b/packages/api/src/routes/spamshield.routes.ts
@@ -0,0 +1,252 @@
+import { FastifyInstance, FastifyRequest, FastifyReply } from 'fastify';
+import {
+ numberReputationService,
+ smsClassifierService,
+ callAnalysisService,
+ spamFeedbackService,
+} from '../services/spamshield';
+import { ErrorHandler, SpamErrorCode } from '../services/spamshield/spamshield.error-handler';
+
+export async function spamshieldRoutes(fastify: FastifyInstance) {
+ // Classify SMS text
+ fastify.post('/sms/classify', async (request: FastifyRequest, reply: FastifyReply) => {
+ const authReq = request as FastifyRequest & { user?: { id: string } };
+ const userId = authReq.user?.id;
+
+ if (!userId) {
+ ErrorHandler.send(reply, SpamErrorCode.UNAUTHORIZED, 'User ID required', { status: 401 });
+ return;
+ }
+
+ const body = request.body as { text: string };
+
+ const textValidation = ErrorHandler.validateRequiredField(body.text, 'text');
+ if (!textValidation.isValid && textValidation.error) {
+ ErrorHandler.send(reply, textValidation.error.code, textValidation.error.message, {
+ field: textValidation.error.field,
+ status: 400,
+ });
+ return;
+ }
+
+ try {
+ const result = await smsClassifierService.classify(body.text);
+ return reply.send({
+ classification: {
+ isSpam: result.isSpam,
+ confidence: result.confidence,
+ spamFeatures: result.spamFeatures,
+ },
+ });
+ } catch (error) {
+ ErrorHandler.send(reply, SpamErrorCode.CLASSIFICATION_FAILED, 'Classification failed', {
+ status: 422,
+ });
+ }
+ });
+
+ // Check number reputation
+ fastify.post('/number/reputation', async (request: FastifyRequest, reply: FastifyReply) => {
+ const authReq = request as FastifyRequest & { user?: { id: string } };
+ const userId = authReq.user?.id;
+
+ if (!userId) {
+ ErrorHandler.send(reply, SpamErrorCode.UNAUTHORIZED, 'User ID required', { status: 401 });
+ return;
+ }
+
+ const body = request.body as { phoneNumber: string };
+
+ const phoneValidation = ErrorHandler.validateRequiredField(body.phoneNumber, 'phoneNumber');
+ if (!phoneValidation.isValid && phoneValidation.error) {
+ ErrorHandler.send(reply, phoneValidation.error.code, phoneValidation.error.message, {
+ field: phoneValidation.error.field,
+ status: 400,
+ });
+ return;
+ }
+
+ try {
+ const result = await numberReputationService.checkReputation(body.phoneNumber);
+ return reply.send({
+ reputation: {
+ isSpam: result.isSpam,
+ confidence: result.confidence,
+ spamType: result.spamType,
+ reportCount: result.reportCount,
+ },
+ });
+ } catch (error) {
+ ErrorHandler.send(reply, SpamErrorCode.REPUTATION_CHECK_FAILED, 'Reputation check failed', {
+ status: 422,
+ });
+ }
+ });
+
+ // Analyze incoming call
+ fastify.post('/call/analyze', async (request: FastifyRequest, reply: FastifyReply) => {
+ const authReq = request as FastifyRequest & { user?: { id: string } };
+ const userId = authReq.user?.id;
+
+ if (!userId) {
+ ErrorHandler.send(reply, SpamErrorCode.UNAUTHORIZED, 'User ID required', { status: 401 });
+ return;
+ }
+
+ const body = request.body as {
+ phoneNumber: string;
+ duration?: number;
+ callTime: string;
+ isVoip?: boolean;
+ };
+
+ const phoneValidation = ErrorHandler.validateRequiredField(body.phoneNumber, 'phoneNumber');
+ const callTimeValidation = ErrorHandler.validateRequiredField(body.callTime, 'callTime');
+
+ if (!phoneValidation.isValid && phoneValidation.error) {
+ ErrorHandler.send(reply, phoneValidation.error.code, phoneValidation.error.message, {
+ field: phoneValidation.error.field,
+ status: 400,
+ });
+ return;
+ }
+
+ if (!callTimeValidation.isValid && callTimeValidation.error) {
+ ErrorHandler.send(reply, callTimeValidation.error.code, callTimeValidation.error.message, {
+ field: callTimeValidation.error.field,
+ status: 400,
+ });
+ return;
+ }
+
+ try {
+ const result = await callAnalysisService.analyzeCall({
+ phoneNumber: body.phoneNumber,
+ duration: body.duration,
+ callTime: new Date(body.callTime),
+ isVoip: body.isVoip,
+ });
+ return reply.send({
+ analysis: {
+ decision: result.decision,
+ confidence: result.confidence,
+ reasons: result.reasons,
+ },
+ });
+ } catch (error) {
+ ErrorHandler.send(reply, SpamErrorCode.ANALYSIS_FAILED, 'Call analysis failed', {
+ status: 422,
+ });
+ }
+ });
+
+ // Record spam feedback
+ fastify.post('/feedback', async (request: FastifyRequest, reply: FastifyReply) => {
+ const authReq = request as FastifyRequest & { user?: { id: string } };
+ const userId = authReq.user?.id;
+
+ if (!userId) {
+ ErrorHandler.send(reply, SpamErrorCode.UNAUTHORIZED, 'User ID required', { status: 401 });
+ return;
+ }
+
+ const body = request.body as {
+ phoneNumber: string;
+ isSpam: boolean;
+ confidence?: number;
+ metadata?: Record;
+ };
+
+ const phoneValidation = ErrorHandler.validateRequiredField(body.phoneNumber, 'phoneNumber');
+ if (!phoneValidation.isValid && phoneValidation.error) {
+ ErrorHandler.send(reply, phoneValidation.error.code, phoneValidation.error.message, {
+ field: phoneValidation.error.field,
+ status: 400,
+ });
+ return;
+ }
+
+ const isSpamValidation = ErrorHandler.validateBooleanField(body.isSpam, 'isSpam');
+ if (!isSpamValidation.isValid && isSpamValidation.error) {
+ ErrorHandler.send(reply, isSpamValidation.error.code, isSpamValidation.error.message, {
+ field: isSpamValidation.error.field,
+ status: 400,
+ });
+ return;
+ }
+
+ try {
+ const feedback = await spamFeedbackService.recordFeedback(
+ userId,
+ body.phoneNumber,
+ body.isSpam,
+ body.confidence,
+ body.metadata
+ );
+ return reply.code(201).send({
+ feedback: {
+ id: feedback.id,
+ phoneNumber: feedback.phoneNumber,
+ isSpam: feedback.isSpam,
+ createdAt: feedback.createdAt,
+ },
+ });
+ } catch (error) {
+ ErrorHandler.send(reply, SpamErrorCode.FEEDBACK_RECORD_FAILED, 'Feedback recording failed', {
+ status: 422,
+ });
+ }
+ });
+
+ // Get spam history
+ fastify.get('/history', async (request: FastifyRequest, reply: FastifyReply) => {
+ const authReq = request as FastifyRequest & { user?: { id: string } };
+ const userId = authReq.user?.id;
+
+ if (!userId) {
+ ErrorHandler.send(reply, SpamErrorCode.UNAUTHORIZED, 'User ID required', { status: 401 });
+ return;
+ }
+
+ const query = request.query as {
+ limit?: string;
+ isSpam?: string;
+ startDate?: string;
+ };
+
+ const results = await spamFeedbackService.getSpamHistory(userId, {
+ limit: query.limit ? parseInt(query.limit, 10) : undefined,
+ isSpam: query.isSpam !== undefined ? query.isSpam === 'true' : undefined,
+ startDate: query.startDate ? new Date(query.startDate) : undefined,
+ });
+
+ return reply.send({
+ history: results.map((r) => ({
+ id: r.id,
+ phoneNumber: r.phoneNumber,
+ isSpam: r.isSpam,
+ createdAt: r.createdAt,
+ })),
+ });
+ });
+
+ // Get spam statistics
+ fastify.get('/statistics', async (request: FastifyRequest, reply: FastifyReply) => {
+ const authReq = request as FastifyRequest & { user?: { id: string } };
+ const userId = authReq.user?.id;
+
+ if (!userId) {
+ ErrorHandler.send(reply, SpamErrorCode.UNAUTHORIZED, 'User ID required', { status: 401 });
+ return;
+ }
+
+ try {
+ const stats = await spamFeedbackService.getStatistics(userId);
+ return reply.send({ statistics: stats });
+ } catch (error) {
+ ErrorHandler.send(reply, SpamErrorCode.ANALYSIS_FAILED, 'Statistics retrieval failed', {
+ status: 422,
+ });
+ }
+ });
+}
diff --git a/packages/api/src/routes/voiceprint.routes.ts b/packages/api/src/routes/voiceprint.routes.ts
index cc9077e..dcdd483 100644
--- a/packages/api/src/routes/voiceprint.routes.ts
+++ b/packages/api/src/routes/voiceprint.routes.ts
@@ -1,94 +1,257 @@
-import { FastifyInstance } from "fastify";
-import { VoiceEnrollmentService } from "@shieldai/voiceprint";
-import { AnalysisService } from "@shieldai/voiceprint";
-import { BatchAnalysisService } from "@shieldai/voiceprint";
+import { FastifyInstance, FastifyRequest, FastifyReply } from 'fastify';
+import {
+ voiceEnrollmentService,
+ analysisService,
+ batchAnalysisService,
+ voicePrintEnv,
+ AnalysisJobStatus,
+} from '../services/voiceprint';
-export function voiceprintRoutes(fastify: FastifyInstance) {
- const enrollmentService = new VoiceEnrollmentService();
- const analysisService = new AnalysisService();
- const batchService = new BatchAnalysisService();
+export async function voiceprintRoutes(fastify: FastifyInstance) {
+ // Enroll a new voice profile
+ fastify.post('/enroll', async (request: FastifyRequest, reply: FastifyReply) => {
+ const authReq = request as FastifyRequest & { user?: { id: string } };
+ const userId = authReq.user?.id;
- fastify.post("/enroll", async (request, reply) => {
- const userId = (request.user as { id: string })?.id;
- if (!userId) return reply.code(401).send({ error: "User not authenticated" });
-
- const body = request.body as { label: string; audio: string; sampleRate?: number };
-
- const audioBuffer = Buffer.from(body.audio, "base64");
- const enrollment = await enrollmentService.enroll(
- { label: body.label, audioBuffer, sampleRate: body.sampleRate },
- userId
- );
- return reply.code(201).send(enrollment);
- });
-
- fastify.get("/enrollments", async (request, reply) => {
- const userId = (request.user as { id: string })?.id;
- if (!userId) return reply.code(401).send({ error: "User not authenticated" });
-
- const enrollments = await enrollmentService.listEnrollments(userId);
- return reply.send(enrollments);
- });
-
- fastify.delete("/enrollments/:id", async (request, reply) => {
- const userId = (request.user as { id: string })?.id;
- if (!userId) return reply.code(401).send({ error: "User not authenticated" });
-
- const enrollmentId = (request.params as { id: string }).id;
- const result = await enrollmentService.removeEnrollment(userId, enrollmentId);
- return reply.send({ removed: result });
- });
-
- fastify.post("/analyze", async (request, reply) => {
- const userId = (request.user as { id: string })?.id;
- if (!userId) return reply.code(401).send({ error: "User not authenticated" });
-
- const body = request.body as { audio: string; sampleRate?: number; analysisType?: string };
- const audioBuffer = Buffer.from(body.audio, "base64");
-
- const result = await analysisService.analyze(
- { audioBuffer, sampleRate: body.sampleRate, analysisType: body.analysisType },
- userId
- );
- return reply.code(201).send(result);
- });
-
- fastify.get("/results/:id", async (request, reply) => {
- const jobId = (request.params as { id: string }).id;
- const result = await analysisService.getResult(jobId);
-
- if (!result) return reply.code(404).send({ error: "Analysis result not found" });
- return reply.send(result);
- });
-
- fastify.get("/results", async (request, reply) => {
- const userId = (request.user as { id: string })?.id;
- if (!userId) return reply.code(401).send({ error: "User not authenticated" });
-
- const limit = parseInt((request.query as { limit?: string }).limit || "20", 10);
- const results = await analysisService.getUserResults(userId, limit);
- return reply.send(results);
- });
-
- fastify.post("/batch", async (request, reply) => {
- const userId = (request.user as { id: string })?.id;
- if (!userId) return reply.code(401).send({ error: "User not authenticated" });
+ if (!userId) {
+ return reply.code(401).send({ error: 'User ID required' });
+ }
const body = request.body as {
- files: Array<{ name: string; audio: string; sampleRate?: number }>;
- analysisType?: string;
+ name: string;
+ audio: Buffer;
};
- const audioBuffers = body.files.map((f) => ({
- name: f.name,
- buffer: Buffer.from(f.audio, "base64"),
- sampleRate: f.sampleRate,
- }));
+ if (!body.name || !body.audio) {
+ return reply.code(400).send({ error: 'name and audio are required' });
+ }
- const result = await batchService.analyzeBatch(
- { audioBuffers, analysisType: body.analysisType },
- userId
- );
- return reply.code(201).send(result);
+ try {
+ const enrollment = await voiceEnrollmentService.enroll(
+ userId,
+ body.name,
+ body.audio
+ );
+ return reply.code(201).send({
+ enrollment: {
+ id: enrollment.id,
+ name: enrollment.name,
+ isActive: enrollment.isActive,
+ createdAt: enrollment.createdAt,
+ },
+ });
+ } catch (error) {
+ const message = error instanceof Error ? error.message : 'Enrollment failed';
+ return reply.code(422).send({ error: message });
+ }
+ });
+
+ // List user's voice enrollments
+ fastify.get('/enrollments', async (request: FastifyRequest, reply: FastifyReply) => {
+ const authReq = request as FastifyRequest & { user?: { id: string } };
+ const userId = authReq.user?.id;
+
+ if (!userId) {
+ return reply.code(401).send({ error: 'User ID required' });
+ }
+
+ const isActive = request.query as { isActive?: string };
+ const limit = request.query as { limit?: string };
+ const offset = request.query as { offset?: string };
+
+ const enrollments = await voiceEnrollmentService.listEnrollments(userId, {
+ isActive: isActive.isActive !== undefined
+ ? isActive.isActive === 'true'
+ : undefined,
+ limit: limit.limit ? parseInt(limit.limit, 10) : undefined,
+ offset: offset.offset ? parseInt(offset.offset, 10) : undefined,
+ });
+
+ return reply.send({
+ enrollments: enrollments.map((e) => ({
+ id: e.id,
+ name: e.name,
+ isActive: e.isActive,
+ createdAt: e.createdAt,
+ })),
+ });
+ });
+
+ // Remove an enrollment
+ fastify.delete('/enrollments/:id', async (request: FastifyRequest, reply: FastifyReply) => {
+ const authReq = request as FastifyRequest & { user?: { id: string } };
+ const userId = authReq.user?.id;
+
+ if (!userId) {
+ return reply.code(401).send({ error: 'User ID required' });
+ }
+
+ const enrollmentId = (request.params as { id: string }).id;
+
+ try {
+ const enrollment = await voiceEnrollmentService.removeEnrollment(
+ enrollmentId,
+ userId
+ );
+ return reply.send({
+ enrollment: {
+ id: enrollment.id,
+ name: enrollment.name,
+ isActive: enrollment.isActive,
+ },
+ });
+ } catch (error) {
+ const message = error instanceof Error ? error.message : 'Removal failed';
+ return reply.code(404).send({ error: message });
+ }
+ });
+
+ // Analyze a single audio file
+ fastify.post('/analyze', async (request: FastifyRequest, reply: FastifyReply) => {
+ const authReq = request as FastifyRequest & { user?: { id: string } };
+ const userId = authReq.user?.id;
+
+ if (!userId) {
+ return reply.code(401).send({ error: 'User ID required' });
+ }
+
+ const body = request.body as {
+ audio: Buffer;
+ enrollmentId?: string;
+ audioUrl?: string;
+ };
+
+ if (!body.audio) {
+ return reply.code(400).send({ error: 'audio is required' });
+ }
+
+ try {
+ const result = await analysisService.analyze(userId, body.audio, {
+ enrollmentId: body.enrollmentId,
+ audioUrl: body.audioUrl,
+ });
+ return reply.code(201).send({
+ analysis: {
+ id: result.id,
+ isSynthetic: result.isSynthetic,
+ confidence: result.confidence,
+ analysisResult: result.analysisResult,
+ createdAt: result.createdAt,
+ },
+ });
+ } catch (error) {
+ const message = error instanceof Error ? error.message : 'Analysis failed';
+ return reply.code(422).send({ error: message });
+ }
+ });
+
+ // Get analysis result by ID
+ fastify.get('/results/:id', async (request: FastifyRequest, reply: FastifyReply) => {
+ const authReq = request as FastifyRequest & { user?: { id: string } };
+ const userId = authReq.user?.id;
+
+ if (!userId) {
+ return reply.code(401).send({ error: 'User ID required' });
+ }
+
+ const analysisId = (request.params as { id: string }).id;
+ const result = await analysisService.getResult(analysisId, userId);
+
+ if (!result) {
+ return reply.code(404).send({ error: 'Analysis not found' });
+ }
+
+ return reply.send({
+ analysis: {
+ id: result.id,
+ isSynthetic: result.isSynthetic,
+ confidence: result.confidence,
+ analysisResult: result.analysisResult,
+ createdAt: result.createdAt,
+ },
+ });
+ });
+
+ // Get analysis history
+ fastify.get('/history', async (request: FastifyRequest, reply: FastifyReply) => {
+ const authReq = request as FastifyRequest & { user?: { id: string } };
+ const userId = authReq.user?.id;
+
+ if (!userId) {
+ return reply.code(401).send({ error: 'User ID required' });
+ }
+
+ const query = request.query as {
+ limit?: string;
+ offset?: string;
+ isSynthetic?: string;
+ };
+
+ const results = await analysisService.getHistory(userId, {
+ limit: query.limit ? parseInt(query.limit, 10) : undefined,
+ offset: query.offset ? parseInt(query.offset, 10) : undefined,
+ isSynthetic: query.isSynthetic !== undefined
+ ? query.isSynthetic === 'true'
+ : undefined,
+ });
+
+ return reply.send({
+ analyses: results.map((r) => ({
+ id: r.id,
+ isSynthetic: r.isSynthetic,
+ confidence: r.confidence,
+ createdAt: r.createdAt,
+ })),
+ });
+ });
+
+ // Batch analyze multiple audio files
+ fastify.post('/batch', async (request: FastifyRequest, reply: FastifyReply) => {
+ const authReq = request as FastifyRequest & { user?: { id: string } };
+ const userId = authReq.user?.id;
+
+ if (!userId) {
+ return reply.code(401).send({ error: 'User ID required' });
+ }
+
+ const body = request.body as {
+ files: Array<{
+ name: string;
+ audio: Buffer;
+ audioUrl?: string;
+ }>;
+ enrollmentId?: string;
+ };
+
+ if (!body.files || body.files.length === 0) {
+ return reply.code(400).send({ error: 'files array is required' });
+ }
+
+ try {
+ const result = await batchAnalysisService.analyzeBatch(
+ userId,
+ body.files.map((f) => ({
+ name: f.name,
+ buffer: f.audio,
+ audioUrl: f.audioUrl,
+ })),
+ {
+ enrollmentId: body.enrollmentId,
+ }
+ );
+
+ return reply.code(201).send({
+ jobId: result.jobId,
+ results: result.results.map((r) => ({
+ id: r.id,
+ isSynthetic: r.isSynthetic,
+ confidence: r.confidence,
+ })),
+ summary: result.summary,
+ });
+ } catch (error) {
+ const message = error instanceof Error ? error.message : 'Batch analysis failed';
+ return reply.code(422).send({ error: message });
+ }
});
}
diff --git a/packages/jobs/src/darkwatch.jobs.ts b/packages/jobs/src/darkwatch.jobs.ts
new file mode 100644
index 0000000..1b7c4f2
--- /dev/null
+++ b/packages/jobs/src/darkwatch.jobs.ts
@@ -0,0 +1,173 @@
+import { prisma, SubscriptionTier } from '@shieldsai/shared-db';
+import { Queue, Worker, Job } from 'bullmq';
+import { Redis } from 'ioredis';
+import { tierConfig, getTierFeatures } from '@shieldsai/shared-billing';
+import { mixpanelService, EventType } from '@shieldsai/shared-analytics';
+
+const redisHost = process.env.REDIS_HOST || 'localhost';
+const redisPort = parseInt(process.env.REDIS_PORT || '6379', 10);
+
+const connection = new Redis({
+ host: redisHost,
+ port: redisPort,
+ retryStrategy: (times: number) => Math.min(times * 50, 2000),
+});
+
+const QUEUE_CONFIG = {
+ darkwatchScan: {
+ name: 'darkwatch-scan',
+ concurrency: parseInt(process.env.DARKWATCH_CONCURRENCY || '5', 10),
+ defaultJobTimeout: parseInt(process.env.DARKWATCH_JOB_TIMEOUT || '120000', 10),
+ maxAttempts: parseInt(process.env.DARKWATCH_MAX_ATTEMPTS || '3', 10),
+ },
+};
+
+export const darkwatchScanQueue = new Queue(
+ QUEUE_CONFIG.darkwatchScan.name,
+ { connection }
+);
+
+async function processDarkwatchScan(
+ job: Job<{
+ subscriptionId: string;
+ tier: string;
+ scanType: 'scheduled' | 'on-demand' | 'realtime';
+ sourceData?: Record;
+ }>
+) {
+ const { subscriptionId, tier, scanType, sourceData } = job.data;
+
+ const { scanService } = await import(
+ '../../../apps/api/src/services/darkwatch/scan.service'
+ );
+ const { alertPipeline } = await import(
+ '../../../apps/api/src/services/darkwatch/alert.pipeline'
+ );
+
+ job.updateProgress(10);
+ console.log(
+ `[DarkWatch:Scan] Starting ${scanType} scan for subscription ${subscriptionId} (tier: ${tier})`
+ );
+
+ try {
+ const subscription = await prisma.subscription.findUnique({
+ where: { id: subscriptionId },
+ select: { userId: true, tier: true },
+ });
+
+ if (!subscription) {
+ job.updateProgress(100);
+ return { status: 'skipped', reason: 'subscription_not_found' };
+ }
+
+ await mixpanelService.track(
+ EventType.DARK_WEB_SCAN_STARTED,
+ subscription.userId,
+ {
+ scanType,
+ subscriptionTier: subscription.tier,
+ }
+ );
+
+ job.updateProgress(25);
+
+ const watchlistItems = await scanService.getWatchlistItems(subscriptionId);
+
+ if (watchlistItems.length === 0) {
+ job.updateProgress(100);
+ return { status: 'completed', exposuresCreated: 0, exposuresUpdated: 0 };
+ }
+
+ job.updateProgress(50);
+
+ const { exposuresCreated, exposuresUpdated } =
+ await scanService.processSubscriptionScan(subscriptionId, watchlistItems);
+
+ job.updateProgress(80);
+
+ const newExposureIds = await prisma.exposure.findMany({
+ where: {
+ subscriptionId,
+ isFirstTime: true,
+ detectedAt: { gte: new Date(Date.now() - 5 * 60 * 1000) },
+ },
+ select: { id: true },
+ });
+
+ if (newExposureIds.length > 0) {
+ await alertPipeline.processNewExposures(newExposureIds.map((e) => e.id));
+ }
+
+ await alertPipeline.dispatchScanCompleteAlert(
+ subscriptionId,
+ subscription.userId,
+ exposuresCreated
+ );
+
+ job.updateProgress(95);
+
+ await mixpanelService.track(
+ EventType.DARK_WEB_SCAN_COMPLETED,
+ subscription.userId,
+ {
+ scanType,
+ subscriptionTier: subscription.tier,
+ exposuresCreated,
+ exposuresUpdated,
+ watchlistItemsScanned: watchlistItems.length,
+ }
+ );
+
+ job.updateProgress(100);
+
+ return {
+ status: 'completed',
+ exposuresCreated,
+ exposuresUpdated,
+ watchlistItemsScanned: watchlistItems.length,
+ };
+ } catch (error) {
+ const message = error instanceof Error ? error.message : 'Scan failed';
+ console.error(`[DarkWatch:Scan] Job ${job.id} failed:`, message);
+ job.updateProgress(100);
+ throw new Error(message);
+ }
+}
+
+export const darkwatchScanWorker = new Worker(
+ QUEUE_CONFIG.darkwatchScan.name,
+ processDarkwatchScan,
+ {
+ connection,
+ concurrency: QUEUE_CONFIG.darkwatchScan.concurrency,
+ limiter: {
+ max: 20,
+ duration: 1000,
+ },
+ removeOnComplete: {
+ age: 7 * 24 * 60 * 60,
+ count: 1000,
+ },
+ removeOnFail: {
+ age: 30 * 24 * 60 * 60,
+ count: 100,
+ },
+ }
+);
+
+darkwatchScanWorker.on('completed', (job, result) => {
+ console.log(`[DarkWatch:Scan] Job ${job.id} completed:`, result);
+});
+
+darkwatchScanWorker.on('failed', (job, err) => {
+ console.error(`[DarkWatch:Scan] Job ${job?.id} failed:`, err.message);
+});
+
+darkwatchScanWorker.on('error', (err) => {
+ console.error('[DarkWatch:Scan] Worker error:', err.message);
+});
+
+export default {
+ darkwatchScanQueue,
+ darkwatchScanWorker,
+};
diff --git a/packages/shared-analytics/package.json b/packages/shared-analytics/package.json
new file mode 100644
index 0000000..d2352c6
--- /dev/null
+++ b/packages/shared-analytics/package.json
@@ -0,0 +1,19 @@
+{
+ "name": "@shieldsai/shared-analytics",
+ "version": "0.1.0",
+ "private": true,
+ "type": "module",
+ "main": "src/index.ts",
+ "types": "src/index.ts",
+ "scripts": {
+ "lint": "eslint src/"
+ },
+ "dependencies": {
+ "@segment/analytics-node": "^1.0.0",
+ "googleapis": "^128.0.0",
+ "zod": "^4.3.6"
+ },
+ "devDependencies": {
+ "typescript": "^5.3.3"
+ }
+}
diff --git a/packages/shared-analytics/src/config/analytics.config.ts b/packages/shared-analytics/src/config/analytics.config.ts
new file mode 100644
index 0000000..06ca140
--- /dev/null
+++ b/packages/shared-analytics/src/config/analytics.config.ts
@@ -0,0 +1,132 @@
+import { z } from 'zod';
+
+// Environment variables for analytics
+const envSchema = z.object({
+ MIXPANEL_TOKEN: z.string(),
+ MIXPANEL_API_SECRET: z.string().optional(),
+ GA4_MEASUREMENT_ID: z.string(),
+ GA4_API_SECRET: z.string().optional(),
+ STRIPE_WEBHOOK_SECRET: z.string(),
+ ANALYTICS_ENV: z.enum(['development', 'production', 'staging']).default('development'),
+});
+
+export const analyticsEnv = envSchema.parse({
+ MIXPANEL_TOKEN: process.env.MIXPANEL_TOKEN,
+ MIXPANEL_API_SECRET: process.env.MIXPANEL_API_SECRET,
+ GA4_MEASUREMENT_ID: process.env.GA4_MEASUREMENT_ID,
+ GA4_API_SECRET: process.env.GA4_API_SECRET,
+ STRIPE_WEBHOOK_SECRET: process.env.STRIPE_WEBHOOK_SECRET,
+ ANALYTICS_ENV: process.env.ANALYTICS_ENV,
+});
+
+// Event taxonomy
+export enum EventType {
+ // User events
+ USER_SIGNED_UP = 'user_signed_up',
+ USER_LOGGED_IN = 'user_logged_in',
+ USER_LOGGED_OUT = 'user_logged_out',
+ USER_UPGRADED = 'user_upgraded',
+ USER_DOWNGRADED = 'user_downgraded',
+
+ // Subscription events
+ SUBSCRIPTION_CREATED = 'subscription_created',
+ SUBSCRIPTION_UPDATED = 'subscription_updated',
+ SUBSCRIPTION_CANCELLED = 'subscription_cancelled',
+ SUBSCRIPTION_RENEWED = 'subscription_renewed',
+
+ // DarkWatch events
+ DARK_WEB_SCAN_STARTED = 'dark_web_scan_started',
+ DARK_WEB_SCAN_COMPLETED = 'dark_web_scan_completed',
+ EXPOSURE_DETECTED = 'exposure_detected',
+ EXPOSURE_RESOLVED = 'exposure_resolved',
+ WATCHLIST_ITEM_ADDED = 'watchlist_item_added',
+ WATCHLIST_ITEM_REMOVED = 'watchlist_item_removed',
+
+ // VoicePrint events
+ VOICE_ENROLLED = 'voice_enrolled',
+ VOICE_ANALYZED = 'voice_analyzed',
+ VOICE_MATCH_FOUND = 'voice_match_found',
+ SYNTHETIC_VOICE_DETECTED = 'synthetic_voice_detected',
+
+ // SpamShield events
+ CALL_ANALYZED = 'call_analyzed',
+ SMS_ANALYZED = 'sms_analyzed',
+ SPAM_BLOCKED = 'spam_blocked',
+ SPAM_FLAGGED = 'spam_flagged',
+ SPAM_FEEDBACK_SUBMITTED = 'spam_feedback_submitted',
+
+ // KPI events
+ MRR_UPDATED = 'mrr_updated',
+ CONVERSION_OCCURRED = 'conversion_occurred',
+ CHURN_OCCURRED = 'churn_occurred',
+ REFERRAL_SENT = 'referral_sent',
+ REFERRAL_CONVERTED = 'referral_converted',
+}
+
+// Event properties schema
+export const eventPropertiesSchema = z.object({
+ userId: z.string().optional(),
+ sessionId: z.string().optional(),
+ timestamp: z.date().optional(),
+ platform: z.enum(['web', 'mobile', 'desktop', 'api']).optional(),
+ version: z.string().optional(),
+ environment: z.string().optional(),
+});
+
+// KPI definitions
+export const kpiDefinitions = {
+ mau: {
+ name: 'Monthly Active Users',
+ description: 'Unique users who performed an action in the last 30 days',
+ calculation: 'COUNT(DISTINCT userId) WHERE timestamp > NOW() - INTERVAL 30 DAYS',
+ },
+ payingUsers: {
+ name: 'Paying Users',
+ description: 'Users with active subscriptions',
+ calculation: 'COUNT(DISTINCT userId) WHERE subscription.status = "active"',
+ },
+ mrr: {
+ name: 'Monthly Recurring Revenue',
+ description: 'Total monthly subscription revenue',
+ calculation: 'SUM(subscription.amount) WHERE subscription.status = "active"',
+ },
+ conversionRate: {
+ name: 'Conversion Rate',
+ description: 'Percentage of free users who upgrade to paid',
+ calculation: 'COUNT(upgrade events) / COUNT(signup events)',
+ },
+ churn: {
+ name: 'Churn Rate',
+ description: 'Percentage of paying users who cancel',
+ calculation: 'COUNT(cancel events) / COUNT(active subscriptions)',
+ },
+ cac: {
+ name: 'Customer Acquisition Cost',
+ description: 'Average cost to acquire a new paying user',
+ calculation: 'Total marketing spend / COUNT(new paying users)',
+ },
+ ltv: {
+ name: 'Lifetime Value',
+ description: 'Average revenue per user over their lifetime',
+ calculation: 'Average subscription amount / Churn rate',
+ },
+ nps: {
+ name: 'Net Promoter Score',
+ description: 'Customer satisfaction metric (-100 to 100)',
+ calculation: '% Promoters - % Detractors',
+ },
+ viralCoefficient: {
+ name: 'Viral Coefficient',
+ description: 'Average number of referrals per user',
+ calculation: 'COUNT(referral events) / COUNT(users)',
+ },
+};
+
+// Alert thresholds
+export const alertThresholds = {
+ churn: { warning: 0.05, critical: 0.10 },
+ conversionRate: { warning: 0.02, critical: 0.01 },
+ mrr: { warning: 0.90, critical: 0.80 }, // Percentage of target
+ nps: { warning: 50, critical: 40 },
+ viralCoefficient: { warning: 0.4, critical: 0.3 },
+};
diff --git a/packages/shared-analytics/src/index.ts b/packages/shared-analytics/src/index.ts
new file mode 100644
index 0000000..c51357f
--- /dev/null
+++ b/packages/shared-analytics/src/index.ts
@@ -0,0 +1,18 @@
+// Config
+export {
+ analyticsEnv,
+ EventType,
+ eventPropertiesSchema,
+ kpiDefinitions,
+ alertThresholds,
+} from './config/analytics.config';
+
+// Services
+export {
+ MixpanelService,
+ mixpanelService,
+} from './services/mixpanel.service';
+export {
+ GA4Service,
+ ga4Service,
+} from './services/ga4.service';
diff --git a/packages/shared-analytics/src/services/ga4.service.ts b/packages/shared-analytics/src/services/ga4.service.ts
new file mode 100644
index 0000000..f8ef9e4
--- /dev/null
+++ b/packages/shared-analytics/src/services/ga4.service.ts
@@ -0,0 +1,104 @@
+import { google } from 'googleapis';
+import { analyticsEnv, EventType } from '../config/analytics.config';
+
+// GA4 service
+export class GA4Service {
+ private auth: any;
+
+ constructor() {
+ this.auth = google.auth.fromAPIKey(analyticsEnv.GA4_API_SECRET || 'placeholder');
+ }
+
+ /**
+ * Initialize GA4 client
+ */
+ async initialize(): Promise {
+ // TODO: Initialize GA4 client with measurement ID
+ console.log('GA4 client initialized');
+ }
+
+ /**
+ * Send event to GA4
+ */
+ async sendEvent(
+ eventName: string,
+ params: {
+ client_id: string;
+ [key: string]: any;
+ }
+ ): Promise {
+ // TODO: Implement GA4 event tracking
+ // const measurementId = analyticsEnv.GA4_MEASUREMENT_ID;
+ // await fetch(`https://www.google-analytics.com/mp/collect?measurement_id=${measurementId}&api_secret=${analyticsEnv.GA4_API_SECRET}`, {
+ // method: 'POST',
+ // body: JSON.stringify({
+ // events: [{ name: eventName, params }],
+ // }),
+ // });
+
+ console.log('GA4 event:', eventName, params);
+ }
+
+ /**
+ * Track page view
+ */
+ async trackPageView(clientId: string, path: string, title?: string): Promise {
+ await this.sendEvent('page_view', {
+ client_id: clientId,
+ page_path: path,
+ page_title: title,
+ });
+ }
+
+ /**
+ * Track e-commerce purchase
+ */
+ async trackPurchase(
+ clientId: string,
+ transactionId: string,
+ value: number,
+ currency: string,
+ items: Array<{ name: string; price: number; quantity: number }>
+ ): Promise {
+ await this.sendEvent('purchase', {
+ client_id: clientId,
+ transaction_id: transactionId,
+ value,
+ currency,
+ items,
+ });
+ }
+
+ /**
+ * Track conversion
+ */
+ async trackConversion(
+ clientId: string,
+ conversionName: string,
+ metadata?: Record
+ ): Promise {
+ await this.sendEvent('conversion', {
+ client_id: clientId,
+ conversion_name: conversionName,
+ ...metadata,
+ });
+ }
+
+ /**
+ * Get analytics data (for dashboards)
+ */
+ async getMetrics(
+ dateRange: { startDate: string; endDate: string },
+ metrics: string[],
+ dimensions?: string[]
+ ): Promise {
+ // TODO: Implement GA4 Analytics Data API
+ return {
+ rows: [],
+ totals: [],
+ };
+ }
+}
+
+// Export instance
+export const ga4Service = new GA4Service();
diff --git a/packages/shared-analytics/src/services/mixpanel.service.ts b/packages/shared-analytics/src/services/mixpanel.service.ts
new file mode 100644
index 0000000..4ca2102
--- /dev/null
+++ b/packages/shared-analytics/src/services/mixpanel.service.ts
@@ -0,0 +1,117 @@
+import { Analytics } from '@segment/analytics-node';
+import { analyticsEnv, EventType, eventPropertiesSchema } from '../config/analytics.config';
+import { hashPhoneNumber } from '../utils/phone-hash';
+
+// Mixpanel service
+export class MixpanelService {
+ private client: Analytics;
+
+ constructor() {
+ this.client = new Analytics({
+ apiKey: analyticsEnv.MIXPANEL_TOKEN,
+ });
+ }
+
+ /**
+ * Track an event in Mixpanel
+ */
+ async track(
+ event: EventType,
+ distinctId: string,
+ properties?: Record
+ ): Promise {
+ const validatedProperties = eventPropertiesSchema.parse(properties);
+
+ this.client.track({
+ event,
+ distinctId,
+ properties: {
+ ...validatedProperties,
+ ...properties,
+ },
+ });
+ }
+
+ /**
+ * Identify a user
+ */
+ async identify(userId: string, traits?: Record): Promise {
+ this.client.identify({
+ distinctId: userId,
+ traits,
+ });
+ }
+
+ /**
+ * Group users by subscription tier
+ */
+ async group(groupId: string, groupKey: string, traits?: Record): Promise {
+ this.client.group({
+ groupKey,
+ groupId,
+ traits,
+ });
+ }
+
+ /**
+ * Track user sign-up
+ */
+ async userSignedUp(userId: string, plan?: string, referrer?: string): Promise {
+ await this.track(EventType.USER_SIGNED_UP, userId, {
+ plan,
+ referrer,
+ timestamp: new Date(),
+ });
+ }
+
+ /**
+ * Track subscription upgrade
+ */
+ async userUpgraded(userId: string, fromTier: string, toTier: string, mrr: number): Promise {
+ await this.track(EventType.USER_UPGRADED, userId, {
+ fromTier,
+ toTier,
+ mrr,
+ timestamp: new Date(),
+ });
+ }
+
+ /**
+ * Track exposure detection
+ */
+ async exposureDetected(
+ userId: string,
+ exposureType: string,
+ severity: string,
+ source: string
+ ): Promise {
+ await this.track(EventType.EXPOSURE_DETECTED, userId, {
+ exposureType,
+ severity,
+ source,
+ timestamp: new Date(),
+ });
+ }
+
+ /**
+ * Track spam detection
+ */
+ async spamBlocked(userId: string, phoneNumber: string, confidence: number, method: string): Promise {
+ await this.track(EventType.SPAM_BLOCKED, userId, {
+ phoneNumber: hashPhoneNumber(phoneNumber),
+ confidence,
+ method,
+ timestamp: new Date(),
+ });
+ }
+
+ /**
+ * Flush pending events
+ */
+ async flush(): Promise {
+ await this.client.flush();
+ }
+}
+
+// Export instance
+export const mixpanelService = new MixpanelService();
diff --git a/packages/shared-analytics/src/utils/phone-hash.ts b/packages/shared-analytics/src/utils/phone-hash.ts
new file mode 100644
index 0000000..accb2c2
--- /dev/null
+++ b/packages/shared-analytics/src/utils/phone-hash.ts
@@ -0,0 +1,12 @@
+/**
+ * Hash a phone number for analytics purposes
+ * Uses a consistent hashing algorithm to create a deterministic hash
+ */
+export function hashPhoneNumber(phoneNumber: string): string {
+ let hash = 0;
+ for (let i = 0; i < phoneNumber.length; i++) {
+ hash = ((hash << 5) - hash) + phoneNumber.charCodeAt(i);
+ hash |= 0;
+ }
+ return `hash_${Math.abs(hash)}`;
+}
diff --git a/packages/shared-analytics/tsconfig.json b/packages/shared-analytics/tsconfig.json
new file mode 100644
index 0000000..d822f58
--- /dev/null
+++ b/packages/shared-analytics/tsconfig.json
@@ -0,0 +1,12 @@
+{
+ "extends": "../../tsconfig.base.json",
+ "compilerOptions": {
+ "outDir": "./dist",
+ "rootDir": "./src",
+ "declaration": true,
+ "declarationMap": true,
+ "emitDeclarationOnly": true
+ },
+ "include": ["src/**/*"],
+ "exclude": ["node_modules", "dist"]
+}
diff --git a/packages/shared-auth/package.json b/packages/shared-auth/package.json
new file mode 100644
index 0000000..a45c56f
--- /dev/null
+++ b/packages/shared-auth/package.json
@@ -0,0 +1,18 @@
+{
+ "name": "@shieldsai/shared-auth",
+ "version": "0.1.0",
+ "private": true,
+ "type": "module",
+ "main": "src/index.ts",
+ "types": "src/index.ts",
+ "scripts": {
+ "lint": "eslint src/"
+ },
+ "dependencies": {
+ "next-auth": "^4.24.0",
+ "zod": "^4.3.6"
+ },
+ "devDependencies": {
+ "typescript": "^5.3.3"
+ }
+}
diff --git a/packages/shared-auth/src/config/auth.config.ts b/packages/shared-auth/src/config/auth.config.ts
new file mode 100644
index 0000000..9202c65
--- /dev/null
+++ b/packages/shared-auth/src/config/auth.config.ts
@@ -0,0 +1,114 @@
+import { NextAuthOptions } from 'next-auth';
+import CredentialsProvider from 'next-auth/providers/credentials';
+import GoogleProvider from 'next-auth/providers/google';
+import AppleProvider from 'next-auth/providers/apple';
+import { z } from 'zod';
+
+// Environment variables
+const envSchema = z.object({
+ NEXTAUTH_URL: z.string().url(),
+ NEXTAUTH_SECRET: z.string().min(32),
+ GOOGLE_CLIENT_ID: z.string(),
+ GOOGLE_CLIENT_SECRET: z.string(),
+ APPLE_CLIENT_ID: z.string(),
+ APPLE_CLIENT_SECRET: z.string(),
+ DATABASE_URL: z.string().url(),
+});
+
+export const authEnv = envSchema.parse({
+ NEXTAUTH_URL: process.env.NEXTAUTH_URL,
+ NEXTAUTH_SECRET: process.env.NEXTAUTH_SECRET,
+ GOOGLE_CLIENT_ID: process.env.GOOGLE_CLIENT_ID,
+ GOOGLE_CLIENT_SECRET: process.env.GOOGLE_CLIENT_SECRET,
+ APPLE_CLIENT_ID: process.env.APPLE_CLIENT_ID,
+ APPLE_CLIENT_SECRET: process.env.APPLE_CLIENT_SECRET,
+ DATABASE_URL: process.env.DATABASE_URL,
+});
+
+// Role-based access control
+export type UserRole = 'user' | 'family_admin' | 'family_member' | 'support';
+
+export const userRoles: UserRole[] = ['user', 'family_admin', 'family_member', 'support'];
+
+// Family group types
+export type FamilyGroup = {
+ id: string;
+ name: string;
+ members: string[]; // user IDs
+ createdAt: Date;
+ updatedAt: Date;
+};
+
+// NextAuth options
+export const authOptions: NextAuthOptions = {
+ providers: [
+ CredentialsProvider({
+ name: 'Credentials',
+ credentials: {
+ email: { label: 'Email', type: 'email' },
+ password: { label: 'Password', type: 'password' },
+ },
+ async authorize(credentials) {
+ if (!credentials?.email || !credentials?.password) {
+ throw new Error('Email and password required');
+ }
+
+ // TODO: Validate against database
+ const user = {
+ id: '1',
+ email: credentials.email,
+ name: credentials.email.split('@')[0],
+ role: 'user' as UserRole,
+ };
+
+ return user;
+ },
+ }),
+ GoogleProvider({
+ clientId: authEnv.GOOGLE_CLIENT_ID,
+ clientSecret: authEnv.GOOGLE_CLIENT_SECRET,
+ }),
+ AppleProvider({
+ clientId: authEnv.APPLE_CLIENT_ID,
+ clientSecret: authEnv.APPLE_CLIENT_SECRET,
+ }),
+ ],
+ session: {
+ strategy: 'jwt',
+ maxAge: 30 * 24 * 60 * 60, // 30 days
+ },
+ pages: {
+ signIn: '/auth/signin',
+ signOut: '/auth/signout',
+ error: '/auth/error',
+ },
+ callbacks: {
+ async jwt({ token, user, account }) {
+ if (user) {
+ token.id = user.id;
+ token.role = (user as any).role;
+ }
+
+ if (account) {
+ token.provider = account.provider;
+ token.accessToken = account.access_token;
+ }
+
+ return token;
+ },
+ async session({ session, token }) {
+ if (session.user) {
+ session.user.id = token.id as string;
+ session.user.role = token.role as UserRole;
+ }
+
+ return session;
+ },
+ },
+ events: {
+ async createUser({ user }) {
+ // TODO: Create default family group
+ console.log('New user created:', user.email);
+ },
+ },
+};
diff --git a/packages/shared-auth/src/index.ts b/packages/shared-auth/src/index.ts
new file mode 100644
index 0000000..c59c5ea
--- /dev/null
+++ b/packages/shared-auth/src/index.ts
@@ -0,0 +1,25 @@
+// Config
+export { authOptions, authEnv, userRoles } from './config/auth.config';
+export type { UserRole, FamilyGroup } from './config/auth.config';
+
+// Middleware
+export { withAuth, withRole, protectApiRoute } from './middleware/auth.middleware';
+
+// Models
+export {
+ userSchema,
+ familyGroupSchema,
+ familyMemberSchema,
+ sessionSchema,
+ accountSchema,
+ createUserSchema,
+ createFamilyGroupSchema,
+ addFamilyMemberSchema,
+} from './models/auth.models';
+export type {
+ User,
+ FamilyGroup as AuthFamilyGroup,
+ FamilyMember,
+ Session,
+ Account,
+} from './models/auth.models';
diff --git a/packages/shared-auth/src/middleware/auth.middleware.ts b/packages/shared-auth/src/middleware/auth.middleware.ts
new file mode 100644
index 0000000..35a4abf
--- /dev/null
+++ b/packages/shared-auth/src/middleware/auth.middleware.ts
@@ -0,0 +1,62 @@
+import { NextRequest, NextResponse } from 'next-auth/react';
+import { UserRole } from '../config/auth.config';
+
+/**
+ * Middleware to protect routes that require authentication
+ */
+export function withAuth(
+ request: NextRequest,
+ options?: {
+ signInPath?: string;
+ }
+): NextResponse {
+ const token = request.cookies.get('next-auth.session-token')?.value;
+ const signInPath = options?.signInPath ?? '/auth/signin';
+
+ if (!token) {
+ const signInUrl = new URL(signInPath, request.url);
+ signInUrl.searchParams.set('callbackUrl', request.nextUrl.pathname);
+ return NextResponse.redirect(signInUrl);
+ }
+
+ return NextResponse.next();
+}
+
+/**
+ * Middleware to check if user has required role
+ */
+export function withRole(
+ response: NextResponse,
+ request: NextRequest,
+ requiredRoles: UserRole[]
+): NextResponse {
+ const token = request.cookies.get('next-auth.session-token')?.value;
+
+ if (!token) {
+ return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
+ }
+
+ // TODO: Decode JWT and check role
+ // For now, allow all authenticated users
+ return response;
+}
+
+/**
+ * Middleware to protect API routes
+ */
+export function protectApiRoute(request: NextRequest): NextResponse {
+ const authHeader = request.headers.get('authorization');
+
+ if (!authHeader?.startsWith('Bearer ')) {
+ return NextResponse.json({ error: 'Missing or invalid token' }, { status: 401 });
+ }
+
+ const token = authHeader.split(' ')[1];
+
+ try {
+ // TODO: Verify JWT token
+ return NextResponse.next();
+ } catch (error) {
+ return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
+ }
+}
diff --git a/packages/shared-auth/src/models/auth.models.ts b/packages/shared-auth/src/models/auth.models.ts
new file mode 100644
index 0000000..7198b9f
--- /dev/null
+++ b/packages/shared-auth/src/models/auth.models.ts
@@ -0,0 +1,81 @@
+import { z } from 'zod';
+
+// User schema
+export const userSchema = z.object({
+ id: z.string().uuid(),
+ email: z.string().email(),
+ name: z.string().min(1),
+ image: z.string().url().optional(),
+ role: z.enum(['user', 'family_admin', 'family_member', 'support']),
+ emailVerified: z.date().optional(),
+ createdAt: z.date(),
+ updatedAt: z.date(),
+});
+
+export type User = z.infer;
+
+// Family group schema
+export const familyGroupSchema = z.object({
+ id: z.string().uuid(),
+ name: z.string().min(1).max(100),
+ ownerId: z.string().uuid(),
+ createdAt: z.date(),
+ updatedAt: z.date(),
+});
+
+export type FamilyGroup = z.infer;
+
+// Family member schema
+export const familyMemberSchema = z.object({
+ id: z.string().uuid(),
+ groupId: z.string().uuid(),
+ userId: z.string().uuid(),
+ role: z.enum(['owner', 'admin', 'member']),
+ joinedAt: z.date(),
+});
+
+export type FamilyMember = z.infer;
+
+// Session schema
+export const sessionSchema = z.object({
+ id: z.string().uuid(),
+ userId: z.string().uuid(),
+ sessionToken: z.string(),
+ expires: z.date(),
+ createdAt: z.date(),
+});
+
+export type Session = z.infer;
+
+// Account schema (for OAuth)
+export const accountSchema = z.object({
+ id: z.string().uuid(),
+ userId: z.string().uuid(),
+ provider: z.string(),
+ providerAccountId: z.string(),
+ access_token: z.string().optional(),
+ refresh_token: z.string().optional(),
+ expires_at: z.number().optional(),
+ token_type: z.string().optional(),
+ scope: z.string().optional(),
+});
+
+export type Account = z.infer;
+
+// Validation schemas for API
+export const createUserSchema = z.object({
+ email: z.string().email(),
+ password: z.string().min(8),
+ name: z.string().min(1),
+});
+
+export const createFamilyGroupSchema = z.object({
+ name: z.string().min(1).max(100),
+ ownerId: z.string().uuid(),
+});
+
+export const addFamilyMemberSchema = z.object({
+ groupId: z.string().uuid(),
+ userId: z.string().uuid(),
+ role: z.enum(['admin', 'member']).default('member'),
+});
diff --git a/packages/shared-auth/tsconfig.json b/packages/shared-auth/tsconfig.json
new file mode 100644
index 0000000..d822f58
--- /dev/null
+++ b/packages/shared-auth/tsconfig.json
@@ -0,0 +1,12 @@
+{
+ "extends": "../../tsconfig.base.json",
+ "compilerOptions": {
+ "outDir": "./dist",
+ "rootDir": "./src",
+ "declaration": true,
+ "declarationMap": true,
+ "emitDeclarationOnly": true
+ },
+ "include": ["src/**/*"],
+ "exclude": ["node_modules", "dist"]
+}
diff --git a/packages/shared-billing/src/services/billing.services.ts b/packages/shared-billing/src/services/billing.services.ts
new file mode 100644
index 0000000..f2f9532
--- /dev/null
+++ b/packages/shared-billing/src/services/billing.services.ts
@@ -0,0 +1,223 @@
+import { stripe, SubscriptionTier, tierConfig } from '../config/billing.config';
+import { z } from 'zod';
+
+// Subscription service
+export class SubscriptionService {
+ /**
+ * Create a new subscription for a customer
+ */
+ async createSubscription(
+ customerId: string,
+ tier: SubscriptionTier,
+ metadata?: Record
+ ): Promise {
+ const priceId = tierConfig[tier].priceId;
+
+ const subscription = await stripe.subscriptions.create({
+ customer: customerId,
+ items: [{ price: priceId }],
+ metadata: metadata,
+ proration_behavior: 'create_prorations',
+ });
+
+ return subscription;
+ }
+
+ /**
+ * Update a customer's subscription tier
+ */
+ async updateSubscriptionTier(
+ subscriptionId: string,
+ newTier: SubscriptionTier
+ ): Promise {
+ const newPriceId = tierConfig[newTier].priceId;
+
+ const subscription = await stripe.subscriptions.update(subscriptionId, {
+ items: [
+ {
+ price: newPriceId,
+ quantity: 1,
+ },
+ ],
+ proration_behavior: 'create_prorations',
+ });
+
+ return subscription;
+ }
+
+ /**
+ * Cancel a subscription
+ */
+ async cancelSubscription(
+ subscriptionId: string,
+ atPeriodEnd: boolean = true
+ ): Promise {
+ const subscription = await stripe.subscriptions.update(subscriptionId, {
+ cancel_at_period_end: atPeriodEnd,
+ });
+
+ return subscription;
+ }
+
+ /**
+ * Get subscription by ID
+ */
+ async getSubscription(subscriptionId: string): Promise {
+ try {
+ const subscription = await stripe.subscriptions.retrieve(subscriptionId);
+ return subscription;
+ } catch (error) {
+ if (error instanceof Stripe.errors.StripeInvalidRequestError) {
+ return null;
+ }
+ throw error;
+ }
+ }
+
+ /**
+ * Get customer's current subscription
+ */
+ async getCustomerSubscription(customerId: string): Promise {
+ const subscriptions = await stripe.subscriptions.list({
+ customer: customerId,
+ status: 'active',
+ limit: 1,
+ });
+
+ return subscriptions.data[0] || null;
+ }
+}
+
+// Customer service
+export class CustomerService {
+ /**
+ * Create a new Stripe customer
+ */
+ async createCustomer(
+ email: string,
+ name?: string,
+ metadata?: Record
+ ): Promise {
+ const customer = await stripe.customers.create({
+ email,
+ name,
+ metadata,
+ });
+
+ return customer;
+ }
+
+ /**
+ * Get or create customer by email
+ */
+ async getOrCreateCustomer(
+ email: string,
+ name?: string
+ ): Promise {
+ const existingCustomers = await stripe.customers.list({
+ email,
+ limit: 1,
+ });
+
+ if (existingCustomers.data.length > 0) {
+ return existingCustomers.data[0];
+ }
+
+ return this.createCustomer(email, name);
+ }
+
+ /**
+ * Create a billing portal session
+ */
+ async createBillingPortalSession(
+ customerId: string,
+ returnUrl: string
+ ): Promise {
+ const session = await stripe.billingPortal.sessions.create({
+ customer: customerId,
+ return_url: returnUrl,
+ });
+
+ return session;
+ }
+
+ /**
+ * Get customer by ID
+ */
+ async getCustomer(customerId: string): Promise {
+ try {
+ const customer = await stripe.customers.retrieve(customerId);
+ return customer as Stripe.Customer;
+ } catch (error) {
+ if (error instanceof Stripe.errors.StripeInvalidRequestError) {
+ return null;
+ }
+ throw error;
+ }
+ }
+}
+
+// Webhook service
+export class WebhookService {
+ /**
+ * Construct webhook event from raw body
+ */
+ constructEvent(
+ rawBody: Buffer | string,
+ signature: string
+ ): Stripe.Event {
+ return stripe.webhooks.constructEvent(
+ rawBody,
+ signature,
+ process.env.STRIPE_WEBHOOK_SECRET!
+ );
+ }
+
+ /**
+ * Handle webhook event
+ */
+ async handleWebhook(event: Stripe.Event): Promise {
+ switch (event.type) {
+ case 'customer.subscription.created':
+ case 'customer.subscription.updated':
+ await this.handleSubscriptionChange(event.data.object);
+ break;
+ case 'customer.subscription.deleted':
+ await this.handleSubscriptionDeleted(event.data.object);
+ break;
+ case 'invoice.payment_succeeded':
+ await this.handlePaymentSucceeded(event.data.object);
+ break;
+ case 'invoice.payment_failed':
+ await this.handlePaymentFailed(event.data.object);
+ break;
+ default:
+ console.log(`Unhandled event type: ${event.type}`);
+ }
+ }
+
+ private async handleSubscriptionChange(subscription: Stripe.Subscription) {
+ console.log(`Subscription ${subscription.id} changed to ${subscription.status}`);
+ // TODO: Update local database
+ }
+
+ private async handleSubscriptionDeleted(subscription: Stripe.Subscription) {
+ console.log(`Subscription ${subscription.id} deleted`);
+ // TODO: Update local database
+ }
+
+ private async handlePaymentSucceeded(invoice: Stripe.Invoice) {
+ console.log(`Payment succeeded for invoice ${invoice.id}`);
+ // TODO: Update usage tracking
+ }
+
+ private async handlePaymentFailed(invoice: Stripe.Invoice) {
+ console.log(`Payment failed for invoice ${invoice.id}`);
+ // TODO: Send notification to customer
+ }
+}
+
+// Export instances
+export const subscriptionService = new SubscriptionService();
+export const customerService = new CustomerService();
+export const webhookService = new WebhookService();
diff --git a/packages/shared-ui/package.json b/packages/shared-ui/package.json
new file mode 100644
index 0000000..dc9680f
--- /dev/null
+++ b/packages/shared-ui/package.json
@@ -0,0 +1,17 @@
+{
+ "name": "@shieldsai/shared-ui",
+ "version": "0.1.0",
+ "private": true,
+ "type": "module",
+ "main": "src/index.tsx",
+ "types": "src/index.tsx",
+ "scripts": {
+ "lint": "eslint src/"
+ },
+ "dependencies": {
+ "solid-js": "^1.8.14"
+ },
+ "devDependencies": {
+ "typescript": "^5.3.3"
+ }
+}
diff --git a/packages/shared-utils/package.json b/packages/shared-utils/package.json
new file mode 100644
index 0000000..6ae9d14
--- /dev/null
+++ b/packages/shared-utils/package.json
@@ -0,0 +1,16 @@
+{
+ "name": "@shieldsai/shared-utils",
+ "version": "0.1.0",
+ "private": true,
+ "type": "module",
+ "main": "src/index.ts",
+ "types": "src/index.ts",
+ "scripts": {
+ "lint": "eslint src/",
+ "test": "vitest"
+ },
+ "devDependencies": {
+ "typescript": "^5.3.3",
+ "vitest": "^1.3.1"
+ }
+}
diff --git a/server/alerts/alert-server.ts b/server/alerts/alert-server.ts
index 1a3e350..c55792b 100644
--- a/server/alerts/alert-server.ts
+++ b/server/alerts/alert-server.ts
@@ -1,473 +1,415 @@
-import { WebSocketServer, WebSocket, Data } from 'ws';
-import { randomBytes } from 'crypto';
-import { IncomingMessage } from 'http';
-import { EventEmitter } from 'events';
-import jwt from 'jsonwebtoken';
-
/**
- * WebSocket Alert Server for Real-Time Call Analysis
- *
- * Subscribes to CallAnalysisEngine events and broadcasts alerts
- * to authenticated WebSocket clients.
- *
- * Security hardening (FRE-4497):
- * - JWT authentication required (enableAuth defaults to true)
- * - jwtSecret loaded from env (non-empty default)
- * - Origin allowlist validation
- * - Per-subscriber callId filtering (empty set = no alerts by default)
- * - crypto.randomBytes for sessionId
- * - Bounded alert history with TTL-based eviction
- * - Alert cooldown per session to prevent flooding
- * - Graceful shutdown with timeout
+ * WebSocket Alert Server
+ * Real-time alert broadcasting for call analysis events and anomalies
+ * Connects to CallAnalysisEngine and pushes alerts to subscribed clients
*/
-// ── Types ────────────────────────────────────────────────────────────────────
+import { WebSocketServer, WebSocket } from 'ws';
+import { CallAnalysisEngine, CallEvent, Anomaly, SentimentAnalysis, AnalysisResult } from '../../src/lib/inference/call-analysis-engine';
+import { jwtVerify, SignJWT } from 'jose';
-export interface AlertServerConfig {
- port: number;
- host: string;
- allowedOrigins: string[];
- enableAuth: boolean;
- jwtSecret: string;
- maxAlertHistory: number;
- alertHistoryTtlMs: number;
- cooldownMs: number;
- maxSubscribers: number;
- maxCallIdsPerSubscriber: number;
- shutdownTimeoutMs: number;
+export type AlertType =
+ | 'anomaly'
+ | 'call_event'
+ | 'quality_degraded'
+ | 'sentiment_shift'
+ | 'call_summary'
+ | 'connection'
+ | 'disconnection';
+
+export type AlertSeverity = 'info' | 'low' | 'medium' | 'high' | 'critical';
+
+export interface AlertPayload {
+ id: string;
+ type: AlertType;
+ severity: AlertSeverity;
+ timestamp: number;
+ callId?: string;
+ title: string;
+ message: string;
+ data: Record;
+ actionable: boolean;
}
-export interface AlertEntry {
- id: string;
- timestamp: number;
- callId: string;
- type: string;
- severity: 'low' | 'medium' | 'high' | 'critical';
- data: Record;
+export interface AlertServerConfig {
+ port?: number;
+ enableAuth?: boolean;
+ jwtSecret?: string;
+ allowedOrigins?: string[];
+ alertCooldownMs?: number;
+ maxSubscribers?: number;
+ enableCallCorrelation?: boolean;
}
export interface SubscriberSession {
- sessionId: string;
- userId: string;
ws: WebSocket;
+ userId?: string;
callIds: Set;
lastAlertTime: Map;
- connectedAt: number;
+ subscribedAt: number;
}
-export interface AlertOptions {
- callId: string;
- type: string;
- severity: 'low' | 'medium' | 'high' | 'critical';
- data?: Record;
-}
-
-// ── Constants ────────────────────────────────────────────────────────────────
-
-const DEFAULT_CONFIG: AlertServerConfig = {
- port: parseInt(process.env.ALERT_SERVER_PORT || '8088', 10),
- host: process.env.ALERT_SERVER_HOST || '0.0.0.0',
- allowedOrigins: (process.env.ALLOWED_ORIGINS || '').split(',').filter(Boolean),
- enableAuth: process.env.ALERT_AUTH_DISABLED === 'true' ? false : true,
- jwtSecret: process.env.JWT_SECRET || randomBytes(32).toString('hex'),
- maxAlertHistory: 500,
- alertHistoryTtlMs: 3600_000,
- cooldownMs: 2000,
+const DEFAULT_CONFIG: Required = {
+ port: 8088,
+ enableAuth: true,
+ jwtSecret: process.env.ALERT_SERVER_JWT_SECRET || '',
+ allowedOrigins: ['http://localhost:3000'],
+ alertCooldownMs: 5000,
maxSubscribers: 100,
- maxCallIdsPerSubscriber: 50,
- shutdownTimeoutMs: 5000,
+ enableCallCorrelation: true,
};
-// ── JWT Helper ───────────────────────────────────────────────────────────────
-
-function extractJwt(req: IncomingMessage): string | null {
- const auth = req.headers['authorization'];
- if (auth?.startsWith('Bearer ')) return auth.slice(7);
- const match = req.url?.match(/[?&]token=([^&]+)/);
- return match ? decodeURIComponent(match[1]) : null;
-}
-
-function verifyJwt(token: string, secret: string): { sub: string; exp?: number } | null {
+/**
+ * JWT verification helper
+ */
+async function verifyJWT(token: string, secret: string): Promise {
try {
- const decoded = jwt.verify(token, secret, { algorithms: ['HS256'] });
- if (typeof decoded !== 'object' || !decoded.sub) return null;
- return {
- sub: String(decoded.sub),
- exp: decoded.exp ? Number(decoded.exp) : undefined,
- };
- } catch {
+ const decoded = await jwtVerify(token, new TextEncoder().encode(secret), {
+ algorithms: ['HS256'],
+ });
+ return decoded;
+ } catch (error) {
+ console.error('[AlertServer] JWT verification failed:', (error as Error).message);
return null;
}
}
-// ── Alert Server ─────────────────────────────────────────────────────────────
+export class AlertServer {
+ private wss: WebSocketServer | null = null;
+ private config: Required;
+ private subscribers: Map = new Map();
+ private analysisEngines: Map = new Map();
+ private alertHistory: AlertPayload[] = [];
+ private maxAlertHistory: number = 500;
+ private isRunning: boolean = false;
-export class AlertServer extends EventEmitter {
- private wss: WebSocketServer;
- private sessions: Map = new Map();
- private alertHistory: AlertEntry[] = [];
- private config: AlertServerConfig;
- private engine?: EventEmitter;
- private cleanupTimer?: NodeJS.Timeout;
-
- constructor(config: Partial = {}) {
- super();
+ constructor(config: AlertServerConfig = {}) {
this.config = { ...DEFAULT_CONFIG, ...config };
-
- this.wss = new WebSocketServer({
- port: this.config.port,
- host: this.config.host,
- maxPayload: 65536,
- verifyClient: this.verifyClient.bind(this),
- });
-
- this.wss.on('connection', this.handleConnection.bind(this));
- console.log(`[AlertServer] Listening on ${this.config.host}:${this.config.port}`);
-
- // Periodic TTL cleanup
- this.cleanupTimer = setInterval(() => this.evictStaleAlerts(), 60_000);
}
- /**
- * Verify incoming WebSocket connection
- */
- private verifyClient(info: { req: IncomingMessage; origin: string }, cb: (result: boolean, status?: number, reason?: string) => void) {
+ async start(): Promise {
+ this.wss = new WebSocketServer({
+ port: this.config.port,
+ maxPayload: 1024 * 1024,
+ });
+
+ this.wss.on('connection', (ws: WebSocket, req) => {
+ this.handleConnection(ws, req);
+ });
+
+ this.wss.on('error', (error: Error) => {
+ console.error(`[AlertServer] WebSocket error: ${error.message}`);
+ });
+
+ this.isRunning = true;
+ console.log(`[AlertServer] Listening on port ${this.config.port}`);
+ }
+
+ private handleConnection(ws: WebSocket, req: import('http').IncomingMessage): void {
+ const url = new URL(req.url || '', `http://${req.headers.host}`);
+ const sessionId = url.searchParams.get('sessionId') || `sub-${Date.now()}-${Math.random().toString(36).slice(2)}`;
+ let userId = url.searchParams.get('userId') || undefined;
+ const callId = url.searchParams.get('callId') || undefined;
+
// Origin validation
- if (this.config.allowedOrigins.length > 0) {
- const origin = info.origin || info.req.headers['origin'] || '';
- const allowed = this.config.allowedOrigins.some(
- allowedOrigin => origin === allowedOrigin || origin.startsWith(allowedOrigin)
- );
- if (!allowed) {
- cb(false, 403, `Origin "${origin}" not allowed`);
- return;
- }
- }
-
- // JWT authentication
- if (this.config.enableAuth) {
- const token = extractJwt(info.req);
- if (!token) {
- cb(false, 401, 'Missing JWT token');
- return;
- }
- const payload = verifyJwt(token, this.config.jwtSecret);
- if (!payload) {
- cb(false, 401, 'Invalid or expired JWT');
- return;
- }
- }
-
- // Max subscriber check
- if (this.sessions.size >= this.config.maxSubscribers) {
- cb(false, 503, 'Max subscribers reached');
+ const origin = req.headers.origin;
+ if (origin && !this.config.allowedOrigins.includes(origin)) {
+ ws.close(1008, 'Origin not allowed');
return;
}
- cb(true);
- }
-
- /**
- * Handle new WebSocket connection
- */
- private handleConnection(ws: WebSocket, req: IncomingMessage) {
- const token = extractJwt(req);
- const payload = token ? verifyJwt(token, this.config.jwtSecret) : null;
- const userId = payload?.sub || 'anonymous';
-
- // crypto.randomBytes for sessionId (not Date.now() + Math.random())
- const sessionId = `sess_${randomBytes(12).toString('hex')}`;
-
- const session: SubscriberSession = {
- sessionId,
- userId,
- ws,
- callIds: new Set(),
- lastAlertTime: new Map(),
- connectedAt: Date.now(),
- };
-
- this.sessions.set(sessionId, session);
-
- // Send handshake
- ws.send(JSON.stringify({
- type: 'handshake',
- payload: { sessionId, message: 'Connected to alert server' },
- }));
-
- ws.on('message', this.handleMessage(session).bind(this));
- ws.on('close', () => this.handleDisconnect(session));
- ws.on('error', (err) => {
- console.error(`[AlertServer] Session ${sessionId} error:`, err.message);
- this.handleDisconnect(session);
- });
-
- this.emit('subscriber:connected', { sessionId, userId });
- }
-
- /**
- * Handle incoming message from subscriber
- */
- private handleMessage(session: SubscriberSession) {
- return (data: Data) => {
- let parsed: Record;
- try {
- parsed = JSON.parse(data.toString());
- } catch {
- session.ws.send(JSON.stringify({ type: 'error', payload: { message: 'Invalid JSON' } }));
+ // JWT Authentication (if enabled)
+ if (this.config.enableAuth && this.config.jwtSecret) {
+ const authHeader = req.headers.authorization;
+ if (!authHeader || !authHeader.startsWith('Bearer ')) {
+ ws.close(4001, 'Missing or invalid JWT token');
return;
}
- const msgType = parsed.type as string;
-
- switch (msgType) {
- case 'subscribe': {
- const callIds = (parsed.callIds as string[]) || [];
- for (const cid of callIds) {
- if (typeof cid === 'string' && cid.length <= 64) {
- session.callIds.add(cid);
- }
- }
- if (session.callIds.size > this.config.maxCallIdsPerSubscriber) {
- const ids = Array.from(session.callIds);
- session.callIds = new Set(ids.slice(0, this.config.maxCallIdsPerSubscriber));
- }
- session.ws.send(JSON.stringify({
- type: 'subscribed',
- payload: { callIds: Array.from(session.callIds) },
- }));
- break;
- }
- case 'unsubscribe': {
- const callIds = (parsed.callIds as string[]) || Array.from(session.callIds);
- for (const cid of callIds) {
- session.callIds.delete(cid);
- }
- session.ws.send(JSON.stringify({
- type: 'unsubscribed',
- payload: { callIds: Array.from(session.callIds) },
- }));
- break;
- }
- case 'getHistory': {
- const limit = Math.min(parseInt(String(parsed.limit)) || 50, 100);
- const callId = parsed.callId as string | undefined;
- const filtered = callId
- ? this.alertHistory.filter(a => a.callId === callId)
- : this.alertHistory;
- session.ws.send(JSON.stringify({
- type: 'history',
- payload: { alerts: filtered.slice(-limit) },
- }));
- break;
- }
- case 'ping':
- session.ws.send(JSON.stringify({ type: 'pong', payload: { timestamp: Date.now() } }));
- break;
- default:
- session.ws.send(JSON.stringify({ type: 'error', payload: { message: `Unknown message type: ${msgType}` } }));
+ const token = authHeader.substring(7);
+ const decoded = verifyJWT(token, this.config.jwtSecret);
+
+ if (!decoded) {
+ ws.close(4002, 'Invalid or expired JWT token');
+ return;
}
- };
- }
- /**
- * Handle subscriber disconnect
- */
- private handleDisconnect(session: SubscriberSession) {
- this.sessions.delete(session.sessionId);
- this.emit('subscriber:disconnected', { sessionId: session.sessionId });
- }
-
- /**
- * Connect to CallAnalysisEngine events
- */
- connectEngine(engine: EventEmitter): void {
- this.engine = engine;
-
- engine.on('result', (result: { callId: string; callQuality?: Record; sentiment?: string }) => {
- if (result.callQuality) {
- this.emitAlert({
- callId: result.callId,
- type: 'call_quality',
- severity: this.getSeverityFromQuality(result.callQuality),
- data: result.callQuality as Record,
- });
- }
- });
-
- engine.on('events', (events: { callId: string; events: Array<{ type: string; timestamp: number }> }) => {
- for (const event of events.events) {
- this.emitAlert({
- callId: events.callId,
- type: `call_event:${event.type}`,
- severity: 'medium',
- data: { eventType: event.type, timestamp: event.timestamp },
- });
- }
- });
-
- engine.on('anomalies', (anomalies: { callId: string; anomalies: Array<{ type: string; confidence: number }> }) => {
- for (const anomaly of anomalies.anomalies) {
- this.emitAlert({
- callId: anomalies.callId,
- type: `anomaly:${anomaly.type}`,
- severity: anomaly.confidence > 0.8 ? 'high' : 'medium',
- data: { anomalyType: anomaly.type, confidence: anomaly.confidence },
- });
- }
- });
-
- console.log('[AlertServer] Connected to analysis engine');
- }
-
- /**
- * Emit an alert to matching subscribers
- */
- private emitAlert(options: AlertOptions): void {
- const alert: AlertEntry = {
- id: `alert_${randomBytes(8).toString('hex')}`,
- timestamp: Date.now(),
- callId: options.callId,
- type: options.type,
- severity: options.severity,
- data: options.data || {},
- };
-
- // Store in bounded history
- this.alertHistory.push(alert);
- if (this.alertHistory.length > this.config.maxAlertHistory) {
- this.alertHistory = this.alertHistory.slice(-this.config.maxAlertHistory);
+ // Extract user ID from token if present
+ userId = (decoded as any).sub || userId;
}
- const payload = JSON.stringify({ type: 'alert', payload: alert });
+ if (this.subscribers.size >= this.config.maxSubscribers) {
+ ws.close(1013, 'Too many subscribers');
+ return;
+ }
- // Broadcast to matching subscribers with cooldown
- for (const session of this.sessions.values()) {
- // Skip if subscriber has callId filter and this call is not in it
- if (session.callIds.size > 0 && !session.callIds.has(options.callId)) {
- continue;
+ const session: SubscriberSession = {
+ ws,
+ userId,
+ callIds: callId ? new Set([callId]) : new Set(),
+ lastAlertTime: new Map(),
+ subscribedAt: Date.now(),
+ };
+
+ this.subscribers.set(sessionId, session);
+
+ ws.send(JSON.stringify({
+ type: 'connected',
+ payload: { sessionId, userId, timestamp: Date.now() },
+ }));
+
+ ws.on('message', (data: Buffer | ArrayBuffer) => {
+ this.handleMessage(sessionId, data);
+ });
+
+ ws.on('close', () => {
+ this.subscribers.delete(sessionId);
+ console.log(`[AlertServer] Subscriber disconnected: ${sessionId}`);
+ });
+
+ ws.on('error', (error: Error) => {
+ console.error(`[AlertServer] Subscriber error (${sessionId}): ${error.message}`);
+ });
+
+ console.log(`[AlertServer] Subscriber connected: ${sessionId}${callId ? ` (call: ${callId})` : ''}`);
+ }
+
+ private handleMessage(sessionId: string, data: Buffer | ArrayBuffer): void {
+ try {
+ const message = JSON.parse(data.toString());
+ const session = this.subscribers.get(sessionId);
+ if (!session) return;
+
+ switch (message.type) {
+ case 'subscribe':
+ if (message.callId) {
+ session.callIds.add(message.callId);
+ }
+ break;
+
+ case 'unsubscribe':
+ if (message.callId) {
+ session.callIds.delete(message.callId);
+ }
+ break;
+
+ case 'ping':
+ session.ws.send(JSON.stringify({ type: 'pong', timestamp: Date.now() }));
+ break;
}
+ } catch (error) {
+ console.error(`[AlertServer] Message parse error: ${(error as Error).message}`);
+ }
+ }
- // Cooldown check
- const key = `${options.callId}:${options.type}`;
- const lastTime = session.lastAlertTime.get(key) || 0;
- if (Date.now() - lastTime < this.config.cooldownMs) {
- continue;
+ bindAnalysisEngine(callId: string, engine: CallAnalysisEngine): void {
+ this.analysisEngines.set(callId, engine);
+
+ engine.on('result', (result: AnalysisResult) => {
+ this.processAnalysisResult(callId, result);
+ });
+
+ engine.on('events', (events: CallEvent[]) => {
+ events.forEach(event => {
+ this.sendAlert({
+ type: 'call_event',
+ severity: event.severity as AlertSeverity,
+ callId,
+ title: this.formatEventType(event.type),
+ message: this.formatEventMessage(event),
+ data: { event, timestamp: event.timestamp },
+ actionable: event.severity === 'high',
+ });
+ });
+ });
+
+ engine.on('anomalies', (anomalies: Anomaly[]) => {
+ anomalies.forEach(anomaly => {
+ this.sendAlert({
+ type: 'anomaly',
+ severity: anomaly.severity as AlertSeverity,
+ callId,
+ title: this.formatAnomalyType(anomaly.type),
+ message: anomaly.description,
+ data: {
+ anomaly,
+ confidence: anomaly.confidence,
+ recommendation: anomaly.recommendation,
+ },
+ actionable: anomaly.severity === 'high' || anomaly.severity === 'critical',
+ });
+ });
+ });
+
+ console.log(`[AlertServer] Bound analysis engine for call: ${callId}`);
+ }
+
+ private processAnalysisResult(callId: string, result: AnalysisResult): void {
+ if (result.callQuality.mosScore < 3.0) {
+ this.sendAlert({
+ type: 'quality_degraded',
+ severity: result.callQuality.mosScore < 2.5 ? 'high' : 'medium',
+ callId,
+ title: 'Call Quality Degraded',
+ message: `MOS score: ${result.callQuality.mosScore.toFixed(1)} (threshold: 3.0)`,
+ data: result.callQuality as unknown as Record,
+ actionable: true,
+ });
+ }
+
+ if (result.sentiment.sentiment === 'negative' && result.sentiment.confidence > 0.7) {
+ this.sendAlert({
+ type: 'sentiment_shift',
+ severity: 'medium',
+ callId,
+ title: 'Negative Sentiment Detected',
+ message: `Confidence: ${(result.sentiment.confidence * 100).toFixed(0)}%`,
+ data: result.sentiment as unknown as Record,
+ actionable: false,
+ });
+ }
+ }
+
+ sendAlert(options: {
+ type: AlertType;
+ severity: AlertSeverity;
+ callId?: string;
+ title: string;
+ message: string;
+ data: Record;
+ actionable: boolean;
+ }): void {
+ const cooldownKey = `${options.callId}:${options.type}`;
+ const now = Date.now();
+
+ const sessionKeys = Array.from(this.subscribers.keys());
+ for (const key of sessionKeys) {
+ const session = this.subscribers.get(key);
+ if (!session) continue;
+
+ const lastTime = session.lastAlertTime.get(cooldownKey) || 0;
+ if (now - lastTime < this.config.alertCooldownMs) continue;
+
+ if (options.callId && session.callIds.size > 0 && !session.callIds.has(options.callId)) continue;
+
+ const alert: AlertPayload = {
+ id: `alert-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`,
+ type: options.type,
+ severity: options.severity,
+ timestamp: now,
+ callId: options.callId,
+ title: options.title,
+ message: options.message,
+ data: options.data,
+ actionable: options.actionable,
+ };
+
+ this.alertHistory.push(alert);
+ if (this.alertHistory.length > this.maxAlertHistory) {
+ this.alertHistory.shift();
}
if (session.ws.readyState === WebSocket.OPEN) {
- session.ws.send(payload);
- session.lastAlertTime.set(key, Date.now());
+ session.ws.send(JSON.stringify(alert));
}
- }
-
- this.emit('alert:emitted', alert);
- }
-
- /**
- * Determine severity from call quality metrics
- */
- private getSeverityFromQuality(quality: Record): 'low' | 'medium' | 'high' | 'critical' {
- const mos = quality.mosScore as number | undefined;
- if (mos !== undefined) {
- if (mos < 2.5) return 'critical';
- if (mos < 3.5) return 'high';
- if (mos < 4.0) return 'medium';
- }
- return 'low';
- }
-
- /**
- * Evict stale alerts from history based on TTL
- */
- private evictStaleAlerts(): void {
- const cutoff = Date.now() - this.config.alertHistoryTtlMs;
- const before = this.alertHistory.length;
- this.alertHistory = this.alertHistory.filter(a => a.timestamp > cutoff);
- const evicted = before - this.alertHistory.length;
- if (evicted > 0) {
- console.log(`[AlertServer] Evicted ${evicted} stale alerts`);
+ session.lastAlertTime.set(cooldownKey, now);
}
}
- /**
- * Get alert history (for API endpoint)
- */
- getAlertHistory(limit = 50, callId?: string): AlertEntry[] {
- let alerts = this.alertHistory;
- if (callId) {
- alerts = alerts.filter(a => a.callId === callId);
- }
- return alerts.slice(-limit);
- }
-
- /**
- * Get subscriber stats
- */
- getStats() {
- return {
- activeSubscribers: this.sessions.size,
- alertHistorySize: this.alertHistory.length,
- };
- }
-
- /**
- * Graceful shutdown with timeout
- */
- async stop(timeoutMs?: number): Promise {
- const t = timeoutMs || this.config.shutdownTimeoutMs;
- return new Promise((resolve) => {
- // Notify all subscribers
- const shutdownMsg = JSON.stringify({
- type: 'shutdown',
- payload: { message: 'Server shutting down', reconnectUrl: `ws://${this.config.host}:${this.config.port}` },
- });
-
- for (const session of this.sessions.values()) {
- if (session.ws.readyState === WebSocket.OPEN) {
- session.ws.send(shutdownMsg);
- }
- }
-
- // Close connections with timeout
- const deadline = Date.now() + t;
- let pending = this.sessions.size;
-
- if (pending === 0) {
- this.finishShutdown();
- resolve();
- return;
- }
-
- const timer = setTimeout(() => {
- for (const session of this.sessions.values()) {
- session.ws.close(1001, 'Server shutting down');
- }
- this.finishShutdown();
- resolve();
- }, Math.max(100, deadline - Date.now()));
-
- for (const session of this.sessions.values()) {
- session.ws.once('close', () => {
- pending--;
- if (pending <= 0) {
- clearTimeout(timer);
- this.finishShutdown();
- resolve();
- }
- });
- }
+ broadcastCallSummary(callId: string, summary: string): void {
+ this.sendAlert({
+ type: 'call_summary',
+ severity: 'info',
+ callId,
+ title: 'Call Analysis Summary',
+ message: summary,
+ data: { summary },
+ actionable: false,
});
}
- private finishShutdown(): void {
- if (this.cleanupTimer) clearInterval(this.cleanupTimer);
- this.wss.close();
- this.sessions.clear();
- console.log('[AlertServer] Shutdown complete');
+ getAlertHistory(limit: number = 50, callId?: string): AlertPayload[] {
+ let history = this.alertHistory;
+ if (callId) {
+ history = history.filter(a => a.callId === callId);
+ }
+ return history.slice(-limit);
+ }
+
+ getSubscriberCount(): number {
+ return this.subscribers.size;
+ }
+
+ getActiveCalls(): string[] {
+ return Array.from(this.analysisEngines.keys());
+ }
+
+ getEngine(callId: string): CallAnalysisEngine | undefined {
+ return this.analysisEngines.get(callId);
+ }
+
+ async stop(): Promise {
+ this.isRunning = false;
+
+ this.subscribers.forEach((session) => {
+ if (session.ws.readyState === WebSocket.OPEN) {
+ session.ws.send(JSON.stringify({
+ type: 'server_shutdown',
+ payload: { timestamp: Date.now() },
+ }));
+ session.ws.close(1001, 'Server shutting down');
+ }
+ });
+
+ this.analysisEngines.forEach((engine) => {
+ engine.destroy();
+ });
+
+ if (this.wss) {
+ await new Promise((resolve) => {
+ this.wss!.close(() => resolve());
+ });
+ this.wss = null;
+ }
+
+ console.log('[AlertServer] Stopped');
+ }
+
+ private formatEventType(type: string): string {
+ const labels: Record = {
+ interrupt: 'Speaker Interrupt',
+ overlap: 'Speech Overlap',
+ long_pause: 'Long Pause',
+ volume_spike: 'Volume Spike',
+ silence: 'Silence Detected',
+ speaker_change: 'Speaker Change',
+ };
+ return labels[type] || type;
+ }
+
+ private formatEventMessage(event: CallEvent): string {
+ const messages: Record = {
+ interrupt: `Interrupt detected (${event.duration}ms)`,
+ overlap: `Speech overlap detected (${event.duration}ms)`,
+ long_pause: `Pause duration: ${(event.duration / 1000).toFixed(1)}s`,
+ volume_spike: `Volume spike: ${(event.metadata.level as number)?.toFixed(2) || 'unknown'}`,
+ silence: `Silence detected for ${(event.duration * 1000).toFixed(0)}ms`,
+ speaker_change: 'Speaker change detected',
+ };
+ return messages[event.type] || 'Event detected';
+ }
+
+ private formatAnomalyType(type: string): string {
+ const labels: Record = {
+ background_noise: 'Background Noise',
+ echo: 'Echo Detected',
+ distortion: 'Audio Distortion',
+ dropouts: 'Audio Dropout',
+ excessive_silence: 'Excessive Silence',
+ volume_inconsistency: 'Volume Inconsistency',
+ };
+ return labels[type] || type;
}
}
-export function createAlertServer(config?: Partial): AlertServer {
- return new AlertServer(config);
-}
+export default AlertServer;
diff --git a/services/darkwatch/src/alert.pipeline.ts b/services/darkwatch/src/alert.pipeline.ts
new file mode 100644
index 0000000..c91a809
--- /dev/null
+++ b/services/darkwatch/src/alert.pipeline.ts
@@ -0,0 +1,174 @@
+import { prisma, AlertType, AlertSeverity } from '@shieldsai/shared-db';
+import {
+ NotificationService,
+ NotificationPriority,
+ loadNotificationConfig,
+} from '@shieldsai/shared-notifications';
+
+const ALERT_DEDUP_WINDOW_MS = 24 * 60 * 60 * 1000;
+
+export class AlertPipeline {
+ private notificationService: NotificationService;
+
+ constructor() {
+ this.notificationService = new NotificationService(loadNotificationConfig());
+ }
+
+ async processNewExposures(exposureIds: string[]) {
+ const exposures = await prisma.exposure.findMany({
+ where: { id: { in: exposureIds }, isFirstTime: true },
+ include: {
+ subscription: {
+ select: {
+ id: true,
+ userId: true,
+ tier: true,
+ },
+ },
+ watchlistItem: true,
+ },
+ });
+
+ const alertsCreated: Awaited>[] = [];
+
+ for (const exposure of exposures) {
+ const dedupKey = `exposure:${exposure.subscriptionId}:${exposure.source}:${exposure.identifierHash}`;
+
+ const recentAlert = await prisma.alert.findFirst({
+ where: {
+ subscriptionId: exposure.subscriptionId,
+ type: AlertType.exposure_detected,
+ createdAt: {
+ gte: new Date(Date.now() - ALERT_DEDUP_WINDOW_MS),
+ },
+ },
+ orderBy: { createdAt: 'desc' },
+ });
+
+ if (recentAlert) {
+ continue;
+ }
+
+ const alert = await prisma.alert.create({
+ data: {
+ subscriptionId: exposure.subscriptionId,
+ userId: exposure.subscription.userId,
+ exposureId: exposure.id,
+ type: AlertType.exposure_detected,
+ title: this.buildTitle(exposure),
+ message: this.buildMessage(exposure),
+ severity: this.mapSeverity(exposure.severity),
+ channel: this.getChannelsForTier(exposure.subscription.tier),
+ },
+ });
+
+ alertsCreated.push(alert);
+
+ await this.dispatchNotification(alert, exposure);
+ }
+
+ return alertsCreated;
+ }
+
+ async dispatchScanCompleteAlert(
+ subscriptionId: string,
+ userId: string,
+ exposuresFound: number
+ ) {
+ const subscription = await prisma.subscription.findUnique({
+ where: { id: subscriptionId },
+ select: { tier: true },
+ });
+
+ if (!subscription) return;
+
+ const alert = await prisma.alert.create({
+ data: {
+ subscriptionId,
+ userId,
+ type: AlertType.scan_complete,
+ title: 'DarkWatch Scan Complete',
+ message: `Scan found ${exposuresFound} new exposure${exposuresFound === 1 ? '' : 's'}.`,
+ severity: exposuresFound > 0 ? 'warning' : 'info',
+ channel: this.getChannelsForTier(subscription.tier),
+ },
+ });
+
+ await this.dispatchNotification(alert, {
+ source: 'hibp',
+ severity: 'info',
+ identifier: '',
+ dataType: 'email',
+ } as any);
+
+ return alert;
+ }
+
+ private async dispatchNotification(
+ alert: {
+ userId: string;
+ channel: string[];
+ title: string;
+ message: string;
+ severity: AlertSeverity;
+ },
+ exposure: { source: string; severity: string; identifier: string; dataType: string }
+ ) {
+ try {
+ if (!this.notificationService.isFullyConfigured()) return;
+
+ await this.notificationService.sendMultiChannelNotification(
+ {
+ userId: alert.userId,
+ },
+ alert.channel as any,
+ alert.title,
+ `${alert.message}
+ Source: ${exposure.source}
+ Severity: ${exposure.severity}
+ Type: ${exposure.dataType}
`,
+ alert.severity === 'critical'
+ ? NotificationPriority.HIGH
+ : NotificationPriority.NORMAL
+ );
+ } catch (error) {
+ console.error('[AlertPipeline] Notification dispatch error:', error);
+ }
+ }
+
+ private buildTitle(exposure: {
+ source: string;
+ dataType: string;
+ severity: string;
+ }): string {
+ return `${exposure.severity.toUpperCase()}: ${exposure.dataType} exposure on ${exposure.source}`;
+ }
+
+ private buildMessage(exposure: {
+ identifier: string;
+ source: string;
+ severity: string;
+ dataType: string;
+ }): string {
+ const masked = exposure.identifier.includes('@')
+ ? exposure.identifier.replace(/(?<=.{2}).*(?=@)/, '***')
+ : exposure.identifier.slice(0, 3) + '***';
+
+ return `Your ${exposure.dataType} (${masked}) was found in a ${exposure.source} breach with ${exposure.severity} severity.`;
+ }
+
+ private mapSeverity(severity: string): AlertSeverity {
+ return severity as AlertSeverity;
+ }
+
+ private getChannelsForTier(tier: string): string[] {
+ const channelMap: Record = {
+ basic: ['email'],
+ plus: ['email', 'push'],
+ premium: ['email', 'push', 'sms'],
+ };
+ return channelMap[tier] || ['email'];
+ }
+}
+
+export const alertPipeline = new AlertPipeline();
diff --git a/services/darkwatch/src/index.ts b/services/darkwatch/src/index.ts
index 938258d..f18ad02 100644
--- a/services/darkwatch/src/index.ts
+++ b/services/darkwatch/src/index.ts
@@ -1,7 +1,5 @@
-export * from "./watchlist/WatchListService";
-export * from "./hibp/HIBPService";
-export * from "./matching/MatchingEngine";
-export * from "./alerts/AlertPipeline";
-export * from "./scanner/ScanService";
-export * from "./scheduler/ScanScheduler";
-export * from "./webhooks/WebhookHandler";
+export { watchlistService } from './watchlist.service';
+export { scanService } from './scan.service';
+export { schedulerService } from './scheduler.service';
+export { webhookService } from './webhook.service';
+export { alertPipeline } from './alert.pipeline';
diff --git a/services/darkwatch/src/scan.service.ts b/services/darkwatch/src/scan.service.ts
new file mode 100644
index 0000000..d3b5182
--- /dev/null
+++ b/services/darkwatch/src/scan.service.ts
@@ -0,0 +1,220 @@
+import { prisma, ExposureSource, ExposureSeverity, WatchlistType } from '@shieldsai/shared-db';
+import { createHash } from 'crypto';
+
+function hashIdentifier(identifier: string): string {
+ return createHash('sha256').update(identifier.toLowerCase().trim()).digest('hex');
+}
+
+function determineSeverity(
+ source: ExposureSource,
+ dataType: WatchlistType
+): ExposureSeverity {
+ const criticalSources = [ExposureSource.darkWebForum, ExposureSource.honeypot];
+ const warningSources = [ExposureSource.hibp, ExposureSource.shodan];
+ const criticalTypes = [WatchlistType.ssn];
+
+ if (criticalTypes.includes(dataType)) return ExposureSeverity.critical;
+ if (criticalSources.includes(source)) return ExposureSeverity.critical;
+ if (warningSources.includes(source)) return ExposureSeverity.warning;
+ return ExposureSeverity.info;
+}
+
+export class ScanService {
+ async checkHIBP(email: string): Promise<{ exposed: boolean; sources: string[] }> {
+ try {
+ const response = await fetch(
+ `https://hibp.com/api/v2/${encodeURIComponent(email)}`,
+ {
+ headers: {
+ 'hibp-api-key': process.env.HIBP_API_KEY || '',
+ Accept: 'application/json',
+ },
+ signal: AbortSignal.timeout(15000),
+ }
+ );
+
+ if (response.status === 404) {
+ return { exposed: false, sources: [] };
+ }
+
+ if (!response.ok) {
+ console.error(`[ScanService:HIBP] Status ${response.status} for ${email}`);
+ return { exposed: false, sources: [] };
+ }
+
+ const data = await response.json();
+ const sources = Array.isArray(data)
+ ? data.map((p: { Name: string }) => p.Name)
+ : [];
+
+ return { exposed: sources.length > 0, sources };
+ } catch (error) {
+ console.error('[ScanService:HIBP] Error:', error);
+ return { exposed: false, sources: [] };
+ }
+ }
+
+ async checkShodan(domain: string): Promise<{ exposed: boolean; ports: string[]; ips: string[] }> {
+ try {
+ const response = await fetch(
+ `https://api.shodan.io/shodan/host/${encodeURIComponent(domain)}`,
+ {
+ headers: {
+ Authorization: `Bearer ${process.env.SHODAN_API_KEY || ''}`,
+ },
+ signal: AbortSignal.timeout(15000),
+ }
+ );
+
+ if (response.status === 404) {
+ return { exposed: false, ports: [], ips: [] };
+ }
+
+ if (!response.ok) {
+ console.error(`[ScanService:Shodan] Status ${response.status} for ${domain}`);
+ return { exposed: false, ports: [], ips: [] };
+ }
+
+ const data = await response.json();
+ return {
+ exposed: !!data.ip_str,
+ ports: data.ports?.map(String) || [],
+ ips: [data.ip_str || ''],
+ };
+ } catch (error) {
+ console.error('[ScanService:Shodan] Error:', error);
+ return { exposed: false, ports: [], ips: [] };
+ }
+ }
+
+ async processSubscriptionScan(
+ subscriptionId: string,
+ watchlistItems: Awaited>
+ ): Promise<{ exposuresCreated: number; exposuresUpdated: number }> {
+ let exposuresCreated = 0;
+ let exposuresUpdated = 0;
+
+ for (const item of watchlistItems) {
+ const identifier = item.value;
+ const identifierHash = hashIdentifier(identifier);
+
+ switch (item.type) {
+ case WatchlistType.email: {
+ const hibpResult = await this.checkHIBP(identifier);
+ if (hibpResult.exposed) {
+ for (const source of hibpResult.sources) {
+ const existing = await prisma.exposure.findFirst({
+ where: {
+ subscriptionId,
+ source: ExposureSource.hibp,
+ identifierHash,
+ metadata: { path: ['dbName'], equals: source },
+ },
+ });
+
+ if (existing) {
+ await prisma.exposure.update({
+ where: { id: existing.id },
+ data: { detectedAt: new Date() },
+ });
+ exposuresUpdated++;
+ } else {
+ await prisma.exposure.create({
+ data: {
+ subscriptionId,
+ watchlistItemId: item.id,
+ source: ExposureSource.hibp,
+ dataType: item.type,
+ identifier,
+ identifierHash,
+ severity: determineSeverity(ExposureSource.hibp, item.type),
+ isFirstTime: true,
+ metadata: { dbName: source },
+ detectedAt: new Date(),
+ },
+ });
+ exposuresCreated++;
+ }
+ }
+ }
+ break;
+ }
+
+ case WatchlistType.domain: {
+ const shodanResult = await this.checkShodan(identifier);
+ if (shodanResult.exposed) {
+ const existing = await prisma.exposure.findFirst({
+ where: {
+ subscriptionId,
+ source: ExposureSource.shodan,
+ identifierHash,
+ },
+ });
+
+ if (existing) {
+ await prisma.exposure.update({
+ where: { id: existing.id },
+ data: {
+ detectedAt: new Date(),
+ metadata: { ports: shodanResult.ports, ips: shodanResult.ips },
+ },
+ });
+ exposuresUpdated++;
+ } else {
+ await prisma.exposure.create({
+ data: {
+ subscriptionId,
+ watchlistItemId: item.id,
+ source: ExposureSource.shodan,
+ dataType: item.type,
+ identifier,
+ identifierHash,
+ severity: determineSeverity(ExposureSource.shodan, item.type),
+ isFirstTime: true,
+ metadata: { ports: shodanResult.ports, ips: shodanResult.ips },
+ detectedAt: new Date(),
+ },
+ });
+ exposuresCreated++;
+ }
+ }
+ break;
+ }
+
+ default: {
+ const existing = await prisma.exposure.findFirst({
+ where: { subscriptionId, watchlistItemId: item.id, identifierHash },
+ });
+
+ if (!existing) {
+ await prisma.exposure.create({
+ data: {
+ subscriptionId,
+ watchlistItemId: item.id,
+ source: ExposureSource.darkWebForum,
+ dataType: item.type,
+ identifier,
+ identifierHash,
+ severity: determineSeverity(ExposureSource.darkWebForum, item.type),
+ isFirstTime: true,
+ detectedAt: new Date(),
+ },
+ });
+ exposuresCreated++;
+ }
+ break;
+ }
+ }
+ }
+
+ return { exposuresCreated, exposuresUpdated };
+ }
+
+ async getWatchlistItems(subscriptionId: string) {
+ return prisma.watchlistItem.findMany({
+ where: { subscriptionId, isActive: true },
+ });
+ }
+}
+
+export const scanService = new ScanService();
diff --git a/services/darkwatch/src/scheduler.service.ts b/services/darkwatch/src/scheduler.service.ts
new file mode 100644
index 0000000..e31725e
--- /dev/null
+++ b/services/darkwatch/src/scheduler.service.ts
@@ -0,0 +1,155 @@
+import { prisma, SubscriptionTier, SubscriptionStatus } from '@shieldsai/shared-db';
+import { tierConfig } from '@shieldsai/shared-billing';
+import { darkwatchScanQueue } from '@shieldsai/jobs';
+import { randomUUID } from 'crypto';
+
+const CRON_EXPRESSIONS = {
+ daily: '0 0 * * *',
+ hourly: '0 * * * *',
+ realtime: null,
+};
+
+export class SchedulerService {
+ async scheduleSubscriptionScans() {
+ const activeSubscriptions = await prisma.subscription.findMany({
+ where: {
+ tier: { in: [SubscriptionTier.basic, SubscriptionTier.plus, SubscriptionTier.premium] },
+ status: SubscriptionStatus.active,
+ },
+ select: {
+ id: true,
+ tier: true,
+ userId: true,
+ },
+ });
+
+ const jobsEnqueued = [];
+
+ for (const subscription of activeSubscriptions) {
+ const frequency = tierConfig[subscription.tier].features.darkWebScanFrequency;
+ const cron = CRON_EXPRESSIONS[frequency];
+
+ if (!cron) {
+ continue;
+ }
+
+ const jobKey = `scheduled-scan:${subscription.id}`;
+
+ try {
+ await darkwatchScanQueue.add(
+ 'scheduled-scan',
+ {
+ subscriptionId: subscription.id,
+ tier: subscription.tier,
+ scanType: 'scheduled',
+ },
+ {
+ jobId: jobKey,
+ repeat: {
+ every: frequency === 'daily'
+ ? 24 * 60 * 60 * 1000
+ : 60 * 60 * 1000,
+ },
+ priority: subscription.tier === SubscriptionTier.premium ? 1 : 3,
+ }
+ );
+
+ jobsEnqueued.push({
+ subscriptionId: subscription.id,
+ tier: subscription.tier,
+ frequency,
+ });
+ } catch (error) {
+ if ((error as Error).message?.includes('Duplicate')) {
+ continue;
+ }
+ console.error(
+ `[SchedulerService] Failed to schedule scan for ${subscription.id}:`,
+ error
+ );
+ }
+ }
+
+ return jobsEnqueued;
+ }
+
+ async enqueueOnDemandScan(subscriptionId: string) {
+ const subscription = await prisma.subscription.findUnique({
+ where: { id: subscriptionId },
+ select: { id: true, tier: true },
+ });
+
+ if (!subscription) {
+ throw new Error(`Subscription ${subscriptionId} not found`);
+ }
+
+ return darkwatchScanQueue.add(
+ 'on-demand-scan',
+ {
+ subscriptionId,
+ tier: subscription.tier,
+ scanType: 'on-demand',
+ },
+ {
+ priority: 1,
+ jobId: `on-demand-scan:${subscriptionId}:${randomUUID()}`,
+ }
+ );
+ }
+
+ async enqueueRealtimeTrigger(subscriptionId: string, sourceData: Record) {
+ const subscription = await prisma.subscription.findUnique({
+ where: { id: subscriptionId },
+ select: { id: true, tier: true },
+ });
+
+ if (!subscription || subscription.tier !== SubscriptionTier.premium) {
+ throw new Error('Realtime triggers require Premium tier');
+ }
+
+ return darkwatchScanQueue.add(
+ 'realtime-trigger',
+ {
+ subscriptionId,
+ tier: subscription.tier,
+ scanType: 'realtime',
+ sourceData,
+ },
+ {
+ priority: 0,
+ jobId: `realtime-trigger:${subscriptionId}:${randomUUID()}`,
+ }
+ );
+ }
+
+ async rescheduleAll() {
+ const repeatableJobs = await darkwatchScanQueue.getRepeatableJobs();
+
+ for (const job of repeatableJobs) {
+ await darkwatchScanQueue.removeRepeatableByKey(job.key);
+ }
+
+ return this.scheduleSubscriptionScans();
+ }
+
+ async getScanSchedule(subscriptionId: string) {
+ const subscription = await prisma.subscription.findUnique({
+ where: { id: subscriptionId },
+ select: { tier: true },
+ });
+
+ if (!subscription) return null;
+
+ const frequency = tierConfig[subscription.tier].features.darkWebScanFrequency;
+
+ return {
+ subscriptionId,
+ tier: subscription.tier,
+ frequency,
+ cron: CRON_EXPRESSIONS[frequency],
+ nextRun: frequency === 'realtime' ? 'event-driven' : CRON_EXPRESSIONS[frequency],
+ };
+ }
+}
+
+export const schedulerService = new SchedulerService();
diff --git a/services/darkwatch/src/watchlist.service.ts b/services/darkwatch/src/watchlist.service.ts
new file mode 100644
index 0000000..caaaf91
--- /dev/null
+++ b/services/darkwatch/src/watchlist.service.ts
@@ -0,0 +1,97 @@
+import { prisma, WatchlistType } from '@shieldsai/shared-db';
+import { createHash } from 'crypto';
+
+export function normalizeValue(type: WatchlistType, value: string): string {
+ const trimmed = value.trim().toLowerCase();
+ switch (type) {
+ case WatchlistType.email:
+ return trimmed.replace(/\s+/g, '');
+ case WatchlistType.phoneNumber:
+ return trimmed.replace(/[\s\-\(\)]/g, '');
+ case WatchlistType.ssn:
+ return trimmed.replace(/-/g, '');
+ case WatchlistType.address:
+ return trimmed;
+ case WatchlistType.domain:
+ return trimmed.replace(/^https?:\/\//, '').replace(/\/.*$/, '');
+ default:
+ return trimmed;
+ }
+}
+
+export function hashValue(value: string): string {
+ return createHash('sha256').update(value).digest('hex');
+}
+
+export class WatchlistService {
+ async addItem(
+ subscriptionId: string,
+ type: WatchlistType,
+ value: string,
+ maxItems: number
+ ) {
+ const normalized = normalizeValue(type, value);
+ const itemHash = hashValue(normalized);
+
+ const currentCount = await prisma.watchlistItem.count({
+ where: { subscriptionId, isActive: true },
+ });
+
+ if (currentCount >= maxItems) {
+ throw new Error(
+ `Watchlist limit reached (${maxItems} items). Upgrade your plan to add more.`
+ );
+ }
+
+ const existing = await prisma.watchlistItem.findFirst({
+ where: { subscriptionId, type, hash: itemHash },
+ });
+
+ if (existing) {
+ if (!existing.isActive) {
+ return prisma.watchlistItem.update({
+ where: { id: existing.id },
+ data: { isActive: true },
+ });
+ }
+ return existing;
+ }
+
+ return prisma.watchlistItem.create({
+ data: {
+ subscriptionId,
+ type,
+ value: normalized,
+ hash: itemHash,
+ },
+ });
+ }
+
+ async getItems(subscriptionId: string) {
+ return prisma.watchlistItem.findMany({
+ where: { subscriptionId, isActive: true },
+ orderBy: { createdAt: 'desc' },
+ });
+ }
+
+ async removeItem(id: string, subscriptionId: string) {
+ return prisma.watchlistItem.update({
+ where: { id },
+ data: { isActive: false },
+ });
+ }
+
+ async getActiveItemsForScan(subscriptionId: string) {
+ return prisma.watchlistItem.findMany({
+ where: { subscriptionId, isActive: true },
+ });
+ }
+
+ async getItemCount(subscriptionId: string) {
+ return prisma.watchlistItem.count({
+ where: { subscriptionId, isActive: true },
+ });
+ }
+}
+
+export const watchlistService = new WatchlistService();
diff --git a/services/darkwatch/src/webhook.service.ts b/services/darkwatch/src/webhook.service.ts
new file mode 100644
index 0000000..256bd4e
--- /dev/null
+++ b/services/darkwatch/src/webhook.service.ts
@@ -0,0 +1,226 @@
+import { prisma, ExposureSource, ExposureSeverity, WatchlistType, AlertType, AlertSeverity } from '@shieldsai/shared-db';
+import { createHash } from 'crypto';
+import { mixpanelService, EventType } from '@shieldsai/shared-analytics';
+
+function hashIdentifier(identifier: string): string {
+ return createHash('sha256').update(identifier.toLowerCase().trim()).digest('hex');
+}
+
+function determineSeverity(
+ source: ExposureSource,
+ dataType: WatchlistType
+): ExposureSeverity {
+ const criticalSources = [ExposureSource.darkWebForum, ExposureSource.honeypot];
+ const warningSources = [ExposureSource.hibp, ExposureSource.shodan];
+ const criticalTypes = [WatchlistType.ssn];
+
+ if (criticalTypes.includes(dataType)) return ExposureSeverity.critical;
+ if (criticalSources.includes(source)) return ExposureSeverity.critical;
+ if (warningSources.includes(source)) return ExposureSeverity.warning;
+ return ExposureSeverity.info;
+}
+
+export interface WebhookPayload {
+ source: string;
+ identifier: string;
+ identifierType: string;
+ metadata?: Record;
+ timestamp?: string;
+}
+
+export class WebhookService {
+ async processExternalWebhook(payload: WebhookPayload): Promise<{
+ exposuresCreated: number;
+ alertsCreated: number;
+ }> {
+ const source = this.mapSource(payload.source);
+ const dataType = this.mapDataType(payload.identifierType);
+ const identifier = payload.identifier.toLowerCase().trim();
+ const identifierHash = hashIdentifier(identifier);
+ const severity = determineSeverity(source, dataType);
+
+ const matchingItems = await prisma.watchlistItem.findMany({
+ where: {
+ isActive: true,
+ OR: [
+ { hash: identifierHash, type: dataType },
+ { value: identifier, type: dataType },
+ ],
+ },
+ include: {
+ subscription: {
+ select: {
+ id: true,
+ tier: true,
+ userId: true,
+ },
+ },
+ },
+ });
+
+ let exposuresCreated = 0;
+ let alertsCreated = 0;
+
+ for (const item of matchingItems) {
+ const existing = await prisma.exposure.findFirst({
+ where: {
+ subscriptionId: item.subscriptionId,
+ source,
+ identifierHash,
+ },
+ });
+
+ if (existing) {
+ await prisma.exposure.update({
+ where: { id: existing.id },
+ data: { detectedAt: new Date() },
+ });
+ continue;
+ }
+
+ const exposure = await prisma.exposure.create({
+ data: {
+ subscriptionId: item.subscriptionId,
+ watchlistItemId: item.id,
+ source,
+ dataType,
+ identifier,
+ identifierHash,
+ severity,
+ isFirstTime: true,
+ metadata: payload.metadata || {},
+ detectedAt: new Date(),
+ },
+ });
+
+ exposuresCreated++;
+
+ const alertChannels = this.getAlertChannelsForTier(item.subscription.tier);
+
+ await prisma.alert.create({
+ data: {
+ subscriptionId: item.subscriptionId,
+ userId: item.subscription.userId,
+ exposureId: exposure.id,
+ type: AlertType.exposure_detected,
+ title: `New Exposure Detected: ${this.getSourceLabel(source)}`,
+ message: this.buildAlertMessage(identifier, source, severity),
+ severity: this.mapAlertSeverity(severity),
+ channel: alertChannels,
+ },
+ });
+
+ alertsCreated++;
+
+ await mixpanelService.track(EventType.EXPOSURE_DETECTED, {
+ userId: item.subscription.userId,
+ exposureType: dataType,
+ severity,
+ source,
+ subscriptionTier: item.subscription.tier,
+ });
+ }
+
+ return { exposuresCreated, alertsCreated };
+ }
+
+ async verifyWebhookSignature(
+ body: string,
+ signature: string,
+ timestamp: string
+ ): Promise {
+ const webhookSecret = process.env.DARKWATCH_WEBHOOK_SECRET;
+ if (!webhookSecret) {
+ console.warn('[WebhookService] DARKWATCH_WEBHOOK_SECRET not set — signature verification skipped');
+ return false;
+ }
+
+ const expected = createHash('sha256')
+ .update(`${timestamp}:${body}`)
+ .digest('hex');
+
+ return expected === signature;
+ }
+
+ private mapSource(source: string): ExposureSource {
+ const sourceMap: Record = {
+ hibp: ExposureSource.hibp,
+ 'haveibeenpwned': ExposureSource.hibp,
+ securitytrails: ExposureSource.securityTrails,
+ censys: ExposureSource.censys,
+ 'darkweb-forum': ExposureSource.darkWebForum,
+ 'darkweb': ExposureSource.darkWebForum,
+ shodan: ExposureSource.shodan,
+ honeypot: ExposureSource.honeypot,
+ };
+
+ const normalized = source.toLowerCase().replace(/\s+/g, '');
+ const mapped = sourceMap[normalized];
+ if (!mapped) {
+ console.warn(`[WebhookService] Unknown source "${source}", falling back to darkWebForum`);
+ }
+ return mapped || ExposureSource.darkWebForum;
+ }
+
+ private mapDataType(type: string): WatchlistType {
+ const typeMap: Record = {
+ email: WatchlistType.email,
+ phone: WatchlistType.phoneNumber,
+ phonenumber: WatchlistType.phoneNumber,
+ ssn: WatchlistType.ssn,
+ address: WatchlistType.address,
+ domain: WatchlistType.domain,
+ };
+
+ const normalized = type.toLowerCase().trim();
+ return typeMap[normalized] || WatchlistType.email;
+ }
+
+ private getAlertChannelsForTier(tier: string): string[] {
+ const channelMap: Record = {
+ basic: ['email'],
+ plus: ['email', 'push'],
+ premium: ['email', 'push', 'sms'],
+ };
+ return channelMap[tier] || ['email'];
+ }
+
+ private mapAlertSeverity(severity: ExposureSeverity): AlertSeverity {
+ return severity as AlertSeverity;
+ }
+
+ private getSourceLabel(source: ExposureSource): string {
+ const labels: Record = {
+ [ExposureSource.hibp]: 'Have I Been Pwned',
+ [ExposureSource.securityTrails]: 'SecurityTrails',
+ [ExposureSource.censys]: 'Censys',
+ [ExposureSource.darkWebForum]: 'Dark Web Forum',
+ [ExposureSource.shodan]: 'Shodan',
+ [ExposureSource.honeypot]: 'Honeypot',
+ };
+ return labels[source] || source;
+ }
+
+ private buildAlertMessage(
+ identifier: string,
+ source: ExposureSource,
+ severity: ExposureSeverity
+ ): string {
+ const masked = this.maskIdentifier(identifier);
+ return `${severity.toUpperCase()}: "${masked}" found in ${this.getSourceLabel(source)}.`;
+ }
+
+ private maskIdentifier(identifier: string): string {
+ if (identifier.includes('@')) {
+ const [user, domain] = identifier.split('@');
+ const maskedUser = user.slice(0, 2) + '***' + user.slice(-1);
+ return `${maskedUser}@${domain}`;
+ }
+ if (identifier.length > 8) {
+ return identifier.slice(0, 3) + '***' + identifier.slice(-2);
+ }
+ return identifier;
+ }
+}
+
+export const webhookService = new WebhookService();
diff --git a/services/spamshield/src/feature-flags.ts b/services/spamshield/src/feature-flags.ts
new file mode 100644
index 0000000..5c72e6a
--- /dev/null
+++ b/services/spamshield/src/feature-flags.ts
@@ -0,0 +1,227 @@
+/**
+ * Feature Flag Management System
+ * Centralized feature flag handling with type safety and runtime updates
+ */
+
+import type { z } from 'zod';
+
+/**
+ * Type for feature flag values
+ */
+export type FeatureFlagValue = boolean | string | number;
+
+/**
+ * Interface for a feature flag definition
+ */
+export interface FeatureFlag {
+ key: string;
+ defaultValue: T;
+ description?: string;
+ allowedValues?: T[]; // For enum-like flags
+ category?: string;
+}
+
+/**
+ * Feature flag registry - stores all defined flags
+ */
+export interface FeatureFlagRegistry {
+ [key: string]: FeatureFlag;
+}
+
+/**
+ * Feature flag resolver - handles flag resolution logic
+ */
+export class FeatureFlagResolver {
+ private flags: FeatureFlagRegistry;
+ private resolvedCache: Map = new Map();
+
+ constructor(flags: FeatureFlagRegistry) {
+ this.flags = flags;
+ }
+
+ /**
+ * Resolve a feature flag value
+ * Priority: Environment > Cache > Default
+ */
+ resolve(key: string, defaultValue: T): T {
+ // Check cache first
+ if (this.resolvedCache.has(key)) {
+ return this.resolvedCache.get(key)! as T;
+ }
+
+ // Check environment variable (allows runtime updates)
+ const envValue = process.env[`FLAG_${key.toUpperCase()}`];
+ if (envValue !== undefined) {
+ // Try to parse as JSON first, then as boolean, then as string
+ let parsed: FeatureFlagValue;
+ try {
+ parsed = JSON.parse(envValue);
+ } catch {
+ parsed = envValue.toLowerCase() === 'true' ? true :
+ envValue.toLowerCase() === 'false' ? false :
+ envValue;
+ }
+
+ // Validate against allowed values if defined
+ const flag = this.flags[key];
+ if (flag && flag.allowedValues && !flag.allowedValues.includes(parsed)) {
+ console.warn(`Invalid value for flag ${key}: ${parsed}. Using default.`);
+ parsed = defaultValue as FeatureFlagValue;
+ }
+
+ this.resolvedCache.set(key, parsed);
+ return parsed as T;
+ }
+
+ // Use cached value if available
+ if (this.resolvedCache.has(key)) {
+ return this.resolvedCache.get(key)! as T;
+ }
+
+ // Return default
+ this.resolvedCache.set(key, defaultValue as FeatureFlagValue);
+ return defaultValue as T;
+ }
+
+ /**
+ * Check if a flag is enabled (boolean check)
+ */
+ isEnabled(key: string, defaultValue: T): T {
+ return this.resolve(key, defaultValue) as T;
+ }
+
+ /**
+ * Get flag definition
+ */
+ getDefinition(key: string): FeatureFlag | undefined {
+ return this.flags[key];
+ }
+
+ /**
+ * List all registered flags
+ */
+ getAllFlags(): FeatureFlagRegistry {
+ return { ...this.flags };
+ }
+
+ /**
+ * Clear the resolution cache (useful for testing)
+ */
+ clearCache(): void {
+ this.resolvedCache.clear();
+ }
+}
+
+/**
+ * Feature flag configuration with pre-defined flags
+ */
+export const featureFlags: FeatureFlagRegistry = {
+ // SpamShield Feature Flags
+ 'spamshield.enable.number.reputation': {
+ key: 'spamshield_enable_number_reputation',
+ defaultValue: true,
+ description: 'Enable number reputation checking (Hiya API integration)',
+ category: 'spamshield',
+ },
+ 'spamshield.enable.content.classification': {
+ key: 'spamshield_enable_content_classification',
+ defaultValue: true,
+ description: 'Enable SMS content classification (BERT model)',
+ category: 'spamshield',
+ },
+ 'spamshield.enable.behavioral.analysis': {
+ key: 'spamshield_enable_behavioral_analysis',
+ defaultValue: true,
+ description: 'Enable call behavioral analysis',
+ category: 'spamshield',
+ },
+ 'spamshield.enable.community.intelligence': {
+ key: 'spamshield_enable_community_intelligence',
+ defaultValue: true,
+ description: 'Enable community intelligence sharing',
+ category: 'spamshield',
+ },
+ 'spamshield.enable.real.time.blocking': {
+ key: 'spamshield_enable_real_time_blocking',
+ defaultValue: true,
+ description: 'Enable real-time spam blocking',
+ category: 'spamshield',
+ },
+ 'spamshield.enable.multiple.sources': {
+ key: 'spamshield_enable_multiple_sources',
+ defaultValue: false,
+ description: 'Enable multiple reputation source aggregation (Truecaller, etc.)',
+ category: 'spamshield',
+ },
+ 'spamshield.enable.ml.classifier': {
+ key: 'spamshield_enable_ml_classifier',
+ defaultValue: false,
+ description: 'Enable ML-based spam classification',
+ category: 'spamshield',
+ },
+
+ // VoicePrint Feature Flags
+ 'voiceprint.enable.ml.service': {
+ key: 'voiceprint_enable_ml_service',
+ defaultValue: false,
+ description: 'Enable ML service integration for voice analysis',
+ category: 'voiceprint',
+ },
+ 'voiceprint.enable.faiss.index': {
+ key: 'voiceprint_enable_faiss_index',
+ defaultValue: true,
+ description: 'Enable FAISS index for voice matching',
+ category: 'voiceprint',
+ },
+ 'voiceprint.enable.batch.analysis': {
+ key: 'voiceprint_enable_batch_analysis',
+ defaultValue: true,
+ description: 'Enable batch voice analysis',
+ category: 'voiceprint',
+ },
+ 'voiceprint.enable.realtime.analysis': {
+ key: 'voiceprint_enable_realtime_analysis',
+ defaultValue: false,
+ description: 'Enable real-time voice analysis',
+ category: 'voiceprint',
+ },
+ 'voiceprint.enable.mock.model': {
+ key: 'voiceprint_enable_mock_model',
+ defaultValue: true,
+ description: 'Enable mock model for development',
+ category: 'voiceprint',
+ },
+
+ // General Platform Flags
+ 'platform.enable.audit.logs': {
+ key: 'platform_enable_audit_logs',
+ defaultValue: true,
+ description: 'Enable comprehensive audit logging',
+ category: 'platform',
+ },
+ 'platform.enable.kpi.tracking': {
+ key: 'platform_enable_kpi_tracking',
+ defaultValue: true,
+ description: 'Enable KPI snapshot tracking',
+ category: 'platform',
+ },
+};
+
+/**
+ * Create a resolver instance with the default flags
+ */
+export const featureFlagResolver = new FeatureFlagResolver(featureFlags);
+
+/**
+ * Convenience function for quick flag checks
+ */
+export function isFeatureEnabled(key: string, defaultValue: T): T {
+ return featureFlagResolver.isEnabled(key, defaultValue);
+}
+
+/**
+ * Check if a flag is enabled with type safety
+ */
+export function checkFlag(key: string, defaultValue: T): T {
+ return featureFlagResolver.resolve(key, defaultValue);
+}
diff --git a/services/spamshield/src/spamshield.audit-logger.ts b/services/spamshield/src/spamshield.audit-logger.ts
new file mode 100644
index 0000000..dd62ee4
--- /dev/null
+++ b/services/spamshield/src/spamshield.audit-logger.ts
@@ -0,0 +1,118 @@
+import { createHash } from 'crypto';
+
+export type AuditClassificationType = 'sms' | 'call';
+
+export interface AuditClassificationEntry {
+ id: string;
+ timestamp: string;
+ type: AuditClassificationType;
+ phoneNumberHash: string;
+ decision: 'spam' | 'ham' | 'block' | 'flag' | 'allow';
+ confidence: number;
+ reasons: string[];
+ featureFlags: Record;
+ metadata?: Record;
+}
+
+const MAX_AUDIT_LOG_SIZE = 10_000;
+
+class AuditLogger {
+ private entries: AuditClassificationEntry[] = [];
+
+ logClassification(entry: Omit): AuditClassificationEntry {
+ const record: AuditClassificationEntry = {
+ id: `audit-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`,
+ timestamp: new Date().toISOString(),
+ ...entry,
+ };
+
+ this.entries.push(record);
+
+ if (this.entries.length > MAX_AUDIT_LOG_SIZE) {
+ this.entries.shift();
+ }
+
+ console.log(
+ `[SpamShield:Audit] type=${record.type} decision=${record.decision} ` +
+ `confidence=${record.confidence.toFixed(3)} reasons=${record.reasons.join(',') || 'none'} ` +
+ `phoneHash=${record.phoneNumberHash}`
+ );
+
+ return record;
+ }
+
+ getEntries(
+ filters?: {
+ type?: AuditClassificationType;
+ decision?: string;
+ startDate?: Date;
+ endDate?: Date;
+ limit?: number;
+ }
+ ): AuditClassificationEntry[] {
+ let results = this.entries;
+
+ if (filters?.type) {
+ results = results.filter(e => e.type === filters.type);
+ }
+
+ if (filters?.decision) {
+ results = results.filter(e => e.decision === filters.decision);
+ }
+
+ if (filters?.startDate) {
+ results = results.filter(e => new Date(e.timestamp) >= filters.startDate!);
+ }
+
+ if (filters?.endDate) {
+ results = results.filter(e => new Date(e.timestamp) <= filters.endDate!);
+ }
+
+ if (filters?.limit) {
+ results = results.slice(-filters.limit);
+ }
+
+ return results;
+ }
+
+ getSummary(): {
+ totalEntries: number;
+ spamCount: number;
+ hamCount: number;
+ blockCount: number;
+ flagCount: number;
+ allowCount: number;
+ avgConfidence: number;
+ } {
+ const spamCount = this.entries.filter(e => e.decision === 'spam' || e.decision === 'block').length;
+ const hamCount = this.entries.filter(e => e.decision === 'ham' || e.decision === 'allow').length;
+ const blockCount = this.entries.filter(e => e.decision === 'block').length;
+ const flagCount = this.entries.filter(e => e.decision === 'flag').length;
+ const allowCount = this.entries.filter(e => e.decision === 'allow').length;
+ const avgConfidence =
+ this.entries.length > 0
+ ? this.entries.reduce((s, e) => s + e.confidence, 0) / this.entries.length
+ : 0;
+
+ return {
+ totalEntries: this.entries.length,
+ spamCount,
+ hamCount,
+ blockCount,
+ flagCount,
+ allowCount,
+ avgConfidence: Math.round(avgConfidence * 1000) / 1000,
+ };
+ }
+
+ clear(): void {
+ this.entries = [];
+ }
+}
+
+export const spamAuditLogger = new AuditLogger();
+
+export function hashPhoneNumber(phoneNumber: string): string {
+ const hash = createHash('sha256').update(phoneNumber.trim()).digest('hex');
+ return `sha256_${hash}`;
+}
diff --git a/services/spamshield/src/spamshield.error-handler.ts b/services/spamshield/src/spamshield.error-handler.ts
new file mode 100644
index 0000000..41b382d
--- /dev/null
+++ b/services/spamshield/src/spamshield.error-handler.ts
@@ -0,0 +1,118 @@
+import { FastifyReply } from 'fastify';
+import { SpamErrorCode, HttpStatus, SpamErrorResponse } from './spamshield.config';
+
+export { SpamErrorCode, HttpStatus };
+export type { SpamErrorResponse };
+
+/**
+ * Standardized error response builder for SpamShield API
+ */
+export class ErrorHandler {
+ /**
+ * Create a standard error response
+ */
+ static create(
+ code: SpamErrorCode,
+ message: string,
+ options?: {
+ field?: string;
+ requestId?: string;
+ additionalData?: Record;
+ }
+ ): SpamErrorResponse {
+ return {
+ error: {
+ code,
+ message,
+ ...(options?.field && { field: options.field }),
+ timestamp: new Date().toISOString(),
+ ...(options?.requestId && { requestId: options.requestId }),
+ },
+ };
+ }
+
+ /**
+ * Send a standard error response with appropriate HTTP status code
+ */
+ static send(
+ reply: FastifyReply,
+ code: SpamErrorCode,
+ message: string,
+ options?: {
+ field?: string;
+ status?: number;
+ requestId?: string;
+ }
+ ): void {
+ const status = options?.status ?? this.getStatusForCode(code);
+ const errorResponse = this.create(code, message, {
+ field: options?.field,
+ requestId: options?.requestId,
+ });
+ reply.code(status).send(errorResponse);
+ }
+
+ /**
+ * Map error codes to HTTP status codes
+ */
+ private static getStatusForCode(code: SpamErrorCode): number {
+ const statusMap: Record = {
+ // Client errors
+ [SpamErrorCode.INVALID_REQUEST]: HttpStatus.BAD_REQUEST,
+ [SpamErrorCode.MISSING_REQUIRED_FIELD]: HttpStatus.BAD_REQUEST,
+ [SpamErrorCode.UNAUTHORIZED]: HttpStatus.UNAUTHORIZED,
+ [SpamErrorCode.NOT_FOUND]: HttpStatus.NOT_FOUND,
+ [SpamErrorCode.VALIDATION_ERROR]: HttpStatus.BAD_REQUEST,
+
+ // Server errors
+ [SpamErrorCode.CLASSIFICATION_FAILED]: HttpStatus.UNPROCESSABLE_ENTITY,
+ [SpamErrorCode.REPUTATION_CHECK_FAILED]: HttpStatus.UNPROCESSABLE_ENTITY,
+ [SpamErrorCode.ANALYSIS_FAILED]: HttpStatus.UNPROCESSABLE_ENTITY,
+ [SpamErrorCode.FEEDBACK_RECORD_FAILED]: HttpStatus.UNPROCESSABLE_ENTITY,
+ [SpamErrorCode.DATABASE_ERROR]: HttpStatus.INTERNAL_SERVER_ERROR,
+ [SpamErrorCode.RATE_LIMIT_EXCEEDED]: HttpStatus.TOO_MANY_REQUESTS,
+ [SpamErrorCode.SERVICE_UNAVAILABLE]: HttpStatus.SERVICE_UNAVAILABLE,
+ };
+ return statusMap[code] ?? HttpStatus.INTERNAL_SERVER_ERROR;
+ }
+
+ /**
+ * Validate required string field
+ */
+ static validateRequiredField(
+ value: unknown,
+ fieldName: string
+ ): { isValid: boolean; error?: { code: SpamErrorCode; message: string; field: string } } {
+ if (!value || typeof value !== 'string' || value.trim() === '') {
+ return {
+ isValid: false,
+ error: {
+ code: SpamErrorCode.MISSING_REQUIRED_FIELD,
+ message: `${fieldName} is required`,
+ field: fieldName,
+ },
+ };
+ }
+ return { isValid: true };
+ }
+
+ /**
+ * Validate boolean field
+ */
+ static validateBooleanField(
+ value: unknown,
+ fieldName: string
+ ): { isValid: boolean; error?: { code: SpamErrorCode; message: string; field: string } } {
+ if (value === undefined || value === null || typeof value !== 'boolean') {
+ return {
+ isValid: false,
+ error: {
+ code: SpamErrorCode.VALIDATION_ERROR,
+ message: `${fieldName} must be a boolean`,
+ field: fieldName,
+ },
+ };
+ }
+ return { isValid: true };
+ }
+}
diff --git a/services/voiceprint/src/index.ts b/services/voiceprint/src/index.ts
index 0554fdb..4d40bf7 100644
--- a/services/voiceprint/src/index.ts
+++ b/services/voiceprint/src/index.ts
@@ -1,6 +1,30 @@
-export * from "./preprocessor/AudioPreprocessor";
-export * from "./enrollment/VoiceEnrollmentService";
-export * from "./analysis/AnalysisService";
-export * from "./analysis/BatchAnalysisService";
-export * from "./embedding/EmbeddingService";
-export * from "./indexer/FAISSIndex";
+// Config
+export {
+ voicePrintEnv,
+ VoicePrintSource,
+ AnalysisJobStatus,
+ DetectionType,
+ ConfidenceLevel,
+ audioPreprocessingConfig,
+ voicePrintFeatureFlags,
+ voicePrintRateLimits,
+ checkFlag,
+ isFeatureEnabled,
+} from './voiceprint.config';
+
+
+
+// Services
+export {
+ AudioPreprocessor,
+ VoiceEnrollmentService,
+ AnalysisService,
+ BatchAnalysisService,
+ EmbeddingService,
+ FAISSIndex,
+ audioPreprocessor,
+ voiceEnrollmentService,
+ analysisService,
+ batchAnalysisService,
+ embeddingService,
+} from './voiceprint.service';
diff --git a/services/voiceprint/src/voiceprint.config.ts b/services/voiceprint/src/voiceprint.config.ts
new file mode 100644
index 0000000..f117f5a
--- /dev/null
+++ b/services/voiceprint/src/voiceprint.config.ts
@@ -0,0 +1,102 @@
+import { z } from 'zod';
+import { checkFlag } from './voiceprint.feature-flags';
+
+// Environment variables for VoicePrint
+const envSchema = z.object({
+ ECAPA_TDNN_MODEL_PATH: z.string().default('./models/ecapa-tdnn'),
+ ML_SERVICE_URL: z.string().default('http://localhost:8001'),
+ FAISS_INDEX_PATH: z.string().default('./data/voiceprint_faiss.index'),
+ AUDIO_STORAGE_BUCKET: z.string().default('voiceprint-audio'),
+ AUDIO_STORAGE_ENDPOINT: z.string().default('http://localhost:9000'),
+ SYNTHETIC_THRESHOLD: z.string().transform(Number).default(0.75),
+ ENROLLMENT_MIN_DURATION_SEC: z.string().transform(Number).default(3),
+ ENROLLMENT_MAX_DURATION_SEC: z.string().transform(Number).default(60),
+ EMBEDDING_DIMENSIONS: z.string().transform(Number).default(192),
+ BATCH_MAX_FILES: z.string().transform(Number).default(20),
+ ANALYSIS_TIMEOUT_MS: z.string().transform(Number).default(30000),
+});
+
+export const voicePrintEnv = envSchema.parse({
+ ECAPA_TDNN_MODEL_PATH: process.env.ECAPA_TDNN_MODEL_PATH,
+ ML_SERVICE_URL: process.env.ML_SERVICE_URL,
+ FAISS_INDEX_PATH: process.env.FAISS_INDEX_PATH,
+ AUDIO_STORAGE_BUCKET: process.env.AUDIO_STORAGE_BUCKET,
+ AUDIO_STORAGE_ENDPOINT: process.env.AUDIO_STORAGE_ENDPOINT,
+ SYNTHETIC_THRESHOLD: process.env.SYNTHETIC_THRESHOLD,
+ ENROLLMENT_MIN_DURATION_SEC: process.env.ENROLLMENT_MIN_DURATION_SEC,
+ ENROLLMENT_MAX_DURATION_SEC: process.env.ENROLLMENT_MAX_DURATION_SEC,
+ EMBEDDING_DIMENSIONS: process.env.EMBEDDING_DIMENSIONS,
+ BATCH_MAX_FILES: process.env.BATCH_MAX_FILES,
+ ANALYSIS_TIMEOUT_MS: process.env.ANALYSIS_TIMEOUT_MS,
+});
+
+// Audio source types
+export enum VoicePrintSource {
+ UPLOAD = 'upload',
+ S3 = 's3',
+ URL = 'url',
+ REALTIME = 'realtime',
+}
+
+// Analysis job status
+export enum AnalysisJobStatus {
+ PENDING = 'pending',
+ PROCESSING = 'processing',
+ COMPLETED = 'completed',
+ FAILED = 'failed',
+ CANCELLED = 'cancelled',
+}
+
+// Detection result types
+export enum DetectionType {
+ SYNTHETIC_VOICE = 'synthetic_voice',
+ VOICE_CLONE = 'voice_clone',
+ DEEPFAKE = 'deepfake',
+ NATURAL = 'natural',
+}
+
+// Confidence levels
+export enum ConfidenceLevel {
+ LOW = 'low',
+ MEDIUM = 'medium',
+ HIGH = 'high',
+ VERY_HIGH = 'very_high',
+}
+
+// Audio preprocessing configuration
+export const audioPreprocessingConfig = {
+ sampleRate: 16000,
+ channels: 1,
+ bitDepth: 16,
+ vadThreshold: 0.5,
+ noiseReduction: true,
+ maxSilenceDurationMs: 500,
+};
+
+// Feature flags - use centralized system
+export const voicePrintFeatureFlags = {
+ enableMLService: checkFlag('voiceprint.enable.ml.service', false),
+ enableFAISSIndex: checkFlag('voiceprint.enable.faiss.index', true),
+ enableBatchAnalysis: checkFlag('voiceprint.enable.batch.analysis', true),
+ enableRealtimeAnalysis: checkFlag('voiceprint.enable.realtime.analysis', false),
+ enableMockModel: checkFlag('voiceprint.enable.mock.model', true),
+};
+
+// Rate limits for voice analysis
+export const voicePrintRateLimits = {
+ basic: {
+ analysesPerMinute: 5,
+ enrollmentsPerDay: 10,
+ maxAudioFileSizeMB: 50,
+ },
+ plus: {
+ analysesPerMinute: 30,
+ enrollmentsPerDay: 50,
+ maxAudioFileSizeMB: 200,
+ },
+ premium: {
+ analysesPerMinute: 100,
+ enrollmentsPerDay: 500,
+ maxAudioFileSizeMB: 500,
+ },
+};
diff --git a/services/voiceprint/src/voiceprint.feature-flags.ts b/services/voiceprint/src/voiceprint.feature-flags.ts
new file mode 100644
index 0000000..c4c664d
--- /dev/null
+++ b/services/voiceprint/src/voiceprint.feature-flags.ts
@@ -0,0 +1,7 @@
+/**
+ * VoicePrint Feature Flags
+ * Re-exports the checkFlag function from the centralized feature flag system
+ */
+
+// Re-export the checkFlag function from the spamshield feature flags module
+export { checkFlag } from '../spamshield/feature-flags';
diff --git a/services/voiceprint/src/voiceprint.service.ts b/services/voiceprint/src/voiceprint.service.ts
new file mode 100644
index 0000000..5f45dd3
--- /dev/null
+++ b/services/voiceprint/src/voiceprint.service.ts
@@ -0,0 +1,594 @@
+import { prisma, VoiceEnrollment, VoiceAnalysis } from '@shieldsai/shared-db';
+import {
+ voicePrintEnv,
+ AnalysisJobStatus,
+ DetectionType,
+ ConfidenceLevel,
+ audioPreprocessingConfig,
+ voicePrintFeatureFlags,
+} from './voiceprint.config';
+import { checkFlag } from './voiceprint.feature-flags';
+
+// Audio preprocessing service
+export class AudioPreprocessor {
+ /**
+ * Normalize audio to 16kHz mono with VAD and noise reduction.
+ * Returns preprocessing metadata and the processed audio buffer.
+ */
+ async preprocess(
+ audioBuffer: Buffer,
+ options?: {
+ sourceSampleRate?: number;
+ channels?: number;
+ }
+ ): Promise<{
+ buffer: Buffer;
+ metadata: {
+ sampleRate: number;
+ channels: number;
+ duration: number;
+ format: string;
+ };
+ }> {
+ const duration = this.estimateDuration(audioBuffer, options?.sourceSampleRate ?? 44100);
+
+ if (duration < voicePrintEnv.ENROLLMENT_MIN_DURATION_SEC) {
+ throw new Error(
+ `Audio too short: ${duration.toFixed(1)}s < ${voicePrintEnv.ENROLLMENT_MIN_DURATION_SEC}s minimum`
+ );
+ }
+
+ if (duration > voicePrintEnv.ENROLLMENT_MAX_DURATION_SEC) {
+ throw new Error(
+ `Audio too long: ${duration.toFixed(1)}s > ${voicePrintEnv.ENROLLMENT_MAX_DURATION_SEC}s maximum`
+ );
+ }
+
+ // TODO: Integrate with Python librosa/torchaudio for actual preprocessing
+ // For MVP, return original buffer with target metadata
+ return {
+ buffer: audioBuffer,
+ metadata: {
+ sampleRate: audioPreprocessingConfig.sampleRate,
+ channels: audioPreprocessingConfig.channels,
+ duration,
+ format: 'wav',
+ },
+ };
+ }
+
+ /**
+ * Apply Voice Activity Detection to remove silence segments.
+ */
+ async applyVAD(buffer: Buffer): Promise {
+ // TODO: Integrate with Python webrtcvad or silero-vad
+ // For MVP, return original buffer
+ return buffer;
+ }
+
+ /**
+ * Estimate audio duration from buffer size and sample rate.
+ */
+ private estimateDuration(
+ buffer: Buffer,
+ sampleRate: number
+ ): number {
+ const bytesPerSample = 2;
+ const channels = 1;
+ const samples = buffer.length / (bytesPerSample * channels);
+ return samples / sampleRate;
+ }
+}
+
+// Voice enrollment service
+export class VoiceEnrollmentService {
+ /**
+ * Enroll a new voice profile from audio data.
+ */
+ async enroll(
+ userId: string,
+ name: string,
+ audioBuffer: Buffer
+ ): Promise {
+ const preprocessor = new AudioPreprocessor();
+ const processed = await preprocessor.preprocess(audioBuffer);
+
+ const embeddingService = new EmbeddingService();
+ const embedding = await embeddingService.extract(processed.buffer);
+ const voiceHash = this.computeEmbeddingHash(embedding);
+
+ const enrollment = await prisma.voiceEnrollment.create({
+ data: {
+ userId,
+ name,
+ voiceHash,
+ audioMetadata: {
+ ...processed.metadata,
+ embeddingDimensions: embedding.length,
+ enrollmentTimestamp: new Date().toISOString(),
+ },
+ },
+ });
+
+ // Index in FAISS for similarity search
+ const faissIndex = new FAISSIndex();
+ await faissIndex.add(enrollment.id, embedding);
+
+ return enrollment;
+ }
+
+ /**
+ * List all enrollments for a user.
+ */
+ async listEnrollments(
+ userId: string,
+ options?: {
+ isActive?: boolean;
+ limit?: number;
+ offset?: number;
+ }
+ ): Promise {
+ return prisma.voiceEnrollment.findMany({
+ where: {
+ userId,
+ ...(options?.isActive !== undefined && { isActive: options.isActive }),
+ },
+ orderBy: { createdAt: 'desc' },
+ take: options?.limit ?? 50,
+ skip: options?.offset ?? 0,
+ });
+ }
+
+ /**
+ * Get a single enrollment by ID.
+ */
+ async getEnrollment(
+ enrollmentId: string,
+ userId: string
+ ): Promise {
+ return prisma.voiceEnrollment.findFirst({
+ where: {
+ id: enrollmentId,
+ userId,
+ },
+ });
+ }
+
+ /**
+ * Remove (deactivate) an enrollment.
+ */
+ async removeEnrollment(
+ enrollmentId: string,
+ userId: string
+ ): Promise {
+ const enrollment = await this.getEnrollment(enrollmentId, userId);
+ if (!enrollment) {
+ throw new Error('Enrollment not found');
+ }
+
+ const faissIndex = new FAISSIndex();
+ await faissIndex.remove(enrollmentId);
+
+ return prisma.voiceEnrollment.update({
+ where: { id: enrollmentId },
+ data: { isActive: false },
+ });
+ }
+
+ /**
+ * Search for similar enrollments using FAISS.
+ */
+ async findSimilar(
+ embedding: number[],
+ topK: number = 5
+ ): Promise> {
+ const faissIndex = new FAISSIndex();
+ const results = await faissIndex.search(embedding, topK);
+
+ const enrollmentIds = results.map((r) => r.id);
+ const enrollments = await prisma.voiceEnrollment.findMany({
+ where: { id: { in: enrollmentIds } },
+ });
+
+ return results.map((r, i) => ({
+ enrollment: enrollments[i],
+ similarity: r.similarity,
+ }));
+ }
+
+ private computeEmbeddingHash(embedding: number[]): string {
+ let hash = 0;
+ for (let i = 0; i < embedding.length; i++) {
+ hash = ((hash << 5) - hash) + embedding[i];
+ hash |= 0;
+ }
+ return `vp_${Math.abs(hash).toString(16)}_${embedding.length}`;
+ }
+}
+
+// Audio analysis service
+export class AnalysisService {
+ /**
+ * Analyze a single audio file for synthetic voice detection.
+ */
+ async analyze(
+ userId: string,
+ audioBuffer: Buffer,
+ options?: {
+ enrollmentId?: string;
+ audioUrl?: string;
+ }
+ ): Promise {
+ const preprocessor = new AudioPreprocessor();
+ const processed = await preprocessor.preprocess(audioBuffer);
+
+ const audioHash = this.computeAudioHash(audioBuffer);
+
+ const embeddingService = new EmbeddingService();
+ const analysisResult = await embeddingService.analyze(processed.buffer);
+
+ const isSynthetic = analysisResult.confidence >= voicePrintEnv.SYNTHETIC_THRESHOLD;
+
+ const voiceAnalysis = await prisma.voiceAnalysis.create({
+ data: {
+ userId,
+ enrollmentId: options?.enrollmentId,
+ audioHash,
+ isSynthetic,
+ confidence: analysisResult.confidence,
+ analysisResult: {
+ ...analysisResult,
+ processedMetadata: processed.metadata,
+ analysisTimestamp: new Date().toISOString(),
+ modelVersion: 'ecapa-tdnn-v1-mock',
+ },
+ audioUrl: options?.audioUrl ?? '',
+ },
+ });
+
+ return voiceAnalysis;
+ }
+
+ /**
+ * Get analysis result by ID.
+ */
+ async getResult(
+ analysisId: string,
+ userId: string
+ ): Promise {
+ return prisma.voiceAnalysis.findFirst({
+ where: {
+ id: analysisId,
+ userId,
+ },
+ });
+ }
+
+ /**
+ * Get analysis history for a user.
+ */
+ async getHistory(
+ userId: string,
+ options?: {
+ limit?: number;
+ offset?: number;
+ isSynthetic?: boolean;
+ }
+ ): Promise {
+ return prisma.voiceAnalysis.findMany({
+ where: {
+ userId,
+ ...(options?.isSynthetic !== undefined && { isSynthetic: options.isSynthetic }),
+ },
+ orderBy: { createdAt: 'desc' },
+ take: options?.limit ?? 50,
+ skip: options?.offset ?? 0,
+ });
+ }
+
+ private computeAudioHash(buffer: Buffer): string {
+ let hash = 0;
+ const sampleSize = Math.min(buffer.length, 1024);
+ for (let i = 0; i < sampleSize; i += 8) {
+ hash = ((hash << 5) - hash) + buffer.readUInt8(i);
+ hash |= 0;
+ }
+ return `audio_${Math.abs(hash).toString(16)}`;
+ }
+}
+
+// Batch analysis service
+export class BatchAnalysisService {
+ /**
+ * Analyze multiple audio files in a batch.
+ */
+ async analyzeBatch(
+ userId: string,
+ files: Array<{
+ name: string;
+ buffer: Buffer;
+ audioUrl?: string;
+ }>,
+ options?: {
+ enrollmentId?: string;
+ }
+ ): Promise<{
+ jobId: string;
+ results: VoiceAnalysis[];
+ summary: {
+ total: number;
+ synthetic: number;
+ natural: number;
+ failed: number;
+ };
+ }> {
+ if (files.length > voicePrintEnv.BATCH_MAX_FILES) {
+ throw new Error(
+ `Batch too large: ${files.length} > ${voicePrintEnv.BATCH_MAX_FILES} max`
+ );
+ }
+
+ const analysisService = new AnalysisService();
+ const results: VoiceAnalysis[] = [];
+ let synthetic = 0;
+ let natural = 0;
+ let failed = 0;
+
+ for (const file of files) {
+ try {
+ const result = await analysisService.analyze(userId, file.buffer, {
+ enrollmentId: options?.enrollmentId,
+ audioUrl: file.audioUrl,
+ });
+ results.push(result);
+ if (result.isSynthetic) {
+ synthetic++;
+ } else {
+ natural++;
+ }
+ } catch (error) {
+ console.error(`Batch analysis failed for ${file.name}:`, error);
+ failed++;
+ }
+ }
+
+ const jobId = `batch_${Date.now()}_${Math.random().toString(36).slice(2, 8)}`;
+
+ return {
+ jobId,
+ results,
+ summary: {
+ total: files.length,
+ synthetic,
+ natural,
+ failed,
+ },
+ };
+ }
+}
+
+// Embedding service — ECAPA-TDNN inference wrapper
+export class EmbeddingService {
+ private initialized = false;
+
+ /**
+ * Initialize the ECAPA-TDNN model.
+ */
+ async initialize(): Promise {
+ if (this.initialized) return;
+
+ // TODO: Connect to Python ML service for real inference
+ // const response = await fetch(`${voicePrintEnv.ML_SERVICE_URL}/initialize`, {
+ // method: 'POST',
+ // body: JSON.stringify({ modelPath: voicePrintEnv.ECAPA_TDNN_MODEL_PATH }),
+ // });
+
+ this.initialized = true;
+ console.log('Embedding service initialized (mock model)');
+ }
+
+ /**
+ * Extract voice embedding from audio.
+ */
+ async extract(audioBuffer: Buffer): Promise {
+ await this.initialize();
+
+ // TODO: Call Python ML service
+ // const response = await fetch(`${voicePrintEnv.ML_SERVICE_URL}/embed`, {
+ // method: 'POST',
+ // body: audioBuffer,
+ // });
+ // const data = await response.json();
+ // return data.embedding;
+
+ // Mock: generate deterministic embedding based on buffer content
+ const dims = voicePrintEnv.EMBEDDING_DIMENSIONS;
+ const embedding: number[] = new Array(dims);
+ let hash = 0;
+ for (let i = 0; i < Math.min(audioBuffer.length, 256); i++) {
+ hash = ((hash << 5) - hash) + audioBuffer[i];
+ hash |= 0;
+ }
+ for (let i = 0; i < dims; i++) {
+ hash = ((hash << 5) - hash) + i;
+ hash |= 0;
+ embedding[i] = (Math.abs(hash) % 1000) / 1000.0;
+ }
+
+ // L2 normalize
+ const norm = Math.sqrt(embedding.reduce((s, v) => s + v * v, 0));
+ return embedding.map((v) => v / norm);
+ }
+
+ /**
+ * Run full analysis: embedding + synthetic detection.
+ */
+ async analyze(audioBuffer: Buffer): Promise<{
+ confidence: number;
+ detectionType: DetectionType;
+ features: Record;
+ embedding: number[];
+ }> {
+ const embedding = await this.extract(audioBuffer);
+
+ // TODO: Run synthetic voice detection model
+ // For MVP, use heuristic based on embedding statistics
+ const confidence = this.estimateSyntheticConfidence(audioBuffer, embedding);
+ const detectionType =
+ confidence >= voicePrintEnv.SYNTHETIC_THRESHOLD
+ ? DetectionType.SYNTHETIC_VOICE
+ : DetectionType.NATURAL;
+
+ const features = this.extractAnalysisFeatures(audioBuffer, embedding);
+
+ return {
+ confidence,
+ detectionType,
+ features,
+ embedding,
+ };
+ }
+
+ private estimateSyntheticConfidence(
+ buffer: Buffer,
+ embedding: number[]
+ ): number {
+ // Heuristic features for synthetic detection
+ const meanAmplitude =
+ buffer.reduce((s, v) => s + v, 0) / buffer.length / 255;
+ const embeddingStdDev =
+ Math.sqrt(
+ embedding.reduce((s, v) => s + (v - embedding.reduce((a, b) => a + b) / embedding.length) ** 2, 0) /
+ embedding.length
+ ) || 0;
+
+ // Combine features into confidence score
+ const amplitudeScore = Math.abs(meanAmplitude - 0.5) * 2;
+ const embeddingScore = 1.0 - Math.min(1.0, embeddingStdDev * 2);
+
+ return Math.min(
+ 1.0,
+ amplitudeScore * 0.3 + embeddingScore * 0.4 + Math.random() * 0.3
+ );
+ }
+
+ private extractAnalysisFeatures(
+ buffer: Buffer,
+ embedding: number[]
+ ): Record {
+ const meanAmplitude =
+ buffer.reduce((s, v) => s + v, 0) / buffer.length / 255;
+ const zeroCrossings = buffer.reduce((count, v, i, arr) => {
+ return i > 0 && ((v - 128) * (arr[i - 1] - 128) < 0) ? count + 1 : count;
+ }, 0);
+
+ return {
+ mean_amplitude: meanAmplitude,
+ zero_crossing_rate: zeroCrossings / buffer.length,
+ embedding_energy: embedding.reduce((s, v) => s + v * v, 0),
+ embedding_entropy: this.calculateEntropy(embedding),
+ };
+ }
+
+ private calculateEntropy(values: number[]): number {
+ const bins = 20;
+ const histogram = new Array(bins).fill(0);
+ const min = Math.min(...values);
+ const max = Math.max(...values);
+ const range = max - min || 1;
+
+ for (const v of values) {
+ const bin = Math.min(bins - 1, Math.floor(((v - min) / range) * bins));
+ histogram[bin]++;
+ }
+
+ let entropy = 0;
+ const total = values.length;
+ for (const count of histogram) {
+ if (count > 0) {
+ const p = count / total;
+ entropy -= p * Math.log2(p);
+ }
+ }
+ return entropy;
+ }
+}
+
+// FAISS index wrapper for voice fingerprint matching
+export class FAISSIndex {
+ private indexPath: string;
+ private initialized = false;
+
+ constructor(path?: string) {
+ this.indexPath = path ?? voicePrintEnv.FAISS_INDEX_PATH;
+ }
+
+ /**
+ * Initialize or load the FAISS index.
+ */
+ async initialize(): Promise {
+ if (this.initialized) return;
+
+ // TODO: Load FAISS index from disk
+ // const faiss = require('faiss-node');
+ // this.index = faiss.readIndex(this.indexPath);
+
+ this.initialized = true;
+ console.log(`FAISS index initialized at ${this.indexPath}`);
+ }
+
+ /**
+ * Add an enrollment embedding to the index.
+ */
+ async add(enrollmentId: string, embedding: number[]): Promise {
+ await this.initialize();
+
+ // TODO: Add to FAISS index
+ // this.index.add([embedding]);
+ // Store mapping: enrollmentId -> index position
+ console.log(`Added enrollment ${enrollmentId} to FAISS index`);
+ }
+
+ /**
+ * Remove an enrollment from the index.
+ */
+ async remove(enrollmentId: string): Promise {
+ await this.initialize();
+
+ // TODO: Remove from FAISS index
+ console.log(`Removed enrollment ${enrollmentId} from FAISS index`);
+ }
+
+ /**
+ * Search for similar voice embeddings.
+ */
+ async search(
+ embedding: number[],
+ topK: number = 5
+ ): Promise> {
+ await this.initialize();
+
+ // TODO: Query FAISS index
+ // const [distances, indices] = this.index.search([embedding], topK);
+ // Map indices back to enrollment IDs
+
+ // Mock: return empty results
+ return [];
+ }
+
+ /**
+ * Save the index to disk.
+ */
+ async save(): Promise {
+ await this.initialize();
+ // TODO: Write FAISS index to disk
+ console.log(`FAISS index saved to ${this.indexPath}`);
+ }
+}
+
+// Export singleton instances
+export const audioPreprocessor = new AudioPreprocessor();
+export const voiceEnrollmentService = new VoiceEnrollmentService();
+export const analysisService = new AnalysisService();
+export const batchAnalysisService = new BatchAnalysisService();
+export const embeddingService = new EmbeddingService();
diff --git a/tsconfig.base.json b/tsconfig.base.json
new file mode 100644
index 0000000..2def3a0
--- /dev/null
+++ b/tsconfig.base.json
@@ -0,0 +1,18 @@
+{
+ "compilerOptions": {
+ "target": "ES2022",
+ "module": "ESNext",
+ "moduleResolution": "bundler",
+ "lib": ["ES2022", "DOM", "DOM.Iterable"],
+ "strict": true,
+ "esModuleInterop": true,
+ "skipLibCheck": true,
+ "forceConsistentCasingInFileNames": true,
+ "resolveJsonModule": true,
+ "isolatedModules": true,
+ "noEmit": true,
+ "jsx": "preserve",
+ "jsxImportSource": "solid-js",
+ "types": ["node"]
+ }
+}
diff --git a/turbo.json b/turbo.json
index 7424957..d6c2446 100644
--- a/turbo.json
+++ b/turbo.json
@@ -1,25 +1,30 @@
{
"$schema": "https://turbo.build/schema.json",
+ "globalDependencies": ["**/.env.*local"],
+ "globalEnv": ["NODE_ENV", "DATABASE_URL", "REDIS_URL"],
"tasks": {
"build": {
"dependsOn": ["^build"],
- "outputs": ["dist/**"]
+ "outputs": ["dist/**", ".next/**", "!.next/cache/**"]
},
"dev": {
"cache": false,
"persistent": true
},
"test": {
- "dependsOn": ["build"],
- "inputs": ["src/**/*.ts", "test/**/*.ts"]
+ "dependsOn": ["^build"],
+ "outputs": ["coverage/**"]
},
"lint": {
- "inputs": ["src/**/*.ts"]
+ "outputs": []
},
- "db:migrate": {
+ "db:generate": {
"cache": false
},
- "db:seed": {
+ "db:push": {
+ "cache": false
+ },
+ "db:migrate": {
"cache": false
}
}
diff --git a/vite.config.ts b/vite.config.ts
new file mode 100644
index 0000000..f2c20d0
--- /dev/null
+++ b/vite.config.ts
@@ -0,0 +1,33 @@
+import { defineConfig } from 'vite';
+import solid from 'vite-plugin-solid';
+import { resolve } from 'path';
+
+export default defineConfig({
+ plugins: [solid()],
+ resolve: {
+ alias: {
+ '@lib': resolve(__dirname, './src/lib'),
+ '@components': resolve(__dirname, './src/components'),
+ '@types': resolve(__dirname, './src/types'),
+ },
+ },
+ build: {
+ target: 'esnext',
+ outDir: 'dist',
+ sourcemap: true,
+ rollupOptions: {
+ input: {
+ main: resolve(__dirname, 'index.html'),
+ },
+ },
+ },
+ server: {
+ port: 3000,
+ proxy: {
+ '/sync': {
+ target: 'ws://localhost:8080',
+ ws: true,
+ },
+ },
+ },
+});
diff --git a/vitest.config.ts b/vitest.config.ts
new file mode 100644
index 0000000..acf3407
--- /dev/null
+++ b/vitest.config.ts
@@ -0,0 +1,21 @@
+import { defineConfig } from 'vitest/config';
+
+export default defineConfig({
+ test: {
+ globals: true,
+ environment: 'node',
+ deps: {
+ interopDefault: true,
+ },
+ env: {
+ HIYA_API_KEY: 'test-api-key',
+ HIYA_API_URL: 'https://api.hiya.com/v1',
+ },
+ },
+ optimizeDeps: {
+ include: ['ws'],
+ },
+ ssr: {
+ noExternal: ['ws'],
+ },
+});