FRE-4529: Transfer ShieldAI code from FrenoCorp repo

Transferred ShieldAI-related files mistakenly placed in ~/code/FrenoCorp:
- Services: spamshield (feature-flags, audit-logger, error-handler), voiceprint (config, service, feature-flags), darkwatch (pipeline, scan, scheduler, watchlist, webhook)
- Packages: shared-analytics, shared-auth, shared-ui, shared-utils (new); shared-billing, jobs supplemented with unique FC files
- Server: alerts (FC version newer), routes (spamshield, darkwatch, voiceprint)
- Config: turbo.json, tsconfig.base.json, vite/vitest configs, drizzle, Dockerfile
- VoicePrint ML service
- Examples

Pending: apps/{api,web,mobile}/ structured merge, shared-db/db mapping

Co-Authored-By: Paperclip <noreply@paperclip.ing>
This commit is contained in:
2026-05-02 10:13:13 -04:00
parent 8687868632
commit 1e42c4a5c2
45 changed files with 4837 additions and 562 deletions

View File

@@ -0,0 +1,173 @@
import { prisma, SubscriptionTier } from '@shieldsai/shared-db';
import { Queue, Worker, Job } from 'bullmq';
import { Redis } from 'ioredis';
import { tierConfig, getTierFeatures } from '@shieldsai/shared-billing';
import { mixpanelService, EventType } from '@shieldsai/shared-analytics';
const redisHost = process.env.REDIS_HOST || 'localhost';
const redisPort = parseInt(process.env.REDIS_PORT || '6379', 10);
const connection = new Redis({
host: redisHost,
port: redisPort,
retryStrategy: (times: number) => Math.min(times * 50, 2000),
});
const QUEUE_CONFIG = {
darkwatchScan: {
name: 'darkwatch-scan',
concurrency: parseInt(process.env.DARKWATCH_CONCURRENCY || '5', 10),
defaultJobTimeout: parseInt(process.env.DARKWATCH_JOB_TIMEOUT || '120000', 10),
maxAttempts: parseInt(process.env.DARKWATCH_MAX_ATTEMPTS || '3', 10),
},
};
export const darkwatchScanQueue = new Queue(
QUEUE_CONFIG.darkwatchScan.name,
{ connection }
);
async function processDarkwatchScan(
job: Job<{
subscriptionId: string;
tier: string;
scanType: 'scheduled' | 'on-demand' | 'realtime';
sourceData?: Record<string, unknown>;
}>
) {
const { subscriptionId, tier, scanType, sourceData } = job.data;
const { scanService } = await import(
'../../../apps/api/src/services/darkwatch/scan.service'
);
const { alertPipeline } = await import(
'../../../apps/api/src/services/darkwatch/alert.pipeline'
);
job.updateProgress(10);
console.log(
`[DarkWatch:Scan] Starting ${scanType} scan for subscription ${subscriptionId} (tier: ${tier})`
);
try {
const subscription = await prisma.subscription.findUnique({
where: { id: subscriptionId },
select: { userId: true, tier: true },
});
if (!subscription) {
job.updateProgress(100);
return { status: 'skipped', reason: 'subscription_not_found' };
}
await mixpanelService.track(
EventType.DARK_WEB_SCAN_STARTED,
subscription.userId,
{
scanType,
subscriptionTier: subscription.tier,
}
);
job.updateProgress(25);
const watchlistItems = await scanService.getWatchlistItems(subscriptionId);
if (watchlistItems.length === 0) {
job.updateProgress(100);
return { status: 'completed', exposuresCreated: 0, exposuresUpdated: 0 };
}
job.updateProgress(50);
const { exposuresCreated, exposuresUpdated } =
await scanService.processSubscriptionScan(subscriptionId, watchlistItems);
job.updateProgress(80);
const newExposureIds = await prisma.exposure.findMany({
where: {
subscriptionId,
isFirstTime: true,
detectedAt: { gte: new Date(Date.now() - 5 * 60 * 1000) },
},
select: { id: true },
});
if (newExposureIds.length > 0) {
await alertPipeline.processNewExposures(newExposureIds.map((e) => e.id));
}
await alertPipeline.dispatchScanCompleteAlert(
subscriptionId,
subscription.userId,
exposuresCreated
);
job.updateProgress(95);
await mixpanelService.track(
EventType.DARK_WEB_SCAN_COMPLETED,
subscription.userId,
{
scanType,
subscriptionTier: subscription.tier,
exposuresCreated,
exposuresUpdated,
watchlistItemsScanned: watchlistItems.length,
}
);
job.updateProgress(100);
return {
status: 'completed',
exposuresCreated,
exposuresUpdated,
watchlistItemsScanned: watchlistItems.length,
};
} catch (error) {
const message = error instanceof Error ? error.message : 'Scan failed';
console.error(`[DarkWatch:Scan] Job ${job.id} failed:`, message);
job.updateProgress(100);
throw new Error(message);
}
}
export const darkwatchScanWorker = new Worker(
QUEUE_CONFIG.darkwatchScan.name,
processDarkwatchScan,
{
connection,
concurrency: QUEUE_CONFIG.darkwatchScan.concurrency,
limiter: {
max: 20,
duration: 1000,
},
removeOnComplete: {
age: 7 * 24 * 60 * 60,
count: 1000,
},
removeOnFail: {
age: 30 * 24 * 60 * 60,
count: 100,
},
}
);
darkwatchScanWorker.on('completed', (job, result) => {
console.log(`[DarkWatch:Scan] Job ${job.id} completed:`, result);
});
darkwatchScanWorker.on('failed', (job, err) => {
console.error(`[DarkWatch:Scan] Job ${job?.id} failed:`, err.message);
});
darkwatchScanWorker.on('error', (err) => {
console.error('[DarkWatch:Scan] Worker error:', err.message);
});
export default {
darkwatchScanQueue,
darkwatchScanWorker,
};