ids.alfacom.it/server/routes.ts
marco370 c8efe5c942 Improve ML stats display and add database fallback
Add backend logic for ML stats to use database when unavailable and update frontend to show offline status.

Replit-Commit-Author: Agent
Replit-Commit-Session-Id: 7a657272-55ba-4a79-9a2e-f1ed9bc7a528
Replit-Commit-Checkpoint-Type: intermediate_checkpoint
Replit-Commit-Event-Id: b69b4d7d-5491-401d-a003-d99b33ae655d
Replit-Commit-Screenshot-Url: https://storage.googleapis.com/screenshot-production-us-central1/449cf7c4-c97a-45ae-8234-e5c5b8d6a84f/7a657272-55ba-4a79-9a2e-f1ed9bc7a528/MmMtYN7
2026-02-16 11:29:12 +00:00

978 lines
34 KiB
TypeScript

import type { Express } from "express";
import { createServer, type Server } from "http";
import { storage } from "./storage";
import { insertRouterSchema, insertDetectionSchema, insertWhitelistSchema, insertPublicListSchema, networkAnalytics, routers, detections, networkLogs, trainingHistory } from "@shared/schema";
import { db } from "./db";
import { desc, eq, gte, sql } from "drizzle-orm";
import { blockIpOnAllRouters, unblockIpOnAllRouters, bulkBlockIps, testRouterConnection } from "./mikrotik";
export async function registerRoutes(app: Express): Promise<Server> {
// Routers
app.get("/api/routers", async (req, res) => {
try {
const routers = await storage.getAllRouters();
res.json(routers);
} catch (error) {
console.error('[DB ERROR] Failed to fetch routers:', error);
res.status(500).json({ error: "Failed to fetch routers" });
}
});
app.post("/api/routers", async (req, res) => {
try {
const validatedData = insertRouterSchema.parse(req.body);
const router = await storage.createRouter(validatedData);
res.json(router);
} catch (error) {
res.status(400).json({ error: "Invalid router data" });
}
});
app.put("/api/routers/:id", async (req, res) => {
try {
const validatedData = insertRouterSchema.parse(req.body);
const router = await storage.updateRouter(req.params.id, validatedData);
if (!router) {
return res.status(404).json({ error: "Router not found" });
}
res.json(router);
} catch (error) {
console.error('[Router UPDATE] Error:', error);
res.status(400).json({ error: "Invalid router data" });
}
});
app.delete("/api/routers/:id", async (req, res) => {
try {
const success = await storage.deleteRouter(req.params.id);
if (!success) {
return res.status(404).json({ error: "Router not found" });
}
res.json({ success: true });
} catch (error) {
res.status(500).json({ error: "Failed to delete router" });
}
});
// Network Logs
app.get("/api/logs", async (req, res) => {
try {
const limit = parseInt(req.query.limit as string) || 100;
const logs = await storage.getRecentLogs(limit);
res.json(logs);
} catch (error) {
res.status(500).json({ error: "Failed to fetch logs" });
}
});
app.get("/api/logs/ip/:ip", async (req, res) => {
try {
const limit = parseInt(req.query.limit as string) || 50;
const logs = await storage.getLogsByIp(req.params.ip, limit);
res.json(logs);
} catch (error) {
res.status(500).json({ error: "Failed to fetch logs for IP" });
}
});
// Detections
app.get("/api/detections", async (req, res) => {
try {
const limit = req.query.limit ? parseInt(req.query.limit as string) : 50;
const offset = req.query.offset ? parseInt(req.query.offset as string) : 0;
const anomalyType = req.query.anomalyType as string | undefined;
const minScore = req.query.minScore ? parseFloat(req.query.minScore as string) : undefined;
const maxScore = req.query.maxScore ? parseFloat(req.query.maxScore as string) : undefined;
const search = req.query.search as string | undefined;
const result = await storage.getAllDetections({
limit,
offset,
anomalyType,
minScore,
maxScore,
search
});
res.json(result);
} catch (error) {
console.error('[DB ERROR] Failed to fetch detections:', error);
res.status(500).json({ error: "Failed to fetch detections" });
}
});
app.get("/api/dashboard/live", async (req, res) => {
try {
const hours = parseInt(req.query.hours as string) || 72;
const stats = await storage.getLiveDashboardStats(hours);
res.json(stats);
} catch (error) {
console.error('[DB ERROR] Failed to fetch dashboard stats:', error);
res.status(500).json({ error: "Failed to fetch dashboard stats" });
}
});
app.get("/api/detections/unblocked", async (req, res) => {
try {
const detections = await storage.getUnblockedDetections();
res.json(detections);
} catch (error) {
res.status(500).json({ error: "Failed to fetch unblocked detections" });
}
});
// Whitelist
app.get("/api/whitelist", async (req, res) => {
try {
const limit = parseInt(req.query.limit as string) || 50;
const offset = parseInt(req.query.offset as string) || 0;
const search = req.query.search as string || undefined;
const result = await storage.getAllWhitelist({ limit, offset, search });
res.json(result);
} catch (error) {
console.error('[DB ERROR] Failed to fetch whitelist:', error);
res.status(500).json({ error: "Failed to fetch whitelist" });
}
});
app.post("/api/whitelist", async (req, res) => {
try {
const validatedData = insertWhitelistSchema.parse(req.body);
const item = await storage.createWhitelist(validatedData);
try {
const allRouters = await storage.getAllRouters();
const enabledRouters = allRouters.filter(r => r.enabled);
if (enabledRouters.length > 0) {
const results = await unblockIpOnAllRouters(enabledRouters as any, validatedData.ipAddress);
const unblocked = results.filter(r => r.success).length;
console.log(`[WHITELIST] Auto-unblocked ${validatedData.ipAddress} from ${unblocked}/${enabledRouters.length} routers`);
}
} catch (unblockError) {
console.warn(`[WHITELIST] Auto-unblock failed for ${validatedData.ipAddress}:`, unblockError);
}
res.json(item);
} catch (error) {
res.status(400).json({ error: "Invalid whitelist data" });
}
});
app.post("/api/unblock-ip", async (req, res) => {
try {
const { ipAddress, listName = "ddos_blocked" } = req.body;
if (!ipAddress) {
return res.status(400).json({ error: "IP address is required" });
}
const allRouters = await storage.getAllRouters();
const enabledRouters = allRouters.filter(r => r.enabled);
if (enabledRouters.length === 0) {
return res.status(400).json({ error: "Nessun router abilitato" });
}
const results = await unblockIpOnAllRouters(enabledRouters as any, ipAddress, listName);
const successCount = results.filter(r => r.success).length;
await db.update(detections)
.set({ blocked: false })
.where(eq(detections.sourceIp, ipAddress));
console.log(`[UNBLOCK] ${ipAddress} rimosso da ${successCount}/${enabledRouters.length} router`);
res.json({
message: `IP ${ipAddress} sbloccato da ${successCount} router`,
unblocked_from: successCount,
total_routers: enabledRouters.length,
results: results.map(r => ({ router: r.routerIp, success: r.success, error: r.error }))
});
} catch (error: any) {
console.error('[UNBLOCK] Error:', error);
res.status(500).json({ error: error.message || "Errore sblocco IP" });
}
});
app.delete("/api/whitelist/:id", async (req, res) => {
try {
const success = await storage.deleteWhitelist(req.params.id);
if (!success) {
return res.status(404).json({ error: "Whitelist entry not found" });
}
res.json({ success: true });
} catch (error) {
res.status(500).json({ error: "Failed to delete whitelist entry" });
}
});
// Public Lists
app.get("/api/public-lists", async (req, res) => {
try {
const lists = await storage.getAllPublicLists();
res.json(lists);
} catch (error) {
console.error('[DB ERROR] Failed to fetch public lists:', error);
res.status(500).json({ error: "Failed to fetch public lists" });
}
});
app.get("/api/public-lists/:id", async (req, res) => {
try {
const list = await storage.getPublicListById(req.params.id);
if (!list) {
return res.status(404).json({ error: "List not found" });
}
res.json(list);
} catch (error) {
res.status(500).json({ error: "Failed to fetch list" });
}
});
app.post("/api/public-lists", async (req, res) => {
try {
const validatedData = insertPublicListSchema.parse(req.body);
const list = await storage.createPublicList(validatedData);
res.json(list);
} catch (error: any) {
console.error('[API ERROR] Failed to create public list:', error);
if (error.name === 'ZodError') {
return res.status(400).json({ error: "Invalid list data", details: error.errors });
}
res.status(400).json({ error: "Invalid list data" });
}
});
app.patch("/api/public-lists/:id", async (req, res) => {
try {
const validatedData = insertPublicListSchema.partial().parse(req.body);
const list = await storage.updatePublicList(req.params.id, validatedData);
if (!list) {
return res.status(404).json({ error: "List not found" });
}
res.json(list);
} catch (error: any) {
console.error('[API ERROR] Failed to update public list:', error);
if (error.name === 'ZodError') {
return res.status(400).json({ error: "Invalid list data", details: error.errors });
}
res.status(400).json({ error: "Invalid list data" });
}
});
app.delete("/api/public-lists/:id", async (req, res) => {
try {
const success = await storage.deletePublicList(req.params.id);
if (!success) {
return res.status(404).json({ error: "List not found" });
}
res.json({ success: true });
} catch (error) {
res.status(500).json({ error: "Failed to delete list" });
}
});
app.post("/api/public-lists/:id/sync", async (req, res) => {
try {
const list = await storage.getPublicListById(req.params.id);
if (!list) {
return res.status(404).json({ error: "List not found" });
}
console.log(`[SYNC] Starting sync for list: ${list.name} (${list.url})`);
// Fetch the list from URL
const response = await fetch(list.url, {
headers: {
'User-Agent': 'IDS-MikroTik-PublicListFetcher/2.0',
'Accept': 'application/json, text/plain, */*',
},
signal: AbortSignal.timeout(30000),
});
if (!response.ok) {
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
}
const contentType = response.headers.get('content-type') || '';
const text = await response.text();
// Parse IPs based on content type
let ips: Array<{ip: string, cidr: string | null}> = [];
if (contentType.includes('json') || list.url.endsWith('.json')) {
// JSON format (Spamhaus DROP v4 JSON)
try {
const data = JSON.parse(text);
if (Array.isArray(data)) {
for (const entry of data) {
if (entry.cidr) {
const [ip] = entry.cidr.split('/');
ips.push({ ip, cidr: entry.cidr });
} else if (entry.ip) {
ips.push({ ip: entry.ip, cidr: null });
}
}
}
} catch (e) {
console.error('[SYNC] Failed to parse JSON:', e);
throw new Error('Invalid JSON format');
}
} else {
// Plain text format (one IP/CIDR per line)
const lines = text.split('\n');
for (const line of lines) {
const trimmed = line.trim();
if (!trimmed || trimmed.startsWith('#') || trimmed.startsWith(';')) continue;
// Extract IP/CIDR from line
const match = trimmed.match(/^(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})(\/\d{1,2})?/);
if (match) {
const ip = match[1];
const cidr = match[2] ? `${match[1]}${match[2]}` : null;
ips.push({ ip, cidr });
}
}
}
console.log(`[SYNC] Parsed ${ips.length} IPs from ${list.name}`);
// Save IPs to database
let added = 0;
let updated = 0;
for (const { ip, cidr } of ips) {
const result = await storage.upsertBlacklistIp(list.id, ip, cidr);
if (result.created) added++;
else updated++;
}
// Update list stats
await storage.updatePublicList(list.id, {
lastFetch: new Date(),
lastSuccess: new Date(),
totalIps: ips.length,
activeIps: ips.length,
errorCount: 0,
lastError: null,
});
console.log(`[SYNC] Completed: ${added} added, ${updated} updated for ${list.name}`);
res.json({
success: true,
message: `Sync completed: ${ips.length} IPs processed`,
added,
updated,
total: ips.length,
});
} catch (error: any) {
console.error('[API ERROR] Failed to sync:', error);
// Update error count
const list = await storage.getPublicListById(req.params.id);
if (list) {
await storage.updatePublicList(req.params.id, {
errorCount: (list.errorCount || 0) + 1,
lastError: error.message,
lastFetch: new Date(),
});
}
res.status(500).json({ error: `Sync failed: ${error.message}` });
}
});
// Public Blacklist IPs
app.get("/api/public-blacklist", async (req, res) => {
try {
const limit = parseInt(req.query.limit as string) || 1000;
const listId = req.query.listId as string | undefined;
const ipAddress = req.query.ipAddress as string | undefined;
const isActive = req.query.isActive === 'true';
const ips = await storage.getPublicBlacklistIps({
limit,
listId,
ipAddress,
isActive: req.query.isActive !== undefined ? isActive : undefined,
});
res.json(ips);
} catch (error) {
console.error('[DB ERROR] Failed to fetch blacklist IPs:', error);
res.status(500).json({ error: "Failed to fetch blacklist IPs" });
}
});
app.get("/api/public-blacklist/stats", async (req, res) => {
try {
const stats = await storage.getPublicBlacklistStats();
res.json(stats);
} catch (error) {
console.error('[DB ERROR] Failed to fetch blacklist stats:', error);
res.status(500).json({ error: "Failed to fetch stats" });
}
});
// Training History
app.get("/api/training-history", async (req, res) => {
try {
const limit = parseInt(req.query.limit as string) || 10;
const history = await storage.getTrainingHistory(limit);
res.json(history);
} catch (error) {
console.error('[DB ERROR] Failed to fetch training history:', error);
res.status(500).json({ error: "Failed to fetch training history" });
}
});
app.get("/api/training-history/latest", async (req, res) => {
try {
const latest = await storage.getLatestTraining();
res.json(latest || null);
} catch (error) {
res.status(500).json({ error: "Failed to fetch latest training" });
}
});
// Network Analytics
app.get("/api/analytics/recent", async (req, res) => {
try {
const days = parseInt(req.query.days as string) || 3;
const hourly = req.query.hourly === 'true';
const analytics = await storage.getRecentAnalytics(days, hourly);
res.json(analytics);
} catch (error) {
console.error('[DB ERROR] Failed to fetch recent analytics:', error);
res.status(500).json({ error: "Failed to fetch analytics" });
}
});
app.get("/api/analytics/range", async (req, res) => {
try {
const startDate = new Date(req.query.start as string);
const endDate = new Date(req.query.end as string);
const hourly = req.query.hourly === 'true';
if (isNaN(startDate.getTime()) || isNaN(endDate.getTime())) {
return res.status(400).json({ error: "Invalid date range" });
}
const analytics = await storage.getAnalyticsByDateRange(startDate, endDate, hourly);
res.json(analytics);
} catch (error) {
console.error('[DB ERROR] Failed to fetch analytics range:', error);
res.status(500).json({ error: "Failed to fetch analytics" });
}
});
// Stats
app.get("/api/stats", async (req, res) => {
try {
const routersList = await storage.getAllRouters();
const whitelistResult = await storage.getAllWhitelist({ limit: 1 });
const latestTraining = await storage.getLatestTraining();
const detectionStats = await db.select({
total: sql<number>`count(*)::int`,
blocked: sql<number>`count(*) filter (where blocked = true)::int`,
critical: sql<number>`count(*) filter (where ${detections.riskScore}::numeric >= 85)::int`,
high: sql<number>`count(*) filter (where ${detections.riskScore}::numeric >= 70 and ${detections.riskScore}::numeric < 85)::int`,
}).from(detections);
let logCount = 0;
try {
const logStats = await db.execute(
sql`SELECT count(*)::int as count FROM network_logs WHERE timestamp >= NOW() - INTERVAL '24 hours'`
);
logCount = (logStats as any).rows?.[0]?.count ?? (logStats as any)[0]?.count ?? 0;
} catch (logError) {
console.error('[DB WARN] Log count query failed:', logError);
logCount = 0;
}
res.json({
routers: {
total: routersList.length,
enabled: routersList.filter(r => r.enabled).length
},
detections: {
total: detectionStats[0]?.total || 0,
blocked: detectionStats[0]?.blocked || 0,
critical: detectionStats[0]?.critical || 0,
high: detectionStats[0]?.high || 0
},
logs: {
recent: logCount
},
whitelist: {
total: whitelistResult.total
},
latestTraining: latestTraining
});
} catch (error) {
console.error('[DB ERROR] Failed to fetch stats:', error);
res.status(500).json({ error: "Failed to fetch stats" });
}
});
// ML Actions - Trigger training/detection on Python backend
const ML_BACKEND_URL = process.env.ML_BACKEND_URL || "http://localhost:8000";
const ML_TIMEOUT = 120000; // 2 minutes timeout
const IDS_API_KEY = process.env.IDS_API_KEY; // API Key for secure ML backend communication
// Helper to create authenticated fetch headers
const getMLBackendHeaders = () => {
const headers: HeadersInit = {
"Content-Type": "application/json",
};
if (IDS_API_KEY) {
headers["X-API-Key"] = IDS_API_KEY;
}
return headers;
};
app.post("/api/ml/train", async (req, res) => {
try {
const { max_records = 100000, hours_back = 24 } = req.body;
// Validate input
if (typeof max_records !== 'number' || max_records <= 0 || max_records > 1000000) {
return res.status(400).json({ error: "max_records must be between 1 and 1000000" });
}
if (typeof hours_back !== 'number' || hours_back <= 0 || hours_back > 720) {
return res.status(400).json({ error: "hours_back must be between 1 and 720" });
}
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), ML_TIMEOUT);
const response = await fetch(`${ML_BACKEND_URL}/train`, {
method: "POST",
headers: getMLBackendHeaders(),
body: JSON.stringify({ max_records, hours_back }),
signal: controller.signal,
});
clearTimeout(timeout);
if (!response.ok) {
const errorData = await response.json().catch(() => ({}));
return res.status(response.status).json({
error: errorData.detail || "Training failed",
status: response.status,
});
}
const data = await response.json();
res.json(data);
} catch (error: any) {
if (error.name === 'AbortError') {
return res.status(504).json({ error: "Training timeout - operation took too long" });
}
if (error.code === 'ECONNREFUSED') {
return res.status(503).json({ error: "ML backend not available - is Python server running?" });
}
res.status(500).json({ error: error.message || "Failed to trigger training" });
}
});
app.post("/api/ml/detect", async (req, res) => {
try {
const { max_records = 50000, hours_back = 1, risk_threshold = 75, auto_block = false } = req.body;
// Validate input
if (typeof max_records !== 'number' || max_records <= 0 || max_records > 1000000) {
return res.status(400).json({ error: "max_records must be between 1 and 1000000" });
}
if (typeof hours_back !== 'number' || hours_back <= 0 || hours_back > 720) {
return res.status(400).json({ error: "hours_back must be between 1 and 720" });
}
if (typeof risk_threshold !== 'number' || risk_threshold < 0 || risk_threshold > 100) {
return res.status(400).json({ error: "risk_threshold must be between 0 and 100" });
}
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), ML_TIMEOUT);
const response = await fetch(`${ML_BACKEND_URL}/detect`, {
method: "POST",
headers: getMLBackendHeaders(),
body: JSON.stringify({ max_records, hours_back, risk_threshold, auto_block }),
signal: controller.signal,
});
clearTimeout(timeout);
if (!response.ok) {
const errorData = await response.json().catch(() => ({}));
return res.status(response.status).json({
error: errorData.detail || "Detection failed",
status: response.status,
});
}
const data = await response.json();
res.json(data);
} catch (error: any) {
if (error.name === 'AbortError') {
return res.status(504).json({ error: "Detection timeout - operation took too long" });
}
if (error.code === 'ECONNREFUSED') {
return res.status(503).json({ error: "ML backend not available - is Python server running?" });
}
res.status(500).json({ error: error.message || "Failed to trigger detection" });
}
});
app.post("/api/ml/block-all-critical", async (req, res) => {
try {
const { min_score = 80, list_name = "ddos_blocked", limit = 100 } = req.body;
const maxIps = Math.min(Number(limit) || 100, 500);
const allRouters = await storage.getAllRouters();
const enabledRouters = allRouters.filter(r => r.enabled);
if (enabledRouters.length === 0) {
return res.status(400).json({ error: "Nessun router abilitato" });
}
const unblockedDetections = await db.execute(
sql`SELECT DISTINCT source_ip, MAX(CAST(risk_score AS FLOAT)) as max_score, MAX(anomaly_type) as anomaly_type
FROM detections
WHERE CAST(risk_score AS FLOAT) >= ${min_score}
AND blocked = false
AND source_ip NOT IN (SELECT ip_address FROM whitelist WHERE active = true)
GROUP BY source_ip
ORDER BY max_score DESC
LIMIT ${maxIps}`
);
const rows = (unblockedDetections as any).rows || unblockedDetections;
const totalUnblockedResult = await db.execute(
sql`SELECT COUNT(DISTINCT source_ip) as count
FROM detections
WHERE CAST(risk_score AS FLOAT) >= ${min_score}
AND blocked = false
AND source_ip NOT IN (SELECT ip_address FROM whitelist WHERE active = true)`
);
const totalUnblockedRows = (totalUnblockedResult as any).rows || totalUnblockedResult;
const totalUnblocked = parseInt(totalUnblockedRows[0]?.count || "0");
if (!rows || rows.length === 0) {
return res.json({
message: "Nessun IP critico da bloccare",
blocked: 0,
failed: 0,
total_critical: 0,
remaining: 0,
skipped: 0
});
}
const ipList = rows.map((r: any) => r.source_ip);
console.log(`[BLOCK-ALL] Avvio blocco massivo: ${ipList.length}/${totalUnblocked} IP con score >= ${min_score} su ${enabledRouters.length} router`);
const result = await bulkBlockIps(
enabledRouters as any,
ipList,
list_name,
`IDS bulk-block (score>=${min_score})`,
"1h",
10
);
if (result.blocked > 0) {
const blockedIps = result.details
.filter(d => d.status === "blocked")
.map(d => d.ip);
const batchSize = 200;
for (let i = 0; i < blockedIps.length; i += batchSize) {
const batch = blockedIps.slice(i, i + batchSize);
const ipValues = batch.map(ip => `'${ip.replace(/'/g, "''")}'`).join(',');
await db.execute(
sql`UPDATE detections SET blocked = true, blocked_at = NOW() WHERE source_ip IN (${sql.raw(ipValues)}) AND blocked = false`
);
}
console.log(`[BLOCK-ALL] Database aggiornato: ${blockedIps.length} IP marcati come bloccati`);
}
const remaining = totalUnblocked - ipList.length;
res.json({
message: `Blocco massivo completato: ${result.blocked} IP bloccati, ${result.failed} falliti, ${result.skipped} già bloccati` +
(remaining > 0 ? `. Rimangono ${remaining} IP da bloccare.` : ''),
blocked: result.blocked,
failed: result.failed,
skipped: result.skipped,
total_critical: ipList.length,
remaining,
details: result.details.slice(0, 100)
});
} catch (error: any) {
console.error('[BLOCK-ALL] Error:', error);
res.status(500).json({ error: error.message || "Errore blocco massivo" });
}
});
app.get("/api/ml/stats", async (req, res) => {
try {
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), 5000);
const response = await fetch(`${ML_BACKEND_URL}/stats`, {
headers: getMLBackendHeaders(),
signal: controller.signal,
});
clearTimeout(timeout);
if (!response.ok) {
throw new Error(`HTTP ${response.status}`);
}
const data = await response.json();
res.json(data);
} catch (error: any) {
try {
const latestTraining = await db
.select()
.from(trainingHistory)
.orderBy(desc(trainingHistory.trainedAt))
.limit(1);
const detectionStats = await db.execute(
sql`SELECT
COUNT(*) as total_detections,
COUNT(*) FILTER (WHERE blocked = true) as blocked_count,
COUNT(*) FILTER (WHERE CAST(risk_score AS FLOAT) >= 80) as critical_count,
COUNT(DISTINCT source_ip) as unique_ips
FROM detections`
);
const statsRows = (detectionStats as any).rows || detectionStats;
const logCount = await db.execute(
sql`SELECT COUNT(*) as count FROM network_logs WHERE timestamp > NOW() - INTERVAL '24 hours'`
);
const logRows = (logCount as any).rows || logCount;
res.json({
source: "database_fallback",
ml_backend_status: "offline",
latest_training: latestTraining[0] || null,
detections: {
total: parseInt(statsRows[0]?.total_detections || "0"),
blocked: parseInt(statsRows[0]?.blocked_count || "0"),
critical: parseInt(statsRows[0]?.critical_count || "0"),
unique_ips: parseInt(statsRows[0]?.unique_ips || "0"),
},
logs_24h: parseInt(logRows[0]?.count || "0"),
});
} catch (dbError: any) {
res.status(503).json({ error: "ML backend offline and database fallback failed" });
}
}
});
// Services monitoring
app.get("/api/services/status", async (req, res) => {
try {
const services = {
mlBackend: { name: "ML Backend Python", status: "unknown", healthy: false, details: null as any },
database: { name: "PostgreSQL Database", status: "unknown", healthy: false, details: null as any },
syslogParser: { name: "Syslog Parser", status: "unknown", healthy: false, details: null as any },
analyticsAggregator: { name: "Analytics Aggregator Timer", status: "unknown", healthy: false, details: null as any },
};
// Check ML Backend Python
try {
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), 5000);
const response = await fetch(`${ML_BACKEND_URL}/health`, {
signal: controller.signal,
});
clearTimeout(timeout);
if (response.ok) {
const data = await response.json();
services.mlBackend.status = "running";
services.mlBackend.healthy = true;
services.mlBackend.details = {
modelLoaded: data.ml_model === "loaded",
timestamp: data.timestamp,
};
} else {
services.mlBackend.status = "error";
services.mlBackend.details = { error: `HTTP ${response.status}` };
}
} catch (error: any) {
services.mlBackend.status = "offline";
services.mlBackend.details = { error: error.code === 'ECONNREFUSED' ? "Connection refused" : error.message };
}
// Check Database
try {
const conn = await storage.testConnection();
if (conn) {
services.database.status = "running";
services.database.healthy = true;
services.database.details = { connected: true };
}
} catch (error: any) {
services.database.status = "error";
services.database.details = { error: error.message };
}
// Check Syslog Parser via database (independent of ML Backend)
try {
const recentLogsResult = await db.execute(
sql`SELECT COUNT(*) as count, MAX(timestamp) as last_log
FROM network_logs
WHERE timestamp > NOW() - INTERVAL '30 minutes'`
);
const logRows = (recentLogsResult as any).rows || recentLogsResult;
const recentLogCount = parseInt(logRows[0]?.count || "0");
const lastLogTime = logRows[0]?.last_log;
if (recentLogCount > 0) {
services.syslogParser.status = "running";
services.syslogParser.healthy = true;
services.syslogParser.details = {
recentLogs30min: recentLogCount,
lastLog: lastLogTime,
};
} else {
const lastLogEverResult = await db.execute(
sql`SELECT MAX(timestamp) as last_log FROM network_logs`
);
const lastLogEverRows = (lastLogEverResult as any).rows || lastLogEverResult;
const lastLogEver = lastLogEverRows[0]?.last_log;
services.syslogParser.status = "offline";
services.syslogParser.healthy = false;
services.syslogParser.details = {
recentLogs30min: 0,
lastLog: lastLogEver || "Never",
warning: "No logs received in last 30 minutes",
};
}
} catch (error: any) {
services.syslogParser.status = "error";
services.syslogParser.healthy = false;
services.syslogParser.details = { error: error.message };
}
// Check Analytics Aggregator (via last record timestamp)
try {
const latestAnalytics = await db
.select()
.from(networkAnalytics)
.orderBy(desc(networkAnalytics.date), desc(networkAnalytics.hour))
.limit(1);
if (latestAnalytics.length > 0) {
const lastRun = new Date(latestAnalytics[0].date);
const lastTimestamp = lastRun.toISOString();
const hoursSinceLastRun = (Date.now() - lastRun.getTime()) / (1000 * 60 * 60);
if (hoursSinceLastRun < 2) {
services.analyticsAggregator.status = "running";
services.analyticsAggregator.healthy = true;
services.analyticsAggregator.details = {
lastRun: latestAnalytics[0].date,
lastTimestamp,
hoursSinceLastRun: hoursSinceLastRun.toFixed(1),
};
} else {
services.analyticsAggregator.status = "idle";
services.analyticsAggregator.healthy = false;
services.analyticsAggregator.details = {
lastRun: latestAnalytics[0].date,
lastTimestamp,
hoursSinceLastRun: hoursSinceLastRun.toFixed(1),
warning: "No aggregation in last 2 hours",
};
}
} else {
services.analyticsAggregator.status = "idle";
services.analyticsAggregator.healthy = false;
services.analyticsAggregator.details = { error: "No analytics data found" };
}
} catch (error: any) {
services.analyticsAggregator.status = "error";
services.analyticsAggregator.healthy = false;
services.analyticsAggregator.details = { error: error.message };
}
res.json({ services });
} catch (error: any) {
res.status(500).json({ error: "Failed to check services status" });
}
});
// Service Control Endpoints (Secured - only allow specific systemd operations)
const ALLOWED_SERVICES = ["ids-ml-backend", "ids-syslog-parser"];
const ALLOWED_ACTIONS = ["start", "stop", "restart", "status"];
app.post("/api/services/:service/:action", async (req, res) => {
try {
const { service, action } = req.params;
// Validate service name
if (!ALLOWED_SERVICES.includes(service)) {
return res.status(400).json({ error: "Invalid service name" });
}
// Validate action
if (!ALLOWED_ACTIONS.includes(action)) {
return res.status(400).json({ error: "Invalid action" });
}
// Execute systemd command
const { exec } = await import("child_process");
const { promisify } = await import("util");
const execAsync = promisify(exec);
try {
const systemdAction = action === "status" ? "status" : action;
const { stdout, stderr } = await execAsync(
`systemctl ${systemdAction} ${service}`,
{ timeout: 10000 }
);
res.json({
success: true,
service,
action,
output: stdout || stderr,
timestamp: new Date().toISOString(),
});
} catch (execError: any) {
// systemctl returns non-zero exit for stopped services in status command
if (action === "status") {
res.json({
success: true,
service,
action,
output: execError.stdout || execError.stderr,
timestamp: new Date().toISOString(),
});
} else {
throw execError;
}
}
} catch (error: any) {
res.status(500).json({
error: "Service control failed",
details: error.message,
});
}
});
const httpServer = createServer(app);
return httpServer;
}