ids.alfacom.it/server/routes.ts
marco370 4118d60d6d Update service monitoring to display detailed status and health
Refactor the services page to dynamically fetch and display the status of various systemd services and timers, improving the observability of the application's backend components.

Replit-Commit-Author: Agent
Replit-Commit-Session-Id: 7a657272-55ba-4a79-9a2e-f1ed9bc7a528
Replit-Commit-Checkpoint-Type: intermediate_checkpoint
Replit-Commit-Event-Id: 48245392-3f34-4eac-aeaf-99e52684ddf2
Replit-Commit-Screenshot-Url: https://storage.googleapis.com/screenshot-production-us-central1/449cf7c4-c97a-45ae-8234-e5c5b8d6a84f/7a657272-55ba-4a79-9a2e-f1ed9bc7a528/s2eMVCL
2026-02-17 09:09:26 +00:00

1057 lines
40 KiB
TypeScript

import type { Express } from "express";
import { createServer, type Server } from "http";
import { storage } from "./storage";
import { insertRouterSchema, insertDetectionSchema, insertWhitelistSchema, insertPublicListSchema, networkAnalytics, routers, detections, networkLogs, trainingHistory } from "@shared/schema";
import { db } from "./db";
import { desc, eq, gte, sql } from "drizzle-orm";
import { blockIpOnAllRouters, unblockIpOnAllRouters, bulkBlockIps, testRouterConnection } from "./mikrotik";
export async function registerRoutes(app: Express): Promise<Server> {
// Routers
app.get("/api/routers", async (req, res) => {
try {
const routers = await storage.getAllRouters();
res.json(routers);
} catch (error) {
console.error('[DB ERROR] Failed to fetch routers:', error);
res.status(500).json({ error: "Failed to fetch routers" });
}
});
app.post("/api/routers", async (req, res) => {
try {
const validatedData = insertRouterSchema.parse(req.body);
const router = await storage.createRouter(validatedData);
res.json(router);
} catch (error) {
res.status(400).json({ error: "Invalid router data" });
}
});
app.put("/api/routers/:id", async (req, res) => {
try {
const validatedData = insertRouterSchema.parse(req.body);
const router = await storage.updateRouter(req.params.id, validatedData);
if (!router) {
return res.status(404).json({ error: "Router not found" });
}
res.json(router);
} catch (error) {
console.error('[Router UPDATE] Error:', error);
res.status(400).json({ error: "Invalid router data" });
}
});
app.delete("/api/routers/:id", async (req, res) => {
try {
const success = await storage.deleteRouter(req.params.id);
if (!success) {
return res.status(404).json({ error: "Router not found" });
}
res.json({ success: true });
} catch (error) {
res.status(500).json({ error: "Failed to delete router" });
}
});
// Network Logs
app.get("/api/logs", async (req, res) => {
try {
const limit = parseInt(req.query.limit as string) || 100;
const logs = await storage.getRecentLogs(limit);
res.json(logs);
} catch (error) {
res.status(500).json({ error: "Failed to fetch logs" });
}
});
app.get("/api/logs/ip/:ip", async (req, res) => {
try {
const limit = parseInt(req.query.limit as string) || 50;
const logs = await storage.getLogsByIp(req.params.ip, limit);
res.json(logs);
} catch (error) {
res.status(500).json({ error: "Failed to fetch logs for IP" });
}
});
// Detections
app.get("/api/detections", async (req, res) => {
try {
const limit = req.query.limit ? parseInt(req.query.limit as string) : 50;
const offset = req.query.offset ? parseInt(req.query.offset as string) : 0;
const anomalyType = req.query.anomalyType as string | undefined;
const minScore = req.query.minScore ? parseFloat(req.query.minScore as string) : undefined;
const maxScore = req.query.maxScore ? parseFloat(req.query.maxScore as string) : undefined;
const search = req.query.search as string | undefined;
const result = await storage.getAllDetections({
limit,
offset,
anomalyType,
minScore,
maxScore,
search
});
res.json(result);
} catch (error) {
console.error('[DB ERROR] Failed to fetch detections:', error);
res.status(500).json({ error: "Failed to fetch detections" });
}
});
app.get("/api/dashboard/live", async (req, res) => {
try {
const hours = parseInt(req.query.hours as string) || 72;
const stats = await storage.getLiveDashboardStats(hours);
res.json(stats);
} catch (error) {
console.error('[DB ERROR] Failed to fetch dashboard stats:', error);
res.status(500).json({ error: "Failed to fetch dashboard stats" });
}
});
app.get("/api/detections/unblocked", async (req, res) => {
try {
const detections = await storage.getUnblockedDetections();
res.json(detections);
} catch (error) {
res.status(500).json({ error: "Failed to fetch unblocked detections" });
}
});
// Whitelist
app.get("/api/whitelist", async (req, res) => {
try {
const limit = parseInt(req.query.limit as string) || 50;
const offset = parseInt(req.query.offset as string) || 0;
const search = req.query.search as string || undefined;
const result = await storage.getAllWhitelist({ limit, offset, search });
res.json(result);
} catch (error) {
console.error('[DB ERROR] Failed to fetch whitelist:', error);
res.status(500).json({ error: "Failed to fetch whitelist" });
}
});
app.post("/api/whitelist", async (req, res) => {
try {
const validatedData = insertWhitelistSchema.parse(req.body);
const item = await storage.createWhitelist(validatedData);
try {
const allRouters = await storage.getAllRouters();
const enabledRouters = allRouters.filter(r => r.enabled);
if (enabledRouters.length > 0) {
const results = await unblockIpOnAllRouters(enabledRouters as any, validatedData.ipAddress);
const unblocked = results.filter(r => r.success).length;
console.log(`[WHITELIST] Auto-unblocked ${validatedData.ipAddress} from ${unblocked}/${enabledRouters.length} routers`);
}
} catch (unblockError) {
console.warn(`[WHITELIST] Auto-unblock failed for ${validatedData.ipAddress}:`, unblockError);
}
res.json(item);
} catch (error) {
res.status(400).json({ error: "Invalid whitelist data" });
}
});
app.post("/api/unblock-ip", async (req, res) => {
try {
const { ipAddress, listName = "ddos_blocked" } = req.body;
if (!ipAddress) {
return res.status(400).json({ error: "IP address is required" });
}
const allRouters = await storage.getAllRouters();
const enabledRouters = allRouters.filter(r => r.enabled);
if (enabledRouters.length === 0) {
return res.status(400).json({ error: "Nessun router abilitato" });
}
const results = await unblockIpOnAllRouters(enabledRouters as any, ipAddress, listName);
const successCount = results.filter(r => r.success).length;
await db.update(detections)
.set({ blocked: false })
.where(eq(detections.sourceIp, ipAddress));
console.log(`[UNBLOCK] ${ipAddress} rimosso da ${successCount}/${enabledRouters.length} router`);
res.json({
message: `IP ${ipAddress} sbloccato da ${successCount} router`,
unblocked_from: successCount,
total_routers: enabledRouters.length,
results: results.map(r => ({ router: r.routerIp, success: r.success, error: r.error }))
});
} catch (error: any) {
console.error('[UNBLOCK] Error:', error);
res.status(500).json({ error: error.message || "Errore sblocco IP" });
}
});
app.delete("/api/whitelist/:id", async (req, res) => {
try {
const success = await storage.deleteWhitelist(req.params.id);
if (!success) {
return res.status(404).json({ error: "Whitelist entry not found" });
}
res.json({ success: true });
} catch (error) {
res.status(500).json({ error: "Failed to delete whitelist entry" });
}
});
// Public Lists
app.get("/api/public-lists", async (req, res) => {
try {
const lists = await storage.getAllPublicLists();
res.json(lists);
} catch (error) {
console.error('[DB ERROR] Failed to fetch public lists:', error);
res.status(500).json({ error: "Failed to fetch public lists" });
}
});
app.get("/api/public-lists/:id", async (req, res) => {
try {
const list = await storage.getPublicListById(req.params.id);
if (!list) {
return res.status(404).json({ error: "List not found" });
}
res.json(list);
} catch (error) {
res.status(500).json({ error: "Failed to fetch list" });
}
});
app.post("/api/public-lists", async (req, res) => {
try {
const validatedData = insertPublicListSchema.parse(req.body);
const list = await storage.createPublicList(validatedData);
res.json(list);
} catch (error: any) {
console.error('[API ERROR] Failed to create public list:', error);
if (error.name === 'ZodError') {
return res.status(400).json({ error: "Invalid list data", details: error.errors });
}
res.status(400).json({ error: "Invalid list data" });
}
});
app.patch("/api/public-lists/:id", async (req, res) => {
try {
const validatedData = insertPublicListSchema.partial().parse(req.body);
const list = await storage.updatePublicList(req.params.id, validatedData);
if (!list) {
return res.status(404).json({ error: "List not found" });
}
res.json(list);
} catch (error: any) {
console.error('[API ERROR] Failed to update public list:', error);
if (error.name === 'ZodError') {
return res.status(400).json({ error: "Invalid list data", details: error.errors });
}
res.status(400).json({ error: "Invalid list data" });
}
});
app.delete("/api/public-lists/:id", async (req, res) => {
try {
const success = await storage.deletePublicList(req.params.id);
if (!success) {
return res.status(404).json({ error: "List not found" });
}
res.json({ success: true });
} catch (error) {
res.status(500).json({ error: "Failed to delete list" });
}
});
app.post("/api/public-lists/:id/sync", async (req, res) => {
try {
const list = await storage.getPublicListById(req.params.id);
if (!list) {
return res.status(404).json({ error: "List not found" });
}
console.log(`[SYNC] Starting sync for list: ${list.name} (${list.url})`);
// Fetch the list from URL
const response = await fetch(list.url, {
headers: {
'User-Agent': 'IDS-MikroTik-PublicListFetcher/2.0',
'Accept': 'application/json, text/plain, */*',
},
signal: AbortSignal.timeout(30000),
});
if (!response.ok) {
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
}
const contentType = response.headers.get('content-type') || '';
const text = await response.text();
// Parse IPs based on content type
let ips: Array<{ip: string, cidr: string | null}> = [];
if (contentType.includes('json') || list.url.endsWith('.json')) {
// JSON format (Spamhaus DROP v4 JSON)
try {
const data = JSON.parse(text);
if (Array.isArray(data)) {
for (const entry of data) {
if (entry.cidr) {
const [ip] = entry.cidr.split('/');
ips.push({ ip, cidr: entry.cidr });
} else if (entry.ip) {
ips.push({ ip: entry.ip, cidr: null });
}
}
}
} catch (e) {
console.error('[SYNC] Failed to parse JSON:', e);
throw new Error('Invalid JSON format');
}
} else {
// Plain text format (one IP/CIDR per line)
const lines = text.split('\n');
for (const line of lines) {
const trimmed = line.trim();
if (!trimmed || trimmed.startsWith('#') || trimmed.startsWith(';')) continue;
// Extract IP/CIDR from line
const match = trimmed.match(/^(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})(\/\d{1,2})?/);
if (match) {
const ip = match[1];
const cidr = match[2] ? `${match[1]}${match[2]}` : null;
ips.push({ ip, cidr });
}
}
}
console.log(`[SYNC] Parsed ${ips.length} IPs from ${list.name}`);
// Save IPs to database
let added = 0;
let updated = 0;
for (const { ip, cidr } of ips) {
const result = await storage.upsertBlacklistIp(list.id, ip, cidr);
if (result.created) added++;
else updated++;
}
// Update list stats
await storage.updatePublicList(list.id, {
lastFetch: new Date(),
lastSuccess: new Date(),
totalIps: ips.length,
activeIps: ips.length,
errorCount: 0,
lastError: null,
});
console.log(`[SYNC] Completed: ${added} added, ${updated} updated for ${list.name}`);
res.json({
success: true,
message: `Sync completed: ${ips.length} IPs processed`,
added,
updated,
total: ips.length,
});
} catch (error: any) {
console.error('[API ERROR] Failed to sync:', error);
// Update error count
const list = await storage.getPublicListById(req.params.id);
if (list) {
await storage.updatePublicList(req.params.id, {
errorCount: (list.errorCount || 0) + 1,
lastError: error.message,
lastFetch: new Date(),
});
}
res.status(500).json({ error: `Sync failed: ${error.message}` });
}
});
// Public Blacklist IPs
app.get("/api/public-blacklist", async (req, res) => {
try {
const limit = parseInt(req.query.limit as string) || 1000;
const listId = req.query.listId as string | undefined;
const ipAddress = req.query.ipAddress as string | undefined;
const isActive = req.query.isActive === 'true';
const ips = await storage.getPublicBlacklistIps({
limit,
listId,
ipAddress,
isActive: req.query.isActive !== undefined ? isActive : undefined,
});
res.json(ips);
} catch (error) {
console.error('[DB ERROR] Failed to fetch blacklist IPs:', error);
res.status(500).json({ error: "Failed to fetch blacklist IPs" });
}
});
app.get("/api/public-blacklist/stats", async (req, res) => {
try {
const stats = await storage.getPublicBlacklistStats();
res.json(stats);
} catch (error) {
console.error('[DB ERROR] Failed to fetch blacklist stats:', error);
res.status(500).json({ error: "Failed to fetch stats" });
}
});
// Training History
app.get("/api/training-history", async (req, res) => {
try {
const limit = parseInt(req.query.limit as string) || 10;
const history = await storage.getTrainingHistory(limit);
res.json(history);
} catch (error) {
console.error('[DB ERROR] Failed to fetch training history:', error);
res.status(500).json({ error: "Failed to fetch training history" });
}
});
app.get("/api/training-history/latest", async (req, res) => {
try {
const latest = await storage.getLatestTraining();
res.json(latest || null);
} catch (error) {
res.status(500).json({ error: "Failed to fetch latest training" });
}
});
// Network Analytics
app.get("/api/analytics/recent", async (req, res) => {
try {
const days = parseInt(req.query.days as string) || 3;
const hourly = req.query.hourly === 'true';
const analytics = await storage.getRecentAnalytics(days, hourly);
res.json(analytics);
} catch (error) {
console.error('[DB ERROR] Failed to fetch recent analytics:', error);
res.status(500).json({ error: "Failed to fetch analytics" });
}
});
app.get("/api/analytics/range", async (req, res) => {
try {
const startDate = new Date(req.query.start as string);
const endDate = new Date(req.query.end as string);
const hourly = req.query.hourly === 'true';
if (isNaN(startDate.getTime()) || isNaN(endDate.getTime())) {
return res.status(400).json({ error: "Invalid date range" });
}
const analytics = await storage.getAnalyticsByDateRange(startDate, endDate, hourly);
res.json(analytics);
} catch (error) {
console.error('[DB ERROR] Failed to fetch analytics range:', error);
res.status(500).json({ error: "Failed to fetch analytics" });
}
});
// Stats
app.get("/api/stats", async (req, res) => {
try {
const routersList = await storage.getAllRouters();
const whitelistResult = await storage.getAllWhitelist({ limit: 1 });
const latestTraining = await storage.getLatestTraining();
const detectionStats = await db.select({
total: sql<number>`count(*)::int`,
blocked: sql<number>`count(*) filter (where blocked = true)::int`,
critical: sql<number>`count(*) filter (where ${detections.riskScore}::numeric >= 85)::int`,
high: sql<number>`count(*) filter (where ${detections.riskScore}::numeric >= 70 and ${detections.riskScore}::numeric < 85)::int`,
}).from(detections);
let logCount = 0;
try {
const logStats = await db.execute(
sql`SELECT count(*)::int as count FROM network_logs WHERE timestamp >= NOW() - INTERVAL '24 hours'`
);
logCount = (logStats as any).rows?.[0]?.count ?? (logStats as any)[0]?.count ?? 0;
} catch (logError) {
console.error('[DB WARN] Log count query failed:', logError);
logCount = 0;
}
res.json({
routers: {
total: routersList.length,
enabled: routersList.filter(r => r.enabled).length
},
detections: {
total: detectionStats[0]?.total || 0,
blocked: detectionStats[0]?.blocked || 0,
critical: detectionStats[0]?.critical || 0,
high: detectionStats[0]?.high || 0
},
logs: {
recent: logCount
},
whitelist: {
total: whitelistResult.total
},
latestTraining: latestTraining
});
} catch (error) {
console.error('[DB ERROR] Failed to fetch stats:', error);
res.status(500).json({ error: "Failed to fetch stats" });
}
});
// ML Actions - Trigger training/detection on Python backend
const ML_BACKEND_URL = process.env.ML_BACKEND_URL || "http://localhost:8000";
const ML_TIMEOUT = 120000; // 2 minutes timeout
const IDS_API_KEY = process.env.IDS_API_KEY; // API Key for secure ML backend communication
// Helper to create authenticated fetch headers
const getMLBackendHeaders = () => {
const headers: HeadersInit = {
"Content-Type": "application/json",
};
if (IDS_API_KEY) {
headers["X-API-Key"] = IDS_API_KEY;
}
return headers;
};
app.post("/api/ml/train", async (req, res) => {
try {
const { max_records = 100000, hours_back = 24 } = req.body;
// Validate input
if (typeof max_records !== 'number' || max_records <= 0 || max_records > 1000000) {
return res.status(400).json({ error: "max_records must be between 1 and 1000000" });
}
if (typeof hours_back !== 'number' || hours_back <= 0 || hours_back > 720) {
return res.status(400).json({ error: "hours_back must be between 1 and 720" });
}
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), ML_TIMEOUT);
const response = await fetch(`${ML_BACKEND_URL}/train`, {
method: "POST",
headers: getMLBackendHeaders(),
body: JSON.stringify({ max_records, hours_back }),
signal: controller.signal,
});
clearTimeout(timeout);
if (!response.ok) {
const errorData = await response.json().catch(() => ({}));
return res.status(response.status).json({
error: errorData.detail || "Training failed",
status: response.status,
});
}
const data = await response.json();
res.json(data);
} catch (error: any) {
if (error.name === 'AbortError') {
return res.status(504).json({ error: "Training timeout - operation took too long" });
}
if (error.code === 'ECONNREFUSED') {
return res.status(503).json({ error: "ML backend not available - is Python server running?" });
}
res.status(500).json({ error: error.message || "Failed to trigger training" });
}
});
app.post("/api/ml/detect", async (req, res) => {
try {
const { max_records = 50000, hours_back = 1, risk_threshold = 75, auto_block = false } = req.body;
// Validate input
if (typeof max_records !== 'number' || max_records <= 0 || max_records > 1000000) {
return res.status(400).json({ error: "max_records must be between 1 and 1000000" });
}
if (typeof hours_back !== 'number' || hours_back <= 0 || hours_back > 720) {
return res.status(400).json({ error: "hours_back must be between 1 and 720" });
}
if (typeof risk_threshold !== 'number' || risk_threshold < 0 || risk_threshold > 100) {
return res.status(400).json({ error: "risk_threshold must be between 0 and 100" });
}
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), ML_TIMEOUT);
const response = await fetch(`${ML_BACKEND_URL}/detect`, {
method: "POST",
headers: getMLBackendHeaders(),
body: JSON.stringify({ max_records, hours_back, risk_threshold, auto_block }),
signal: controller.signal,
});
clearTimeout(timeout);
if (!response.ok) {
const errorData = await response.json().catch(() => ({}));
return res.status(response.status).json({
error: errorData.detail || "Detection failed",
status: response.status,
});
}
const data = await response.json();
res.json(data);
} catch (error: any) {
if (error.name === 'AbortError') {
return res.status(504).json({ error: "Detection timeout - operation took too long" });
}
if (error.code === 'ECONNREFUSED') {
return res.status(503).json({ error: "ML backend not available - is Python server running?" });
}
res.status(500).json({ error: error.message || "Failed to trigger detection" });
}
});
app.post("/api/ml/block-all-critical", async (req, res) => {
try {
const { min_score = 80, list_name = "ddos_blocked", limit = 100 } = req.body;
const maxIps = Math.min(Number(limit) || 100, 500);
const allRouters = await storage.getAllRouters();
const enabledRouters = allRouters.filter(r => r.enabled);
if (enabledRouters.length === 0) {
return res.status(400).json({ error: "Nessun router abilitato" });
}
const unblockedDetections = await db.execute(
sql`SELECT DISTINCT source_ip, MAX(CAST(risk_score AS FLOAT)) as max_score, MAX(anomaly_type) as anomaly_type
FROM detections
WHERE CAST(risk_score AS FLOAT) >= ${min_score}
AND blocked = false
AND source_ip NOT IN (SELECT ip_address FROM whitelist WHERE active = true)
GROUP BY source_ip
ORDER BY max_score DESC
LIMIT ${maxIps}`
);
const rows = (unblockedDetections as any).rows || unblockedDetections;
const totalUnblockedResult = await db.execute(
sql`SELECT COUNT(DISTINCT source_ip) as count
FROM detections
WHERE CAST(risk_score AS FLOAT) >= ${min_score}
AND blocked = false
AND source_ip NOT IN (SELECT ip_address FROM whitelist WHERE active = true)`
);
const totalUnblockedRows = (totalUnblockedResult as any).rows || totalUnblockedResult;
const totalUnblocked = parseInt(totalUnblockedRows[0]?.count || "0");
if (!rows || rows.length === 0) {
return res.json({
message: "Nessun IP critico da bloccare",
blocked: 0,
failed: 0,
total_critical: 0,
remaining: 0,
skipped: 0
});
}
const ipList = rows.map((r: any) => r.source_ip);
const routerInfo = enabledRouters.map((r: any) => `${r.name || r.ipAddress}(${r.ipAddress}:${r.apiPort})`).join(', ');
console.log(`[BLOCK-ALL] Avvio blocco massivo: ${ipList.length}/${totalUnblocked} IP con score >= ${min_score} su ${enabledRouters.length} router: ${routerInfo}`);
const result = await bulkBlockIps(
enabledRouters as any,
ipList,
list_name,
`IDS bulk-block (score>=${min_score})`,
"1h",
10
);
if (result.blocked > 0) {
const blockedIps = result.details
.filter(d => d.status === "blocked")
.map(d => d.ip);
const batchSize = 200;
for (let i = 0; i < blockedIps.length; i += batchSize) {
const batch = blockedIps.slice(i, i + batchSize);
const ipValues = batch.map(ip => `'${ip.replace(/'/g, "''")}'`).join(',');
await db.execute(
sql`UPDATE detections SET blocked = true, blocked_at = NOW() WHERE source_ip IN (${sql.raw(ipValues)}) AND blocked = false`
);
}
console.log(`[BLOCK-ALL] Database aggiornato: ${blockedIps.length} IP marcati come bloccati`);
}
const remaining = totalUnblocked - ipList.length;
res.json({
message: `Blocco massivo completato: ${result.blocked} IP bloccati, ${result.failed} falliti, ${result.skipped} già bloccati` +
(remaining > 0 ? `. Rimangono ${remaining} IP da bloccare.` : ''),
blocked: result.blocked,
failed: result.failed,
skipped: result.skipped,
total_critical: ipList.length,
remaining,
details: result.details.slice(0, 100)
});
} catch (error: any) {
console.error('[BLOCK-ALL] Error:', error);
res.status(500).json({ error: error.message || "Errore blocco massivo" });
}
});
app.get("/api/ml/stats", async (req, res) => {
try {
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), 15000);
const response = await fetch(`${ML_BACKEND_URL}/stats`, {
headers: getMLBackendHeaders(),
signal: controller.signal,
});
clearTimeout(timeout);
if (!response.ok) {
throw new Error(`HTTP ${response.status}`);
}
const data = await response.json();
res.json(data);
} catch (error: any) {
console.warn(`[ML Stats] Fallback to database - ML Backend error: ${error.message || error.code || 'unknown'}`);
try {
const latestTraining = await db
.select()
.from(trainingHistory)
.orderBy(desc(trainingHistory.trainedAt))
.limit(1);
const detectionStats = await db.execute(
sql`SELECT
COUNT(*) as total_detections,
COUNT(*) FILTER (WHERE blocked = true) as blocked_count,
COUNT(*) FILTER (WHERE CAST(risk_score AS FLOAT) >= 80) as critical_count,
COUNT(DISTINCT source_ip) as unique_ips
FROM detections`
);
const statsRows = (detectionStats as any).rows || detectionStats;
const logCount = await db.execute(
sql`SELECT COUNT(*) as count FROM network_logs WHERE timestamp > NOW() - INTERVAL '24 hours'`
);
const logRows = (logCount as any).rows || logCount;
res.json({
source: "database_fallback",
ml_backend_status: "offline",
latest_training: latestTraining[0] || null,
detections: {
total: parseInt(statsRows[0]?.total_detections || "0"),
blocked: parseInt(statsRows[0]?.blocked_count || "0"),
critical: parseInt(statsRows[0]?.critical_count || "0"),
unique_ips: parseInt(statsRows[0]?.unique_ips || "0"),
},
logs_24h: parseInt(logRows[0]?.count || "0"),
});
} catch (dbError: any) {
res.status(503).json({ error: "ML backend offline and database fallback failed" });
}
}
});
// Services monitoring
app.get("/api/services/status", async (req, res) => {
try {
const mkService = (name: string) => ({ name, status: "unknown" as string, healthy: false, details: null as any, systemdUnit: "" as string, type: "service" as string });
const services = {
nodeBackend: { ...mkService("Node.js Backend"), systemdUnit: "ids-backend", type: "service" },
mlBackend: { ...mkService("ML Backend Python"), systemdUnit: "ids-ml-backend", type: "service" },
database: { ...mkService("PostgreSQL Database"), systemdUnit: "postgresql-16", type: "service" },
syslogParser: { ...mkService("Syslog Parser"), systemdUnit: "ids-syslog-parser", type: "service" },
analyticsAggregator: { ...mkService("Analytics Aggregator"), systemdUnit: "ids-analytics-aggregator", type: "timer" },
autoBlock: { ...mkService("Auto Block"), systemdUnit: "ids-auto-block", type: "timer" },
cleanup: { ...mkService("Cleanup Detections"), systemdUnit: "ids-cleanup", type: "timer" },
listFetcher: { ...mkService("Public Lists Fetcher"), systemdUnit: "ids-list-fetcher", type: "timer" },
mlTraining: { ...mkService("ML Training Settimanale"), systemdUnit: "ids-ml-training", type: "timer" },
};
// Node.js Backend - always running if this endpoint responds
services.nodeBackend.status = "running";
services.nodeBackend.healthy = true;
services.nodeBackend.details = { port: 5000, uptime: process.uptime().toFixed(0) + "s" };
// Check ML Backend Python
try {
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), 5000);
const response = await fetch(`${ML_BACKEND_URL}/health`, { signal: controller.signal });
clearTimeout(timeout);
if (response.ok) {
const data = await response.json();
services.mlBackend.status = "running";
services.mlBackend.healthy = true;
services.mlBackend.details = { modelLoaded: data.ml_model === "loaded", timestamp: data.timestamp };
} else {
services.mlBackend.status = "error";
services.mlBackend.details = { error: `HTTP ${response.status}` };
}
} catch (error: any) {
services.mlBackend.status = "offline";
services.mlBackend.details = { error: error.code === 'ECONNREFUSED' ? "Connessione rifiutata" : error.message };
}
// Check Database
try {
const conn = await storage.testConnection();
if (conn) {
services.database.status = "running";
services.database.healthy = true;
services.database.details = { connected: true };
}
} catch (error: any) {
services.database.status = "error";
services.database.details = { error: error.message };
}
// Check Syslog Parser via database
try {
const recentLogsResult = await db.execute(
sql`SELECT COUNT(*) as count, MAX(timestamp) as last_log FROM network_logs WHERE timestamp > NOW() - INTERVAL '30 minutes'`
);
const logRows = (recentLogsResult as any).rows || recentLogsResult;
const recentLogCount = parseInt(logRows[0]?.count || "0");
const lastLogTime = logRows[0]?.last_log;
if (recentLogCount > 0) {
services.syslogParser.status = "running";
services.syslogParser.healthy = true;
services.syslogParser.details = { recentLogs30min: recentLogCount, lastLog: lastLogTime };
} else {
const lastLogEverResult = await db.execute(sql`SELECT MAX(timestamp) as last_log FROM network_logs`);
const lastLogEverRows = (lastLogEverResult as any).rows || lastLogEverResult;
services.syslogParser.status = "offline";
services.syslogParser.healthy = false;
services.syslogParser.details = { recentLogs30min: 0, lastLog: lastLogEverRows[0]?.last_log || "Mai", warning: "Nessun log negli ultimi 30 minuti" };
}
} catch (error: any) {
services.syslogParser.status = "error";
services.syslogParser.details = { error: error.message };
}
// Check Analytics Aggregator (via last record timestamp)
try {
const latestAnalytics = await db.select().from(networkAnalytics).orderBy(desc(networkAnalytics.date), desc(networkAnalytics.hour)).limit(1);
if (latestAnalytics.length > 0) {
const lastRun = new Date(latestAnalytics[0].date);
const hoursSince = (Date.now() - lastRun.getTime()) / (1000 * 60 * 60);
if (hoursSince < 2) {
services.analyticsAggregator.status = "running";
services.analyticsAggregator.healthy = true;
services.analyticsAggregator.details = { lastRun: latestAnalytics[0].date, hoursSinceLastRun: hoursSince.toFixed(1) };
} else {
services.analyticsAggregator.status = "idle";
services.analyticsAggregator.details = { lastRun: latestAnalytics[0].date, hoursSinceLastRun: hoursSince.toFixed(1), warning: "Nessuna aggregazione nelle ultime 2 ore" };
}
} else {
services.analyticsAggregator.status = "idle";
services.analyticsAggregator.details = { error: "Nessun dato analytics trovato" };
}
} catch (error: any) {
services.analyticsAggregator.status = "error";
services.analyticsAggregator.details = { error: error.message };
}
// Check Auto Block (via recent blocked detections)
try {
const recentBlockResult = await db.execute(
sql`SELECT COUNT(*) as count, MAX(detected_at) as last_block FROM detections WHERE blocked = true AND detected_at > NOW() - INTERVAL '10 minutes'`
);
const blockRows = (recentBlockResult as any).rows || recentBlockResult;
const recentBlocks = parseInt(blockRows[0]?.count || "0");
const lastBlock = blockRows[0]?.last_block;
const totalBlockedResult = await db.execute(sql`SELECT COUNT(*) as count FROM detections WHERE blocked = true`);
const totalBlockedRows = (totalBlockedResult as any).rows || totalBlockedResult;
const totalBlocked = parseInt(totalBlockedRows[0]?.count || "0");
services.autoBlock.status = recentBlocks > 0 ? "running" : "idle";
services.autoBlock.healthy = true;
services.autoBlock.details = {
recentBlocks10min: recentBlocks,
totalBlocked,
lastBlock: lastBlock || "Mai",
interval: "ogni 5 minuti"
};
} catch (error: any) {
services.autoBlock.status = "error";
services.autoBlock.details = { error: error.message };
}
// Check Cleanup (via absence of old detections)
try {
const oldDetResult = await db.execute(
sql`SELECT COUNT(*) as count FROM detections WHERE detected_at < NOW() - INTERVAL '48 hours'`
);
const oldRows = (oldDetResult as any).rows || oldDetResult;
const oldDetections = parseInt(oldRows[0]?.count || "0");
const totalDetResult = await db.execute(sql`SELECT COUNT(*) as count FROM detections`);
const totalRows = (totalDetResult as any).rows || totalDetResult;
const totalDetections = parseInt(totalRows[0]?.count || "0");
services.cleanup.status = oldDetections === 0 ? "running" : "idle";
services.cleanup.healthy = oldDetections === 0;
services.cleanup.details = {
oldDetections48h: oldDetections,
totalDetections,
interval: "ogni ora",
warning: oldDetections > 0 ? `${oldDetections} detection vecchie non ancora pulite` : undefined
};
} catch (error: any) {
services.cleanup.status = "error";
services.cleanup.details = { error: error.message };
}
// Check List Fetcher (via public lists last_updated)
try {
const listsResult = await db.execute(
sql`SELECT COUNT(*) as total,
COUNT(*) FILTER (WHERE enabled = true) as enabled,
MAX(last_fetch) as last_fetch
FROM public_lists`
);
const listRows = (listsResult as any).rows || listsResult;
const totalLists = parseInt(listRows[0]?.total || "0");
const enabledLists = parseInt(listRows[0]?.enabled || "0");
const lastFetched = listRows[0]?.last_fetch;
if (lastFetched) {
const hoursSince = (Date.now() - new Date(lastFetched).getTime()) / (1000 * 60 * 60);
services.listFetcher.status = hoursSince < 1 ? "running" : "idle";
services.listFetcher.healthy = hoursSince < 1;
services.listFetcher.details = { totalLists, enabledLists, lastFetched, hoursSinceLastFetch: hoursSince.toFixed(1), interval: "ogni 10 minuti" };
} else {
services.listFetcher.status = "idle";
services.listFetcher.details = { totalLists, enabledLists, lastFetched: "Mai", interval: "ogni 10 minuti" };
}
} catch (error: any) {
services.listFetcher.status = "error";
services.listFetcher.details = { error: error.message };
}
// Check ML Training (via training history)
try {
const latestTraining = await db.select().from(trainingHistory).orderBy(desc(trainingHistory.trainedAt)).limit(1);
if (latestTraining.length > 0) {
const lastTrainDate = new Date(latestTraining[0].trainedAt);
const daysSince = (Date.now() - lastTrainDate.getTime()) / (1000 * 60 * 60 * 24);
services.mlTraining.status = daysSince < 8 ? "running" : "idle";
services.mlTraining.healthy = daysSince < 8;
services.mlTraining.details = {
lastTraining: latestTraining[0].trainedAt,
daysSinceLastTraining: daysSince.toFixed(1),
lastStatus: latestTraining[0].status,
lastModel: latestTraining[0].modelVersion,
recordsProcessed: latestTraining[0].recordsProcessed,
interval: "settimanale"
};
} else {
services.mlTraining.status = "idle";
services.mlTraining.details = { lastTraining: "Mai", interval: "settimanale" };
}
} catch (error: any) {
services.mlTraining.status = "error";
services.mlTraining.details = { error: error.message };
}
res.json({ services });
} catch (error: any) {
res.status(500).json({ error: "Failed to check services status" });
}
});
// Service Control Endpoints (Secured - only allow specific systemd operations)
const ALLOWED_SERVICES = [
"ids-ml-backend", "ids-syslog-parser", "ids-backend",
"ids-analytics-aggregator", "ids-auto-block", "ids-cleanup",
"ids-list-fetcher", "ids-ml-training"
];
const ALLOWED_ACTIONS = ["start", "stop", "restart", "status"];
app.post("/api/services/:service/:action", async (req, res) => {
try {
const { service, action } = req.params;
// Validate service name
if (!ALLOWED_SERVICES.includes(service)) {
return res.status(400).json({ error: "Invalid service name" });
}
// Validate action
if (!ALLOWED_ACTIONS.includes(action)) {
return res.status(400).json({ error: "Invalid action" });
}
// Execute systemd command
const { exec } = await import("child_process");
const { promisify } = await import("util");
const execAsync = promisify(exec);
try {
const systemdAction = action === "status" ? "status" : action;
const { stdout, stderr } = await execAsync(
`systemctl ${systemdAction} ${service}`,
{ timeout: 10000 }
);
res.json({
success: true,
service,
action,
output: stdout || stderr,
timestamp: new Date().toISOString(),
});
} catch (execError: any) {
// systemctl returns non-zero exit for stopped services in status command
if (action === "status") {
res.json({
success: true,
service,
action,
output: execError.stdout || execError.stderr,
timestamp: new Date().toISOString(),
});
} else {
throw execError;
}
}
} catch (error: any) {
res.status(500).json({
error: "Service control failed",
details: error.message,
});
}
});
const httpServer = createServer(app);
return httpServer;
}