diff --git a/attached_assets/Pasted--AGGIORNAMENTO-COMPLETATO--1763997526366_1763997526366.txt b/attached_assets/Pasted--AGGIORNAMENTO-COMPLETATO--1763997526366_1763997526366.txt new file mode 100644 index 0000000..1f50f90 --- /dev/null +++ b/attached_assets/Pasted--AGGIORNAMENTO-COMPLETATO--1763997526366_1763997526366.txt @@ -0,0 +1,55 @@ + +╔═══════════════════════════════════════════════╗ +║ ✅ AGGIORNAMENTO COMPLETATO ║ +╚═══════════════════════════════════════════════╝ + +📋 VERIFICA SISTEMA: + • Log backend: tail -f /var/log/ids/backend.log + • Log frontend: tail -f /var/log/ids/frontend.log + • API backend: curl http://localhost:8000/health + • Frontend: curl http://localhost:5000 + +📊 STATO SERVIZI: +root 20860 0.0 0.0 18344 6400 pts/3 S+ Nov22 0:00 sudo tail -f /var/log/ids/syslog_parser.log +root 20862 0.0 0.0 3088 1536 pts/3 S+ Nov22 0:02 tail -f /var/log/ids/syslog_parser.log +ids 64096 4.0 1.8 1394944 291304 ? Ssl 12:12 9:44 /opt/ids/python_ml/venv/bin/python3 main.py +ids 64102 16.0 0.1 52084 19456 ? Ss 12:12 38:36 /opt/ids/python_ml/venv/bin/python3 syslog_parser.py +root 69074 0.0 0.2 731152 33612 pts/0 Rl+ 16:13 0:00 /usr/bin/node /usr/bin/npm run dev + +[root@ids ids]# sudo /opt/ids/deployment/setup_analytics_timer.sh +╔═══════════════════════════════════════════════╗ +║ IDS Analytics Timer Setup ║ +╚═══════════════════════════════════════════════╝ + +📋 Copia file systemd... +🔄 Reload systemd daemon... +⚙ Enable e start timer... + +📊 Stato timer: +● ids-analytics-aggregator.timer - IDS Analytics Aggregation Timer - Runs every hour + Loaded: loaded (/etc/systemd/system/ids-analytics-aggregator.timer; enabled; preset: disabled) + Active: active (waiting) since Mon 2025-11-24 12:13:35 CET; 4h 3min ago + Until: Mon 2025-11-24 12:13:35 CET; 4h 3min ago + Trigger: Mon 2025-11-24 17:05:00 CET; 47min left + Triggers: ● ids-analytics-aggregator.service + +Nov 24 12:13:35 ids.alfacom.it systemd[1]: Stopped IDS Analytics Aggregation Timer - Runs every hour. +Nov 24 12:13:35 ids.alfacom.it systemd[1]: Stopping IDS Analytics Aggregation Timer - Runs every hour... +Nov 24 12:13:35 ids.alfacom.it systemd[1]: Started IDS Analytics Aggregation Timer - Runs every hour. + +📅 Prossime esecuzioni: +NEXT LEFT LAST PASSED UNIT ACTIVATES +Mon 2025-11-24 17:05:00 CET 47min left Mon 2025-11-24 16:05:01 CET 12min ago ids-analytics-aggregator.timer ids-analytics-aggregator.service + +1 timers listed. +Pass --all to see loaded but inactive timers, too. + +╔═══════════════════════════════════════════════╗ +║ ✅ ANALYTICS TIMER CONFIGURATO ║ +╚═══════════════════════════════════════════════╝ + +📝 Comandi utili: + Stato timer: sudo systemctl status ids-analytics-aggregator.timer + Prossime run: sudo systemctl list-timers + Log aggregazione: sudo journalctl -u ids-analytics-aggregator -f + Test manuale: sudo systemctl start ids-analytics-aggregator diff --git a/python_ml/analytics_aggregator.py b/python_ml/analytics_aggregator.py index 8bf856c..d86c9ba 100644 --- a/python_ml/analytics_aggregator.py +++ b/python_ml/analytics_aggregator.py @@ -90,19 +90,22 @@ class AnalyticsAggregator: return # 2. Identifica IP attaccanti (detections nell'ora) + # NOTA: SELECT DISTINCT su tutte le colonne per gestire IP con più anomaly_type cursor.execute(""" - SELECT DISTINCT source_ip, anomaly_type, risk_score, country + SELECT source_ip, anomaly_type, risk_score, country FROM detections WHERE detected_at >= %s AND detected_at < %s + ORDER BY source_ip, risk_score DESC """, (hour_start, hour_end)) attacker_ips = {} - attacks_by_type = defaultdict(int) for row in cursor.fetchall(): ip = row['source_ip'] - attacker_ips[ip] = row - attacks_by_type[row['anomaly_type']] += 1 + # Mantieni solo la detection con risk_score più alto per IP + # (evita duplicati ma tiene traccia del tipo principale) + if ip not in attacker_ips: + attacker_ips[ip] = row # 3. Classifica traffico: normale vs attacco total_packets = 0 @@ -114,6 +117,7 @@ class AnalyticsAggregator: traffic_by_country = defaultdict(lambda: {'normal': 0, 'attacks': 0}) attacks_by_country = defaultdict(int) + attacks_by_type = defaultdict(int) top_normal = [] top_attackers = [] @@ -130,14 +134,26 @@ class AnalyticsAggregator: attack_packets += packets attack_bytes += bytes_count + # Conta pacchetti per tipo di attacco (non solo occorrenze!) + anomaly_type = attacker_ips[ip].get('anomaly_type') + if anomaly_type: + attacks_by_type[anomaly_type] += packets + else: + # Fallback per attacchi senza tipo classificato + attacks_by_type['unknown'] += packets + country = attacker_ips[ip].get('country') if country: traffic_by_country[country]['attacks'] += packets - attacks_by_country[country] += 1 + attacks_by_country[country] += packets + else: + # Fallback per attacchi senza geolocalizzazione + traffic_by_country['Unknown']['attacks'] += packets + attacks_by_country['Unknown'] += packets top_attackers.append({ 'ip': ip, - 'country': country, + 'country': country or 'Unknown', 'risk_score': float(attacker_ips[ip]['risk_score']), 'packets': packets, 'bytes': bytes_count @@ -218,10 +234,21 @@ class AnalyticsAggregator: conn.commit() + # Validazione coerenza breakdown + breakdown_total_type = sum(attacks_by_type.values()) + breakdown_total_country = sum(attacks_by_country.values()) + print(f"[ANALYTICS] ✅ Aggregazione completata:") print(f" - Totale: {total_packets} pacchetti, {len(traffic_by_ip)} IP unici") print(f" - Normale: {normal_packets} pacchetti ({normal_packets*100//total_packets if total_packets else 0}%)") print(f" - Attacchi: {attack_packets} pacchetti ({attack_packets*100//total_packets if total_packets else 0}%), {len(attacker_ips)} IP") + print(f" - Breakdown types: {breakdown_total_type} pacchetti (match: {breakdown_total_type == attack_packets})") + print(f" - Breakdown countries: {breakdown_total_country} pacchetti (match: {breakdown_total_country == attack_packets})") + + if breakdown_total_type != attack_packets: + print(f" ⚠️ MISMATCH tipi: {attack_packets - breakdown_total_type} pacchetti non classificati") + if breakdown_total_country != attack_packets: + print(f" ⚠️ MISMATCH paesi: {attack_packets - breakdown_total_country} pacchetti senza geo") except Exception as e: print(f"[ANALYTICS] ❌ Errore aggregazione oraria: {e}")