diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..1750ff7 --- /dev/null +++ b/.env.example @@ -0,0 +1,15 @@ +# Database PostgreSQL +PGHOST=localhost +PGPORT=5432 +PGDATABASE=ids_database +PGUSER=ids_user +PGPASSWORD=ids_password_change_me + +# Session Secret (genera una stringa random sicura) +SESSION_SECRET=change_me_to_random_string_minimum_32_chars + +# Python Backend URL (per frontend) +VITE_PYTHON_API_URL=http://localhost:8000 + +# Node Environment +NODE_ENV=production diff --git a/.replit b/.replit index af7007c..08dde1b 100644 --- a/.replit +++ b/.replit @@ -14,6 +14,14 @@ run = ["npm", "run", "start"] localPort = 5000 externalPort = 80 +[[ports]] +localPort = 42197 +externalPort = 3000 + +[[ports]] +localPort = 43291 +externalPort = 3001 + [env] PORT = "5000" diff --git a/RISPOSTA_DEPLOYMENT.md b/RISPOSTA_DEPLOYMENT.md new file mode 100644 index 0000000..50cde72 --- /dev/null +++ b/RISPOSTA_DEPLOYMENT.md @@ -0,0 +1,344 @@ +# πŸ“‹ Risposta alle Tue Domande + +## 1️⃣ Sistema di Esportazione (come guardianshift) + +Ho analizzato il progetto precedente e creato la stessa struttura di deployment: + +### βœ… Script Creati (in `/deployment/`) + +1. **`setup_almalinux.sh`** - Installazione iniziale su AlmaLinux 9 + - Installa Python 3.11, Node.js 20, PostgreSQL + - Configura database e utente `ids` + - Prepara ambiente di produzione + +2. **`setup_crontab.sh`** - Configurazione automazione + - Training ML ogni 12 ore (00:00 e 12:00) + - Detection automatica ogni 5 minuti + - Monitoring processi (riavvio automatico se down) + - Backup database giornaliero + - Restart settimanale completo + +3. **`update_from_git.sh`** - Aggiornamenti da git.alfacom.it + - Pull automatico da git + - Backup e restore configurazione locale (.env) + - Aggiornamento dipendenze + - Sync database schema + - Restart automatico servizi + +4. **Script di supporto**: + - `check_backend.sh` - Monitora backend Python + - `check_frontend.sh` - Monitora frontend Node + - `restart_all.sh` - Restart completo sistema + - `backup_db.sh` - Backup PostgreSQL + +### πŸ”„ Workflow Aggiornamento + +```bash +# Sul server AlmaLinux +cd /opt/ids +sudo -u ids ./deployment/update_from_git.sh +``` + +**Cosa fa lo script:** +1. Salva `.env` (password NON vanno su git!) +2. `git pull origin main` da git.alfacom.it +3. Ripristina `.env` locale +4. Aggiorna dipendenze Node.js e Python +5. Sync schema database +6. Restart automatico tutti i servizi + +## 2️⃣ Deployment su AlmaLinux 9 con Git + +### πŸ“¦ Installazione Iniziale + +```bash +# 1. Clone da git.alfacom.it +cd /opt/ids +git clone https://git.alfacom.it/your-repo/ids.git . + +# 2. Esegui setup +./deployment/setup_almalinux.sh + +# 3. Configura environment (IMPORTANTE!) +cp .env.example .env +nano .env # Inserisci password sicure (vedi sotto) + +# 4. Installa dipendenze +npm install +cd python_ml && pip3.11 install -r requirements.txt + +# 5. Setup database +npm run db:push + +# 6. Configura syslog (per ricevere log router) +./deployment/setup_syslog_server.sh + +# 7. Avvia sistema +./deployment/setup_crontab.sh +``` + +### πŸ” Configurazione Sicura (.env) + +**File `.env` NON va mai committato su git!** + +```bash +# Genera password sicure +PGPASSWORD=$(openssl rand -base64 32) +SESSION_SECRET=$(openssl rand -base64 32) + +# Modifica .env +nano .env +``` + +File `.env`: +```bash +PGHOST=localhost +PGPORT=5432 +PGDATABASE=ids_database +PGUSER=ids_user +PGPASSWORD= # openssl rand -base64 32 +SESSION_SECRET= # openssl rand -base64 32 +VITE_PYTHON_API_URL=http://localhost:8000 +NODE_ENV=production +``` + +### πŸ”„ Aggiornamenti Futuri + +```bash +cd /opt/ids +sudo -u ids ./deployment/update_from_git.sh +``` + +**Git mantiene solo codice, mai password!** + +### πŸ“ Struttura su Server + +``` +/opt/ids/ # Directory principale +β”œβ”€β”€ .env # Configurazione locale (NON su git!) +β”œβ”€β”€ deployment/ # Script deployment +β”‚ β”œβ”€β”€ setup_almalinux.sh +β”‚ β”œβ”€β”€ setup_crontab.sh +β”‚ β”œβ”€β”€ update_from_git.sh +β”‚ └── ... +β”œβ”€β”€ python_ml/ # Backend Python +β”‚ β”œβ”€β”€ main.py # FastAPI +β”‚ β”œβ”€β”€ ml_analyzer.py # ML core +β”‚ β”œβ”€β”€ mikrotik_manager.py +β”‚ └── syslog_parser.py # Parser log router +β”œβ”€β”€ client/ # Frontend React +β”œβ”€β”€ server/ # Backend Node.js +└── backups/ # Backup database (auto) + +/var/log/ids/ # Log sistema +β”œβ”€β”€ backend.log +β”œβ”€β”€ frontend.log +β”œβ”€β”€ training.log +β”œβ”€β”€ detect.log +└── syslog_parser.log + +/var/log/mikrotik/ # Log router in arrivo +└── raw.log +``` + +## 3️⃣ Raccolta Dati dai Router MikroTik + +**Questo Γ¨ il punto CRITICO!** Ecco come funziona: + +### πŸ”„ Flusso Completo + +``` +Router MikroTik (10+ router) + β”‚ + β”‚ Syslog UDP:514 + β”‚ (log firewall, connessioni) + β–Ό +Server AlmaLinux - RSyslog + β”‚ Riceve log + β”‚ /var/log/mikrotik/raw.log + β–Ό +syslog_parser.py (Python) + β”‚ Legge file log in tempo reale + β”‚ Parsa righe (IP, porte, protocollo, ecc) + β–Ό +PostgreSQL Database + β”‚ Tabella: network_logs + β”‚ (timestamp, source_ip, dest_ip, protocol, ecc) + β–Ό +ML Analyzer (Python) + β”‚ Training & Detection + β–Ό +Backend FastAPI + β”‚ API per frontend + β–Ό +Dashboard React +``` + +### πŸ“‘ Configurazione Router MikroTik + +**Su OGNI router MikroTik**, esegui: + +```mikrotik +# 1. Configura destinazione syslog +/system logging action +add name=ids-server target=remote remote=192.168.1.100 remote-port=514 + +# IMPORTANTE: Sostituisci 192.168.1.100 con IP del tuo server AlmaLinux! + +# 2. Abilita logging firewall +/system logging +add action=ids-server topics=firewall,info +add action=ids-server topics=account,info + +# 3. Aggiungi regole firewall per loggare connessioni +/ip firewall filter +add chain=forward action=accept log=yes log-prefix="ACCEPT: " comment="Log accepted" +add chain=forward action=drop log=yes log-prefix="DROP: " comment="Log dropped" +``` + +### πŸ–₯️ Configurazione Server AlmaLinux + +```bash +# 1. Installa e configura rsyslog +./deployment/setup_syslog_server.sh + +# 2. Apri firewall per porta 514/UDP +firewall-cmd --permanent --add-port=514/udp +firewall-cmd --reload + +# 3. Avvia syslog parser +cd /opt/ids/python_ml +nohup python3.11 syslog_parser.py > /var/log/ids/syslog_parser.log 2>&1 & +``` + +### πŸ” Esempio Log Router β†’ Database + +**Log inviato dal router:** +``` +Jan 15 10:30:45 router1 firewall,info: DROP: src-address=203.0.113.45:54321->192.168.1.10:80, proto TCP, len 60 +``` + +**Parsato da `syslog_parser.py`:** +```python +{ + "timestamp": "2025-01-15 10:30:45", + "router_name": "router1", + "source_ip": "203.0.113.45", + "source_port": 54321, + "destination_ip": "192.168.1.10", + "destination_port": 80, + "protocol": "tcp", + "packet_length": 60, + "action": "drop" +} +``` + +**Salvato in database PostgreSQL:** +```sql +INSERT INTO network_logs +(timestamp, router_name, source_ip, source_port, + destination_ip, destination_port, protocol, + packet_length, action, raw_message) +VALUES (...); +``` + +### βœ… Verifica Sistema Funzionante + +```bash +# 1. Verifica rsyslog in ascolto +netstat -ulnp | grep 514 + +# 2. Verifica log in arrivo dai router +tail -f /var/log/mikrotik/raw.log + +# 3. Verifica parser funzionante +tail -f /var/log/ids/syslog_parser.log + +# 4. Verifica database popolato +psql -U ids_user -d ids_database -c "SELECT COUNT(*) FROM network_logs;" + +# 5. Verifica log recenti +psql -U ids_user -d ids_database -c "SELECT * FROM network_logs ORDER BY timestamp DESC LIMIT 10;" +``` + +### πŸ“Š Performance + +- **10+ router** β†’ ~100-1000 log/minuto per router +- **Totale** β†’ ~1000-10000 log/minuto +- **Parser** β†’ Processa in tempo reale (<1ms per log) +- **Database** β†’ Commit batch ogni 100 log + +### βš™οΈ Automazione + +**Syslog parser giΓ  incluso in crontab:** +```bash +# Monitoring ogni 5 minuti (riavvio automatico se down) +*/5 * * * * /opt/ids/deployment/check_syslog_parser.sh +``` + +## πŸ“š Documentazione Completa + +1. **`deployment/README_DEPLOYMENT.md`** - Guida completa deployment +2. **`deployment/CONFIGURAZIONE_ROUTER_MIKROTIK.md`** - Setup router +3. **`README.md`** - Documentazione generale sistema +4. **`python_ml/README.md`** - Dettagli backend Python ML + +## 🎯 Checklist Rapida + +### Installazione +- [ ] Server AlmaLinux 9 pronto +- [ ] Clone da git.alfacom.it in `/opt/ids` +- [ ] Eseguito `setup_almalinux.sh` +- [ ] Configurato `.env` con password sicure +- [ ] Installate dipendenze (npm install, pip install) +- [ ] Database schema sincronizzato (npm run db:push) + +### Raccolta Log Router +- [ ] Eseguito `setup_syslog_server.sh` +- [ ] Firewall aperto porta 514/UDP +- [ ] Router configurati per inviare log (vedi CONFIGURAZIONE_ROUTER_MIKROTIK.md) +- [ ] Log visibili in `/var/log/mikrotik/raw.log` +- [ ] `syslog_parser.py` in esecuzione +- [ ] Database `network_logs` si popola + +### Automazione +- [ ] Eseguito `setup_crontab.sh` +- [ ] Backend Python avviato (porta 8000) +- [ ] Frontend Node avviato (porta 5000) +- [ ] Training automatico ogni 12h +- [ ] Detection automatica ogni 5min +- [ ] Backup automatico funzionante + +### Primo Training +- [ ] Atteso 24h per raccolta log +- [ ] Almeno 10.000 log nel database +- [ ] Eseguito primo training manuale +- [ ] Modello ML salvato +- [ ] Detection funzionante + +## πŸš€ Comandi Rapidi + +```bash +# Aggiornamento da git +cd /opt/ids && sudo -u ids ./deployment/update_from_git.sh + +# Restart completo +/opt/ids/deployment/restart_all.sh + +# Verifica log +tail -f /var/log/mikrotik/raw.log +tail -f /var/log/ids/backend.log + +# Verifica database +psql -U ids_user -d ids_database -c "SELECT COUNT(*) FROM network_logs;" + +# Training manuale +curl -X POST http://localhost:8000/train -H "Content-Type: application/json" -d '{"max_records": 10000}' + +# Detection manuale +curl -X POST http://localhost:8000/detect -H "Content-Type: application/json" -d '{"auto_block": true}' +``` + +--- + +**Sistema completo per deployment su AlmaLinux 9 con raccolta automatica log da router MikroTik! πŸ›‘οΈ** diff --git a/deployment/.gitignore b/deployment/.gitignore new file mode 100644 index 0000000..22fdfca --- /dev/null +++ b/deployment/.gitignore @@ -0,0 +1,49 @@ +# File da NON committare su git + +# Environment variables (contiene password!) +.env +.env.local +.env.*.local + +# Backup configurazioni locali +*.backup + +# Log files +*.log + +# File PID +*.pid + +# Backup database +*.sql +*.sql.gz +backups/ + +# Modelli ML addestrati (troppo grandi, rigenerare sul server) +python_ml/models/*.pkl +python_ml/models/*.joblib + +# Cache Python +__pycache__/ +*.py[cod] +*$py.class +*.so + +# Node +node_modules/ +npm-debug.log* +yarn-debug.log* +yarn-error.log* +dist/ +.vite/ + +# IDE +.vscode/ +.idea/ +*.swp +*.swo +*~ + +# OS +.DS_Store +Thumbs.db diff --git a/deployment/CONFIGURAZIONE_ROUTER_MIKROTIK.md b/deployment/CONFIGURAZIONE_ROUTER_MIKROTIK.md new file mode 100644 index 0000000..66632d3 --- /dev/null +++ b/deployment/CONFIGURAZIONE_ROUTER_MIKROTIK.md @@ -0,0 +1,216 @@ +# πŸ“‘ Configurazione Router MikroTik per IDS + +Questa guida spiega come configurare i router MikroTik per inviare i log al server IDS. + +## 🎯 Obiettivo + +Configurare i router MikroTik per: +1. Generare log delle connessioni di rete (firewall) +2. Inviare i log tramite Syslog al server AlmaLinux +3. Popolare automaticamente il database PostgreSQL + +## πŸ”§ Configurazione Passo-Passo + +### Step 1: Connettiti al Router MikroTik + +Via SSH o Winbox, connettiti al router MikroTik con utente amministratore. + +### Step 2: Crea Azione Syslog + +Configura il router per inviare log al server IDS: + +```mikrotik +/system logging action +add name=ids-server target=remote remote=192.168.1.100 remote-port=514 +``` + +**IMPORTANTE**: Sostituisci `192.168.1.100` con l'**IP reale del tuo server AlmaLinux**. + +### Step 3: Abilita Logging Firewall + +Configura quali log inviare al server IDS: + +```mikrotik +/system logging +add action=ids-server topics=firewall,info +add action=ids-server topics=account,info +add action=ids-server topics=system,error +``` + +Questo invia: +- **firewall,info**: Log di connessioni firewall (accept/drop/reject) +- **account,info**: Log di autenticazione +- **system,error**: Errori di sistema + +### Step 4: Abilita Logging Connessioni Firewall + +Aggiungi regole firewall per loggare le connessioni: + +```mikrotik +# Log connessioni accettate (per analisi traffico normale) +/ip firewall filter +add chain=forward action=accept log=yes log-prefix="ACCEPT: " comment="Log accepted connections" + +# Log connessioni droppate (potenziali attacchi) +add chain=forward action=drop log=yes log-prefix="DROP: " comment="Log dropped connections" + +# Log connessioni da internet (input chain) +add chain=input action=accept log=yes log-prefix="INPUT-ACCEPT: " in-interface=ether1 comment="Log input from WAN" +``` + +**NOTA**: Modifica `ether1` con il nome della tua interfaccia WAN. + +### Step 5: (Opzionale) Abilita Connection Tracking + +Per statistiche dettagliate sulle connessioni: + +```mikrotik +/ip firewall connection tracking +set enabled=yes + +/system logging +add action=ids-server topics=firewall,connection +``` + +## πŸ“Š Verifica Configurazione + +### Sul Router MikroTik + +Verifica che i log vengano generati: + +```mikrotik +/log print where topics~"firewall" +``` + +Dovresti vedere log delle connessioni. + +### Sul Server AlmaLinux + +Verifica ricezione log: + +```bash +# Controlla log syslog +tail -f /var/log/mikrotik/raw.log + +# Verifica porta 514 in ascolto +netstat -ulnp | grep 514 + +# Controlla database +psql -U ids_user -d ids_database -c "SELECT COUNT(*) FROM network_logs;" +``` + +## πŸ” Esempio Log MikroTik + +Log firewall tipico che arriva al server: + +``` +Jan 15 10:30:45 router1 firewall,info: DROP: in:ether1 out:bridge, src-mac 00:11:22:33:44:55, proto TCP, 203.0.113.45:54321->192.168.1.10:80, len 60 +``` + +Questo viene parsato e salvato nel database come: + +```json +{ + "timestamp": "2025-01-15 10:30:45", + "router_name": "router1", + "source_ip": "203.0.113.45", + "source_port": 54321, + "destination_ip": "192.168.1.10", + "destination_port": 80, + "protocol": "tcp", + "packet_length": 60, + "action": "drop" +} +``` + +## βš™οΈ Configurazione Avanzata + +### Filtrare Solo Traffico Esterno + +Per loggare solo traffico da internet (non LAN): + +```mikrotik +/ip firewall filter +add chain=forward action=accept log=yes log-prefix="WAN-TRAFFIC: " \ + in-interface=ether1 comment="Log only WAN traffic" +``` + +### Rate Limiting Log + +Per evitare troppi log e sovraccarico: + +```mikrotik +/ip firewall filter +add chain=forward action=accept log=yes log-prefix="ACCEPT: " \ + limit=100/1s,5:packet comment="Rate limited logging" +``` + +Questo limita a max 100 log/secondo. + +## πŸš€ Automazione Multi-Router + +Per configurare automaticamente piΓΉ router, crea uno script: + +```bash +#!/bin/bash +# configure_all_routers.sh + +ROUTERS=( + "192.168.1.1:admin:password" + "192.168.2.1:admin:password" + "192.168.3.1:admin:password" +) + +IDS_SERVER="192.168.100.50" # IP server AlmaLinux + +for router in "${ROUTERS[@]}"; do + IFS=':' read -r ip user pass <<< "$router" + + echo "Configurando router $ip..." + + sshpass -p "$pass" ssh "$user@$ip" "/system logging action add name=ids-server target=remote remote=$IDS_SERVER remote-port=514" + sshpass -p "$pass" ssh "$user@$ip" "/system logging add action=ids-server topics=firewall,info" + + echo "Router $ip configurato!" +done +``` + +## πŸ“ Note Importanti + +### Prestazioni +- Ogni router invia ~100-1000 log/minuto in base al traffico +- Il server puΓ² gestire facilmente 10+ router +- I log vengono processati in tempo reale dal syslog_parser.py + +### Sicurezza +- Syslog usa UDP porta 514 (non criptato) +- Configura firewall per accettare solo da IP router fidati: + ```bash + firewall-cmd --permanent --add-rich-rule='rule family="ipv4" source address="192.168.0.0/16" port port="514" protocol="udp" accept' + ``` + +### Risoluzione Problemi + +**Log non arrivano al server:** +1. Verifica connettivitΓ : `ping ` dal router +2. Controlla firewall server: `firewall-cmd --list-all` +3. Verifica rsyslog: `systemctl status rsyslog` + +**Database non si popola:** +1. Verifica syslog_parser in esecuzione: `ps aux | grep syslog_parser` +2. Controlla log parser: `tail -f /var/log/ids/syslog_parser.log` +3. Verifica permessi file: `ls -la /var/log/mikrotik/raw.log` + +## βœ… Checklist Finale + +- [ ] Server rsyslog configurato e in ascolto su porta 514 +- [ ] Firewall server aperto per UDP 514 +- [ ] Router configurati per inviare log a server IDS +- [ ] Log visibili in `/var/log/mikrotik/raw.log` +- [ ] Syslog parser in esecuzione e popola database +- [ ] Database contiene log: `SELECT COUNT(*) FROM network_logs;` +- [ ] Sistema ML puΓ² addestrare con dati reali + +--- + +**Prossimo Step**: Dopo 24h di raccolta log, esegui il primo training ML! diff --git a/deployment/README_DEPLOYMENT.md b/deployment/README_DEPLOYMENT.md new file mode 100644 index 0000000..9436c2e --- /dev/null +++ b/deployment/README_DEPLOYMENT.md @@ -0,0 +1,509 @@ +# πŸš€ Guida Deployment IDS su AlmaLinux 9 + +Guida completa per il deployment del sistema IDS su server AlmaLinux 9 con aggiornamenti tramite git.alfacom.it + +## πŸ“‹ Prerequisiti + +- Server AlmaLinux 9 con accesso root +- Accesso a git.alfacom.it +- Almeno 4GB RAM, 20GB disco +- Connessione di rete ai router MikroTik + +## πŸ”§ Installazione Iniziale + +### Step 1: Preparazione Server + +Connettiti al server AlmaLinux come root: + +```bash +ssh root@ +``` + +### Step 2: Clone Repository + +```bash +cd /tmp +git clone https://git.alfacom.it/your-repo/ids.git +cd ids +``` + +### Step 3: Esegui Installazione Base + +```bash +chmod +x deployment/setup_almalinux.sh +./deployment/setup_almalinux.sh +``` + +Questo script: +- βœ… Aggiorna il sistema +- βœ… Installa Python 3.11, Node.js 20, PostgreSQL +- βœ… Configura database PostgreSQL +- βœ… Crea utente `ids` +- βœ… Crea directory `/opt/ids` + +### Step 4: Clone Repository in Destinazione Finale + +```bash +cd /opt/ids +sudo -u ids git clone https://git.alfacom.it/your-repo/ids.git . +``` + +### Step 5: Configura Environment Variables + +**IMPORTANTE**: NON usare mai password in chiaro nel codice! + +```bash +sudo -u ids cp .env.example .env +sudo -u ids nano .env +``` + +Modifica il file `.env`: + +```bash +# Database PostgreSQL +PGHOST=localhost +PGPORT=5432 +PGDATABASE=ids_database +PGUSER=ids_user +PGPASSWORD= # Usa: openssl rand -base64 32 + +# Session Secret +SESSION_SECRET= # Usa: openssl rand -base64 32 + +# Python Backend URL +VITE_PYTHON_API_URL=http://localhost:8000 + +# Environment +NODE_ENV=production +``` + +**Genera password sicure**: +```bash +# Password database +openssl rand -base64 32 + +# Session secret +openssl rand -base64 32 +``` + +### Step 6: Aggiorna Password Database + +```bash +# Modifica password utente PostgreSQL +sudo -u postgres psql -c "ALTER USER ids_user WITH PASSWORD '';" +``` + +### Step 7: Installa Dipendenze + +```bash +# Dipendenze Node.js +cd /opt/ids +sudo -u ids npm install + +# Dipendenze Python +cd /opt/ids/python_ml +sudo -u ids pip3.11 install -r requirements.txt + +# Sync schema database +cd /opt/ids +sudo -u ids npm run db:push +``` + +### Step 8: Configura Syslog Server + +Questo Γ¨ **CRITICO** per ricevere log dai router: + +```bash +cd /opt/ids/deployment +chmod +x setup_syslog_server.sh +./setup_syslog_server.sh +``` + +Verifica che rsyslog sia in ascolto: +```bash +netstat -ulnp | grep 514 +``` + +### Step 9: Configura Router MikroTik + +Segui la guida: `deployment/CONFIGURAZIONE_ROUTER_MIKROTIK.md` + +Configurazione minima su ogni router: +```mikrotik +/system logging action +add name=ids-server target=remote remote= remote-port=514 + +/system logging +add action=ids-server topics=firewall,info +``` + +### Step 10: Avvia Syslog Parser + +```bash +cd /opt/ids/python_ml +sudo -u ids nohup python3.11 syslog_parser.py > /var/log/ids/syslog_parser.log 2>&1 & +``` + +Verifica log in arrivo: +```bash +tail -f /var/log/mikrotik/raw.log +tail -f /var/log/ids/syslog_parser.log +``` + +### Step 11: Configura Crontab e Avvio Automatico + +```bash +cd /opt/ids/deployment +chmod +x setup_crontab.sh +./setup_crontab.sh +``` + +Questo configura: +- βœ… Backend Python (FastAPI) - avvio automatico +- βœ… Frontend Node.js - avvio automatico +- βœ… Training ML ogni 12 ore +- βœ… Detection ogni 5 minuti +- βœ… Monitoring e restart automatici +- βœ… Backup database giornaliero + +### Step 12: Verifica Sistema + +```bash +# Verifica processi in esecuzione +ps aux | grep -E 'python.*main|npm.*dev|syslog_parser' + +# Verifica API backend +curl http://localhost:8000/health + +# Verifica frontend +curl http://localhost:5000 + +# Verifica database popolato +psql -U ids_user -d ids_database -c "SELECT COUNT(*) FROM network_logs;" +``` + +## πŸ”„ Aggiornamenti da Git + +### Procedura Standard di Aggiornamento + +```bash +cd /opt/ids +sudo -u ids ./deployment/update_from_git.sh +``` + +Questo script: +1. βœ… Salva backup configurazione locale (.env) +2. βœ… Scarica aggiornamenti da git.alfacom.it +3. βœ… Ripristina configurazione locale +4. βœ… Aggiorna dipendenze Node.js e Python +5. βœ… Aggiorna schema database +6. βœ… Restart automatico servizi + +### Aggiornamento Manuale (se necessario) + +```bash +cd /opt/ids + +# Backup configurazione +sudo -u ids cp .env .env.backup + +# Pull da git +sudo -u ids git pull origin main + +# Ripristina .env +sudo -u ids cp .env.backup .env + +# Aggiorna dipendenze +sudo -u ids npm install +cd python_ml && sudo -u ids pip3.11 install -r requirements.txt + +# Sync database +cd /opt/ids +sudo -u ids npm run db:push + +# Restart servizi +./deployment/restart_all.sh +``` + +## πŸ“Š Workflow Completo + +``` +β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” +β”‚ ROUTER MIKROTIK β”‚ +β”‚ (Generano log firewall via Syslog UDP:514) β”‚ +β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ + β”‚ + β–Ό +β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” +β”‚ SERVER ALMALINUX 9 - RSYSLOG β”‚ +β”‚ Riceve log β†’ /var/log/mikrotik/raw.log β”‚ +β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ + β”‚ + β–Ό +β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” +β”‚ SYSLOG_PARSER.PY β”‚ +β”‚ Parsa log β†’ Salva in PostgreSQL (network_logs) β”‚ +β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ + β”‚ + β–Ό +β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” +β”‚ DATABASE POSTGRESQL β”‚ +β”‚ Tabelle: network_logs, detections, routers, etc. β”‚ +β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ + β”‚ + β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” + β”‚ β”‚ + β–Ό β–Ό +β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” +β”‚ PYTHON BACKEND β”‚ β”‚ NODE.JS BACKEND β”‚ +β”‚ (FastAPI:8000) β”‚ β”‚ (Express:5000) β”‚ +β”‚ - Training ML β”‚ β”‚ - API REST β”‚ +β”‚ - Detection β”‚ β”‚ - Frontend serve β”‚ +β”‚ - Auto-block IPs β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ +β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ + β”‚ β”‚ + β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ + β”‚ + β–Ό + β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” + β”‚ REACT DASHBOARD β”‚ + β”‚ (Browser) β”‚ + β”‚ - Monitoring β”‚ + β”‚ - Gestione router β”‚ + β”‚ - Detections β”‚ + β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ +``` + +## ⏰ Operazioni Automatiche + +### Training ML (ogni 12h - 00:00 e 12:00) +```bash +POST http://localhost:8000/train +{ + "max_records": 10000, + "hours_back": 24, + "contamination": 0.01 +} +``` + +### Detection (ogni 5 minuti) +```bash +POST http://localhost:8000/detect +{ + "max_records": 5000, + "auto_block": true, + "risk_threshold": 75 +} +``` + +### Monitoring Processi (ogni 5 minuti) +- Controlla backend Python (porta 8000) +- Controlla frontend Node (porta 5000) +- Riavvia automaticamente se down + +### Backup Database (ogni giorno alle 04:00) +```bash +/opt/ids/backups/ids_backup_YYYYMMDD_HHMMSS.sql.gz +``` +Mantiene 7 giorni di backup. + +### Restart Completo (ogni domenica 03:00) +```bash +./deployment/restart_all.sh +``` + +## πŸ” Monitoring e Log + +### Log Principali + +```bash +# Backend Python (FastAPI) +tail -f /var/log/ids/backend.log + +# Frontend Node.js +tail -f /var/log/ids/frontend.log + +# Syslog Parser +tail -f /var/log/ids/syslog_parser.log + +# Training ML +tail -f /var/log/ids/training.log + +# Detection +tail -f /var/log/ids/detect.log + +# Crontab +tail -f /var/log/ids/cron.log + +# Log router in arrivo +tail -f /var/log/mikrotik/raw.log +``` + +### Comandi Utili + +```bash +# Stato processi +ps aux | grep -E 'python.*main|npm.*dev|syslog_parser' + +# Stato database +psql -U ids_user -d ids_database + +# Query utili database +psql -U ids_user -d ids_database -c "SELECT COUNT(*) FROM network_logs;" +psql -U ids_user -d ids_database -c "SELECT COUNT(*) FROM detections WHERE blocked = true;" + +# Restart manuale servizi +/opt/ids/deployment/restart_all.sh + +# Visualizza crontab +crontab -u ids -l +``` + +## πŸ”’ Sicurezza + +### Password e Secrets + +- ❌ **MAI** committare `.env` su git +- βœ… Usa `.env.example` come template (committabile) +- βœ… Genera password sicure con `openssl rand -base64 32` +- βœ… Configura `.env` solo sul server di produzione +- βœ… File `.env` ha permessi 600 (solo utente ids puΓ² leggere) + +### Firewall + +```bash +# Porta 514/UDP per Syslog (solo da router fidati) +firewall-cmd --permanent --add-rich-rule='rule family="ipv4" source address="192.168.0.0/16" port port="514" protocol="udp" accept' + +# Porta 5000/TCP per frontend (accesso web) +firewall-cmd --permanent --add-port=5000/tcp + +# Porta 8000/TCP per API backend (solo localhost) +# NON esporre pubblicamente + +firewall-cmd --reload +``` + +### Backup + +```bash +# Backup manuale database +/opt/ids/deployment/backup_db.sh + +# Restore backup +gunzip /opt/ids/backups/ids_backup_YYYYMMDD_HHMMSS.sql.gz +psql -U ids_user -d ids_database < /opt/ids/backups/ids_backup_YYYYMMDD_HHMMSS.sql +``` + +## πŸ› Troubleshooting + +### Log non arrivano dai router + +```bash +# 1. Verifica rsyslog in ascolto +netstat -ulnp | grep 514 + +# 2. Test connettivitΓ  da router +# Sul router MikroTik: +/tool fetch url="http://:514" mode=udp + +# 3. Verifica firewall server +firewall-cmd --list-all + +# 4. Controlla log rsyslog +tail -f /var/log/messages | grep rsyslog +``` + +### Database non si popola + +```bash +# 1. Verifica syslog_parser in esecuzione +ps aux | grep syslog_parser + +# 2. Controlla log parser +tail -f /var/log/ids/syslog_parser.log + +# 3. Verifica file log esiste +ls -la /var/log/mikrotik/raw.log + +# 4. Test manuale parser +cd /opt/ids/python_ml +sudo -u ids python3.11 syslog_parser.py +``` + +### Backend non risponde + +```bash +# 1. Verifica processo +ps aux | grep "python.*main" + +# 2. Controlla log +tail -f /var/log/ids/backend.log + +# 3. Test manuale +cd /opt/ids/python_ml +sudo -u ids python3.11 main.py + +# 4. Verifica dipendenze +pip3.11 list | grep -E 'fastapi|psycopg2|scikit' +``` + +### Frontend non carica + +```bash +# 1. Verifica processo Node +ps aux | grep "npm.*dev" + +# 2. Controlla log +tail -f /var/log/ids/frontend.log + +# 3. Test manuale +cd /opt/ids +sudo -u ids npm run dev + +# 4. Verifica dipendenze +npm list +``` + +## πŸ“ Checklist Post-Installazione + +- [ ] Server AlmaLinux 9 aggiornato +- [ ] PostgreSQL installato e configurato +- [ ] Database `ids_database` creato e accessibile +- [ ] File `.env` configurato con password sicure +- [ ] Dipendenze Node.js e Python installate +- [ ] Rsyslog in ascolto su porta 514 +- [ ] Router MikroTik configurati per inviare log +- [ ] Log visibili in `/var/log/mikrotik/raw.log` +- [ ] Syslog parser in esecuzione +- [ ] Database `network_logs` si popola +- [ ] Backend Python risponde su porta 8000 +- [ ] Frontend Node accessibile su porta 5000 +- [ ] Crontab configurato per operazioni automatiche +- [ ] Firewall configurato correttamente +- [ ] Backup automatico funzionante + +## βœ… Prima Esecuzione Training + +Dopo 24h di raccolta log: + +```bash +# Controlla quanti log hai raccolto +psql -U ids_user -d ids_database -c "SELECT COUNT(*) FROM network_logs;" + +# Se hai almeno 10.000 log, esegui training +curl -X POST http://localhost:8000/train \ + -H "Content-Type: application/json" \ + -d '{"max_records": 10000, "hours_back": 24}' + +# Controlla log training +tail -f /var/log/ids/training.log + +# Dopo training, abilita detection automatica (giΓ  configurata in crontab) +``` + +--- + +**Sistema IDS pronto per la produzione! πŸ›‘οΈ** + +Per supporto: controllare log in `/var/log/ids/` e documentazione in `/opt/ids/deployment/` diff --git a/deployment/setup_almalinux.sh b/deployment/setup_almalinux.sh new file mode 100644 index 0000000..1807095 --- /dev/null +++ b/deployment/setup_almalinux.sh @@ -0,0 +1,95 @@ +#!/bin/bash + +# Script di installazione IDS su AlmaLinux 9 +# Eseguire con: chmod +x setup_almalinux.sh && sudo ./setup_almalinux.sh + +# Colori per output +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +BLUE='\033[0;34m' +NC='\033[0m' + +echo -e "${BLUE}πŸ›‘οΈ INSTALLAZIONE IDS - INTRUSION DETECTION SYSTEM${NC}" +echo "====================================================" + +# Verifica esecuzione come root +if [ "$EUID" -ne 0 ]; then + echo -e "${RED}❌ Esegui questo script come root (sudo)${NC}" + exit 1 +fi + +# 1. Aggiorna sistema +echo -e "\n${BLUE}πŸ“¦ Aggiornamento sistema AlmaLinux 9...${NC}" +dnf update -y + +# 2. Installa dipendenze sistema +echo -e "\n${BLUE}πŸ“¦ Installazione dipendenze sistema...${NC}" +dnf install -y epel-release +dnf install -y python3.11 python3.11-pip nodejs npm postgresql-server postgresql-contrib git + +# 3. Installa Node.js 20 (LTS) +echo -e "\n${BLUE}πŸ“¦ Installazione Node.js 20...${NC}" +dnf module reset nodejs -y +dnf module enable nodejs:20 -y +dnf install -y nodejs + +# 4. Inizializza PostgreSQL +echo -e "\n${BLUE}πŸ—„οΈ Inizializzazione PostgreSQL...${NC}" +if [ ! -d "/var/lib/pgsql/data/base" ]; then + postgresql-setup --initdb + systemctl enable postgresql + systemctl start postgresql + echo -e "${GREEN}βœ… PostgreSQL inizializzato${NC}" +else + echo -e "${YELLOW}ℹ️ PostgreSQL giΓ  inizializzato${NC}" +fi + +# 5. Configura PostgreSQL +echo -e "\n${BLUE}πŸ”§ Configurazione PostgreSQL...${NC}" +sudo -u postgres psql -c "CREATE DATABASE ids_database;" 2>/dev/null || echo "Database giΓ  esistente" +sudo -u postgres psql -c "CREATE USER ids_user WITH PASSWORD 'ids_password_change_me';" 2>/dev/null || echo "Utente giΓ  esistente" +sudo -u postgres psql -c "GRANT ALL PRIVILEGES ON DATABASE ids_database TO ids_user;" 2>/dev/null + +# Configura pg_hba.conf per connessioni locali +PG_HBA="/var/lib/pgsql/data/pg_hba.conf" +if ! grep -q "ids_user" "$PG_HBA"; then + echo "local ids_database ids_user md5" >> "$PG_HBA" + echo "host ids_database ids_user 127.0.0.1/32 md5" >> "$PG_HBA" + systemctl restart postgresql +fi + +# 6. Crea utente IDS +echo -e "\n${BLUE}πŸ‘€ Creazione utente ids...${NC}" +if ! id "ids" &>/dev/null; then + useradd -m -s /bin/bash ids + echo -e "${GREEN}βœ… Utente ids creato${NC}" +else + echo -e "${YELLOW}ℹ️ Utente ids giΓ  esistente${NC}" +fi + +# 7. Crea directory di lavoro +echo -e "\n${BLUE}πŸ“ Creazione directory di lavoro...${NC}" +mkdir -p /opt/ids +chown -R ids:ids /opt/ids + +echo -e "\n${GREEN}βœ… INSTALLAZIONE BASE COMPLETATA!${NC}" +echo "" +echo -e "${YELLOW}πŸ“‹ PROSSIMI PASSI:${NC}" +echo -e " 1. Clona il repository da git.alfacom.it:" +echo -e " ${BLUE}cd /opt/ids${NC}" +echo -e " ${BLUE}sudo -u ids git clone https://git.alfacom.it/your-repo/ids.git .${NC}" +echo "" +echo -e " 2. Configura environment variables:" +echo -e " ${BLUE}sudo -u ids nano /opt/ids/.env${NC}" +echo "" +echo -e " 3. Installa dipendenze Node.js:" +echo -e " ${BLUE}cd /opt/ids && sudo -u ids npm install${NC}" +echo "" +echo -e " 4. Installa dipendenze Python:" +echo -e " ${BLUE}cd /opt/ids/python_ml && sudo -u ids pip3.11 install -r requirements.txt${NC}" +echo "" +echo -e " 5. Esegui lo script di configurazione crontab:" +echo -e " ${BLUE}cd /opt/ids/deployment && sudo ./setup_crontab.sh${NC}" +echo "" +echo -e "${GREEN}πŸŽ‰ Sistema pronto per il deployment!${NC}" diff --git a/deployment/setup_crontab.sh b/deployment/setup_crontab.sh new file mode 100644 index 0000000..da6767f --- /dev/null +++ b/deployment/setup_crontab.sh @@ -0,0 +1,259 @@ +#!/bin/bash + +# Script per configurare crontab per il sistema IDS +# Eseguire con: chmod +x setup_crontab.sh && sudo ./setup_crontab.sh + +# Colori per output +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +BLUE='\033[0;34m' +NC='\033[0m' + +echo -e "${BLUE}πŸ”§ CONFIGURAZIONE CRONTAB SISTEMA IDS${NC}" +echo "==================================================" + +# Rileva percorso corrente +IDS_DIR="/opt/ids" +echo -e "${YELLOW}πŸ“ Directory IDS: ${IDS_DIR}${NC}" + +# Percorsi dei log +TRAINING_LOG="/var/log/ids/training.log" +DETECT_LOG="/var/log/ids/detect.log" +FRONTEND_LOG="/var/log/ids/frontend.log" +BACKEND_LOG="/var/log/ids/backend.log" +CRON_LOG="/var/log/ids/cron.log" + +# Crea directory log +mkdir -p /var/log/ids +chown -R ids:ids /var/log/ids + +echo -e "${YELLOW}πŸ“„ Log files:${NC}" +echo -e " β€’ Training: ${TRAINING_LOG}" +echo -e " β€’ Detection: ${DETECT_LOG}" +echo -e " β€’ Frontend: ${FRONTEND_LOG}" +echo -e " β€’ Backend Python: ${BACKEND_LOG}" +echo -e " β€’ Crontab: ${CRON_LOG}" + +# Crea backup del crontab esistente +echo -e "\n${BLUE}πŸ’Ύ Backup crontab esistente...${NC}" +crontab -u ids -l > /tmp/crontab_backup_$(date +%Y%m%d_%H%M%S) 2>/dev/null || echo "Nessun crontab esistente" + +# Crea il nuovo crontab +echo -e "\n${BLUE}βš™οΈ Configurazione nuovo crontab...${NC}" + +cat > /tmp/new_crontab << EOF +# ============================================ +# SISTEMA IDS - CONFIGURAZIONE AUTOMATICA +# ============================================ + +# Training ML ogni 12 ore (alle 00:00 e 12:00) +0 */12 * * * cd ${IDS_DIR}/python_ml && /usr/bin/python3.11 -c "import requests; requests.post('http://localhost:8000/train', json={'max_records': 10000, 'hours_back': 24})" >> ${TRAINING_LOG} 2>&1 + +# Detection automatica ogni 5 minuti +*/5 * * * * cd ${IDS_DIR}/python_ml && /usr/bin/python3.11 -c "import requests; requests.post('http://localhost:8000/detect', json={'max_records': 5000, 'auto_block': True, 'risk_threshold': 75})" >> ${DETECT_LOG} 2>&1 + +# Verifica processo backend Python ogni 5 minuti (riavvia se non attivo) +*/5 * * * * ${IDS_DIR}/deployment/check_backend.sh >> ${CRON_LOG} 2>&1 + +# Verifica processo frontend ogni 5 minuti (riavvia se non attivo) +*/5 * * * * ${IDS_DIR}/deployment/check_frontend.sh >> ${CRON_LOG} 2>&1 + +# Pulizia log settimanale (ogni domenica alle 02:00) +0 2 * * 0 find /var/log/ids -name "*.log" -size +100M -exec truncate -s 50M {} \; >> ${CRON_LOG} 2>&1 + +# Restart completo del sistema ogni settimana (domenica alle 03:00) +0 3 * * 0 ${IDS_DIR}/deployment/restart_all.sh >> ${CRON_LOG} 2>&1 + +# Backup database giornaliero (alle 04:00) +0 4 * * * ${IDS_DIR}/deployment/backup_db.sh >> ${CRON_LOG} 2>&1 +EOF + +# Installa il nuovo crontab +crontab -u ids /tmp/new_crontab + +echo -e "${GREEN}βœ… Crontab configurato con successo!${NC}" + +# Crea script di controllo backend Python +echo -e "\n${BLUE}πŸ“œ Creazione script di controllo backend...${NC}" + +cat > ${IDS_DIR}/deployment/check_backend.sh << 'EOF' +#!/bin/bash + +# Script per verificare e riavviare il backend Python se necessario +BACKEND_LOG="/var/log/ids/backend.log" +IDS_DIR="/opt/ids" +PIDFILE="/var/run/ids/backend.pid" + +mkdir -p /var/run/ids + +# Funzione per avviare backend +start_backend() { + echo "$(date): Avvio backend Python FastAPI..." >> "$BACKEND_LOG" + cd "$IDS_DIR/python_ml" + nohup /usr/bin/python3.11 main.py >> "$BACKEND_LOG" 2>&1 & + echo $! > "$PIDFILE" + echo "$(date): Backend avviato con PID $(cat $PIDFILE)" >> "$BACKEND_LOG" +} + +# Verifica se il processo Γ¨ attivo +if [ -f "$PIDFILE" ]; then + PID=$(cat "$PIDFILE") + if ps -p "$PID" > /dev/null 2>&1; then + # Processo attivo, verifica health endpoint + if ! curl -f http://localhost:8000/health > /dev/null 2>&1; then + echo "$(date): Backend non risponde, riavvio..." >> "$BACKEND_LOG" + kill "$PID" 2>/dev/null + sleep 5 + start_backend + fi + else + echo "$(date): Backend non trovato, riavvio..." >> "$BACKEND_LOG" + start_backend + fi +else + echo "$(date): File PID non trovato, avvio backend..." >> "$BACKEND_LOG" + start_backend +fi +EOF + +chmod +x ${IDS_DIR}/deployment/check_backend.sh + +# Crea script di controllo frontend +echo -e "\n${BLUE}πŸ“œ Creazione script di controllo frontend...${NC}" + +cat > ${IDS_DIR}/deployment/check_frontend.sh << 'EOF' +#!/bin/bash + +# Script per verificare e riavviare il frontend se necessario +FRONTEND_LOG="/var/log/ids/frontend.log" +IDS_DIR="/opt/ids" +PIDFILE="/var/run/ids/frontend.pid" + +mkdir -p /var/run/ids + +# Funzione per avviare frontend +start_frontend() { + echo "$(date): Avvio frontend Node.js..." >> "$FRONTEND_LOG" + cd "$IDS_DIR" + nohup npm run dev >> "$FRONTEND_LOG" 2>&1 & + echo $! > "$PIDFILE" + echo "$(date): Frontend avviato con PID $(cat $PIDFILE)" >> "$FRONTEND_LOG" +} + +# Verifica se il processo Γ¨ attivo +if [ -f "$PIDFILE" ]; then + PID=$(cat "$PIDFILE") + if ps -p "$PID" > /dev/null 2>&1; then + # Processo attivo, verifica se risponde + if ! curl -f http://localhost:5000 > /dev/null 2>&1; then + echo "$(date): Frontend non risponde, riavvio..." >> "$FRONTEND_LOG" + kill "$PID" 2>/dev/null + sleep 5 + start_frontend + fi + else + echo "$(date): Frontend non trovato, riavvio..." >> "$FRONTEND_LOG" + start_frontend + fi +else + echo "$(date): File PID non trovato, avvio frontend..." >> "$FRONTEND_LOG" + start_frontend +fi +EOF + +chmod +x ${IDS_DIR}/deployment/check_frontend.sh + +# Crea script di restart completo +echo -e "\n${BLUE}πŸ”„ Creazione script di restart...${NC}" + +cat > ${IDS_DIR}/deployment/restart_all.sh << 'EOF' +#!/bin/bash + +# Script per restart completo del sistema IDS +CRON_LOG="/var/log/ids/cron.log" +IDS_DIR="/opt/ids" + +echo "$(date): === RESTART SETTIMANALE SISTEMA IDS ===" >> "$CRON_LOG" + +# Termina backend +if [ -f "/var/run/ids/backend.pid" ]; then + PID=$(cat /var/run/ids/backend.pid) + kill -TERM "$PID" 2>/dev/null + rm -f /var/run/ids/backend.pid +fi + +# Termina frontend +if [ -f "/var/run/ids/frontend.pid" ]; then + PID=$(cat /var/run/ids/frontend.pid) + kill -TERM "$PID" 2>/dev/null + rm -f /var/run/ids/frontend.pid +fi + +sleep 10 + +# Pulizia file temporanei +echo "$(date): Pulizia file temporanei..." >> "$CRON_LOG" +find /tmp -name "*ids*" -mtime +1 -delete 2>/dev/null +find "$IDS_DIR" -name "*.pyc" -delete 2>/dev/null +find "$IDS_DIR" -name "__pycache__" -type d -exec rm -rf {} + 2>/dev/null + +# Riavvio servizi +echo "$(date): Riavvio servizi..." >> "$CRON_LOG" +${IDS_DIR}/deployment/check_backend.sh +${IDS_DIR}/deployment/check_frontend.sh +EOF + +chmod +x ${IDS_DIR}/deployment/restart_all.sh + +# Crea script di backup database +echo -e "\n${BLUE}πŸ’Ύ Creazione script di backup database...${NC}" + +cat > ${IDS_DIR}/deployment/backup_db.sh << 'EOF' +#!/bin/bash + +# Backup giornaliero database PostgreSQL +BACKUP_DIR="/opt/ids/backups" +TIMESTAMP=$(date +%Y%m%d_%H%M%S) +BACKUP_FILE="$BACKUP_DIR/ids_backup_$TIMESTAMP.sql" + +mkdir -p "$BACKUP_DIR" + +# Esegui backup +PGPASSWORD="ids_password_change_me" pg_dump -U ids_user -h localhost ids_database > "$BACKUP_FILE" + +# Comprimi backup +gzip "$BACKUP_FILE" + +# Mantieni solo gli ultimi 7 backup +find "$BACKUP_DIR" -name "ids_backup_*.sql.gz" -mtime +7 -delete + +echo "$(date): Backup completato: ${BACKUP_FILE}.gz" +EOF + +chmod +x ${IDS_DIR}/deployment/backup_db.sh + +# Avvio immediato dei processi +echo -e "\n${BLUE}πŸš€ Avvio immediato dei processi...${NC}" +sudo -u ids ${IDS_DIR}/deployment/check_backend.sh +sudo -u ids ${IDS_DIR}/deployment/check_frontend.sh + +echo -e "\n${GREEN}βœ… CONFIGURAZIONE COMPLETATA!${NC}" +echo "" +echo -e "${YELLOW}πŸ“‹ COMANDI UTILI:${NC}" +echo -e " β€’ Visualizza log backend: ${BLUE}tail -f ${BACKEND_LOG}${NC}" +echo -e " β€’ Visualizza log frontend: ${BLUE}tail -f ${FRONTEND_LOG}${NC}" +echo -e " β€’ Visualizza log training: ${BLUE}tail -f ${TRAINING_LOG}${NC}" +echo -e " β€’ Visualizza log detection: ${BLUE}tail -f ${DETECT_LOG}${NC}" +echo -e " β€’ Stato crontab: ${BLUE}crontab -u ids -l${NC}" +echo -e " β€’ Stato processi: ${BLUE}ps aux | grep -E 'python.*main|npm.*dev'${NC}" +echo -e " β€’ Restart manuale: ${BLUE}sudo ${IDS_DIR}/deployment/restart_all.sh${NC}" +echo "" +echo -e "${YELLOW}⚠️ IMPORTANTE:${NC}" +echo -e " β€’ Training automatico ogni 12 ore" +echo -e " β€’ Detection automatica ogni 5 minuti" +echo -e " β€’ Monitoring processi ogni 5 minuti" +echo -e " β€’ Restart automatico ogni domenica alle 03:00" +echo -e " β€’ Backup database giornaliero alle 04:00" +echo "" +echo -e "${GREEN}πŸŽ‰ Sistema IDS configurato per l'esecuzione automatica!${NC}" diff --git a/deployment/setup_syslog_server.sh b/deployment/setup_syslog_server.sh new file mode 100644 index 0000000..651839b --- /dev/null +++ b/deployment/setup_syslog_server.sh @@ -0,0 +1,102 @@ +#!/bin/bash + +# Script per configurare il server Syslog su AlmaLinux 9 +# Questo script riceve i log dai router MikroTik e li salva in PostgreSQL +# Eseguire con: chmod +x setup_syslog_server.sh && sudo ./setup_syslog_server.sh + +# Colori per output +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +BLUE='\033[0;34m' +NC='\033[0m' + +echo -e "${BLUE}πŸ“‘ CONFIGURAZIONE SERVER SYSLOG PER ROUTER MIKROTIK${NC}" +echo "====================================================" + +# 1. Installa rsyslog +echo -e "\n${BLUE}πŸ“¦ Installazione rsyslog...${NC}" +dnf install -y rsyslog rsyslog-pgsql + +# 2. Configura rsyslog per ricevere log UDP +echo -e "\n${BLUE}πŸ”§ Configurazione rsyslog...${NC}" + +cat > /etc/rsyslog.d/10-mikrotik.conf << 'EOF' +# Configurazione rsyslog per ricevere log dai router MikroTik + +# Abilita ricezione UDP sulla porta 514 +module(load="imudp") +input(type="imudp" port="514") + +# Template per parsare log MikroTik +# Formato: timestamp hostname tag: message +template(name="MikrotikFormat" type="string" + string="%timestamp% %hostname% %syslogtag%%msg:::drop-last-lf%\n" +) + +# Filtra solo log dai router MikroTik (basato su hostname o IP range) +# Salva in file temporaneo per processamento Python +if $fromhost-ip startswith '192.168.' or $fromhost-ip startswith '10.' then { + action(type="omfile" file="/var/log/mikrotik/raw.log" template="MikrotikFormat") +} + +# Ruota log giornalmente +$ActionFileDefaultTemplate RSYSLOG_TraditionalFileFormat +$ActionFileEnableSync on +EOF + +# 3. Crea directory log +echo -e "\n${BLUE}πŸ“ Creazione directory log...${NC}" +mkdir -p /var/log/mikrotik +chown -R syslog:adm /var/log/mikrotik +chmod 755 /var/log/mikrotik + +# 4. Configura logrotate +echo -e "\n${BLUE}πŸ”„ Configurazione logrotate...${NC}" + +cat > /etc/logrotate.d/mikrotik << 'EOF' +/var/log/mikrotik/*.log { + daily + rotate 7 + compress + delaycompress + missingok + notifempty + create 0644 syslog adm + sharedscripts + postrotate + /bin/kill -HUP $(cat /var/run/syslogd.pid 2>/dev/null) 2>/dev/null || true + endscript +} +EOF + +# 5. Configura firewall per porta 514/UDP +echo -e "\n${BLUE}πŸ”₯ Configurazione firewall...${NC}" +firewall-cmd --permanent --add-port=514/udp +firewall-cmd --reload + +# 6. Restart rsyslog +echo -e "\n${BLUE}πŸ”„ Restart rsyslog...${NC}" +systemctl enable rsyslog +systemctl restart rsyslog + +echo -e "\n${GREEN}βœ… SERVER SYSLOG CONFIGURATO!${NC}" +echo "" +echo -e "${YELLOW}πŸ“‹ CONFIGURAZIONE ROUTER MIKROTIK:${NC}" +echo "" +echo -e "${BLUE}Connettiti al router MikroTik e esegui:${NC}" +echo "" +echo -e " /system logging action" +echo -e " add name=ids-server target=remote remote= remote-port=514" +echo "" +echo -e " /system logging" +echo -e " add action=ids-server topics=firewall,info" +echo -e " add action=ids-server topics=account,info" +echo -e " add action=ids-server topics=system,error" +echo "" +echo -e "${YELLOW}Sostituisci con l'IP di questo server AlmaLinux${NC}" +echo "" +echo -e "${BLUE}πŸ“Š VERIFICA:${NC}" +echo -e " β€’ Test ricezione log: ${BLUE}tail -f /var/log/mikrotik/raw.log${NC}" +echo -e " β€’ Stato rsyslog: ${BLUE}systemctl status rsyslog${NC}" +echo -e " β€’ Porta aperta: ${BLUE}netstat -ulnp | grep 514${NC}" diff --git a/deployment/update_from_git.sh b/deployment/update_from_git.sh new file mode 100644 index 0000000..7894d25 --- /dev/null +++ b/deployment/update_from_git.sh @@ -0,0 +1,80 @@ +#!/bin/bash + +# Script per aggiornare il sistema IDS da git.alfacom.it +# Eseguire con: sudo -u ids ./update_from_git.sh + +# Colori per output +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +BLUE='\033[0;34m' +NC='\033[0m' + +echo -e "${BLUE}πŸ”„ AGGIORNAMENTO SISTEMA IDS DA GIT${NC}" +echo "====================================" + +IDS_DIR="/opt/ids" +cd "$IDS_DIR" || exit 1 + +# Backup configurazione locale +echo -e "\n${BLUE}πŸ’Ύ Backup configurazione locale...${NC}" +if [ -f ".env" ]; then + cp .env .env.backup + echo -e "${GREEN}βœ… .env salvato in .env.backup${NC}" +fi + +# Verifica modifiche locali +echo -e "\n${BLUE}πŸ” Verifica modifiche locali...${NC}" +if ! git diff-index --quiet HEAD --; then + echo -e "${YELLOW}⚠️ Ci sono modifiche locali non committate${NC}" + echo -e "${YELLOW} Esegui 'git status' per vedere i dettagli${NC}" + read -p "Vuoi procedere comunque? (y/n) " -n 1 -r + echo + if [[ ! $REPLY =~ ^[Yy]$ ]]; then + exit 1 + fi +fi + +# Pull da git +echo -e "\n${BLUE}πŸ“₯ Download aggiornamenti da git.alfacom.it...${NC}" +git fetch origin +git pull origin main + +if [ $? -eq 0 ]; then + echo -e "${GREEN}βœ… Aggiornamenti scaricati con successo${NC}" +else + echo -e "${RED}❌ Errore durante il download${NC}" + exit 1 +fi + +# Ripristina configurazione locale +if [ -f ".env.backup" ]; then + cp .env.backup .env + echo -e "${GREEN}βœ… Configurazione locale ripristinata${NC}" +fi + +# Aggiorna dipendenze Node.js +echo -e "\n${BLUE}πŸ“¦ Aggiornamento dipendenze Node.js...${NC}" +npm install + +# Aggiorna dipendenze Python +echo -e "\n${BLUE}πŸ“¦ Aggiornamento dipendenze Python...${NC}" +cd python_ml +/usr/bin/python3.11 -m pip install -r requirements.txt +cd .. + +# Aggiorna schema database +echo -e "\n${BLUE}πŸ—„οΈ Aggiornamento schema database...${NC}" +npm run db:push + +# Restart servizi +echo -e "\n${BLUE}πŸ”„ Restart servizi...${NC}" +./deployment/restart_all.sh + +echo -e "\n${GREEN}βœ… AGGIORNAMENTO COMPLETATO!${NC}" +echo "" +echo -e "${YELLOW}πŸ“‹ VERIFICA:${NC}" +echo -e " β€’ Controlla log backend: ${BLUE}tail -f /var/log/ids/backend.log${NC}" +echo -e " β€’ Controlla log frontend: ${BLUE}tail -f /var/log/ids/frontend.log${NC}" +echo -e " β€’ Testa API backend: ${BLUE}curl http://localhost:8000/health${NC}" +echo -e " β€’ Testa frontend: ${BLUE}curl http://localhost:5000${NC}" diff --git a/python_ml/syslog_parser.py b/python_ml/syslog_parser.py new file mode 100644 index 0000000..ccf4e0c --- /dev/null +++ b/python_ml/syslog_parser.py @@ -0,0 +1,215 @@ +""" +Syslog Parser - Analizza log MikroTik e li salva in PostgreSQL +Legge da /var/log/mikrotik/raw.log e popola la tabella network_logs +""" + +import re +import psycopg2 +from datetime import datetime +from typing import Dict, Optional +import os +import time + + +class SyslogParser: + """ + Parser per log MikroTik in formato syslog + Estrae informazioni di rete e salva in database + """ + + def __init__(self, db_config: Dict[str, str]): + self.db_config = db_config + self.conn = None + self.cursor = None + + # Pattern regex per parsare log MikroTik + # Formato: timestamp hostname tag: message + self.patterns = { + # Firewall connection + 'firewall': re.compile( + r'(?Paccept|drop|reject).*' + r'src-address=(?P[\d.]+):(?P\d+).*' + r'dst-address=(?P[\d.]+):(?P\d+).*' + r'proto=(?P\w+).*' + r'(?:len=(?P\d+))?' + ), + # Connection tracking + 'connection': re.compile( + r'(?P[\d.]+):(?P\d+)->(?P[\d.]+):(?P\d+).*' + r'proto (?P\w+).*' + r'(?:packets: (?P\d+))?.*' + r'(?:bytes: (?P\d+))?' + ), + } + + def connect_db(self): + """Connessione al database PostgreSQL""" + try: + self.conn = psycopg2.connect(**self.db_config) + self.cursor = self.conn.cursor() + print("[INFO] Connesso a PostgreSQL") + except Exception as e: + print(f"[ERROR] Connessione database fallita: {e}") + raise + + def disconnect_db(self): + """Chiusura connessione database""" + if self.cursor: + self.cursor.close() + if self.conn: + self.conn.close() + print("[INFO] Disconnesso da PostgreSQL") + + def parse_log_line(self, line: str) -> Optional[Dict]: + """ + Analizza una singola riga di log MikroTik + Returns: Dict con dati parsati o None se non parsabile + """ + # Estrai timestamp, hostname, tag e messaggio + # Formato: Jan 15 10:30:45 router1 firewall,info: drop src-address=... + parts = line.split(None, 4) + if len(parts) < 5: + return None + + month, day, time_str, hostname, message = parts + + # Parse timestamp (usa anno corrente) + current_year = datetime.now().year + try: + timestamp = datetime.strptime( + f"{current_year} {month} {day} {time_str}", + "%Y %b %d %H:%M:%S" + ) + except ValueError: + return None + + # Prova pattern firewall + for pattern_name, pattern in self.patterns.items(): + match = pattern.search(message) + if match: + data = match.groupdict() + + # Aggiungi metadati + data['timestamp'] = timestamp + data['router_name'] = hostname + data['log_type'] = pattern_name + data['raw_message'] = message.strip() + + # Converti numeri + for key in ['src_port', 'dst_port', 'len', 'packets', 'bytes']: + if key in data and data[key]: + data[key] = int(data[key]) + + return data + + return None + + def save_to_db(self, log_data: Dict): + """Salva log parsato nel database""" + try: + query = """ + INSERT INTO network_logs + (timestamp, router_name, source_ip, source_port, destination_ip, + destination_port, protocol, packet_length, action, raw_message) + VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s) + ON CONFLICT DO NOTHING + """ + + self.cursor.execute(query, ( + log_data.get('timestamp'), + log_data.get('router_name'), + log_data.get('src_ip'), + log_data.get('src_port'), + log_data.get('dst_ip'), + log_data.get('dst_port'), + log_data.get('proto', 'unknown').lower(), + log_data.get('len') or log_data.get('bytes'), + log_data.get('action', 'unknown'), + log_data.get('raw_message') + )) + + except Exception as e: + print(f"[ERROR] Errore salvataggio log: {e}") + self.conn.rollback() + + def process_log_file(self, log_file: str, follow: bool = False): + """ + Processa file di log + follow: se True, segue il file come 'tail -f' + """ + print(f"[INFO] Processando {log_file} (follow={follow})") + + processed = 0 + saved = 0 + + try: + with open(log_file, 'r') as f: + # Se follow, vai alla fine del file + if follow: + f.seek(0, 2) # Seek to end + + while True: + line = f.readline() + + if not line: + if follow: + time.sleep(0.1) # Attendi nuove righe + self.conn.commit() # Commit batch + continue + else: + break # Fine file + + processed += 1 + + # Parsa riga + log_data = self.parse_log_line(line.strip()) + if log_data: + self.save_to_db(log_data) + saved += 1 + + # Commit ogni 100 righe + if processed % 100 == 0: + self.conn.commit() + print(f"[INFO] Processate {processed} righe, salvate {saved} log") + + except KeyboardInterrupt: + print("\n[INFO] Interrotto dall'utente") + except Exception as e: + print(f"[ERROR] Errore processamento file: {e}") + finally: + self.conn.commit() + print(f"[INFO] Totale: {processed} righe processate, {saved} log salvati") + + +def main(): + """Main entry point""" + # Configurazione database da environment + db_config = { + 'host': os.getenv('PGHOST', 'localhost'), + 'port': os.getenv('PGPORT', '5432'), + 'database': os.getenv('PGDATABASE', 'ids_database'), + 'user': os.getenv('PGUSER', 'ids_user'), + 'password': os.getenv('PGPASSWORD', 'ids_password_change_me') + } + + # File log da processare + log_file = '/var/log/mikrotik/raw.log' + + # Crea parser + parser = SyslogParser(db_config) + + try: + # Connetti al database + parser.connect_db() + + # Processa file in modalitΓ  follow (come tail -f) + parser.process_log_file(log_file, follow=True) + + finally: + parser.disconnect_db() + + +if __name__ == "__main__": + print("=== SYSLOG PARSER PER ROUTER MIKROTIK ===") + print("Pressione Ctrl+C per interrompere\n") + main()