Compare commits
33 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
182d98de0d | ||
|
|
a1be759431 | ||
|
|
f404952e0e | ||
|
|
0a269a9032 | ||
|
|
1133ca356f | ||
|
|
aa74340706 | ||
|
|
051c5ee4a5 | ||
|
|
a15d4d660b | ||
|
|
dee64495cd | ||
|
|
16d13d6bee | ||
|
|
a4bf75394a | ||
|
|
58fb6476c5 | ||
|
|
1b47e08129 | ||
|
|
0298b4a790 | ||
|
|
a311573d0c | ||
|
|
21ff8c0c4b | ||
|
|
d966d26784 | ||
|
|
73ad653cb0 | ||
|
|
3574ff0274 | ||
|
|
0301a42825 | ||
|
|
278bc6bd61 | ||
|
|
3425521215 | ||
|
|
c3a6f28434 | ||
|
|
c0b2342c43 | ||
|
|
6ad718c51f | ||
|
|
505b7738bf | ||
|
|
2b24323f7f | ||
|
|
3e35032d79 | ||
|
|
bb5d14823f | ||
|
|
e6db06e597 | ||
|
|
a08c4309a8 | ||
|
|
584c25381c | ||
|
|
b31bad7d8b |
20
.replit
20
.replit
@ -14,26 +14,6 @@ run = ["npm", "run", "start"]
|
|||||||
localPort = 5000
|
localPort = 5000
|
||||||
externalPort = 80
|
externalPort = 80
|
||||||
|
|
||||||
[[ports]]
|
|
||||||
localPort = 40145
|
|
||||||
externalPort = 4200
|
|
||||||
|
|
||||||
[[ports]]
|
|
||||||
localPort = 41303
|
|
||||||
externalPort = 3002
|
|
||||||
|
|
||||||
[[ports]]
|
|
||||||
localPort = 43471
|
|
||||||
externalPort = 3003
|
|
||||||
|
|
||||||
[[ports]]
|
|
||||||
localPort = 43803
|
|
||||||
externalPort = 3000
|
|
||||||
|
|
||||||
[[ports]]
|
|
||||||
localPort = 45059
|
|
||||||
externalPort = 3001
|
|
||||||
|
|
||||||
[env]
|
[env]
|
||||||
PORT = "5000"
|
PORT = "5000"
|
||||||
|
|
||||||
|
|||||||
@ -0,0 +1,51 @@
|
|||||||
|
journalctl -u ids-list-fetcher -n 50 --no-pager
|
||||||
|
Jan 02 15:30:01 ids.alfacom.it ids-list-fetcher[9296]: Skipped (whitelisted): 0
|
||||||
|
Jan 02 15:30:01 ids.alfacom.it ids-list-fetcher[9296]: ============================================================
|
||||||
|
Jan 02 15:30:01 ids.alfacom.it systemd[1]: ids-list-fetcher.service: Deactivated successfully.
|
||||||
|
Jan 02 15:30:01 ids.alfacom.it systemd[1]: Finished IDS Public Lists Fetcher Service.
|
||||||
|
Jan 02 15:40:00 ids.alfacom.it systemd[1]: Starting IDS Public Lists Fetcher Service...
|
||||||
|
Jan 02 15:40:00 ids.alfacom.it ids-list-fetcher[9493]: ============================================================
|
||||||
|
Jan 02 15:40:00 ids.alfacom.it ids-list-fetcher[9493]: [2026-01-02 15:40:00] PUBLIC LISTS SYNC
|
||||||
|
Jan 02 15:40:00 ids.alfacom.it ids-list-fetcher[9493]: ============================================================
|
||||||
|
Jan 02 15:40:00 ids.alfacom.it ids-list-fetcher[9493]: Found 2 enabled lists
|
||||||
|
Jan 02 15:40:00 ids.alfacom.it ids-list-fetcher[9493]: [15:40:00] Downloading Spamhaus from https://www.spamhaus.org/drop/drop_v4.json...
|
||||||
|
Jan 02 15:40:00 ids.alfacom.it ids-list-fetcher[9493]: [15:40:00] Downloading AWS from https://ip-ranges.amazonaws.com/ip-ranges.json...
|
||||||
|
Jan 02 15:40:00 ids.alfacom.it ids-list-fetcher[9493]: [15:40:00] Parsing AWS...
|
||||||
|
Jan 02 15:40:01 ids.alfacom.it ids-list-fetcher[9493]: [15:40:01] Found 9548 IPs, syncing to database...
|
||||||
|
Jan 02 15:40:01 ids.alfacom.it ids-list-fetcher[9493]: [15:40:01] ✓ AWS: +0 -0 ~9511
|
||||||
|
Jan 02 15:40:01 ids.alfacom.it ids-list-fetcher[9493]: [15:40:01] Parsing Spamhaus...
|
||||||
|
Jan 02 15:40:01 ids.alfacom.it ids-list-fetcher[9493]: [15:40:01] Found 1468 IPs, syncing to database...
|
||||||
|
Jan 02 15:40:01 ids.alfacom.it ids-list-fetcher[9493]: [15:40:01] ✓ Spamhaus: +0 -0 ~1464
|
||||||
|
Jan 02 15:40:01 ids.alfacom.it ids-list-fetcher[9493]: ============================================================
|
||||||
|
Jan 02 15:40:01 ids.alfacom.it ids-list-fetcher[9493]: SYNC SUMMARY
|
||||||
|
Jan 02 15:40:01 ids.alfacom.it ids-list-fetcher[9493]: ============================================================
|
||||||
|
Jan 02 15:40:01 ids.alfacom.it ids-list-fetcher[9493]: Success: 2/2
|
||||||
|
Jan 02 15:40:01 ids.alfacom.it ids-list-fetcher[9493]: Errors: 0/2
|
||||||
|
Jan 02 15:40:01 ids.alfacom.it ids-list-fetcher[9493]: Total IPs Added: 0
|
||||||
|
Jan 02 15:40:01 ids.alfacom.it ids-list-fetcher[9493]: Total IPs Removed: 0
|
||||||
|
Jan 02 15:40:01 ids.alfacom.it ids-list-fetcher[9493]: ============================================================
|
||||||
|
Jan 02 15:40:01 ids.alfacom.it ids-list-fetcher[9493]: ============================================================
|
||||||
|
Jan 02 15:40:01 ids.alfacom.it ids-list-fetcher[9493]: RUNNING MERGE LOGIC
|
||||||
|
Jan 02 15:40:01 ids.alfacom.it ids-list-fetcher[9493]: ============================================================
|
||||||
|
Jan 02 15:40:01 ids.alfacom.it ids-list-fetcher[9493]: ERROR:merge_logic:Failed to cleanup detections: operator does not exist: inet = text
|
||||||
|
Jan 02 15:40:01 ids.alfacom.it ids-list-fetcher[9493]: LINE 9: d.source_ip::inet = wl.ip_inet
|
||||||
|
Jan 02 15:40:01 ids.alfacom.it ids-list-fetcher[9493]: ^
|
||||||
|
Jan 02 15:40:01 ids.alfacom.it ids-list-fetcher[9493]: HINT: No operator matches the given name and argument types. You might need to add explicit type casts.
|
||||||
|
Jan 02 15:40:01 ids.alfacom.it ids-list-fetcher[9493]: ERROR:merge_logic:Failed to sync detections: operator does not exist: inet = text
|
||||||
|
Jan 02 15:40:01 ids.alfacom.it ids-list-fetcher[9493]: LINE 29: bl.ip_inet = wl.ip_inet
|
||||||
|
Jan 02 15:40:01 ids.alfacom.it ids-list-fetcher[9493]: ^
|
||||||
|
Jan 02 15:40:01 ids.alfacom.it ids-list-fetcher[9493]: HINT: No operator matches the given name and argument types. You might need to add explicit type casts.
|
||||||
|
Jan 02 15:40:01 ids.alfacom.it ids-list-fetcher[9493]: Traceback (most recent call last):
|
||||||
|
Jan 02 15:40:01 ids.alfacom.it ids-list-fetcher[9493]: File "/opt/ids/python_ml/merge_logic.py", line 264, in sync_public_blacklist_detections
|
||||||
|
Jan 02 15:40:01 ids.alfacom.it ids-list-fetcher[9493]: cur.execute("""
|
||||||
|
Jan 02 15:40:01 ids.alfacom.it ids-list-fetcher[9493]: psycopg2.errors.UndefinedFunction: operator does not exist: inet = text
|
||||||
|
Jan 02 15:40:01 ids.alfacom.it ids-list-fetcher[9493]: LINE 29: bl.ip_inet = wl.ip_inet
|
||||||
|
Jan 02 15:40:01 ids.alfacom.it ids-list-fetcher[9493]: ^
|
||||||
|
Jan 02 15:40:01 ids.alfacom.it ids-list-fetcher[9493]: HINT: No operator matches the given name and argument types. You might need to add explicit type casts.
|
||||||
|
Jan 02 15:40:01 ids.alfacom.it ids-list-fetcher[9493]: Merge Logic Stats:
|
||||||
|
Jan 02 15:40:01 ids.alfacom.it ids-list-fetcher[9493]: Created detections: 0
|
||||||
|
Jan 02 15:40:01 ids.alfacom.it ids-list-fetcher[9493]: Cleaned invalid detections: 0
|
||||||
|
Jan 02 15:40:01 ids.alfacom.it ids-list-fetcher[9493]: Skipped (whitelisted): 0
|
||||||
|
Jan 02 15:40:01 ids.alfacom.it ids-list-fetcher[9493]: ============================================================
|
||||||
|
Jan 02 15:40:01 ids.alfacom.it systemd[1]: ids-list-fetcher.service: Deactivated successfully.
|
||||||
|
Jan 02 15:40:01 ids.alfacom.it systemd[1]: Finished IDS Public Lists Fetcher Service.
|
||||||
@ -0,0 +1,51 @@
|
|||||||
|
journalctl -u ids-list-fetcher -n 50 --no-pager
|
||||||
|
Jan 02 17:10:02 ids.alfacom.it ids-list-fetcher[2139]: ============================================================
|
||||||
|
Jan 02 17:10:02 ids.alfacom.it ids-list-fetcher[2139]: ============================================================
|
||||||
|
Jan 02 17:10:02 ids.alfacom.it ids-list-fetcher[2139]: RUNNING MERGE LOGIC
|
||||||
|
Jan 02 17:10:02 ids.alfacom.it ids-list-fetcher[2139]: ============================================================
|
||||||
|
Jan 02 17:10:12 ids.alfacom.it ids-list-fetcher[2139]: INFO:merge_logic:Bulk sync complete: {'created': 0, 'cleaned': 0, 'skipped_whitelisted': 0}
|
||||||
|
Jan 02 17:10:12 ids.alfacom.it ids-list-fetcher[2139]: Merge Logic Stats:
|
||||||
|
Jan 02 17:10:12 ids.alfacom.it ids-list-fetcher[2139]: Created detections: 0
|
||||||
|
Jan 02 17:10:12 ids.alfacom.it ids-list-fetcher[2139]: Cleaned invalid detections: 0
|
||||||
|
Jan 02 17:10:12 ids.alfacom.it ids-list-fetcher[2139]: Skipped (whitelisted): 0
|
||||||
|
Jan 02 17:10:12 ids.alfacom.it ids-list-fetcher[2139]: ============================================================
|
||||||
|
Jan 02 17:10:12 ids.alfacom.it systemd[1]: ids-list-fetcher.service: Deactivated successfully.
|
||||||
|
Jan 02 17:10:12 ids.alfacom.it systemd[1]: Finished IDS Public Lists Fetcher Service.
|
||||||
|
Jan 02 17:12:35 ids.alfacom.it systemd[1]: Starting IDS Public Lists Fetcher Service...
|
||||||
|
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: ============================================================
|
||||||
|
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: [2026-01-02 17:12:35] PUBLIC LISTS SYNC
|
||||||
|
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: ============================================================
|
||||||
|
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: Found 4 enabled lists
|
||||||
|
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: [17:12:35] Downloading Spamhaus from https://www.spamhaus.org/drop/drop_v4.json...
|
||||||
|
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: [17:12:35] Downloading AWS from https://ip-ranges.amazonaws.com/ip-ranges.json...
|
||||||
|
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: [17:12:35] Downloading Google Cloud from https://www.gstatic.com/ipranges/cloud.json...
|
||||||
|
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: [17:12:35] Downloading Google globali from https://www.gstatic.com/ipranges/goog.json...
|
||||||
|
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: [17:12:35] Parsing AWS...
|
||||||
|
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: [17:12:35] Found 9548 IPs, syncing to database...
|
||||||
|
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: [17:12:35] ✓ AWS: +0 -0 ~9548
|
||||||
|
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: [17:12:35] Parsing Google globali...
|
||||||
|
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: [17:12:35] ✗ Google globali: No valid IPs found in list
|
||||||
|
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: [17:12:35] Parsing Google Cloud...
|
||||||
|
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: [17:12:35] ✗ Google Cloud: No valid IPs found in list
|
||||||
|
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: [17:12:35] Parsing Spamhaus...
|
||||||
|
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: [17:12:35] Found 1468 IPs, syncing to database...
|
||||||
|
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: [17:12:35] ✓ Spamhaus: +0 -0 ~1468
|
||||||
|
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: ============================================================
|
||||||
|
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: SYNC SUMMARY
|
||||||
|
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: ============================================================
|
||||||
|
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: Success: 2/4
|
||||||
|
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: Errors: 2/4
|
||||||
|
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: Total IPs Added: 0
|
||||||
|
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: Total IPs Removed: 0
|
||||||
|
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: ============================================================
|
||||||
|
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: ============================================================
|
||||||
|
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: RUNNING MERGE LOGIC
|
||||||
|
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: ============================================================
|
||||||
|
Jan 02 17:12:45 ids.alfacom.it ids-list-fetcher[2279]: INFO:merge_logic:Bulk sync complete: {'created': 0, 'cleaned': 0, 'skipped_whitelisted': 0}
|
||||||
|
Jan 02 17:12:45 ids.alfacom.it ids-list-fetcher[2279]: Merge Logic Stats:
|
||||||
|
Jan 02 17:12:45 ids.alfacom.it ids-list-fetcher[2279]: Created detections: 0
|
||||||
|
Jan 02 17:12:45 ids.alfacom.it ids-list-fetcher[2279]: Cleaned invalid detections: 0
|
||||||
|
Jan 02 17:12:45 ids.alfacom.it ids-list-fetcher[2279]: Skipped (whitelisted): 0
|
||||||
|
Jan 02 17:12:45 ids.alfacom.it ids-list-fetcher[2279]: ============================================================
|
||||||
|
Jan 02 17:12:45 ids.alfacom.it systemd[1]: ids-list-fetcher.service: Deactivated successfully.
|
||||||
|
Jan 02 17:12:45 ids.alfacom.it systemd[1]: Finished IDS Public Lists Fetcher Service.
|
||||||
@ -0,0 +1,4 @@
|
|||||||
|
curl -X POST http://localhost:8000/detect \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d '{"max_records": 5000, "hours_back": 1, "risk_threshold": 80, "auto_block": true}'
|
||||||
|
{"detections":[{"source_ip":"108.139.210.107","risk_score":98.55466848373413,"confidence_level":"high","action_recommendation":"auto_block","anomaly_type":"ddos","reason":"High connection rate: 403.7 conn/s","log_count":1211,"total_packets":1211,"total_bytes":2101702,"first_seen":"2026-01-02T16:41:51","last_seen":"2026-01-02T16:41:54","confidence":95.0},{"source_ip":"216.58.209.54","risk_score":95.52801848493884,"confidence_level":"high","action_recommendation":"auto_block","anomaly_type":"brute_force","reason":"High connection rate: 184.7 conn/s","log_count":554,"total_packets":554,"total_bytes":782397,"first_seen":"2026-01-02T16:41:51","last_seen":"2026-01-02T16:41:54","confidence":95.0},{"source_ip":"95.127.69.202","risk_score":93.58280514393482,"confidence_level":"medium","action_recommendation":"manual_review","anomaly_type":"brute_force","reason":"High connection rate: 93.7 conn/s","log_count":281,"total_packets":281,"total_bytes":369875,"first_seen":"2026-01-02T16:41:51","last_seen":"2026-01-02T16:41:54","confidence":75.0},{"source_ip":"95.127.72.207","risk_score":92.50694363471318,"confidence_level":"medium","action_recommendation":"manual_review","anomaly_type":"brute_force","reason":"High connection rate: 76.3 conn/s","log_count":229,"total_packets":229,"total_bytes":293439,"first_seen":"2026-01-02T16:41:51","last_seen":"2026-01-02T16:41:54","confidence":75.0},{"source_ip":"95.110.183.67","risk_score":86.42278405656512,"confidence_level":"medium","action_recommendation":"manual_review","anomaly_type":"brute_force","reason":"High connection rate: 153.0 conn/s","log_count":459,"total_packets":459,"total_bytes":20822,"first_seen":"2026-01-02T16:41:51","last_seen":"2026-01-02T16:41:54","confidence":75.0},{"source_ip":"54.75.71.86","risk_score":83.42037059381207,"confidence_level":"medium","action_recommendation":"manual_review","anomaly_type":"brute_force","reason":"High connection rate: 58.0 conn/s","log_count":174,"total_packets":174,"total_bytes":25857,"first_seen":"2026-01-02T16:41:51","last_seen":"2026-01-02T16:41:54","confidence":75.0},{"source_ip":"79.10.127.217","risk_score":82.32814469102843,"confidence_level":"medium","action_recommendation":"manual_review","anomaly_type":"brute_force","reason":"High connection rate: 70.0 conn/s","log_count":210,"total_packets":210,"total_bytes":18963,"first_seen":"2026-01-02T16:41:51","last_seen":"2026-01-02T16:41:54","confidence":75.0},{"source_ip":"142.251.140.100","risk_score":76.61422108557721,"confidence_level":"medium","action_recommendation":"manual_review","anomaly_type":"botnet","reason":"Anomalous pattern detected (botnet)","log_count":16,"total_packets":16,"total_bytes":20056,"first_seen":"2026-01-02T16:41:51","last_seen":"2026-01-02T16:41:53","confidence":75.0},{"source_ip":"142.250.181.161","risk_score":76.3802033958719,"confidence_level":"medium","action_recommendation":"manual_review","anomaly_type":"botnet","reason":"Anomalous pattern detected (botnet)","log_count":15,"total_packets":15,"total_bytes":5214,"first_seen":"2026-01-02T16:41:51","last_seen":"2026-01-02T16:41:51","confidence":75.0},{"source_ip":"142.250.180.131","risk_score":72.7723405111559,"confidence_level":"medium","action_recommendation":"manual_review","anomaly_type":"suspicious","reason":"Anomalous pattern detected (suspicious)","log_count":8,"total_packets":8,"total_bytes":5320,"first_seen":"2026-01-02T16:41:51","last_seen":"2026-01-02T16:41:53","confidence":75.0},{"source_ip":"157.240.231.60","risk_score":72.26853648050493,"confidence_level":"medium","action_recommendation":"manual_review","anomaly_type":"botnet","reason":"Anomalous pattern detected (botnet)","log_count":16,"total_packets":16,"total_bytes":4624,"first_seen":"2026-01-02T16:41:51","last_seen":"2026-01-02T16:41:54","confidence":75.0}],"total":11,"blocked":0,"message":"Trovate 11 anomalie"}[root@ids python_ml]#
|
||||||
@ -0,0 +1,51 @@
|
|||||||
|
journalctl -u ids-list-fetcher -n 50 --no-pager
|
||||||
|
Jan 02 12:50:02 ids.alfacom.it ids-list-fetcher[5900]: ============================================================
|
||||||
|
Jan 02 12:50:02 ids.alfacom.it systemd[1]: ids-list-fetcher.service: Deactivated successfully.
|
||||||
|
Jan 02 12:50:02 ids.alfacom.it systemd[1]: Finished IDS Public Lists Fetcher Service.
|
||||||
|
Jan 02 12:54:56 ids.alfacom.it systemd[1]: Starting IDS Public Lists Fetcher Service...
|
||||||
|
Jan 02 12:54:56 ids.alfacom.it ids-list-fetcher[6290]: ============================================================
|
||||||
|
Jan 02 12:54:56 ids.alfacom.it ids-list-fetcher[6290]: [2026-01-02 12:54:56] PUBLIC LISTS SYNC
|
||||||
|
Jan 02 12:54:56 ids.alfacom.it ids-list-fetcher[6290]: ============================================================
|
||||||
|
Jan 02 12:54:56 ids.alfacom.it ids-list-fetcher[6290]: Found 2 enabled lists
|
||||||
|
Jan 02 12:54:56 ids.alfacom.it ids-list-fetcher[6290]: [12:54:56] Downloading Spamhaus from https://www.spamhaus.org/drop/drop_v4.json...
|
||||||
|
Jan 02 12:54:56 ids.alfacom.it ids-list-fetcher[6290]: [12:54:56] Downloading AWS from https://ip-ranges.amazonaws.com/ip-ranges.json...
|
||||||
|
Jan 02 12:54:56 ids.alfacom.it ids-list-fetcher[6290]: [12:54:56] Parsing AWS...
|
||||||
|
Jan 02 12:54:57 ids.alfacom.it ids-list-fetcher[6290]: [12:54:57] Found 9548 IPs, syncing to database...
|
||||||
|
Jan 02 12:54:57 ids.alfacom.it ids-list-fetcher[6290]: [12:54:57] ✓ AWS: +0 -0 ~9511
|
||||||
|
Jan 02 12:54:57 ids.alfacom.it ids-list-fetcher[6290]: [12:54:57] Parsing Spamhaus...
|
||||||
|
Jan 02 12:54:57 ids.alfacom.it ids-list-fetcher[6290]: [12:54:57] Found 1468 IPs, syncing to database...
|
||||||
|
Jan 02 12:54:57 ids.alfacom.it ids-list-fetcher[6290]: [12:54:57] ✗ Spamhaus: ON CONFLICT DO UPDATE command cannot affect row a second time
|
||||||
|
Jan 02 12:54:57 ids.alfacom.it ids-list-fetcher[6290]: HINT: Ensure that no rows proposed for insertion within the same command have duplicate constrained values.
|
||||||
|
Jan 02 12:54:57 ids.alfacom.it ids-list-fetcher[6290]: ============================================================
|
||||||
|
Jan 02 12:54:57 ids.alfacom.it ids-list-fetcher[6290]: SYNC SUMMARY
|
||||||
|
Jan 02 12:54:57 ids.alfacom.it ids-list-fetcher[6290]: ============================================================
|
||||||
|
Jan 02 12:54:57 ids.alfacom.it ids-list-fetcher[6290]: Success: 1/2
|
||||||
|
Jan 02 12:54:57 ids.alfacom.it ids-list-fetcher[6290]: Errors: 1/2
|
||||||
|
Jan 02 12:54:57 ids.alfacom.it ids-list-fetcher[6290]: Total IPs Added: 0
|
||||||
|
Jan 02 12:54:57 ids.alfacom.it ids-list-fetcher[6290]: Total IPs Removed: 0
|
||||||
|
Jan 02 12:54:57 ids.alfacom.it ids-list-fetcher[6290]: ============================================================
|
||||||
|
Jan 02 12:54:57 ids.alfacom.it ids-list-fetcher[6290]: ============================================================
|
||||||
|
Jan 02 12:54:57 ids.alfacom.it ids-list-fetcher[6290]: RUNNING MERGE LOGIC
|
||||||
|
Jan 02 12:54:57 ids.alfacom.it ids-list-fetcher[6290]: ============================================================
|
||||||
|
Jan 02 12:54:57 ids.alfacom.it ids-list-fetcher[6290]: ERROR:merge_logic:Failed to cleanup detections: operator does not exist: inet = text
|
||||||
|
Jan 02 12:54:57 ids.alfacom.it ids-list-fetcher[6290]: LINE 9: d.source_ip::inet = wl.ip_inet
|
||||||
|
Jan 02 12:54:57 ids.alfacom.it ids-list-fetcher[6290]: ^
|
||||||
|
Jan 02 12:54:57 ids.alfacom.it ids-list-fetcher[6290]: HINT: No operator matches the given name and argument types. You might need to add explicit type casts.
|
||||||
|
Jan 02 12:54:57 ids.alfacom.it ids-list-fetcher[6290]: ERROR:merge_logic:Failed to sync detections: operator does not exist: text <<= text
|
||||||
|
Jan 02 12:54:57 ids.alfacom.it ids-list-fetcher[6290]: LINE 30: OR bl.ip_inet <<= wl.ip_inet
|
||||||
|
Jan 02 12:54:57 ids.alfacom.it ids-list-fetcher[6290]: ^
|
||||||
|
Jan 02 12:54:57 ids.alfacom.it ids-list-fetcher[6290]: HINT: No operator matches the given name and argument types. You might need to add explicit type casts.
|
||||||
|
Jan 02 12:54:57 ids.alfacom.it ids-list-fetcher[6290]: Traceback (most recent call last):
|
||||||
|
Jan 02 12:54:57 ids.alfacom.it ids-list-fetcher[6290]: File "/opt/ids/python_ml/merge_logic.py", line 264, in sync_public_blacklist_detections
|
||||||
|
Jan 02 12:54:57 ids.alfacom.it ids-list-fetcher[6290]: cur.execute("""
|
||||||
|
Jan 02 12:54:57 ids.alfacom.it ids-list-fetcher[6290]: psycopg2.errors.UndefinedFunction: operator does not exist: text <<= text
|
||||||
|
Jan 02 12:54:57 ids.alfacom.it ids-list-fetcher[6290]: LINE 30: OR bl.ip_inet <<= wl.ip_inet
|
||||||
|
Jan 02 12:54:57 ids.alfacom.it ids-list-fetcher[6290]: ^
|
||||||
|
Jan 02 12:54:57 ids.alfacom.it ids-list-fetcher[6290]: HINT: No operator matches the given name and argument types. You might need to add explicit type casts.
|
||||||
|
Jan 02 12:54:57 ids.alfacom.it ids-list-fetcher[6290]: Merge Logic Stats:
|
||||||
|
Jan 02 12:54:57 ids.alfacom.it ids-list-fetcher[6290]: Created detections: 0
|
||||||
|
Jan 02 12:54:57 ids.alfacom.it ids-list-fetcher[6290]: Cleaned invalid detections: 0
|
||||||
|
Jan 02 12:54:57 ids.alfacom.it ids-list-fetcher[6290]: Skipped (whitelisted): 0
|
||||||
|
Jan 02 12:54:57 ids.alfacom.it ids-list-fetcher[6290]: ============================================================
|
||||||
|
Jan 02 12:54:57 ids.alfacom.it systemd[1]: ids-list-fetcher.service: Deactivated successfully.
|
||||||
|
Jan 02 12:54:57 ids.alfacom.it systemd[1]: Finished IDS Public Lists Fetcher Service.
|
||||||
@ -0,0 +1,51 @@
|
|||||||
|
journalctl -u ids-list-fetcher -n 50 --no-pager
|
||||||
|
Jan 02 16:11:31 ids.alfacom.it ids-list-fetcher[10401]: HINT: No operator matches the given name and argument types. You might need to add explicit type casts.
|
||||||
|
Jan 02 16:11:31 ids.alfacom.it ids-list-fetcher[10401]: Merge Logic Stats:
|
||||||
|
Jan 02 16:11:31 ids.alfacom.it ids-list-fetcher[10401]: Created detections: 0
|
||||||
|
Jan 02 16:11:31 ids.alfacom.it ids-list-fetcher[10401]: Cleaned invalid detections: 0
|
||||||
|
Jan 02 16:11:31 ids.alfacom.it ids-list-fetcher[10401]: Skipped (whitelisted): 0
|
||||||
|
Jan 02 16:11:31 ids.alfacom.it ids-list-fetcher[10401]: ============================================================
|
||||||
|
Jan 02 16:11:31 ids.alfacom.it systemd[1]: ids-list-fetcher.service: Deactivated successfully.
|
||||||
|
Jan 02 16:11:31 ids.alfacom.it systemd[1]: Finished IDS Public Lists Fetcher Service.
|
||||||
|
Jan 02 16:15:04 ids.alfacom.it systemd[1]: Starting IDS Public Lists Fetcher Service...
|
||||||
|
Jan 02 16:15:04 ids.alfacom.it ids-list-fetcher[10801]: ============================================================
|
||||||
|
Jan 02 16:15:04 ids.alfacom.it ids-list-fetcher[10801]: [2026-01-02 16:15:04] PUBLIC LISTS SYNC
|
||||||
|
Jan 02 16:15:04 ids.alfacom.it ids-list-fetcher[10801]: ============================================================
|
||||||
|
Jan 02 16:15:04 ids.alfacom.it ids-list-fetcher[10801]: Found 2 enabled lists
|
||||||
|
Jan 02 16:15:04 ids.alfacom.it ids-list-fetcher[10801]: [16:15:04] Downloading Spamhaus from https://www.spamhaus.org/drop/drop_v4.json...
|
||||||
|
Jan 02 16:15:04 ids.alfacom.it ids-list-fetcher[10801]: [16:15:04] Downloading AWS from https://ip-ranges.amazonaws.com/ip-ranges.json...
|
||||||
|
Jan 02 16:15:04 ids.alfacom.it ids-list-fetcher[10801]: [16:15:04] Parsing Spamhaus...
|
||||||
|
Jan 02 16:15:04 ids.alfacom.it ids-list-fetcher[10801]: [16:15:04] Found 1468 IPs, syncing to database...
|
||||||
|
Jan 02 16:15:04 ids.alfacom.it ids-list-fetcher[10801]: [16:15:04] ✓ Spamhaus: +0 -0 ~1468
|
||||||
|
Jan 02 16:15:04 ids.alfacom.it ids-list-fetcher[10801]: [16:15:04] Parsing AWS...
|
||||||
|
Jan 02 16:15:05 ids.alfacom.it ids-list-fetcher[10801]: [16:15:05] Found 9548 IPs, syncing to database...
|
||||||
|
Jan 02 16:15:05 ids.alfacom.it ids-list-fetcher[10801]: [16:15:05] ✓ AWS: +9548 -0 ~0
|
||||||
|
Jan 02 16:15:05 ids.alfacom.it ids-list-fetcher[10801]: ============================================================
|
||||||
|
Jan 02 16:15:05 ids.alfacom.it ids-list-fetcher[10801]: SYNC SUMMARY
|
||||||
|
Jan 02 16:15:05 ids.alfacom.it ids-list-fetcher[10801]: ============================================================
|
||||||
|
Jan 02 16:15:05 ids.alfacom.it ids-list-fetcher[10801]: Success: 2/2
|
||||||
|
Jan 02 16:15:05 ids.alfacom.it ids-list-fetcher[10801]: Errors: 0/2
|
||||||
|
Jan 02 16:15:05 ids.alfacom.it ids-list-fetcher[10801]: Total IPs Added: 9548
|
||||||
|
Jan 02 16:15:05 ids.alfacom.it ids-list-fetcher[10801]: Total IPs Removed: 0
|
||||||
|
Jan 02 16:15:05 ids.alfacom.it ids-list-fetcher[10801]: ============================================================
|
||||||
|
Jan 02 16:15:05 ids.alfacom.it ids-list-fetcher[10801]: ============================================================
|
||||||
|
Jan 02 16:15:05 ids.alfacom.it ids-list-fetcher[10801]: RUNNING MERGE LOGIC
|
||||||
|
Jan 02 16:15:05 ids.alfacom.it ids-list-fetcher[10801]: ============================================================
|
||||||
|
Jan 02 16:15:05 ids.alfacom.it ids-list-fetcher[10801]: ERROR:merge_logic:Failed to sync detections: column "risk_score" is of type numeric but expression is of type text
|
||||||
|
Jan 02 16:15:05 ids.alfacom.it ids-list-fetcher[10801]: LINE 13: '75',
|
||||||
|
Jan 02 16:15:05 ids.alfacom.it ids-list-fetcher[10801]: ^
|
||||||
|
Jan 02 16:15:05 ids.alfacom.it ids-list-fetcher[10801]: HINT: You will need to rewrite or cast the expression.
|
||||||
|
Jan 02 16:15:05 ids.alfacom.it ids-list-fetcher[10801]: Traceback (most recent call last):
|
||||||
|
Jan 02 16:15:05 ids.alfacom.it ids-list-fetcher[10801]: File "/opt/ids/python_ml/merge_logic.py", line 264, in sync_public_blacklist_detections
|
||||||
|
Jan 02 16:15:05 ids.alfacom.it ids-list-fetcher[10801]: cur.execute("""
|
||||||
|
Jan 02 16:15:05 ids.alfacom.it ids-list-fetcher[10801]: psycopg2.errors.DatatypeMismatch: column "risk_score" is of type numeric but expression is of type text
|
||||||
|
Jan 02 16:15:05 ids.alfacom.it ids-list-fetcher[10801]: LINE 13: '75',
|
||||||
|
Jan 02 16:15:05 ids.alfacom.it ids-list-fetcher[10801]: ^
|
||||||
|
Jan 02 16:15:05 ids.alfacom.it ids-list-fetcher[10801]: HINT: You will need to rewrite or cast the expression.
|
||||||
|
Jan 02 16:15:05 ids.alfacom.it ids-list-fetcher[10801]: Merge Logic Stats:
|
||||||
|
Jan 02 16:15:05 ids.alfacom.it ids-list-fetcher[10801]: Created detections: 0
|
||||||
|
Jan 02 16:15:05 ids.alfacom.it ids-list-fetcher[10801]: Cleaned invalid detections: 0
|
||||||
|
Jan 02 16:15:05 ids.alfacom.it ids-list-fetcher[10801]: Skipped (whitelisted): 0
|
||||||
|
Jan 02 16:15:05 ids.alfacom.it ids-list-fetcher[10801]: ============================================================
|
||||||
|
Jan 02 16:15:05 ids.alfacom.it systemd[1]: ids-list-fetcher.service: Deactivated successfully.
|
||||||
|
Jan 02 16:15:05 ids.alfacom.it systemd[1]: Finished IDS Public Lists Fetcher Service.
|
||||||
@ -0,0 +1,51 @@
|
|||||||
|
ournalctl -u ids-list-fetcher -n 50 --no-pager
|
||||||
|
Jan 02 12:30:01 ids.alfacom.it ids-list-fetcher[5571]: Cleaned invalid detections: 0
|
||||||
|
Jan 02 12:30:01 ids.alfacom.it ids-list-fetcher[5571]: Skipped (whitelisted): 0
|
||||||
|
Jan 02 12:30:01 ids.alfacom.it ids-list-fetcher[5571]: ============================================================
|
||||||
|
Jan 02 12:30:01 ids.alfacom.it systemd[1]: ids-list-fetcher.service: Deactivated successfully.
|
||||||
|
Jan 02 12:30:01 ids.alfacom.it systemd[1]: Finished IDS Public Lists Fetcher Service.
|
||||||
|
Jan 02 12:40:01 ids.alfacom.it systemd[1]: Starting IDS Public Lists Fetcher Service...
|
||||||
|
Jan 02 12:40:01 ids.alfacom.it ids-list-fetcher[5730]: ============================================================
|
||||||
|
Jan 02 12:40:01 ids.alfacom.it ids-list-fetcher[5730]: [2026-01-02 12:40:01] PUBLIC LISTS SYNC
|
||||||
|
Jan 02 12:40:01 ids.alfacom.it ids-list-fetcher[5730]: ============================================================
|
||||||
|
Jan 02 12:40:01 ids.alfacom.it ids-list-fetcher[5730]: Found 2 enabled lists
|
||||||
|
Jan 02 12:40:01 ids.alfacom.it ids-list-fetcher[5730]: [12:40:01] Downloading Spamhaus from https://www.spamhaus.org/drop/drop_v4.json...
|
||||||
|
Jan 02 12:40:01 ids.alfacom.it ids-list-fetcher[5730]: [12:40:01] Downloading AWS from https://ip-ranges.amazonaws.com/ip-ranges.json...
|
||||||
|
Jan 02 12:40:01 ids.alfacom.it ids-list-fetcher[5730]: [12:40:01] Parsing AWS...
|
||||||
|
Jan 02 12:40:01 ids.alfacom.it ids-list-fetcher[5730]: [12:40:01] Found 9548 IPs, syncing to database...
|
||||||
|
Jan 02 12:40:02 ids.alfacom.it ids-list-fetcher[5730]: [12:40:02] ✓ AWS: +9511 -0 ~0
|
||||||
|
Jan 02 12:40:02 ids.alfacom.it ids-list-fetcher[5730]: [12:40:02] Parsing Spamhaus...
|
||||||
|
Jan 02 12:40:02 ids.alfacom.it ids-list-fetcher[5730]: [12:40:02] ✗ Spamhaus: No valid IPs found in list
|
||||||
|
Jan 02 12:40:03 ids.alfacom.it ids-list-fetcher[5730]: ============================================================
|
||||||
|
Jan 02 12:40:03 ids.alfacom.it ids-list-fetcher[5730]: SYNC SUMMARY
|
||||||
|
Jan 02 12:40:03 ids.alfacom.it ids-list-fetcher[5730]: ============================================================
|
||||||
|
Jan 02 12:40:03 ids.alfacom.it ids-list-fetcher[5730]: Success: 1/2
|
||||||
|
Jan 02 12:40:03 ids.alfacom.it ids-list-fetcher[5730]: Errors: 1/2
|
||||||
|
Jan 02 12:40:03 ids.alfacom.it ids-list-fetcher[5730]: Total IPs Added: 9511
|
||||||
|
Jan 02 12:40:03 ids.alfacom.it ids-list-fetcher[5730]: Total IPs Removed: 0
|
||||||
|
Jan 02 12:40:03 ids.alfacom.it ids-list-fetcher[5730]: ============================================================
|
||||||
|
Jan 02 12:40:03 ids.alfacom.it ids-list-fetcher[5730]: ============================================================
|
||||||
|
Jan 02 12:40:03 ids.alfacom.it ids-list-fetcher[5730]: RUNNING MERGE LOGIC
|
||||||
|
Jan 02 12:40:03 ids.alfacom.it ids-list-fetcher[5730]: ============================================================
|
||||||
|
Jan 02 12:40:03 ids.alfacom.it ids-list-fetcher[5730]: ERROR:merge_logic:Failed to cleanup detections: operator does not exist: inet = text
|
||||||
|
Jan 02 12:40:03 ids.alfacom.it ids-list-fetcher[5730]: LINE 9: d.source_ip::inet = wl.ip_inet
|
||||||
|
Jan 02 12:40:03 ids.alfacom.it ids-list-fetcher[5730]: ^
|
||||||
|
Jan 02 12:40:03 ids.alfacom.it ids-list-fetcher[5730]: HINT: No operator matches the given name and argument types. You might need to add explicit type casts.
|
||||||
|
Jan 02 12:40:03 ids.alfacom.it ids-list-fetcher[5730]: ERROR:merge_logic:Failed to sync detections: operator does not exist: text <<= text
|
||||||
|
Jan 02 12:40:03 ids.alfacom.it ids-list-fetcher[5730]: LINE 30: OR bl.ip_inet <<= wl.ip_inet
|
||||||
|
Jan 02 12:40:03 ids.alfacom.it ids-list-fetcher[5730]: ^
|
||||||
|
Jan 02 12:40:03 ids.alfacom.it ids-list-fetcher[5730]: HINT: No operator matches the given name and argument types. You might need to add explicit type casts.
|
||||||
|
Jan 02 12:40:03 ids.alfacom.it ids-list-fetcher[5730]: Traceback (most recent call last):
|
||||||
|
Jan 02 12:40:03 ids.alfacom.it ids-list-fetcher[5730]: File "/opt/ids/python_ml/merge_logic.py", line 264, in sync_public_blacklist_detections
|
||||||
|
Jan 02 12:40:03 ids.alfacom.it ids-list-fetcher[5730]: cur.execute("""
|
||||||
|
Jan 02 12:40:03 ids.alfacom.it ids-list-fetcher[5730]: psycopg2.errors.UndefinedFunction: operator does not exist: text <<= text
|
||||||
|
Jan 02 12:40:03 ids.alfacom.it ids-list-fetcher[5730]: LINE 30: OR bl.ip_inet <<= wl.ip_inet
|
||||||
|
Jan 02 12:40:03 ids.alfacom.it ids-list-fetcher[5730]: ^
|
||||||
|
Jan 02 12:40:03 ids.alfacom.it ids-list-fetcher[5730]: HINT: No operator matches the given name and argument types. You might need to add explicit type casts.
|
||||||
|
Jan 02 12:40:03 ids.alfacom.it ids-list-fetcher[5730]: Merge Logic Stats:
|
||||||
|
Jan 02 12:40:03 ids.alfacom.it ids-list-fetcher[5730]: Created detections: 0
|
||||||
|
Jan 02 12:40:03 ids.alfacom.it ids-list-fetcher[5730]: Cleaned invalid detections: 0
|
||||||
|
Jan 02 12:40:03 ids.alfacom.it ids-list-fetcher[5730]: Skipped (whitelisted): 0
|
||||||
|
Jan 02 12:40:03 ids.alfacom.it ids-list-fetcher[5730]: ============================================================
|
||||||
|
Jan 02 12:40:03 ids.alfacom.it systemd[1]: ids-list-fetcher.service: Deactivated successfully.
|
||||||
|
Jan 02 12:40:03 ids.alfacom.it systemd[1]: Finished IDS Public Lists Fetcher Service.
|
||||||
BIN
attached_assets/immagine_1767353869328.png
Normal file
BIN
attached_assets/immagine_1767353869328.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 42 KiB |
@ -5,44 +5,81 @@ import { Button } from "@/components/ui/button";
|
|||||||
import { Input } from "@/components/ui/input";
|
import { Input } from "@/components/ui/input";
|
||||||
import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from "@/components/ui/select";
|
import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from "@/components/ui/select";
|
||||||
import { Slider } from "@/components/ui/slider";
|
import { Slider } from "@/components/ui/slider";
|
||||||
import { AlertTriangle, Search, Shield, Globe, MapPin, Building2, ShieldPlus } from "lucide-react";
|
import { AlertTriangle, Search, Shield, Globe, MapPin, Building2, ShieldPlus, ShieldCheck, Unlock, ChevronLeft, ChevronRight } from "lucide-react";
|
||||||
import { format } from "date-fns";
|
import { format } from "date-fns";
|
||||||
import { useState } from "react";
|
import { useState, useEffect, useMemo } from "react";
|
||||||
import type { Detection } from "@shared/schema";
|
import type { Detection, Whitelist } from "@shared/schema";
|
||||||
import { getFlag } from "@/lib/country-flags";
|
import { getFlag } from "@/lib/country-flags";
|
||||||
import { apiRequest, queryClient } from "@/lib/queryClient";
|
import { apiRequest, queryClient } from "@/lib/queryClient";
|
||||||
import { useToast } from "@/hooks/use-toast";
|
import { useToast } from "@/hooks/use-toast";
|
||||||
|
|
||||||
|
const ITEMS_PER_PAGE = 50;
|
||||||
|
|
||||||
|
interface DetectionsResponse {
|
||||||
|
detections: Detection[];
|
||||||
|
total: number;
|
||||||
|
}
|
||||||
|
|
||||||
export default function Detections() {
|
export default function Detections() {
|
||||||
const [searchQuery, setSearchQuery] = useState("");
|
const [searchInput, setSearchInput] = useState("");
|
||||||
|
const [debouncedSearch, setDebouncedSearch] = useState("");
|
||||||
const [anomalyTypeFilter, setAnomalyTypeFilter] = useState<string>("all");
|
const [anomalyTypeFilter, setAnomalyTypeFilter] = useState<string>("all");
|
||||||
const [minScore, setMinScore] = useState(0);
|
const [minScore, setMinScore] = useState(0);
|
||||||
const [maxScore, setMaxScore] = useState(100);
|
const [maxScore, setMaxScore] = useState(100);
|
||||||
|
const [currentPage, setCurrentPage] = useState(1);
|
||||||
const { toast } = useToast();
|
const { toast } = useToast();
|
||||||
|
|
||||||
// Build query params
|
// Debounce search input
|
||||||
const queryParams = new URLSearchParams();
|
useEffect(() => {
|
||||||
queryParams.set("limit", "5000");
|
const timer = setTimeout(() => {
|
||||||
if (anomalyTypeFilter !== "all") {
|
setDebouncedSearch(searchInput);
|
||||||
queryParams.set("anomalyType", anomalyTypeFilter);
|
setCurrentPage(1); // Reset to first page on search
|
||||||
}
|
}, 300);
|
||||||
if (minScore > 0) {
|
return () => clearTimeout(timer);
|
||||||
queryParams.set("minScore", minScore.toString());
|
}, [searchInput]);
|
||||||
}
|
|
||||||
if (maxScore < 100) {
|
|
||||||
queryParams.set("maxScore", maxScore.toString());
|
|
||||||
}
|
|
||||||
|
|
||||||
const { data: detections, isLoading } = useQuery<Detection[]>({
|
// Reset page on filter change
|
||||||
queryKey: ["/api/detections", anomalyTypeFilter, minScore, maxScore],
|
useEffect(() => {
|
||||||
queryFn: () => fetch(`/api/detections?${queryParams.toString()}`).then(r => r.json()),
|
setCurrentPage(1);
|
||||||
refetchInterval: 5000,
|
}, [anomalyTypeFilter, minScore, maxScore]);
|
||||||
|
|
||||||
|
// Build query params with pagination and search
|
||||||
|
const queryParams = useMemo(() => {
|
||||||
|
const params = new URLSearchParams();
|
||||||
|
params.set("limit", ITEMS_PER_PAGE.toString());
|
||||||
|
params.set("offset", ((currentPage - 1) * ITEMS_PER_PAGE).toString());
|
||||||
|
if (anomalyTypeFilter !== "all") {
|
||||||
|
params.set("anomalyType", anomalyTypeFilter);
|
||||||
|
}
|
||||||
|
if (minScore > 0) {
|
||||||
|
params.set("minScore", minScore.toString());
|
||||||
|
}
|
||||||
|
if (maxScore < 100) {
|
||||||
|
params.set("maxScore", maxScore.toString());
|
||||||
|
}
|
||||||
|
if (debouncedSearch.trim()) {
|
||||||
|
params.set("search", debouncedSearch.trim());
|
||||||
|
}
|
||||||
|
return params.toString();
|
||||||
|
}, [currentPage, anomalyTypeFilter, minScore, maxScore, debouncedSearch]);
|
||||||
|
|
||||||
|
const { data, isLoading } = useQuery<DetectionsResponse>({
|
||||||
|
queryKey: ["/api/detections", currentPage, anomalyTypeFilter, minScore, maxScore, debouncedSearch],
|
||||||
|
queryFn: () => fetch(`/api/detections?${queryParams}`).then(r => r.json()),
|
||||||
|
refetchInterval: 10000,
|
||||||
});
|
});
|
||||||
|
|
||||||
const filteredDetections = detections?.filter((d) =>
|
const detections = data?.detections || [];
|
||||||
d.sourceIp.toLowerCase().includes(searchQuery.toLowerCase()) ||
|
const totalCount = data?.total || 0;
|
||||||
d.anomalyType.toLowerCase().includes(searchQuery.toLowerCase())
|
const totalPages = Math.ceil(totalCount / ITEMS_PER_PAGE);
|
||||||
);
|
|
||||||
|
// Fetch whitelist to check if IP is already whitelisted
|
||||||
|
const { data: whitelistData } = useQuery<Whitelist[]>({
|
||||||
|
queryKey: ["/api/whitelist"],
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create a Set of whitelisted IPs for fast lookup
|
||||||
|
const whitelistedIps = new Set(whitelistData?.map(w => w.ipAddress) || []);
|
||||||
|
|
||||||
// Mutation per aggiungere a whitelist
|
// Mutation per aggiungere a whitelist
|
||||||
const addToWhitelistMutation = useMutation({
|
const addToWhitelistMutation = useMutation({
|
||||||
@ -55,7 +92,7 @@ export default function Detections() {
|
|||||||
onSuccess: (_, detection) => {
|
onSuccess: (_, detection) => {
|
||||||
toast({
|
toast({
|
||||||
title: "IP aggiunto alla whitelist",
|
title: "IP aggiunto alla whitelist",
|
||||||
description: `${detection.sourceIp} è stato aggiunto alla whitelist con successo.`,
|
description: `${detection.sourceIp} è stato aggiunto alla whitelist e sbloccato dai router.`,
|
||||||
});
|
});
|
||||||
queryClient.invalidateQueries({ queryKey: ["/api/whitelist"] });
|
queryClient.invalidateQueries({ queryKey: ["/api/whitelist"] });
|
||||||
queryClient.invalidateQueries({ queryKey: ["/api/detections"] });
|
queryClient.invalidateQueries({ queryKey: ["/api/detections"] });
|
||||||
@ -69,6 +106,29 @@ export default function Detections() {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Mutation per sbloccare IP dai router
|
||||||
|
const unblockMutation = useMutation({
|
||||||
|
mutationFn: async (detection: Detection) => {
|
||||||
|
return await apiRequest("POST", "/api/unblock-ip", {
|
||||||
|
ipAddress: detection.sourceIp
|
||||||
|
});
|
||||||
|
},
|
||||||
|
onSuccess: (data: any, detection) => {
|
||||||
|
toast({
|
||||||
|
title: "IP sbloccato",
|
||||||
|
description: `${detection.sourceIp} è stato rimosso dalla blocklist di ${data.unblocked_from || 0} router.`,
|
||||||
|
});
|
||||||
|
queryClient.invalidateQueries({ queryKey: ["/api/detections"] });
|
||||||
|
},
|
||||||
|
onError: (error: any, detection) => {
|
||||||
|
toast({
|
||||||
|
title: "Errore sblocco",
|
||||||
|
description: error.message || `Impossibile sbloccare ${detection.sourceIp} dai router.`,
|
||||||
|
variant: "destructive",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
const getRiskBadge = (riskScore: string) => {
|
const getRiskBadge = (riskScore: string) => {
|
||||||
const score = parseFloat(riskScore);
|
const score = parseFloat(riskScore);
|
||||||
if (score >= 85) return <Badge variant="destructive">CRITICO</Badge>;
|
if (score >= 85) return <Badge variant="destructive">CRITICO</Badge>;
|
||||||
@ -106,9 +166,9 @@ export default function Detections() {
|
|||||||
<div className="relative flex-1 min-w-[200px]">
|
<div className="relative flex-1 min-w-[200px]">
|
||||||
<Search className="absolute left-3 top-1/2 -translate-y-1/2 h-4 w-4 text-muted-foreground" />
|
<Search className="absolute left-3 top-1/2 -translate-y-1/2 h-4 w-4 text-muted-foreground" />
|
||||||
<Input
|
<Input
|
||||||
placeholder="Cerca per IP o tipo anomalia..."
|
placeholder="Cerca per IP, paese, organizzazione..."
|
||||||
value={searchQuery}
|
value={searchInput}
|
||||||
onChange={(e) => setSearchQuery(e.target.value)}
|
onChange={(e) => setSearchInput(e.target.value)}
|
||||||
className="pl-9"
|
className="pl-9"
|
||||||
data-testid="input-search"
|
data-testid="input-search"
|
||||||
/>
|
/>
|
||||||
@ -160,9 +220,36 @@ export default function Detections() {
|
|||||||
{/* Detections List */}
|
{/* Detections List */}
|
||||||
<Card data-testid="card-detections-list">
|
<Card data-testid="card-detections-list">
|
||||||
<CardHeader>
|
<CardHeader>
|
||||||
<CardTitle className="flex items-center gap-2">
|
<CardTitle className="flex items-center justify-between gap-2 flex-wrap">
|
||||||
<AlertTriangle className="h-5 w-5" />
|
<div className="flex items-center gap-2">
|
||||||
Rilevamenti ({filteredDetections?.length || 0})
|
<AlertTriangle className="h-5 w-5" />
|
||||||
|
Rilevamenti ({totalCount})
|
||||||
|
</div>
|
||||||
|
{totalPages > 1 && (
|
||||||
|
<div className="flex items-center gap-2 text-sm font-normal">
|
||||||
|
<Button
|
||||||
|
variant="outline"
|
||||||
|
size="icon"
|
||||||
|
onClick={() => setCurrentPage(p => Math.max(1, p - 1))}
|
||||||
|
disabled={currentPage === 1}
|
||||||
|
data-testid="button-prev-page"
|
||||||
|
>
|
||||||
|
<ChevronLeft className="h-4 w-4" />
|
||||||
|
</Button>
|
||||||
|
<span data-testid="text-pagination">
|
||||||
|
Pagina {currentPage} di {totalPages}
|
||||||
|
</span>
|
||||||
|
<Button
|
||||||
|
variant="outline"
|
||||||
|
size="icon"
|
||||||
|
onClick={() => setCurrentPage(p => Math.min(totalPages, p + 1))}
|
||||||
|
disabled={currentPage === totalPages}
|
||||||
|
data-testid="button-next-page"
|
||||||
|
>
|
||||||
|
<ChevronRight className="h-4 w-4" />
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
</CardTitle>
|
</CardTitle>
|
||||||
</CardHeader>
|
</CardHeader>
|
||||||
<CardContent>
|
<CardContent>
|
||||||
@ -170,9 +257,9 @@ export default function Detections() {
|
|||||||
<div className="text-center py-8 text-muted-foreground" data-testid="text-loading">
|
<div className="text-center py-8 text-muted-foreground" data-testid="text-loading">
|
||||||
Caricamento...
|
Caricamento...
|
||||||
</div>
|
</div>
|
||||||
) : filteredDetections && filteredDetections.length > 0 ? (
|
) : detections.length > 0 ? (
|
||||||
<div className="space-y-3">
|
<div className="space-y-3">
|
||||||
{filteredDetections.map((detection) => (
|
{detections.map((detection) => (
|
||||||
<div
|
<div
|
||||||
key={detection.id}
|
key={detection.id}
|
||||||
className="p-4 rounded-lg border hover-elevate"
|
className="p-4 rounded-lg border hover-elevate"
|
||||||
@ -278,17 +365,44 @@ export default function Detections() {
|
|||||||
</Badge>
|
</Badge>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
<Button
|
{whitelistedIps.has(detection.sourceIp) ? (
|
||||||
variant="outline"
|
<Button
|
||||||
size="sm"
|
variant="outline"
|
||||||
onClick={() => addToWhitelistMutation.mutate(detection)}
|
size="sm"
|
||||||
disabled={addToWhitelistMutation.isPending}
|
disabled
|
||||||
className="w-full"
|
className="w-full bg-green-500/10 border-green-500 text-green-600 dark:text-green-400"
|
||||||
data-testid={`button-whitelist-${detection.id}`}
|
data-testid={`button-whitelist-${detection.id}`}
|
||||||
>
|
>
|
||||||
<ShieldPlus className="h-3 w-3 mr-1" />
|
<ShieldCheck className="h-3 w-3 mr-1" />
|
||||||
Whitelist
|
In Whitelist
|
||||||
</Button>
|
</Button>
|
||||||
|
) : (
|
||||||
|
<Button
|
||||||
|
variant="outline"
|
||||||
|
size="sm"
|
||||||
|
onClick={() => addToWhitelistMutation.mutate(detection)}
|
||||||
|
disabled={addToWhitelistMutation.isPending}
|
||||||
|
className="w-full"
|
||||||
|
data-testid={`button-whitelist-${detection.id}`}
|
||||||
|
>
|
||||||
|
<ShieldPlus className="h-3 w-3 mr-1" />
|
||||||
|
Whitelist
|
||||||
|
</Button>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{detection.blocked && (
|
||||||
|
<Button
|
||||||
|
variant="outline"
|
||||||
|
size="sm"
|
||||||
|
onClick={() => unblockMutation.mutate(detection)}
|
||||||
|
disabled={unblockMutation.isPending}
|
||||||
|
className="w-full"
|
||||||
|
data-testid={`button-unblock-${detection.id}`}
|
||||||
|
>
|
||||||
|
<Unlock className="h-3 w-3 mr-1" />
|
||||||
|
Sblocca Router
|
||||||
|
</Button>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@ -298,11 +412,40 @@ export default function Detections() {
|
|||||||
<div className="text-center py-12 text-muted-foreground" data-testid="text-no-results">
|
<div className="text-center py-12 text-muted-foreground" data-testid="text-no-results">
|
||||||
<AlertTriangle className="h-12 w-12 mx-auto mb-2 opacity-50" />
|
<AlertTriangle className="h-12 w-12 mx-auto mb-2 opacity-50" />
|
||||||
<p>Nessun rilevamento trovato</p>
|
<p>Nessun rilevamento trovato</p>
|
||||||
{searchQuery && (
|
{debouncedSearch && (
|
||||||
<p className="text-sm">Prova con un altro termine di ricerca</p>
|
<p className="text-sm">Prova con un altro termine di ricerca</p>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
|
{/* Bottom pagination */}
|
||||||
|
{totalPages > 1 && detections.length > 0 && (
|
||||||
|
<div className="flex items-center justify-center gap-4 mt-6 pt-4 border-t">
|
||||||
|
<Button
|
||||||
|
variant="outline"
|
||||||
|
size="sm"
|
||||||
|
onClick={() => setCurrentPage(p => Math.max(1, p - 1))}
|
||||||
|
disabled={currentPage === 1}
|
||||||
|
data-testid="button-prev-page-bottom"
|
||||||
|
>
|
||||||
|
<ChevronLeft className="h-4 w-4 mr-1" />
|
||||||
|
Precedente
|
||||||
|
</Button>
|
||||||
|
<span className="text-sm text-muted-foreground" data-testid="text-pagination-bottom">
|
||||||
|
Pagina {currentPage} di {totalPages} ({totalCount} totali)
|
||||||
|
</span>
|
||||||
|
<Button
|
||||||
|
variant="outline"
|
||||||
|
size="sm"
|
||||||
|
onClick={() => setCurrentPage(p => Math.min(totalPages, p + 1))}
|
||||||
|
disabled={currentPage === totalPages}
|
||||||
|
data-testid="button-next-page-bottom"
|
||||||
|
>
|
||||||
|
Successiva
|
||||||
|
<ChevronRight className="h-4 w-4 ml-1" />
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
</CardContent>
|
</CardContent>
|
||||||
</Card>
|
</Card>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@ -2,7 +2,7 @@ import { useQuery, useMutation } from "@tanstack/react-query";
|
|||||||
import { queryClient, apiRequest } from "@/lib/queryClient";
|
import { queryClient, apiRequest } from "@/lib/queryClient";
|
||||||
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
|
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
|
||||||
import { Button } from "@/components/ui/button";
|
import { Button } from "@/components/ui/button";
|
||||||
import { Shield, Plus, Trash2, CheckCircle2, XCircle } from "lucide-react";
|
import { Shield, Plus, Trash2, CheckCircle2, XCircle, Search } from "lucide-react";
|
||||||
import { format } from "date-fns";
|
import { format } from "date-fns";
|
||||||
import { useState } from "react";
|
import { useState } from "react";
|
||||||
import { useForm } from "react-hook-form";
|
import { useForm } from "react-hook-form";
|
||||||
@ -44,6 +44,7 @@ const whitelistFormSchema = insertWhitelistSchema.extend({
|
|||||||
export default function WhitelistPage() {
|
export default function WhitelistPage() {
|
||||||
const { toast } = useToast();
|
const { toast } = useToast();
|
||||||
const [isAddDialogOpen, setIsAddDialogOpen] = useState(false);
|
const [isAddDialogOpen, setIsAddDialogOpen] = useState(false);
|
||||||
|
const [searchQuery, setSearchQuery] = useState("");
|
||||||
|
|
||||||
const form = useForm<z.infer<typeof whitelistFormSchema>>({
|
const form = useForm<z.infer<typeof whitelistFormSchema>>({
|
||||||
resolver: zodResolver(whitelistFormSchema),
|
resolver: zodResolver(whitelistFormSchema),
|
||||||
@ -59,6 +60,13 @@ export default function WhitelistPage() {
|
|||||||
queryKey: ["/api/whitelist"],
|
queryKey: ["/api/whitelist"],
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Filter whitelist based on search query
|
||||||
|
const filteredWhitelist = whitelist?.filter((item) =>
|
||||||
|
item.ipAddress.toLowerCase().includes(searchQuery.toLowerCase()) ||
|
||||||
|
item.reason?.toLowerCase().includes(searchQuery.toLowerCase()) ||
|
||||||
|
item.comment?.toLowerCase().includes(searchQuery.toLowerCase())
|
||||||
|
);
|
||||||
|
|
||||||
const addMutation = useMutation({
|
const addMutation = useMutation({
|
||||||
mutationFn: async (data: z.infer<typeof whitelistFormSchema>) => {
|
mutationFn: async (data: z.infer<typeof whitelistFormSchema>) => {
|
||||||
return await apiRequest("POST", "/api/whitelist", data);
|
return await apiRequest("POST", "/api/whitelist", data);
|
||||||
@ -189,11 +197,27 @@ export default function WhitelistPage() {
|
|||||||
</Dialog>
|
</Dialog>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
{/* Search Bar */}
|
||||||
|
<Card data-testid="card-search">
|
||||||
|
<CardContent className="pt-6">
|
||||||
|
<div className="relative">
|
||||||
|
<Search className="absolute left-3 top-1/2 -translate-y-1/2 h-4 w-4 text-muted-foreground" />
|
||||||
|
<Input
|
||||||
|
placeholder="Cerca per IP, motivo o note..."
|
||||||
|
value={searchQuery}
|
||||||
|
onChange={(e) => setSearchQuery(e.target.value)}
|
||||||
|
className="pl-9"
|
||||||
|
data-testid="input-search-whitelist"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
<Card data-testid="card-whitelist">
|
<Card data-testid="card-whitelist">
|
||||||
<CardHeader>
|
<CardHeader>
|
||||||
<CardTitle className="flex items-center gap-2">
|
<CardTitle className="flex items-center gap-2">
|
||||||
<Shield className="h-5 w-5" />
|
<Shield className="h-5 w-5" />
|
||||||
IP Protetti ({whitelist?.length || 0})
|
IP Protetti ({filteredWhitelist?.length || 0}{searchQuery && whitelist ? ` di ${whitelist.length}` : ''})
|
||||||
</CardTitle>
|
</CardTitle>
|
||||||
</CardHeader>
|
</CardHeader>
|
||||||
<CardContent>
|
<CardContent>
|
||||||
@ -201,9 +225,9 @@ export default function WhitelistPage() {
|
|||||||
<div className="text-center py-8 text-muted-foreground" data-testid="text-loading">
|
<div className="text-center py-8 text-muted-foreground" data-testid="text-loading">
|
||||||
Caricamento...
|
Caricamento...
|
||||||
</div>
|
</div>
|
||||||
) : whitelist && whitelist.length > 0 ? (
|
) : filteredWhitelist && filteredWhitelist.length > 0 ? (
|
||||||
<div className="space-y-3">
|
<div className="space-y-3">
|
||||||
{whitelist.map((item) => (
|
{filteredWhitelist.map((item) => (
|
||||||
<div
|
<div
|
||||||
key={item.id}
|
key={item.id}
|
||||||
className="p-4 rounded-lg border hover-elevate"
|
className="p-4 rounded-lg border hover-elevate"
|
||||||
|
|||||||
@ -13,6 +13,7 @@ set -e
|
|||||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
MIGRATIONS_DIR="$SCRIPT_DIR/migrations"
|
MIGRATIONS_DIR="$SCRIPT_DIR/migrations"
|
||||||
IDS_DIR="$(dirname "$SCRIPT_DIR")"
|
IDS_DIR="$(dirname "$SCRIPT_DIR")"
|
||||||
|
DEPLOYMENT_MIGRATIONS_DIR="$IDS_DIR/deployment/migrations"
|
||||||
|
|
||||||
# Carica variabili ambiente ed esportale
|
# Carica variabili ambiente ed esportale
|
||||||
if [ -f "$IDS_DIR/.env" ]; then
|
if [ -f "$IDS_DIR/.env" ]; then
|
||||||
@ -79,9 +80,25 @@ echo -e "${CYAN}📊 Versione database corrente: ${YELLOW}${CURRENT_VERSION}${NC
|
|||||||
# STEP 3: Trova migrazioni da applicare
|
# STEP 3: Trova migrazioni da applicare
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
# Formato migrazioni: 001_description.sql, 002_another.sql, etc.
|
# Formato migrazioni: 001_description.sql, 002_another.sql, etc.
|
||||||
|
# Cerca in ENTRAMBE le cartelle: database-schema/migrations E deployment/migrations
|
||||||
MIGRATIONS_TO_APPLY=()
|
MIGRATIONS_TO_APPLY=()
|
||||||
|
|
||||||
for migration_file in $(find "$MIGRATIONS_DIR" -name "[0-9][0-9][0-9]_*.sql" | sort); do
|
# Combina migrations da entrambe le cartelle e ordina per numero
|
||||||
|
ALL_MIGRATIONS=""
|
||||||
|
if [ -d "$MIGRATIONS_DIR" ]; then
|
||||||
|
ALL_MIGRATIONS+=$(find "$MIGRATIONS_DIR" -name "[0-9][0-9][0-9]_*.sql" 2>/dev/null || true)
|
||||||
|
fi
|
||||||
|
if [ -d "$DEPLOYMENT_MIGRATIONS_DIR" ]; then
|
||||||
|
if [ -n "$ALL_MIGRATIONS" ]; then
|
||||||
|
ALL_MIGRATIONS+=$'\n'
|
||||||
|
fi
|
||||||
|
ALL_MIGRATIONS+=$(find "$DEPLOYMENT_MIGRATIONS_DIR" -name "[0-9][0-9][0-9]_*.sql" 2>/dev/null || true)
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Ordina le migrations per nome file (NNN_*.sql) estraendo il basename
|
||||||
|
SORTED_MIGRATIONS=$(echo "$ALL_MIGRATIONS" | grep -v '^$' | while read f; do echo "$(basename "$f"):$f"; done | sort | cut -d':' -f2)
|
||||||
|
|
||||||
|
for migration_file in $SORTED_MIGRATIONS; do
|
||||||
MIGRATION_NAME=$(basename "$migration_file")
|
MIGRATION_NAME=$(basename "$migration_file")
|
||||||
|
|
||||||
# Estrai numero versione dal nome file (001, 002, etc.)
|
# Estrai numero versione dal nome file (001, 002, etc.)
|
||||||
|
|||||||
@ -2,9 +2,9 @@
|
|||||||
-- PostgreSQL database dump
|
-- PostgreSQL database dump
|
||||||
--
|
--
|
||||||
|
|
||||||
\restrict DRdWV6koNFM47SZOgSmGHaNCokL52rZoapBp1Sp7pjpmsyVuQ8dd8Z1Ne5UrhKB
|
\restrict Jq3ohS02Qcz3l9bNbeQprTZolEFbFh84eEwk4en2HkAqc2Xojxrd4AFqHJvBETG
|
||||||
|
|
||||||
-- Dumped from database version 16.9 (415ebe8)
|
-- Dumped from database version 16.11 (74c6bb6)
|
||||||
-- Dumped by pg_dump version 16.10
|
-- Dumped by pg_dump version 16.10
|
||||||
|
|
||||||
SET statement_timeout = 0;
|
SET statement_timeout = 0;
|
||||||
@ -45,7 +45,9 @@ CREATE TABLE public.detections (
|
|||||||
organization text,
|
organization text,
|
||||||
as_number text,
|
as_number text,
|
||||||
as_name text,
|
as_name text,
|
||||||
isp text
|
isp text,
|
||||||
|
detection_source text DEFAULT 'ml_model'::text,
|
||||||
|
blacklist_id character varying
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
||||||
@ -96,6 +98,44 @@ CREATE TABLE public.network_logs (
|
|||||||
);
|
);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: public_blacklist_ips; Type: TABLE; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
CREATE TABLE public.public_blacklist_ips (
|
||||||
|
id character varying DEFAULT (gen_random_uuid())::text NOT NULL,
|
||||||
|
ip_address text NOT NULL,
|
||||||
|
cidr_range text,
|
||||||
|
ip_inet text,
|
||||||
|
cidr_inet text,
|
||||||
|
list_id character varying NOT NULL,
|
||||||
|
first_seen timestamp without time zone DEFAULT now() NOT NULL,
|
||||||
|
last_seen timestamp without time zone DEFAULT now() NOT NULL,
|
||||||
|
is_active boolean DEFAULT true NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: public_lists; Type: TABLE; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
CREATE TABLE public.public_lists (
|
||||||
|
id character varying DEFAULT (gen_random_uuid())::text NOT NULL,
|
||||||
|
name text NOT NULL,
|
||||||
|
type text NOT NULL,
|
||||||
|
url text NOT NULL,
|
||||||
|
enabled boolean DEFAULT true NOT NULL,
|
||||||
|
fetch_interval_minutes integer DEFAULT 10 NOT NULL,
|
||||||
|
last_fetch timestamp without time zone,
|
||||||
|
last_success timestamp without time zone,
|
||||||
|
total_ips integer DEFAULT 0 NOT NULL,
|
||||||
|
active_ips integer DEFAULT 0 NOT NULL,
|
||||||
|
error_count integer DEFAULT 0 NOT NULL,
|
||||||
|
last_error text,
|
||||||
|
created_at timestamp without time zone DEFAULT now() NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
--
|
--
|
||||||
-- Name: routers; Type: TABLE; Schema: public; Owner: -
|
-- Name: routers; Type: TABLE; Schema: public; Owner: -
|
||||||
--
|
--
|
||||||
@ -153,7 +193,10 @@ CREATE TABLE public.whitelist (
|
|||||||
reason text,
|
reason text,
|
||||||
created_by text,
|
created_by text,
|
||||||
active boolean DEFAULT true NOT NULL,
|
active boolean DEFAULT true NOT NULL,
|
||||||
created_at timestamp without time zone DEFAULT now() NOT NULL
|
created_at timestamp without time zone DEFAULT now() NOT NULL,
|
||||||
|
source text DEFAULT 'manual'::text,
|
||||||
|
list_id character varying,
|
||||||
|
ip_inet text
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
||||||
@ -189,6 +232,30 @@ ALTER TABLE ONLY public.network_logs
|
|||||||
ADD CONSTRAINT network_logs_pkey PRIMARY KEY (id);
|
ADD CONSTRAINT network_logs_pkey PRIMARY KEY (id);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: public_blacklist_ips public_blacklist_ips_ip_address_list_id_key; Type: CONSTRAINT; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER TABLE ONLY public.public_blacklist_ips
|
||||||
|
ADD CONSTRAINT public_blacklist_ips_ip_address_list_id_key UNIQUE (ip_address, list_id);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: public_blacklist_ips public_blacklist_ips_pkey; Type: CONSTRAINT; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER TABLE ONLY public.public_blacklist_ips
|
||||||
|
ADD CONSTRAINT public_blacklist_ips_pkey PRIMARY KEY (id);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: public_lists public_lists_pkey; Type: CONSTRAINT; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER TABLE ONLY public.public_lists
|
||||||
|
ADD CONSTRAINT public_lists_pkey PRIMARY KEY (id);
|
||||||
|
|
||||||
|
|
||||||
--
|
--
|
||||||
-- Name: routers routers_ip_address_unique; Type: CONSTRAINT; Schema: public; Owner: -
|
-- Name: routers routers_ip_address_unique; Type: CONSTRAINT; Schema: public; Owner: -
|
||||||
--
|
--
|
||||||
@ -308,9 +375,17 @@ ALTER TABLE ONLY public.network_logs
|
|||||||
ADD CONSTRAINT network_logs_router_id_routers_id_fk FOREIGN KEY (router_id) REFERENCES public.routers(id);
|
ADD CONSTRAINT network_logs_router_id_routers_id_fk FOREIGN KEY (router_id) REFERENCES public.routers(id);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: public_blacklist_ips public_blacklist_ips_list_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER TABLE ONLY public.public_blacklist_ips
|
||||||
|
ADD CONSTRAINT public_blacklist_ips_list_id_fkey FOREIGN KEY (list_id) REFERENCES public.public_lists(id) ON DELETE CASCADE;
|
||||||
|
|
||||||
|
|
||||||
--
|
--
|
||||||
-- PostgreSQL database dump complete
|
-- PostgreSQL database dump complete
|
||||||
--
|
--
|
||||||
|
|
||||||
\unrestrict DRdWV6koNFM47SZOgSmGHaNCokL52rZoapBp1Sp7pjpmsyVuQ8dd8Z1Ne5UrhKB
|
\unrestrict Jq3ohS02Qcz3l9bNbeQprTZolEFbFh84eEwk4en2HkAqc2Xojxrd4AFqHJvBETG
|
||||||
|
|
||||||
|
|||||||
105
deployment/install_list_fetcher.sh
Normal file
105
deployment/install_list_fetcher.sh
Normal file
@ -0,0 +1,105 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# =============================================================================
|
||||||
|
# IDS - Installazione Servizio List Fetcher
|
||||||
|
# =============================================================================
|
||||||
|
# Installa e configura il servizio systemd per il fetcher delle liste pubbliche
|
||||||
|
# Eseguire come ROOT: ./install_list_fetcher.sh
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
RED='\033[0;31m'
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
BLUE='\033[0;34m'
|
||||||
|
NC='\033[0m'
|
||||||
|
|
||||||
|
echo -e "${BLUE}"
|
||||||
|
echo "╔═══════════════════════════════════════════════╗"
|
||||||
|
echo "║ 📋 INSTALLAZIONE IDS LIST FETCHER ║"
|
||||||
|
echo "╚═══════════════════════════════════════════════╝"
|
||||||
|
echo -e "${NC}"
|
||||||
|
|
||||||
|
IDS_DIR="/opt/ids"
|
||||||
|
SYSTEMD_DIR="/etc/systemd/system"
|
||||||
|
|
||||||
|
# Verifica di essere root
|
||||||
|
if [ "$EUID" -ne 0 ]; then
|
||||||
|
echo -e "${RED}❌ Questo script deve essere eseguito come root${NC}"
|
||||||
|
echo -e "${YELLOW} Esegui: sudo ./install_list_fetcher.sh${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Verifica che i file sorgente esistano
|
||||||
|
SERVICE_SRC="$IDS_DIR/deployment/systemd/ids-list-fetcher.service"
|
||||||
|
TIMER_SRC="$IDS_DIR/deployment/systemd/ids-list-fetcher.timer"
|
||||||
|
|
||||||
|
if [ ! -f "$SERVICE_SRC" ]; then
|
||||||
|
echo -e "${RED}❌ File service non trovato: $SERVICE_SRC${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ ! -f "$TIMER_SRC" ]; then
|
||||||
|
echo -e "${RED}❌ File timer non trovato: $TIMER_SRC${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Verifica che il virtual environment Python esista
|
||||||
|
VENV_PYTHON="$IDS_DIR/python_ml/venv/bin/python3"
|
||||||
|
if [ ! -f "$VENV_PYTHON" ]; then
|
||||||
|
echo -e "${YELLOW}⚠️ Virtual environment non trovato, creazione...${NC}"
|
||||||
|
cd "$IDS_DIR/python_ml"
|
||||||
|
python3.11 -m venv venv
|
||||||
|
./venv/bin/pip install --upgrade pip
|
||||||
|
./venv/bin/pip install -r requirements.txt
|
||||||
|
echo -e "${GREEN}✅ Virtual environment creato${NC}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Verifica che run_fetcher.py esista
|
||||||
|
FETCHER_SCRIPT="$IDS_DIR/python_ml/list_fetcher/run_fetcher.py"
|
||||||
|
if [ ! -f "$FETCHER_SCRIPT" ]; then
|
||||||
|
echo -e "${RED}❌ Script fetcher non trovato: $FETCHER_SCRIPT${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Copia file systemd
|
||||||
|
echo -e "${BLUE}📦 Installazione file systemd...${NC}"
|
||||||
|
|
||||||
|
cp "$SERVICE_SRC" "$SYSTEMD_DIR/ids-list-fetcher.service"
|
||||||
|
cp "$TIMER_SRC" "$SYSTEMD_DIR/ids-list-fetcher.timer"
|
||||||
|
|
||||||
|
echo -e "${GREEN} ✅ ids-list-fetcher.service installato${NC}"
|
||||||
|
echo -e "${GREEN} ✅ ids-list-fetcher.timer installato${NC}"
|
||||||
|
|
||||||
|
# Ricarica systemd
|
||||||
|
echo -e "${BLUE}🔄 Ricarica configurazione systemd...${NC}"
|
||||||
|
systemctl daemon-reload
|
||||||
|
echo -e "${GREEN}✅ Daemon ricaricato${NC}"
|
||||||
|
|
||||||
|
# Abilita e avvia timer
|
||||||
|
echo -e "${BLUE}⏱️ Abilitazione timer (ogni 10 minuti)...${NC}"
|
||||||
|
systemctl enable ids-list-fetcher.timer
|
||||||
|
systemctl start ids-list-fetcher.timer
|
||||||
|
echo -e "${GREEN}✅ Timer abilitato e avviato${NC}"
|
||||||
|
|
||||||
|
# Test esecuzione manuale
|
||||||
|
echo -e "${BLUE}🧪 Test esecuzione fetcher...${NC}"
|
||||||
|
if systemctl start ids-list-fetcher.service; then
|
||||||
|
echo -e "${GREEN}✅ Fetcher eseguito con successo${NC}"
|
||||||
|
else
|
||||||
|
echo -e "${YELLOW}⚠️ Prima esecuzione potrebbe fallire se liste non configurate${NC}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Mostra stato
|
||||||
|
echo ""
|
||||||
|
echo -e "${GREEN}╔═══════════════════════════════════════════════╗${NC}"
|
||||||
|
echo -e "${GREEN}║ ✅ INSTALLAZIONE COMPLETATA ║${NC}"
|
||||||
|
echo -e "${GREEN}╚═══════════════════════════════════════════════╝${NC}"
|
||||||
|
echo ""
|
||||||
|
echo -e "${BLUE}📋 COMANDI UTILI:${NC}"
|
||||||
|
echo -e " • Stato timer: ${YELLOW}systemctl status ids-list-fetcher.timer${NC}"
|
||||||
|
echo -e " • Stato service: ${YELLOW}systemctl status ids-list-fetcher.service${NC}"
|
||||||
|
echo -e " • Esegui manuale: ${YELLOW}systemctl start ids-list-fetcher.service${NC}"
|
||||||
|
echo -e " • Visualizza logs: ${YELLOW}journalctl -u ids-list-fetcher -n 50${NC}"
|
||||||
|
echo -e " • Timer attivi: ${YELLOW}systemctl list-timers | grep ids${NC}"
|
||||||
|
echo ""
|
||||||
@ -2,6 +2,7 @@
|
|||||||
-- Description: Adds blacklist/whitelist public sources with auto-sync support
|
-- Description: Adds blacklist/whitelist public sources with auto-sync support
|
||||||
-- Author: IDS System
|
-- Author: IDS System
|
||||||
-- Date: 2024-11-26
|
-- Date: 2024-11-26
|
||||||
|
-- NOTE: Fully idempotent - safe to run multiple times
|
||||||
|
|
||||||
BEGIN;
|
BEGIN;
|
||||||
|
|
||||||
@ -26,8 +27,8 @@ CREATE TABLE IF NOT EXISTS public_lists (
|
|||||||
created_at TIMESTAMP NOT NULL DEFAULT NOW()
|
created_at TIMESTAMP NOT NULL DEFAULT NOW()
|
||||||
);
|
);
|
||||||
|
|
||||||
CREATE INDEX public_lists_type_idx ON public_lists(type);
|
CREATE INDEX IF NOT EXISTS public_lists_type_idx ON public_lists(type);
|
||||||
CREATE INDEX public_lists_enabled_idx ON public_lists(enabled);
|
CREATE INDEX IF NOT EXISTS public_lists_enabled_idx ON public_lists(enabled);
|
||||||
|
|
||||||
-- Public blacklist IPs from external sources
|
-- Public blacklist IPs from external sources
|
||||||
CREATE TABLE IF NOT EXISTS public_blacklist_ips (
|
CREATE TABLE IF NOT EXISTS public_blacklist_ips (
|
||||||
@ -40,10 +41,20 @@ CREATE TABLE IF NOT EXISTS public_blacklist_ips (
|
|||||||
is_active BOOLEAN NOT NULL DEFAULT true
|
is_active BOOLEAN NOT NULL DEFAULT true
|
||||||
);
|
);
|
||||||
|
|
||||||
CREATE INDEX public_blacklist_ip_idx ON public_blacklist_ips(ip_address);
|
CREATE INDEX IF NOT EXISTS public_blacklist_ip_idx ON public_blacklist_ips(ip_address);
|
||||||
CREATE INDEX public_blacklist_list_idx ON public_blacklist_ips(list_id);
|
CREATE INDEX IF NOT EXISTS public_blacklist_list_idx ON public_blacklist_ips(list_id);
|
||||||
CREATE INDEX public_blacklist_active_idx ON public_blacklist_ips(is_active);
|
CREATE INDEX IF NOT EXISTS public_blacklist_active_idx ON public_blacklist_ips(is_active);
|
||||||
CREATE UNIQUE INDEX public_blacklist_ip_list_key ON public_blacklist_ips(ip_address, list_id);
|
|
||||||
|
-- Create unique constraint only if not exists
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM pg_indexes
|
||||||
|
WHERE indexname = 'public_blacklist_ip_list_key'
|
||||||
|
) THEN
|
||||||
|
CREATE UNIQUE INDEX public_blacklist_ip_list_key ON public_blacklist_ips(ip_address, list_id);
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
|
||||||
-- ============================================================================
|
-- ============================================================================
|
||||||
-- 2. ALTER EXISTING TABLES
|
-- 2. ALTER EXISTING TABLES
|
||||||
@ -93,7 +104,6 @@ END $$;
|
|||||||
-- 3. UPDATE SCHEMA VERSION
|
-- 3. UPDATE SCHEMA VERSION
|
||||||
-- ============================================================================
|
-- ============================================================================
|
||||||
|
|
||||||
-- Insert new version record
|
|
||||||
INSERT INTO schema_version (id, version, description)
|
INSERT INTO schema_version (id, version, description)
|
||||||
VALUES (1, 6, 'Add public lists integration (blacklist/whitelist sources)')
|
VALUES (1, 6, 'Add public lists integration (blacklist/whitelist sources)')
|
||||||
ON CONFLICT (id) DO UPDATE
|
ON CONFLICT (id) DO UPDATE
|
||||||
@ -101,46 +111,6 @@ SET version = 6,
|
|||||||
description = 'Add public lists integration (blacklist/whitelist sources)',
|
description = 'Add public lists integration (blacklist/whitelist sources)',
|
||||||
applied_at = NOW();
|
applied_at = NOW();
|
||||||
|
|
||||||
-- ============================================================================
|
|
||||||
-- 4. SEED DEFAULT PUBLIC LISTS (OPTIONAL - commented for manual execution)
|
|
||||||
-- ============================================================================
|
|
||||||
|
|
||||||
/*
|
|
||||||
-- Blacklist sources
|
|
||||||
INSERT INTO public_lists (name, type, url, enabled, fetch_interval_minutes) VALUES
|
|
||||||
('Spamhaus DROP', 'blacklist', 'https://www.spamhaus.org/drop/drop.txt', true, 10),
|
|
||||||
('Talos Intelligence', 'blacklist', 'https://talosintelligence.com/documents/ip-blacklist', false, 10);
|
|
||||||
|
|
||||||
-- Whitelist sources
|
|
||||||
INSERT INTO public_lists (name, type, url, enabled, fetch_interval_minutes) VALUES
|
|
||||||
('AWS IP Ranges', 'whitelist', 'https://ip-ranges.amazonaws.com/ip-ranges.json', true, 10),
|
|
||||||
('Google Cloud IP Ranges', 'whitelist', 'https://www.gstatic.com/ipranges/cloud.json', true, 10),
|
|
||||||
('Cloudflare IPs', 'whitelist', 'https://www.cloudflare.com/ips-v4', true, 10),
|
|
||||||
('IANA Root Servers', 'whitelist', 'https://www.iana.org/domains/root/servers', true, 10),
|
|
||||||
('NTP Pool', 'whitelist', 'https://www.ntppool.org/zone/@', false, 10);
|
|
||||||
*/
|
|
||||||
|
|
||||||
COMMIT;
|
COMMIT;
|
||||||
|
|
||||||
-- ============================================================================
|
SELECT 'Migration 006 completed successfully' as status;
|
||||||
-- ROLLBACK PROCEDURE (if needed)
|
|
||||||
-- ============================================================================
|
|
||||||
|
|
||||||
/*
|
|
||||||
BEGIN;
|
|
||||||
|
|
||||||
-- Remove new columns from existing tables
|
|
||||||
ALTER TABLE detections DROP COLUMN IF EXISTS detection_source;
|
|
||||||
ALTER TABLE detections DROP COLUMN IF EXISTS blacklist_id;
|
|
||||||
ALTER TABLE whitelist DROP COLUMN IF EXISTS source;
|
|
||||||
ALTER TABLE whitelist DROP COLUMN IF EXISTS list_id;
|
|
||||||
|
|
||||||
-- Drop new tables (CASCADE removes FK constraints)
|
|
||||||
DROP TABLE IF EXISTS public_blacklist_ips CASCADE;
|
|
||||||
DROP TABLE IF EXISTS public_lists CASCADE;
|
|
||||||
|
|
||||||
-- Revert schema version
|
|
||||||
UPDATE schema_version SET version = 5, description = 'Rollback from public lists integration' WHERE id = 1;
|
|
||||||
|
|
||||||
COMMIT;
|
|
||||||
*/
|
|
||||||
|
|||||||
@ -1,13 +1,36 @@
|
|||||||
-- Migration 007: Add INET/CIDR support for proper network range matching
|
-- Migration 007: Add INET/CIDR support for proper network range matching
|
||||||
-- Required for public lists integration (Spamhaus /24, AWS ranges, etc.)
|
-- Required for public lists integration (Spamhaus /24, AWS ranges, etc.)
|
||||||
-- Date: 2025-11-26
|
-- Date: 2025-11-26
|
||||||
|
-- NOTE: Handles case where columns exist as TEXT type (from Drizzle)
|
||||||
|
|
||||||
BEGIN;
|
BEGIN;
|
||||||
|
|
||||||
-- Add INET/CIDR columns to public_blacklist_ips
|
-- ============================================================================
|
||||||
|
-- FIX: Drop TEXT columns and recreate as proper INET/CIDR types
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
-- Check column type and fix if needed for public_blacklist_ips
|
||||||
|
DO $$
|
||||||
|
DECLARE
|
||||||
|
col_type text;
|
||||||
|
BEGIN
|
||||||
|
-- Check ip_inet column type
|
||||||
|
SELECT data_type INTO col_type
|
||||||
|
FROM information_schema.columns
|
||||||
|
WHERE table_name = 'public_blacklist_ips' AND column_name = 'ip_inet';
|
||||||
|
|
||||||
|
IF col_type = 'text' THEN
|
||||||
|
-- Drop the wrong type columns
|
||||||
|
ALTER TABLE public_blacklist_ips DROP COLUMN IF EXISTS ip_inet;
|
||||||
|
ALTER TABLE public_blacklist_ips DROP COLUMN IF EXISTS cidr_inet;
|
||||||
|
RAISE NOTICE 'Dropped TEXT columns, will recreate as INET/CIDR';
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
|
||||||
|
-- Add INET/CIDR columns with correct types
|
||||||
ALTER TABLE public_blacklist_ips
|
ALTER TABLE public_blacklist_ips
|
||||||
ADD COLUMN ip_inet inet,
|
ADD COLUMN IF NOT EXISTS ip_inet inet,
|
||||||
ADD COLUMN cidr_inet cidr;
|
ADD COLUMN IF NOT EXISTS cidr_inet cidr;
|
||||||
|
|
||||||
-- Populate new columns from existing text data
|
-- Populate new columns from existing text data
|
||||||
UPDATE public_blacklist_ips
|
UPDATE public_blacklist_ips
|
||||||
@ -15,33 +38,51 @@ SET ip_inet = ip_address::inet,
|
|||||||
cidr_inet = CASE
|
cidr_inet = CASE
|
||||||
WHEN cidr_range IS NOT NULL THEN cidr_range::cidr
|
WHEN cidr_range IS NOT NULL THEN cidr_range::cidr
|
||||||
ELSE (ip_address || '/32')::cidr
|
ELSE (ip_address || '/32')::cidr
|
||||||
END;
|
END
|
||||||
|
WHERE ip_inet IS NULL OR cidr_inet IS NULL;
|
||||||
|
|
||||||
-- Create indexes for INET operators (critical for performance)
|
-- Create GiST indexes for INET operators
|
||||||
CREATE INDEX public_blacklist_ip_inet_idx ON public_blacklist_ips USING gist(ip_inet inet_ops);
|
CREATE INDEX IF NOT EXISTS public_blacklist_ip_inet_idx ON public_blacklist_ips USING gist(ip_inet inet_ops);
|
||||||
CREATE INDEX public_blacklist_cidr_inet_idx ON public_blacklist_ips USING gist(cidr_inet inet_ops);
|
CREATE INDEX IF NOT EXISTS public_blacklist_cidr_inet_idx ON public_blacklist_ips USING gist(cidr_inet inet_ops);
|
||||||
|
|
||||||
-- Add INET column to whitelist for CIDR matching
|
-- ============================================================================
|
||||||
|
-- Fix whitelist table
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
DO $$
|
||||||
|
DECLARE
|
||||||
|
col_type text;
|
||||||
|
BEGIN
|
||||||
|
SELECT data_type INTO col_type
|
||||||
|
FROM information_schema.columns
|
||||||
|
WHERE table_name = 'whitelist' AND column_name = 'ip_inet';
|
||||||
|
|
||||||
|
IF col_type = 'text' THEN
|
||||||
|
ALTER TABLE whitelist DROP COLUMN IF EXISTS ip_inet;
|
||||||
|
RAISE NOTICE 'Dropped TEXT column from whitelist, will recreate as INET';
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
|
||||||
|
-- Add INET column to whitelist
|
||||||
ALTER TABLE whitelist
|
ALTER TABLE whitelist
|
||||||
ADD COLUMN ip_inet inet;
|
ADD COLUMN IF NOT EXISTS ip_inet inet;
|
||||||
|
|
||||||
-- Populate whitelist INET column
|
-- Populate whitelist INET column
|
||||||
UPDATE whitelist
|
UPDATE whitelist
|
||||||
SET ip_inet = CASE
|
SET ip_inet = CASE
|
||||||
WHEN ip_address ~ '/' THEN ip_address::inet
|
WHEN ip_address ~ '/' THEN ip_address::inet
|
||||||
ELSE ip_address::inet
|
ELSE ip_address::inet
|
||||||
END;
|
END
|
||||||
|
WHERE ip_inet IS NULL;
|
||||||
|
|
||||||
-- Create index for whitelist INET matching
|
-- Create index for whitelist INET matching
|
||||||
CREATE INDEX whitelist_ip_inet_idx ON whitelist USING gist(ip_inet inet_ops);
|
CREATE INDEX IF NOT EXISTS whitelist_ip_inet_idx ON whitelist USING gist(ip_inet inet_ops);
|
||||||
|
|
||||||
-- Update schema version
|
-- Update schema version
|
||||||
UPDATE schema_version SET version = 7, applied_at = NOW() WHERE id = 1;
|
UPDATE schema_version SET version = 7, applied_at = NOW() WHERE id = 1;
|
||||||
|
|
||||||
COMMIT;
|
COMMIT;
|
||||||
|
|
||||||
-- Verification queries
|
-- Verification
|
||||||
SELECT 'Migration 007 completed successfully' as status;
|
SELECT 'Migration 007 completed successfully' as status;
|
||||||
SELECT version, applied_at FROM schema_version WHERE id = 1;
|
SELECT version, applied_at FROM schema_version WHERE id = 1;
|
||||||
SELECT COUNT(*) as blacklist_with_cidr FROM public_blacklist_ips WHERE cidr_inet IS NOT NULL;
|
|
||||||
SELECT COUNT(*) as whitelist_with_inet FROM whitelist WHERE ip_inet IS NOT NULL;
|
|
||||||
|
|||||||
92
deployment/migrations/008_force_inet_types.sql
Normal file
92
deployment/migrations/008_force_inet_types.sql
Normal file
@ -0,0 +1,92 @@
|
|||||||
|
-- Migration 008: Force INET/CIDR types (unconditional)
|
||||||
|
-- Fixes issues where columns remained TEXT after conditional migration 007
|
||||||
|
-- Date: 2026-01-02
|
||||||
|
|
||||||
|
BEGIN;
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- FORCE DROP AND RECREATE ALL INET COLUMNS
|
||||||
|
-- This is unconditional - always executes regardless of current state
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
-- Drop indexes first (if exist)
|
||||||
|
DROP INDEX IF EXISTS public_blacklist_ip_inet_idx;
|
||||||
|
DROP INDEX IF EXISTS public_blacklist_cidr_inet_idx;
|
||||||
|
DROP INDEX IF EXISTS whitelist_ip_inet_idx;
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- FIX public_blacklist_ips TABLE
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
-- Drop columns unconditionally
|
||||||
|
ALTER TABLE public_blacklist_ips DROP COLUMN IF EXISTS ip_inet;
|
||||||
|
ALTER TABLE public_blacklist_ips DROP COLUMN IF EXISTS cidr_inet;
|
||||||
|
|
||||||
|
-- Recreate with correct INET/CIDR types
|
||||||
|
ALTER TABLE public_blacklist_ips ADD COLUMN ip_inet inet;
|
||||||
|
ALTER TABLE public_blacklist_ips ADD COLUMN cidr_inet cidr;
|
||||||
|
|
||||||
|
-- Populate from existing text data
|
||||||
|
UPDATE public_blacklist_ips
|
||||||
|
SET
|
||||||
|
ip_inet = CASE
|
||||||
|
WHEN ip_address ~ '/' THEN ip_address::inet
|
||||||
|
ELSE ip_address::inet
|
||||||
|
END,
|
||||||
|
cidr_inet = CASE
|
||||||
|
WHEN cidr_range IS NOT NULL AND cidr_range != '' THEN cidr_range::cidr
|
||||||
|
WHEN ip_address ~ '/' THEN ip_address::cidr
|
||||||
|
ELSE (ip_address || '/32')::cidr
|
||||||
|
END
|
||||||
|
WHERE ip_inet IS NULL;
|
||||||
|
|
||||||
|
-- Create GiST indexes for fast INET/CIDR containment operators
|
||||||
|
CREATE INDEX public_blacklist_ip_inet_idx ON public_blacklist_ips USING gist(ip_inet inet_ops);
|
||||||
|
CREATE INDEX public_blacklist_cidr_inet_idx ON public_blacklist_ips USING gist(cidr_inet inet_ops);
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- FIX whitelist TABLE
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
-- Drop column unconditionally
|
||||||
|
ALTER TABLE whitelist DROP COLUMN IF EXISTS ip_inet;
|
||||||
|
|
||||||
|
-- Recreate with correct INET type
|
||||||
|
ALTER TABLE whitelist ADD COLUMN ip_inet inet;
|
||||||
|
|
||||||
|
-- Populate from existing text data
|
||||||
|
UPDATE whitelist
|
||||||
|
SET ip_inet = CASE
|
||||||
|
WHEN ip_address ~ '/' THEN ip_address::inet
|
||||||
|
ELSE ip_address::inet
|
||||||
|
END
|
||||||
|
WHERE ip_inet IS NULL;
|
||||||
|
|
||||||
|
-- Create index for whitelist
|
||||||
|
CREATE INDEX whitelist_ip_inet_idx ON whitelist USING gist(ip_inet inet_ops);
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- UPDATE SCHEMA VERSION
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
UPDATE schema_version SET version = 8, applied_at = NOW() WHERE id = 1;
|
||||||
|
|
||||||
|
COMMIT;
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- VERIFICATION
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
SELECT 'Migration 008 completed successfully' as status;
|
||||||
|
SELECT version, applied_at FROM schema_version WHERE id = 1;
|
||||||
|
|
||||||
|
-- Verify column types
|
||||||
|
SELECT
|
||||||
|
table_name,
|
||||||
|
column_name,
|
||||||
|
data_type
|
||||||
|
FROM information_schema.columns
|
||||||
|
WHERE
|
||||||
|
(table_name = 'public_blacklist_ips' AND column_name IN ('ip_inet', 'cidr_inet'))
|
||||||
|
OR (table_name = 'whitelist' AND column_name = 'ip_inet')
|
||||||
|
ORDER BY table_name, column_name;
|
||||||
33
deployment/migrations/009_add_microsoft_meta_lists.sql
Normal file
33
deployment/migrations/009_add_microsoft_meta_lists.sql
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
-- Migration 009: Add Microsoft Azure and Meta/Facebook public lists
|
||||||
|
-- Date: 2026-01-02
|
||||||
|
|
||||||
|
-- Microsoft Azure IP ranges (whitelist - cloud provider)
|
||||||
|
INSERT INTO public_lists (name, url, type, format, enabled, description, fetch_interval)
|
||||||
|
VALUES (
|
||||||
|
'Microsoft Azure',
|
||||||
|
'https://raw.githubusercontent.com/femueller/cloud-ip-ranges/master/microsoft-azure-ip-ranges.json',
|
||||||
|
'whitelist',
|
||||||
|
'json',
|
||||||
|
true,
|
||||||
|
'Microsoft Azure cloud IP ranges - auto-updated from Azure Service Tags',
|
||||||
|
3600
|
||||||
|
) ON CONFLICT (name) DO UPDATE SET
|
||||||
|
url = EXCLUDED.url,
|
||||||
|
description = EXCLUDED.description;
|
||||||
|
|
||||||
|
-- Meta/Facebook IP ranges (whitelist - major service provider)
|
||||||
|
INSERT INTO public_lists (name, url, type, format, enabled, description, fetch_interval)
|
||||||
|
VALUES (
|
||||||
|
'Meta (Facebook)',
|
||||||
|
'https://raw.githubusercontent.com/parseword/util-misc/master/block-facebook/facebook-ip-ranges.txt',
|
||||||
|
'whitelist',
|
||||||
|
'plain',
|
||||||
|
true,
|
||||||
|
'Meta/Facebook IP ranges (includes Instagram, WhatsApp, Oculus) from BGP AS32934/AS54115/AS63293',
|
||||||
|
3600
|
||||||
|
) ON CONFLICT (name) DO UPDATE SET
|
||||||
|
url = EXCLUDED.url,
|
||||||
|
description = EXCLUDED.description;
|
||||||
|
|
||||||
|
-- Verify insertion
|
||||||
|
SELECT id, name, type, enabled, url FROM public_lists WHERE name IN ('Microsoft Azure', 'Meta (Facebook)');
|
||||||
@ -158,6 +158,20 @@ if [ -f "./deployment/setup_rsyslog.sh" ]; then
|
|||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Verifica e installa servizio list-fetcher se mancante
|
||||||
|
echo -e "\n${BLUE}📋 Verifica servizio list-fetcher...${NC}"
|
||||||
|
if ! systemctl list-unit-files | grep -q "ids-list-fetcher"; then
|
||||||
|
echo -e "${YELLOW} Servizio ids-list-fetcher non installato, installazione...${NC}"
|
||||||
|
if [ -f "./deployment/install_list_fetcher.sh" ]; then
|
||||||
|
chmod +x ./deployment/install_list_fetcher.sh
|
||||||
|
./deployment/install_list_fetcher.sh
|
||||||
|
else
|
||||||
|
echo -e "${RED} ❌ Script install_list_fetcher.sh non trovato${NC}"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo -e "${GREEN} ✅ Servizio ids-list-fetcher già installato${NC}"
|
||||||
|
fi
|
||||||
|
|
||||||
# Restart servizi
|
# Restart servizi
|
||||||
echo -e "\n${BLUE}🔄 Restart servizi...${NC}"
|
echo -e "\n${BLUE}🔄 Restart servizi...${NC}"
|
||||||
if [ -f "./deployment/restart_all.sh" ]; then
|
if [ -f "./deployment/restart_all.sh" ]; then
|
||||||
|
|||||||
@ -21,13 +21,15 @@ class ListParser:
|
|||||||
def normalize_cidr(ip_str: str) -> tuple[str, Optional[str]]:
|
def normalize_cidr(ip_str: str) -> tuple[str, Optional[str]]:
|
||||||
"""
|
"""
|
||||||
Normalize IP/CIDR to (ip_address, cidr_range)
|
Normalize IP/CIDR to (ip_address, cidr_range)
|
||||||
Example: '1.2.3.0/24' -> ('1.2.3.0', '1.2.3.0/24')
|
For CIDR ranges, use the full CIDR notation as ip_address to ensure uniqueness
|
||||||
|
Example: '1.2.3.0/24' -> ('1.2.3.0/24', '1.2.3.0/24')
|
||||||
'1.2.3.4' -> ('1.2.3.4', None)
|
'1.2.3.4' -> ('1.2.3.4', None)
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
network = ipaddress.ip_network(ip_str, strict=False)
|
network = ipaddress.ip_network(ip_str, strict=False)
|
||||||
if '/' in ip_str:
|
if '/' in ip_str:
|
||||||
return (str(network.network_address), str(network))
|
normalized_cidr = str(network)
|
||||||
|
return (normalized_cidr, normalized_cidr)
|
||||||
else:
|
else:
|
||||||
return (ip_str, None)
|
return (ip_str, None)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
@ -41,8 +43,8 @@ class SpamhausParser(ListParser):
|
|||||||
def parse(content: str) -> Set[tuple[str, Optional[str]]]:
|
def parse(content: str) -> Set[tuple[str, Optional[str]]]:
|
||||||
"""
|
"""
|
||||||
Parse Spamhaus DROP format:
|
Parse Spamhaus DROP format:
|
||||||
; Comment lines start with semicolon
|
- NDJSON (new): {"cidr":"1.2.3.0/24","sblid":"SBL12345","rir":"apnic"}
|
||||||
1.2.3.0/24 ; SBL12345
|
- Text (old): 1.2.3.0/24 ; SBL12345
|
||||||
"""
|
"""
|
||||||
ips = set()
|
ips = set()
|
||||||
lines = content.strip().split('\n')
|
lines = content.strip().split('\n')
|
||||||
@ -54,7 +56,18 @@ class SpamhausParser(ListParser):
|
|||||||
if not line or line.startswith(';') or line.startswith('#'):
|
if not line or line.startswith(';') or line.startswith('#'):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Extract IP/CIDR before comment
|
# Try NDJSON format first (new Spamhaus format)
|
||||||
|
if line.startswith('{'):
|
||||||
|
try:
|
||||||
|
data = json.loads(line)
|
||||||
|
cidr = data.get('cidr')
|
||||||
|
if cidr and ListParser.validate_ip(cidr):
|
||||||
|
ips.add(ListParser.normalize_cidr(cidr))
|
||||||
|
continue
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Fallback: old text format
|
||||||
parts = line.split(';')
|
parts = line.split(';')
|
||||||
if parts:
|
if parts:
|
||||||
ip_part = parts[0].strip()
|
ip_part = parts[0].strip()
|
||||||
@ -163,6 +176,70 @@ class GCPParser(ListParser):
|
|||||||
return ips
|
return ips
|
||||||
|
|
||||||
|
|
||||||
|
class AzureParser(ListParser):
|
||||||
|
"""Parser for Microsoft Azure IP ranges JSON (Service Tags format)"""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def parse(content: str) -> Set[tuple[str, Optional[str]]]:
|
||||||
|
"""
|
||||||
|
Parse Azure Service Tags JSON format:
|
||||||
|
{
|
||||||
|
"values": [
|
||||||
|
{
|
||||||
|
"name": "ActionGroup",
|
||||||
|
"properties": {
|
||||||
|
"addressPrefixes": ["1.2.3.0/24", "5.6.7.0/24"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
ips = set()
|
||||||
|
|
||||||
|
try:
|
||||||
|
data = json.loads(content)
|
||||||
|
|
||||||
|
for value in data.get('values', []):
|
||||||
|
properties = value.get('properties', {})
|
||||||
|
prefixes = properties.get('addressPrefixes', [])
|
||||||
|
|
||||||
|
for prefix in prefixes:
|
||||||
|
if prefix and ListParser.validate_ip(prefix):
|
||||||
|
ips.add(ListParser.normalize_cidr(prefix))
|
||||||
|
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return ips
|
||||||
|
|
||||||
|
|
||||||
|
class MetaParser(ListParser):
|
||||||
|
"""Parser for Meta/Facebook IP ranges (plain CIDR list from BGP)"""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def parse(content: str) -> Set[tuple[str, Optional[str]]]:
|
||||||
|
"""
|
||||||
|
Parse Meta format (plain CIDR list):
|
||||||
|
31.13.24.0/21
|
||||||
|
31.13.64.0/18
|
||||||
|
157.240.0.0/17
|
||||||
|
"""
|
||||||
|
ips = set()
|
||||||
|
lines = content.strip().split('\n')
|
||||||
|
|
||||||
|
for line in lines:
|
||||||
|
line = line.strip()
|
||||||
|
|
||||||
|
# Skip empty lines and comments
|
||||||
|
if not line or line.startswith('#') or line.startswith('//'):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if ListParser.validate_ip(line):
|
||||||
|
ips.add(ListParser.normalize_cidr(line))
|
||||||
|
|
||||||
|
return ips
|
||||||
|
|
||||||
|
|
||||||
class CloudflareParser(ListParser):
|
class CloudflareParser(ListParser):
|
||||||
"""Parser for Cloudflare IP list"""
|
"""Parser for Cloudflare IP list"""
|
||||||
|
|
||||||
@ -250,6 +327,11 @@ PARSERS: Dict[str, type[ListParser]] = {
|
|||||||
'talos': TalosParser,
|
'talos': TalosParser,
|
||||||
'aws': AWSParser,
|
'aws': AWSParser,
|
||||||
'gcp': GCPParser,
|
'gcp': GCPParser,
|
||||||
|
'google': GCPParser,
|
||||||
|
'azure': AzureParser,
|
||||||
|
'microsoft': AzureParser,
|
||||||
|
'meta': MetaParser,
|
||||||
|
'facebook': MetaParser,
|
||||||
'cloudflare': CloudflareParser,
|
'cloudflare': CloudflareParser,
|
||||||
'iana': IANAParser,
|
'iana': IANAParser,
|
||||||
'ntp': NTPPoolParser,
|
'ntp': NTPPoolParser,
|
||||||
|
|||||||
@ -169,17 +169,27 @@ class MergeLogic:
|
|||||||
INSERT INTO detections (
|
INSERT INTO detections (
|
||||||
source_ip,
|
source_ip,
|
||||||
risk_score,
|
risk_score,
|
||||||
|
confidence,
|
||||||
anomaly_type,
|
anomaly_type,
|
||||||
|
reason,
|
||||||
|
log_count,
|
||||||
|
first_seen,
|
||||||
|
last_seen,
|
||||||
detection_source,
|
detection_source,
|
||||||
blacklist_id,
|
blacklist_id,
|
||||||
detected_at,
|
detected_at,
|
||||||
blocked
|
blocked
|
||||||
) VALUES (%s, %s, %s, %s, %s, %s, %s)
|
) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
|
||||||
RETURNING id
|
RETURNING id
|
||||||
""", (
|
""", (
|
||||||
ip_address,
|
ip_address,
|
||||||
str(risk_score),
|
risk_score, # numeric, not string
|
||||||
|
100.0, # confidence
|
||||||
'public_blacklist',
|
'public_blacklist',
|
||||||
|
'IP in public blacklist',
|
||||||
|
1, # log_count
|
||||||
|
datetime.utcnow(), # first_seen
|
||||||
|
datetime.utcnow(), # last_seen
|
||||||
'public_blacklist',
|
'public_blacklist',
|
||||||
blacklist_id,
|
blacklist_id,
|
||||||
datetime.utcnow(),
|
datetime.utcnow(),
|
||||||
@ -213,6 +223,7 @@ class MergeLogic:
|
|||||||
try:
|
try:
|
||||||
with conn.cursor() as cur:
|
with conn.cursor() as cur:
|
||||||
# Delete detections for IPs in whitelist ranges (CIDR-aware)
|
# Delete detections for IPs in whitelist ranges (CIDR-aware)
|
||||||
|
# Cast both sides to inet explicitly for type safety
|
||||||
cur.execute("""
|
cur.execute("""
|
||||||
DELETE FROM detections d
|
DELETE FROM detections d
|
||||||
WHERE d.detection_source = 'public_blacklist'
|
WHERE d.detection_source = 'public_blacklist'
|
||||||
@ -221,8 +232,8 @@ class MergeLogic:
|
|||||||
WHERE wl.active = true
|
WHERE wl.active = true
|
||||||
AND wl.ip_inet IS NOT NULL
|
AND wl.ip_inet IS NOT NULL
|
||||||
AND (
|
AND (
|
||||||
d.source_ip::inet = wl.ip_inet
|
d.source_ip::inet = wl.ip_inet::inet
|
||||||
OR d.source_ip::inet <<= wl.ip_inet
|
OR d.source_ip::inet <<= wl.ip_inet::inet
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
""")
|
""")
|
||||||
@ -265,7 +276,12 @@ class MergeLogic:
|
|||||||
INSERT INTO detections (
|
INSERT INTO detections (
|
||||||
source_ip,
|
source_ip,
|
||||||
risk_score,
|
risk_score,
|
||||||
|
confidence,
|
||||||
anomaly_type,
|
anomaly_type,
|
||||||
|
reason,
|
||||||
|
log_count,
|
||||||
|
first_seen,
|
||||||
|
last_seen,
|
||||||
detection_source,
|
detection_source,
|
||||||
blacklist_id,
|
blacklist_id,
|
||||||
detected_at,
|
detected_at,
|
||||||
@ -273,8 +289,13 @@ class MergeLogic:
|
|||||||
)
|
)
|
||||||
SELECT DISTINCT
|
SELECT DISTINCT
|
||||||
bl.ip_address,
|
bl.ip_address,
|
||||||
'75',
|
75::numeric,
|
||||||
|
100::numeric,
|
||||||
'public_blacklist',
|
'public_blacklist',
|
||||||
|
'IP in public blacklist',
|
||||||
|
1,
|
||||||
|
NOW(),
|
||||||
|
NOW(),
|
||||||
'public_blacklist',
|
'public_blacklist',
|
||||||
bl.id,
|
bl.id,
|
||||||
NOW(),
|
NOW(),
|
||||||
@ -283,14 +304,15 @@ class MergeLogic:
|
|||||||
WHERE bl.is_active = true
|
WHERE bl.is_active = true
|
||||||
AND bl.ip_inet IS NOT NULL
|
AND bl.ip_inet IS NOT NULL
|
||||||
-- Priority 1: Exclude if in manual whitelist (highest priority)
|
-- Priority 1: Exclude if in manual whitelist (highest priority)
|
||||||
|
-- Cast to inet explicitly for type safety
|
||||||
AND NOT EXISTS (
|
AND NOT EXISTS (
|
||||||
SELECT 1 FROM whitelist wl
|
SELECT 1 FROM whitelist wl
|
||||||
WHERE wl.active = true
|
WHERE wl.active = true
|
||||||
AND wl.source = 'manual'
|
AND wl.source = 'manual'
|
||||||
AND wl.ip_inet IS NOT NULL
|
AND wl.ip_inet IS NOT NULL
|
||||||
AND (
|
AND (
|
||||||
bl.ip_inet = wl.ip_inet
|
bl.ip_inet::inet = wl.ip_inet::inet
|
||||||
OR bl.ip_inet <<= wl.ip_inet
|
OR bl.ip_inet::inet <<= wl.ip_inet::inet
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
-- Priority 2: Exclude if in public whitelist
|
-- Priority 2: Exclude if in public whitelist
|
||||||
@ -300,8 +322,8 @@ class MergeLogic:
|
|||||||
AND wl.source != 'manual'
|
AND wl.source != 'manual'
|
||||||
AND wl.ip_inet IS NOT NULL
|
AND wl.ip_inet IS NOT NULL
|
||||||
AND (
|
AND (
|
||||||
bl.ip_inet = wl.ip_inet
|
bl.ip_inet::inet = wl.ip_inet::inet
|
||||||
OR bl.ip_inet <<= wl.ip_inet
|
OR bl.ip_inet::inet <<= wl.ip_inet::inet
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
-- Avoid duplicate detections
|
-- Avoid duplicate detections
|
||||||
|
|||||||
@ -25,12 +25,12 @@ The IDS employs a React-based frontend for real-time monitoring, detection visua
|
|||||||
**Key Architectural Decisions & Features:**
|
**Key Architectural Decisions & Features:**
|
||||||
- **Log Collection & Processing**: MikroTik syslog data (UDP:514) is parsed by `syslog_parser.py` and stored in PostgreSQL with a 3-day retention policy. The parser includes auto-reconnect and error recovery mechanisms.
|
- **Log Collection & Processing**: MikroTik syslog data (UDP:514) is parsed by `syslog_parser.py` and stored in PostgreSQL with a 3-day retention policy. The parser includes auto-reconnect and error recovery mechanisms.
|
||||||
- **Machine Learning**: An Isolation Forest model (sklearn.IsolectionForest) trained on 25 network log features performs real-time anomaly detection, assigning a risk score (0-100 across five risk levels). A hybrid ML detector (Isolation Forest + Ensemble Classifier with weighted voting) reduces false positives. The system supports weekly automatic retraining of models.
|
- **Machine Learning**: An Isolation Forest model (sklearn.IsolectionForest) trained on 25 network log features performs real-time anomaly detection, assigning a risk score (0-100 across five risk levels). A hybrid ML detector (Isolation Forest + Ensemble Classifier with weighted voting) reduces false positives. The system supports weekly automatic retraining of models.
|
||||||
- **Automated Blocking**: Critical IPs (score >= 80) are automatically blocked in parallel across configured MikroTik routers via their REST API.
|
- **Automated Blocking**: Critical IPs (score >= 80) are automatically blocked in parallel across configured MikroTik routers via their REST API. **Auto-unblock on whitelist**: When an IP is added to the whitelist, it is automatically removed from all router blocklists. Manual unblock button available in Detections page.
|
||||||
- **Public Lists Integration (v2.0.0 - CIDR Complete)**: Automatic fetcher syncs blacklist/whitelist feeds every 10 minutes (Spamhaus, Talos, AWS, GCP, Cloudflare, IANA, NTP Pool). **Full CIDR support** using PostgreSQL INET/CIDR types with `<<=` containment operators for network range matching. Priority-based merge logic: Manual whitelist > Public whitelist > Blacklist (CIDR-aware). Detections created for blacklisted IPs/ranges (excluding whitelisted ranges). CRUD API + UI for list management. See `deployment/docs/PUBLIC_LISTS_V2_CIDR.md` for implementation details.
|
- **Public Lists Integration (v2.0.0 - CIDR Complete)**: Automatic fetcher syncs blacklist/whitelist feeds every 10 minutes (Spamhaus, Talos, AWS, GCP, Cloudflare, IANA, NTP Pool). **Full CIDR support** using PostgreSQL INET/CIDR types with `<<=` containment operators for network range matching. Priority-based merge logic: Manual whitelist > Public whitelist > Blacklist (CIDR-aware). Detections created for blacklisted IPs/ranges (excluding whitelisted ranges). CRUD API + UI for list management. See `deployment/docs/PUBLIC_LISTS_V2_CIDR.md` for implementation details.
|
||||||
- **Automatic Cleanup**: An hourly systemd timer (`cleanup_detections.py`) removes old detections (48h) and auto-unblocks IPs (2h).
|
- **Automatic Cleanup**: An hourly systemd timer (`cleanup_detections.py`) removes old detections (48h) and auto-unblocks IPs (2h).
|
||||||
- **Service Monitoring & Management**: A dashboard provides real-time status (ML Backend, Database, Syslog Parser). API endpoints, secured with API key authentication and Systemd integration, allow for service management (start/stop/restart) of Python services.
|
- **Service Monitoring & Management**: A dashboard provides real-time status (ML Backend, Database, Syslog Parser). API endpoints, secured with API key authentication and Systemd integration, allow for service management (start/stop/restart) of Python services.
|
||||||
- **IP Geolocation**: Integration with `ip-api.com` enriches detection data with geographical and AS information, utilizing intelligent caching.
|
- **IP Geolocation**: Integration with `ip-api.com` enriches detection data with geographical and AS information, utilizing intelligent caching.
|
||||||
- **Database Management**: PostgreSQL is used for all persistent data. An intelligent database versioning system ensures efficient SQL migrations (v7 with INET/CIDR columns for network range matching). Dual-mode database drivers (`@neondatabase/serverless` for Replit, `pg` for AlmaLinux) ensure environment compatibility.
|
- **Database Management**: PostgreSQL is used for all persistent data. An intelligent database versioning system ensures efficient SQL migrations (v8 with forced INET/CIDR column types for network range matching). Migration 008 unconditionally recreates INET columns to fix type mismatches. Dual-mode database drivers (`@neondatabase/serverless` for Replit, `pg` for AlmaLinux) ensure environment compatibility.
|
||||||
- **Microservices**: Clear separation of concerns between the Python ML backend and the Node.js API backend.
|
- **Microservices**: Clear separation of concerns between the Python ML backend and the Node.js API backend.
|
||||||
- **UI/UX**: Utilizes ShadCN UI for a modern component library and `react-hook-form` with Zod for robust form validation. Analytics dashboards provide visualizations of normal and attack traffic, including real-time and historical data.
|
- **UI/UX**: Utilizes ShadCN UI for a modern component library and `react-hook-form` with Zod for robust form validation. Analytics dashboards provide visualizations of normal and attack traffic, including real-time and historical data.
|
||||||
|
|
||||||
|
|||||||
190
server/routes.ts
190
server/routes.ts
@ -77,18 +77,22 @@ export async function registerRoutes(app: Express): Promise<Server> {
|
|||||||
// Detections
|
// Detections
|
||||||
app.get("/api/detections", async (req, res) => {
|
app.get("/api/detections", async (req, res) => {
|
||||||
try {
|
try {
|
||||||
const limit = req.query.limit ? parseInt(req.query.limit as string) : 500;
|
const limit = req.query.limit ? parseInt(req.query.limit as string) : 50;
|
||||||
|
const offset = req.query.offset ? parseInt(req.query.offset as string) : 0;
|
||||||
const anomalyType = req.query.anomalyType as string | undefined;
|
const anomalyType = req.query.anomalyType as string | undefined;
|
||||||
const minScore = req.query.minScore ? parseFloat(req.query.minScore as string) : undefined;
|
const minScore = req.query.minScore ? parseFloat(req.query.minScore as string) : undefined;
|
||||||
const maxScore = req.query.maxScore ? parseFloat(req.query.maxScore as string) : undefined;
|
const maxScore = req.query.maxScore ? parseFloat(req.query.maxScore as string) : undefined;
|
||||||
|
const search = req.query.search as string | undefined;
|
||||||
|
|
||||||
const detections = await storage.getAllDetections({
|
const result = await storage.getAllDetections({
|
||||||
limit,
|
limit,
|
||||||
|
offset,
|
||||||
anomalyType,
|
anomalyType,
|
||||||
minScore,
|
minScore,
|
||||||
maxScore
|
maxScore,
|
||||||
|
search
|
||||||
});
|
});
|
||||||
res.json(detections);
|
res.json(result);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('[DB ERROR] Failed to fetch detections:', error);
|
console.error('[DB ERROR] Failed to fetch detections:', error);
|
||||||
res.status(500).json({ error: "Failed to fetch detections" });
|
res.status(500).json({ error: "Failed to fetch detections" });
|
||||||
@ -130,12 +134,74 @@ export async function registerRoutes(app: Express): Promise<Server> {
|
|||||||
try {
|
try {
|
||||||
const validatedData = insertWhitelistSchema.parse(req.body);
|
const validatedData = insertWhitelistSchema.parse(req.body);
|
||||||
const item = await storage.createWhitelist(validatedData);
|
const item = await storage.createWhitelist(validatedData);
|
||||||
|
|
||||||
|
// Auto-unblock from routers when adding to whitelist
|
||||||
|
const mlBackendUrl = process.env.ML_BACKEND_URL || 'http://localhost:8000';
|
||||||
|
const mlApiKey = process.env.IDS_API_KEY;
|
||||||
|
try {
|
||||||
|
const headers: Record<string, string> = { 'Content-Type': 'application/json' };
|
||||||
|
if (mlApiKey) {
|
||||||
|
headers['X-API-Key'] = mlApiKey;
|
||||||
|
}
|
||||||
|
const unblockResponse = await fetch(`${mlBackendUrl}/unblock-ip`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers,
|
||||||
|
body: JSON.stringify({ ip_address: validatedData.ipAddress })
|
||||||
|
});
|
||||||
|
if (unblockResponse.ok) {
|
||||||
|
const result = await unblockResponse.json();
|
||||||
|
console.log(`[WHITELIST] Auto-unblocked ${validatedData.ipAddress} from ${result.unblocked_from} routers`);
|
||||||
|
} else {
|
||||||
|
console.warn(`[WHITELIST] Failed to auto-unblock ${validatedData.ipAddress}: ${unblockResponse.status}`);
|
||||||
|
}
|
||||||
|
} catch (unblockError) {
|
||||||
|
// Don't fail if ML backend is unavailable
|
||||||
|
console.warn(`[WHITELIST] ML backend unavailable for auto-unblock: ${unblockError}`);
|
||||||
|
}
|
||||||
|
|
||||||
res.json(item);
|
res.json(item);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
res.status(400).json({ error: "Invalid whitelist data" });
|
res.status(400).json({ error: "Invalid whitelist data" });
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Unblock IP from all routers (proxy to ML backend)
|
||||||
|
app.post("/api/unblock-ip", async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { ipAddress, listName = "ddos_blocked" } = req.body;
|
||||||
|
|
||||||
|
if (!ipAddress) {
|
||||||
|
return res.status(400).json({ error: "IP address is required" });
|
||||||
|
}
|
||||||
|
|
||||||
|
const mlBackendUrl = process.env.ML_BACKEND_URL || 'http://localhost:8000';
|
||||||
|
const mlApiKey = process.env.IDS_API_KEY;
|
||||||
|
const headers: Record<string, string> = { 'Content-Type': 'application/json' };
|
||||||
|
if (mlApiKey) {
|
||||||
|
headers['X-API-Key'] = mlApiKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await fetch(`${mlBackendUrl}/unblock-ip`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers,
|
||||||
|
body: JSON.stringify({ ip_address: ipAddress, list_name: listName })
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorText = await response.text();
|
||||||
|
console.error(`[UNBLOCK] ML backend error for ${ipAddress}: ${response.status} - ${errorText}`);
|
||||||
|
return res.status(response.status).json({ error: errorText || "Failed to unblock IP" });
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await response.json();
|
||||||
|
console.log(`[UNBLOCK] Successfully unblocked ${ipAddress} from ${result.unblocked_from || 0} routers`);
|
||||||
|
res.json(result);
|
||||||
|
} catch (error: any) {
|
||||||
|
console.error('[UNBLOCK] Error:', error);
|
||||||
|
res.status(500).json({ error: error.message || "Failed to unblock IP from routers" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
app.delete("/api/whitelist/:id", async (req, res) => {
|
app.delete("/api/whitelist/:id", async (req, res) => {
|
||||||
try {
|
try {
|
||||||
const success = await storage.deleteWhitelist(req.params.id);
|
const success = await storage.deleteWhitelist(req.params.id);
|
||||||
@ -221,20 +287,107 @@ export async function registerRoutes(app: Express): Promise<Server> {
|
|||||||
return res.status(404).json({ error: "List not found" });
|
return res.status(404).json({ error: "List not found" });
|
||||||
}
|
}
|
||||||
|
|
||||||
const updated = await storage.updatePublicList(req.params.id, {
|
console.log(`[SYNC] Starting sync for list: ${list.name} (${list.url})`);
|
||||||
lastAttempt: new Date('1970-01-01T00:00:00Z'),
|
|
||||||
errorMessage: null,
|
// Fetch the list from URL
|
||||||
|
const response = await fetch(list.url, {
|
||||||
|
headers: {
|
||||||
|
'User-Agent': 'IDS-MikroTik-PublicListFetcher/2.0',
|
||||||
|
'Accept': 'application/json, text/plain, */*',
|
||||||
|
},
|
||||||
|
signal: AbortSignal.timeout(30000),
|
||||||
});
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const contentType = response.headers.get('content-type') || '';
|
||||||
|
const text = await response.text();
|
||||||
|
|
||||||
|
// Parse IPs based on content type
|
||||||
|
let ips: Array<{ip: string, cidr?: string}> = [];
|
||||||
|
|
||||||
|
if (contentType.includes('json') || list.url.endsWith('.json')) {
|
||||||
|
// JSON format (Spamhaus DROP v4 JSON)
|
||||||
|
try {
|
||||||
|
const data = JSON.parse(text);
|
||||||
|
if (Array.isArray(data)) {
|
||||||
|
for (const entry of data) {
|
||||||
|
if (entry.cidr) {
|
||||||
|
const [ip] = entry.cidr.split('/');
|
||||||
|
ips.push({ ip, cidr: entry.cidr });
|
||||||
|
} else if (entry.ip) {
|
||||||
|
ips.push({ ip: entry.ip, cidr: null as any });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
console.error('[SYNC] Failed to parse JSON:', e);
|
||||||
|
throw new Error('Invalid JSON format');
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Plain text format (one IP/CIDR per line)
|
||||||
|
const lines = text.split('\n');
|
||||||
|
for (const line of lines) {
|
||||||
|
const trimmed = line.trim();
|
||||||
|
if (!trimmed || trimmed.startsWith('#') || trimmed.startsWith(';')) continue;
|
||||||
|
|
||||||
|
// Extract IP/CIDR from line
|
||||||
|
const match = trimmed.match(/^(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})(\/\d{1,2})?/);
|
||||||
|
if (match) {
|
||||||
|
const ip = match[1];
|
||||||
|
const cidr = match[2] ? `${match[1]}${match[2]}` : null;
|
||||||
|
ips.push({ ip, cidr: cidr as any });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`[SYNC] Parsed ${ips.length} IPs from ${list.name}`);
|
||||||
|
|
||||||
|
// Save IPs to database
|
||||||
|
let added = 0;
|
||||||
|
let updated = 0;
|
||||||
|
|
||||||
|
for (const { ip, cidr } of ips) {
|
||||||
|
const result = await storage.upsertBlacklistIp(list.id, ip, cidr);
|
||||||
|
if (result.created) added++;
|
||||||
|
else updated++;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update list stats
|
||||||
|
await storage.updatePublicList(list.id, {
|
||||||
|
lastFetch: new Date(),
|
||||||
|
lastSuccess: new Date(),
|
||||||
|
totalIps: ips.length,
|
||||||
|
activeIps: ips.length,
|
||||||
|
errorCount: 0,
|
||||||
|
lastError: null,
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(`[SYNC] Completed: ${added} added, ${updated} updated for ${list.name}`);
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
message: "Manual sync triggered - list marked for immediate sync",
|
message: `Sync completed: ${ips.length} IPs processed`,
|
||||||
note: "Fetcher will sync this list on next cycle (max 10 minutes). Check logs: journalctl -u ids-list-fetcher -n 50",
|
added,
|
||||||
list: updated
|
updated,
|
||||||
|
total: ips.length,
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error: any) {
|
||||||
console.error('[API ERROR] Failed to trigger sync:', error);
|
console.error('[API ERROR] Failed to sync:', error);
|
||||||
res.status(500).json({ error: "Failed to trigger sync" });
|
|
||||||
|
// Update error count
|
||||||
|
const list = await storage.getPublicListById(req.params.id);
|
||||||
|
if (list) {
|
||||||
|
await storage.updatePublicList(req.params.id, {
|
||||||
|
errorCount: (list.errorCount || 0) + 1,
|
||||||
|
lastError: error.message,
|
||||||
|
lastFetch: new Date(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
res.status(500).json({ error: `Sync failed: ${error.message}` });
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -325,14 +478,15 @@ export async function registerRoutes(app: Express): Promise<Server> {
|
|||||||
app.get("/api/stats", async (req, res) => {
|
app.get("/api/stats", async (req, res) => {
|
||||||
try {
|
try {
|
||||||
const routers = await storage.getAllRouters();
|
const routers = await storage.getAllRouters();
|
||||||
const detections = await storage.getAllDetections({ limit: 1000 });
|
const detectionsResult = await storage.getAllDetections({ limit: 1000 });
|
||||||
const recentLogs = await storage.getRecentLogs(1000);
|
const recentLogs = await storage.getRecentLogs(1000);
|
||||||
const whitelist = await storage.getAllWhitelist();
|
const whitelist = await storage.getAllWhitelist();
|
||||||
const latestTraining = await storage.getLatestTraining();
|
const latestTraining = await storage.getLatestTraining();
|
||||||
|
|
||||||
const blockedCount = detections.filter(d => d.blocked).length;
|
const detectionsList = detectionsResult.detections;
|
||||||
const criticalCount = detections.filter(d => parseFloat(d.riskScore) >= 85).length;
|
const blockedCount = detectionsList.filter(d => d.blocked).length;
|
||||||
const highCount = detections.filter(d => parseFloat(d.riskScore) >= 70 && parseFloat(d.riskScore) < 85).length;
|
const criticalCount = detectionsList.filter(d => parseFloat(d.riskScore) >= 85).length;
|
||||||
|
const highCount = detectionsList.filter(d => parseFloat(d.riskScore) >= 70 && parseFloat(d.riskScore) < 85).length;
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
routers: {
|
routers: {
|
||||||
@ -340,7 +494,7 @@ export async function registerRoutes(app: Express): Promise<Server> {
|
|||||||
enabled: routers.filter(r => r.enabled).length
|
enabled: routers.filter(r => r.enabled).length
|
||||||
},
|
},
|
||||||
detections: {
|
detections: {
|
||||||
total: detections.length,
|
total: detectionsResult.total,
|
||||||
blocked: blockedCount,
|
blocked: blockedCount,
|
||||||
critical: criticalCount,
|
critical: criticalCount,
|
||||||
high: highCount
|
high: highCount
|
||||||
|
|||||||
@ -43,10 +43,12 @@ export interface IStorage {
|
|||||||
// Detections
|
// Detections
|
||||||
getAllDetections(options: {
|
getAllDetections(options: {
|
||||||
limit?: number;
|
limit?: number;
|
||||||
|
offset?: number;
|
||||||
anomalyType?: string;
|
anomalyType?: string;
|
||||||
minScore?: number;
|
minScore?: number;
|
||||||
maxScore?: number;
|
maxScore?: number;
|
||||||
}): Promise<Detection[]>;
|
search?: string;
|
||||||
|
}): Promise<{ detections: Detection[]; total: number }>;
|
||||||
getDetectionByIp(sourceIp: string): Promise<Detection | undefined>;
|
getDetectionByIp(sourceIp: string): Promise<Detection | undefined>;
|
||||||
createDetection(detection: InsertDetection): Promise<Detection>;
|
createDetection(detection: InsertDetection): Promise<Detection>;
|
||||||
updateDetection(id: string, detection: Partial<InsertDetection>): Promise<Detection | undefined>;
|
updateDetection(id: string, detection: Partial<InsertDetection>): Promise<Detection | undefined>;
|
||||||
@ -99,6 +101,7 @@ export interface IStorage {
|
|||||||
totalIps: number;
|
totalIps: number;
|
||||||
overlapWithDetections: number;
|
overlapWithDetections: number;
|
||||||
}>;
|
}>;
|
||||||
|
upsertBlacklistIp(listId: string, ipAddress: string, cidrRange: string | null): Promise<{created: boolean}>;
|
||||||
|
|
||||||
// System
|
// System
|
||||||
testConnection(): Promise<boolean>;
|
testConnection(): Promise<boolean>;
|
||||||
@ -173,11 +176,13 @@ export class DatabaseStorage implements IStorage {
|
|||||||
// Detections
|
// Detections
|
||||||
async getAllDetections(options: {
|
async getAllDetections(options: {
|
||||||
limit?: number;
|
limit?: number;
|
||||||
|
offset?: number;
|
||||||
anomalyType?: string;
|
anomalyType?: string;
|
||||||
minScore?: number;
|
minScore?: number;
|
||||||
maxScore?: number;
|
maxScore?: number;
|
||||||
}): Promise<Detection[]> {
|
search?: string;
|
||||||
const { limit = 5000, anomalyType, minScore, maxScore } = options;
|
}): Promise<{ detections: Detection[]; total: number }> {
|
||||||
|
const { limit = 50, offset = 0, anomalyType, minScore, maxScore, search } = options;
|
||||||
|
|
||||||
// Build WHERE conditions
|
// Build WHERE conditions
|
||||||
const conditions = [];
|
const conditions = [];
|
||||||
@ -195,17 +200,36 @@ export class DatabaseStorage implements IStorage {
|
|||||||
conditions.push(sql`${detections.riskScore}::numeric <= ${maxScore}`);
|
conditions.push(sql`${detections.riskScore}::numeric <= ${maxScore}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
const query = db
|
// Search by IP or anomaly type (case-insensitive)
|
||||||
.select()
|
if (search && search.trim()) {
|
||||||
.from(detections)
|
const searchLower = search.trim().toLowerCase();
|
||||||
.orderBy(desc(detections.detectedAt))
|
conditions.push(sql`(
|
||||||
.limit(limit);
|
LOWER(${detections.sourceIp}) LIKE ${'%' + searchLower + '%'} OR
|
||||||
|
LOWER(${detections.anomalyType}) LIKE ${'%' + searchLower + '%'} OR
|
||||||
if (conditions.length > 0) {
|
LOWER(COALESCE(${detections.country}, '')) LIKE ${'%' + searchLower + '%'} OR
|
||||||
return await query.where(and(...conditions));
|
LOWER(COALESCE(${detections.organization}, '')) LIKE ${'%' + searchLower + '%'}
|
||||||
|
)`);
|
||||||
}
|
}
|
||||||
|
|
||||||
return await query;
|
const whereClause = conditions.length > 0 ? and(...conditions) : undefined;
|
||||||
|
|
||||||
|
// Get total count for pagination
|
||||||
|
const countResult = await db
|
||||||
|
.select({ count: sql<number>`count(*)::int` })
|
||||||
|
.from(detections)
|
||||||
|
.where(whereClause);
|
||||||
|
const total = countResult[0]?.count || 0;
|
||||||
|
|
||||||
|
// Get paginated results
|
||||||
|
const results = await db
|
||||||
|
.select()
|
||||||
|
.from(detections)
|
||||||
|
.where(whereClause)
|
||||||
|
.orderBy(desc(detections.detectedAt))
|
||||||
|
.limit(limit)
|
||||||
|
.offset(offset);
|
||||||
|
|
||||||
|
return { detections: results, total };
|
||||||
}
|
}
|
||||||
|
|
||||||
async getDetectionByIp(sourceIp: string): Promise<Detection | undefined> {
|
async getDetectionByIp(sourceIp: string): Promise<Detection | undefined> {
|
||||||
@ -514,6 +538,49 @@ export class DatabaseStorage implements IStorage {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async upsertBlacklistIp(listId: string, ipAddress: string, cidrRange: string | null): Promise<{created: boolean}> {
|
||||||
|
try {
|
||||||
|
const existing = await db
|
||||||
|
.select()
|
||||||
|
.from(publicBlacklistIps)
|
||||||
|
.where(
|
||||||
|
and(
|
||||||
|
eq(publicBlacklistIps.listId, listId),
|
||||||
|
eq(publicBlacklistIps.ipAddress, ipAddress)
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
if (existing.length > 0) {
|
||||||
|
await db
|
||||||
|
.update(publicBlacklistIps)
|
||||||
|
.set({
|
||||||
|
lastSeen: new Date(),
|
||||||
|
isActive: true,
|
||||||
|
cidrRange: cidrRange,
|
||||||
|
ipInet: ipAddress,
|
||||||
|
cidrInet: cidrRange || `${ipAddress}/32`,
|
||||||
|
})
|
||||||
|
.where(eq(publicBlacklistIps.id, existing[0].id));
|
||||||
|
return { created: false };
|
||||||
|
} else {
|
||||||
|
await db.insert(publicBlacklistIps).values({
|
||||||
|
listId,
|
||||||
|
ipAddress,
|
||||||
|
cidrRange,
|
||||||
|
ipInet: ipAddress,
|
||||||
|
cidrInet: cidrRange || `${ipAddress}/32`,
|
||||||
|
isActive: true,
|
||||||
|
firstSeen: new Date(),
|
||||||
|
lastSeen: new Date(),
|
||||||
|
});
|
||||||
|
return { created: true };
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[DB ERROR] Failed to upsert blacklist IP:', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
async testConnection(): Promise<boolean> {
|
async testConnection(): Promise<boolean> {
|
||||||
try {
|
try {
|
||||||
await db.execute(sql`SELECT 1`);
|
await db.execute(sql`SELECT 1`);
|
||||||
|
|||||||
@ -70,10 +70,12 @@ export const detections = pgTable("detections", {
|
|||||||
}));
|
}));
|
||||||
|
|
||||||
// Whitelist per IP fidati
|
// Whitelist per IP fidati
|
||||||
|
// NOTE: ip_inet is INET type in production (managed by SQL migrations)
|
||||||
|
// Drizzle lacks native INET support, so we use text() here
|
||||||
export const whitelist = pgTable("whitelist", {
|
export const whitelist = pgTable("whitelist", {
|
||||||
id: varchar("id").primaryKey().default(sql`gen_random_uuid()`),
|
id: varchar("id").primaryKey().default(sql`gen_random_uuid()`),
|
||||||
ipAddress: text("ip_address").notNull().unique(),
|
ipAddress: text("ip_address").notNull().unique(),
|
||||||
ipInet: text("ip_inet"),
|
ipInet: text("ip_inet"), // Actually INET in production - see migration 008
|
||||||
comment: text("comment"),
|
comment: text("comment"),
|
||||||
reason: text("reason"),
|
reason: text("reason"),
|
||||||
createdBy: text("created_by"),
|
createdBy: text("created_by"),
|
||||||
@ -156,12 +158,14 @@ export const publicLists = pgTable("public_lists", {
|
|||||||
}));
|
}));
|
||||||
|
|
||||||
// Public blacklist IPs from external sources
|
// Public blacklist IPs from external sources
|
||||||
|
// NOTE: ip_inet/cidr_inet are INET/CIDR types in production (managed by SQL migrations)
|
||||||
|
// Drizzle lacks native INET/CIDR support, so we use text() here
|
||||||
export const publicBlacklistIps = pgTable("public_blacklist_ips", {
|
export const publicBlacklistIps = pgTable("public_blacklist_ips", {
|
||||||
id: varchar("id").primaryKey().default(sql`gen_random_uuid()`),
|
id: varchar("id").primaryKey().default(sql`gen_random_uuid()`),
|
||||||
ipAddress: text("ip_address").notNull(),
|
ipAddress: text("ip_address").notNull(),
|
||||||
cidrRange: text("cidr_range"),
|
cidrRange: text("cidr_range"),
|
||||||
ipInet: text("ip_inet"),
|
ipInet: text("ip_inet"), // Actually INET in production - see migration 008
|
||||||
cidrInet: text("cidr_inet"),
|
cidrInet: text("cidr_inet"), // Actually CIDR in production - see migration 008
|
||||||
listId: varchar("list_id").notNull().references(() => publicLists.id, { onDelete: 'cascade' }),
|
listId: varchar("list_id").notNull().references(() => publicLists.id, { onDelete: 'cascade' }),
|
||||||
firstSeen: timestamp("first_seen").defaultNow().notNull(),
|
firstSeen: timestamp("first_seen").defaultNow().notNull(),
|
||||||
lastSeen: timestamp("last_seen").defaultNow().notNull(),
|
lastSeen: timestamp("last_seen").defaultNow().notNull(),
|
||||||
|
|||||||
148
version.json
148
version.json
@ -1,7 +1,79 @@
|
|||||||
{
|
{
|
||||||
"version": "1.0.91",
|
"version": "1.0.103",
|
||||||
"lastUpdate": "2025-11-26T15:29:20.606Z",
|
"lastUpdate": "2026-01-02T16:33:13.545Z",
|
||||||
"changelog": [
|
"changelog": [
|
||||||
|
{
|
||||||
|
"version": "1.0.103",
|
||||||
|
"date": "2026-01-02",
|
||||||
|
"type": "patch",
|
||||||
|
"description": "Deployment automatico v1.0.103"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"version": "1.0.102",
|
||||||
|
"date": "2026-01-02",
|
||||||
|
"type": "patch",
|
||||||
|
"description": "Deployment automatico v1.0.102"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"version": "1.0.101",
|
||||||
|
"date": "2026-01-02",
|
||||||
|
"type": "patch",
|
||||||
|
"description": "Deployment automatico v1.0.101"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"version": "1.0.100",
|
||||||
|
"date": "2026-01-02",
|
||||||
|
"type": "patch",
|
||||||
|
"description": "Deployment automatico v1.0.100"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"version": "1.0.99",
|
||||||
|
"date": "2026-01-02",
|
||||||
|
"type": "patch",
|
||||||
|
"description": "Deployment automatico v1.0.99"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"version": "1.0.98",
|
||||||
|
"date": "2026-01-02",
|
||||||
|
"type": "patch",
|
||||||
|
"description": "Deployment automatico v1.0.98"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"version": "1.0.97",
|
||||||
|
"date": "2026-01-02",
|
||||||
|
"type": "patch",
|
||||||
|
"description": "Deployment automatico v1.0.97"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"version": "1.0.96",
|
||||||
|
"date": "2026-01-02",
|
||||||
|
"type": "patch",
|
||||||
|
"description": "Deployment automatico v1.0.96"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"version": "1.0.95",
|
||||||
|
"date": "2025-11-27",
|
||||||
|
"type": "patch",
|
||||||
|
"description": "Deployment automatico v1.0.95"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"version": "1.0.94",
|
||||||
|
"date": "2025-11-27",
|
||||||
|
"type": "patch",
|
||||||
|
"description": "Deployment automatico v1.0.94"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"version": "1.0.93",
|
||||||
|
"date": "2025-11-27",
|
||||||
|
"type": "patch",
|
||||||
|
"description": "Deployment automatico v1.0.93"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"version": "1.0.92",
|
||||||
|
"date": "2025-11-27",
|
||||||
|
"type": "patch",
|
||||||
|
"description": "Deployment automatico v1.0.92"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"version": "1.0.91",
|
"version": "1.0.91",
|
||||||
"date": "2025-11-26",
|
"date": "2025-11-26",
|
||||||
@ -229,78 +301,6 @@
|
|||||||
"date": "2025-11-24",
|
"date": "2025-11-24",
|
||||||
"type": "patch",
|
"type": "patch",
|
||||||
"description": "Deployment automatico v1.0.54"
|
"description": "Deployment automatico v1.0.54"
|
||||||
},
|
|
||||||
{
|
|
||||||
"version": "1.0.53",
|
|
||||||
"date": "2025-11-24",
|
|
||||||
"type": "patch",
|
|
||||||
"description": "Deployment automatico v1.0.53"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"version": "1.0.52",
|
|
||||||
"date": "2025-11-24",
|
|
||||||
"type": "patch",
|
|
||||||
"description": "Deployment automatico v1.0.52"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"version": "1.0.51",
|
|
||||||
"date": "2025-11-24",
|
|
||||||
"type": "patch",
|
|
||||||
"description": "Deployment automatico v1.0.51"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"version": "1.0.50",
|
|
||||||
"date": "2025-11-24",
|
|
||||||
"type": "patch",
|
|
||||||
"description": "Deployment automatico v1.0.50"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"version": "1.0.49",
|
|
||||||
"date": "2025-11-24",
|
|
||||||
"type": "patch",
|
|
||||||
"description": "Deployment automatico v1.0.49"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"version": "1.0.48",
|
|
||||||
"date": "2025-11-24",
|
|
||||||
"type": "patch",
|
|
||||||
"description": "Deployment automatico v1.0.48"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"version": "1.0.47",
|
|
||||||
"date": "2025-11-24",
|
|
||||||
"type": "patch",
|
|
||||||
"description": "Deployment automatico v1.0.47"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"version": "1.0.46",
|
|
||||||
"date": "2025-11-24",
|
|
||||||
"type": "patch",
|
|
||||||
"description": "Deployment automatico v1.0.46"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"version": "1.0.45",
|
|
||||||
"date": "2025-11-24",
|
|
||||||
"type": "patch",
|
|
||||||
"description": "Deployment automatico v1.0.45"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"version": "1.0.44",
|
|
||||||
"date": "2025-11-24",
|
|
||||||
"type": "patch",
|
|
||||||
"description": "Deployment automatico v1.0.44"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"version": "1.0.43",
|
|
||||||
"date": "2025-11-24",
|
|
||||||
"type": "patch",
|
|
||||||
"description": "Deployment automatico v1.0.43"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"version": "1.0.42",
|
|
||||||
"date": "2025-11-24",
|
|
||||||
"type": "patch",
|
|
||||||
"description": "Deployment automatico v1.0.42"
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
Loading…
Reference in New Issue
Block a user