Add functionality to manage and sync public blacklists and whitelists

Integrates external public IP lists for enhanced threat detection and whitelisting capabilities, including API endpoints, database schema changes, and a new fetching service.

Replit-Commit-Author: Agent
Replit-Commit-Session-Id: 7a657272-55ba-4a79-9a2e-f1ed9bc7a528
Replit-Commit-Checkpoint-Type: full_checkpoint
Replit-Commit-Event-Id: b1366669-0ccd-493e-9e06-4e4168e2fa3b
Replit-Commit-Screenshot-Url: https://storage.googleapis.com/screenshot-production-us-central1/449cf7c4-c97a-45ae-8234-e5c5b8d6a84f/7a657272-55ba-4a79-9a2e-f1ed9bc7a528/QKzTQQy
This commit is contained in:
marco370 2025-11-26 09:21:43 +00:00
parent 24966154d6
commit 77874c83bf
17 changed files with 2242 additions and 4 deletions

View File

@ -4,13 +4,14 @@ import { QueryClientProvider } from "@tanstack/react-query";
import { Toaster } from "@/components/ui/toaster";
import { TooltipProvider } from "@/components/ui/tooltip";
import { SidebarProvider, Sidebar, SidebarContent, SidebarGroup, SidebarGroupContent, SidebarGroupLabel, SidebarMenu, SidebarMenuButton, SidebarMenuItem, SidebarTrigger } from "@/components/ui/sidebar";
import { LayoutDashboard, AlertTriangle, Server, Shield, Brain, Menu, Activity, BarChart3, TrendingUp } from "lucide-react";
import { LayoutDashboard, AlertTriangle, Server, Shield, Brain, Menu, Activity, BarChart3, TrendingUp, List } from "lucide-react";
import Dashboard from "@/pages/Dashboard";
import Detections from "@/pages/Detections";
import DashboardLive from "@/pages/DashboardLive";
import AnalyticsHistory from "@/pages/AnalyticsHistory";
import Routers from "@/pages/Routers";
import Whitelist from "@/pages/Whitelist";
import PublicLists from "@/pages/PublicLists";
import Training from "@/pages/Training";
import Services from "@/pages/Services";
import NotFound from "@/pages/not-found";
@ -23,6 +24,7 @@ const menuItems = [
{ title: "Training ML", url: "/training", icon: Brain },
{ title: "Router", url: "/routers", icon: Server },
{ title: "Whitelist", url: "/whitelist", icon: Shield },
{ title: "Liste Pubbliche", url: "/public-lists", icon: List },
{ title: "Servizi", url: "/services", icon: TrendingUp },
];
@ -62,6 +64,7 @@ function Router() {
<Route path="/training" component={Training} />
<Route path="/routers" component={Routers} />
<Route path="/whitelist" component={Whitelist} />
<Route path="/public-lists" component={PublicLists} />
<Route path="/services" component={Services} />
<Route component={NotFound} />
</Switch>

View File

@ -0,0 +1,382 @@
import { useQuery, useMutation } from "@tanstack/react-query";
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card";
import { Button } from "@/components/ui/button";
import { Badge } from "@/components/ui/badge";
import { Table, TableBody, TableCell, TableHead, TableHeader, TableRow } from "@/components/ui/table";
import { Dialog, DialogContent, DialogDescription, DialogHeader, DialogTitle, DialogTrigger } from "@/components/ui/dialog";
import { Form, FormControl, FormField, FormItem, FormLabel, FormMessage } from "@/components/ui/form";
import { Input } from "@/components/ui/input";
import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from "@/components/ui/select";
import { Switch } from "@/components/ui/switch";
import { useForm } from "react-hook-form";
import { zodResolver } from "@hookform/resolvers/zod";
import { z } from "zod";
import { RefreshCw, Plus, Trash2, Edit, CheckCircle2, XCircle, AlertTriangle, Clock } from "lucide-react";
import { apiRequest, queryClient } from "@/lib/queryClient";
import { useToast } from "@/hooks/use-toast";
import { formatDistanceToNow } from "date-fns";
import { it } from "date-fns/locale";
import { useState } from "react";
const listFormSchema = z.object({
name: z.string().min(1, "Nome richiesto"),
type: z.enum(["blacklist", "whitelist"], {
required_error: "Tipo richiesto",
}),
url: z.string().url("URL non valida"),
enabled: z.boolean().default(true),
fetchIntervalMinutes: z.number().min(1).max(1440).default(10),
});
type ListFormValues = z.infer<typeof listFormSchema>;
export default function PublicLists() {
const { toast } = useToast();
const [isAddDialogOpen, setIsAddDialogOpen] = useState(false);
const [editingList, setEditingList] = useState<any>(null);
const { data: lists, isLoading } = useQuery({
queryKey: ["/api/public-lists"],
});
const form = useForm<ListFormValues>({
resolver: zodResolver(listFormSchema),
defaultValues: {
name: "",
type: "blacklist",
url: "",
enabled: true,
fetchIntervalMinutes: 10,
},
});
const createMutation = useMutation({
mutationFn: (data: ListFormValues) =>
apiRequest("/api/public-lists", {
method: "POST",
body: JSON.stringify(data),
}),
onSuccess: () => {
queryClient.invalidateQueries({ queryKey: ["/api/public-lists"] });
toast({
title: "Lista creata",
description: "La lista è stata aggiunta con successo",
});
setIsAddDialogOpen(false);
form.reset();
},
onError: (error: any) => {
toast({
title: "Errore",
description: error.message || "Impossibile creare la lista",
variant: "destructive",
});
},
});
const updateMutation = useMutation({
mutationFn: ({ id, data }: { id: string; data: Partial<ListFormValues> }) =>
apiRequest(`/api/public-lists/${id}`, {
method: "PATCH",
body: JSON.stringify(data),
}),
onSuccess: () => {
queryClient.invalidateQueries({ queryKey: ["/api/public-lists"] });
toast({
title: "Lista aggiornata",
description: "Le modifiche sono state salvate",
});
setEditingList(null);
},
});
const deleteMutation = useMutation({
mutationFn: (id: string) =>
apiRequest(`/api/public-lists/${id}`, {
method: "DELETE",
}),
onSuccess: () => {
queryClient.invalidateQueries({ queryKey: ["/api/public-lists"] });
toast({
title: "Lista eliminata",
description: "La lista è stata rimossa",
});
},
onError: (error: any) => {
toast({
title: "Errore",
description: error.message || "Impossibile eliminare la lista",
variant: "destructive",
});
},
});
const syncMutation = useMutation({
mutationFn: (id: string) =>
apiRequest(`/api/public-lists/${id}/sync`, {
method: "POST",
}),
onSuccess: () => {
toast({
title: "Sync avviato",
description: "La sincronizzazione manuale è stata richiesta",
});
},
});
const toggleEnabled = (id: string, enabled: boolean) => {
updateMutation.mutate({ id, data: { enabled } });
};
const onSubmit = (data: ListFormValues) => {
createMutation.mutate(data);
};
const getStatusBadge = (list: any) => {
if (!list.enabled) {
return <Badge variant="outline" className="gap-1"><XCircle className="w-3 h-3" />Disabilitata</Badge>;
}
if (list.errorCount > 5) {
return <Badge variant="destructive" className="gap-1"><AlertTriangle className="w-3 h-3" />Errori</Badge>;
}
if (list.lastSuccess) {
return <Badge variant="default" className="gap-1 bg-green-600"><CheckCircle2 className="w-3 h-3" />OK</Badge>;
}
return <Badge variant="secondary" className="gap-1"><Clock className="w-3 h-3" />In attesa</Badge>;
};
const getTypeBadge = (type: string) => {
if (type === "blacklist") {
return <Badge variant="destructive">Blacklist</Badge>;
}
return <Badge variant="default" className="bg-blue-600">Whitelist</Badge>;
};
if (isLoading) {
return (
<div className="p-6">
<Card>
<CardHeader>
<CardTitle>Caricamento...</CardTitle>
</CardHeader>
</Card>
</div>
);
}
return (
<div className="p-6 space-y-6">
<div className="flex items-center justify-between">
<div>
<h1 className="text-3xl font-bold">Liste Pubbliche</h1>
<p className="text-muted-foreground mt-2">
Gestione sorgenti blacklist e whitelist esterne (aggiornamento ogni 10 minuti)
</p>
</div>
<Dialog open={isAddDialogOpen} onOpenChange={setIsAddDialogOpen}>
<DialogTrigger asChild>
<Button data-testid="button-add-list">
<Plus className="w-4 h-4 mr-2" />
Aggiungi Lista
</Button>
</DialogTrigger>
<DialogContent className="max-w-2xl">
<DialogHeader>
<DialogTitle>Aggiungi Lista Pubblica</DialogTitle>
<DialogDescription>
Configura una nuova sorgente blacklist o whitelist
</DialogDescription>
</DialogHeader>
<Form {...form}>
<form onSubmit={form.handleSubmit(onSubmit)} className="space-y-4">
<FormField
control={form.control}
name="name"
render={({ field }) => (
<FormItem>
<FormLabel>Nome</FormLabel>
<FormControl>
<Input placeholder="es. Spamhaus DROP" {...field} data-testid="input-list-name" />
</FormControl>
<FormMessage />
</FormItem>
)}
/>
<FormField
control={form.control}
name="type"
render={({ field }) => (
<FormItem>
<FormLabel>Tipo</FormLabel>
<Select onValueChange={field.onChange} defaultValue={field.value}>
<FormControl>
<SelectTrigger data-testid="select-list-type">
<SelectValue placeholder="Seleziona tipo" />
</SelectTrigger>
</FormControl>
<SelectContent>
<SelectItem value="blacklist">Blacklist</SelectItem>
<SelectItem value="whitelist">Whitelist</SelectItem>
</SelectContent>
</Select>
<FormMessage />
</FormItem>
)}
/>
<FormField
control={form.control}
name="url"
render={({ field }) => (
<FormItem>
<FormLabel>URL</FormLabel>
<FormControl>
<Input placeholder="https://example.com/list.txt" {...field} data-testid="input-list-url" />
</FormControl>
<FormMessage />
</FormItem>
)}
/>
<FormField
control={form.control}
name="fetchIntervalMinutes"
render={({ field }) => (
<FormItem>
<FormLabel>Intervallo Sync (minuti)</FormLabel>
<FormControl>
<Input
type="number"
{...field}
onChange={(e) => field.onChange(parseInt(e.target.value))}
data-testid="input-list-interval"
/>
</FormControl>
<FormMessage />
</FormItem>
)}
/>
<FormField
control={form.control}
name="enabled"
render={({ field }) => (
<FormItem className="flex items-center justify-between">
<FormLabel>Abilitata</FormLabel>
<FormControl>
<Switch
checked={field.value}
onCheckedChange={field.onChange}
data-testid="switch-list-enabled"
/>
</FormControl>
</FormItem>
)}
/>
<div className="flex justify-end gap-2 pt-4">
<Button type="button" variant="outline" onClick={() => setIsAddDialogOpen(false)}>
Annulla
</Button>
<Button type="submit" disabled={createMutation.isPending} data-testid="button-save-list">
{createMutation.isPending ? "Salvataggio..." : "Salva"}
</Button>
</div>
</form>
</Form>
</DialogContent>
</Dialog>
</div>
<Card>
<CardHeader>
<CardTitle>Sorgenti Configurate</CardTitle>
<CardDescription>
{lists?.length || 0} liste configurate
</CardDescription>
</CardHeader>
<CardContent>
<Table>
<TableHeader>
<TableRow>
<TableHead>Nome</TableHead>
<TableHead>Tipo</TableHead>
<TableHead>Stato</TableHead>
<TableHead>IP Totali</TableHead>
<TableHead>IP Attivi</TableHead>
<TableHead>Ultimo Sync</TableHead>
<TableHead className="text-right">Azioni</TableHead>
</TableRow>
</TableHeader>
<TableBody>
{lists?.map((list: any) => (
<TableRow key={list.id} data-testid={`row-list-${list.id}`}>
<TableCell className="font-medium">
<div>
<div>{list.name}</div>
<div className="text-xs text-muted-foreground truncate max-w-xs">
{list.url}
</div>
</div>
</TableCell>
<TableCell>{getTypeBadge(list.type)}</TableCell>
<TableCell>{getStatusBadge(list)}</TableCell>
<TableCell data-testid={`text-total-ips-${list.id}`}>{list.totalIps?.toLocaleString() || 0}</TableCell>
<TableCell data-testid={`text-active-ips-${list.id}`}>{list.activeIps?.toLocaleString() || 0}</TableCell>
<TableCell>
{list.lastSuccess ? (
<span className="text-sm">
{formatDistanceToNow(new Date(list.lastSuccess), {
addSuffix: true,
locale: it,
})}
</span>
) : (
<span className="text-sm text-muted-foreground">Mai</span>
)}
</TableCell>
<TableCell className="text-right">
<div className="flex items-center justify-end gap-2">
<Switch
checked={list.enabled}
onCheckedChange={(checked) => toggleEnabled(list.id, checked)}
data-testid={`switch-enable-${list.id}`}
/>
<Button
variant="outline"
size="icon"
onClick={() => syncMutation.mutate(list.id)}
disabled={syncMutation.isPending}
data-testid={`button-sync-${list.id}`}
>
<RefreshCw className="w-4 h-4" />
</Button>
<Button
variant="destructive"
size="icon"
onClick={() => {
if (confirm(`Eliminare la lista "${list.name}"?`)) {
deleteMutation.mutate(list.id);
}
}}
data-testid={`button-delete-${list.id}`}
>
<Trash2 className="w-4 h-4" />
</Button>
</div>
</TableCell>
</TableRow>
))}
{(!lists || lists.length === 0) && (
<TableRow>
<TableCell colSpan={7} className="text-center text-muted-foreground py-8">
Nessuna lista configurata. Aggiungi la prima lista.
</TableCell>
</TableRow>
)}
</TableBody>
</Table>
</CardContent>
</Card>
</div>
);
}

View File

@ -0,0 +1,146 @@
-- Migration 006: Add Public Lists Integration
-- Description: Adds blacklist/whitelist public sources with auto-sync support
-- Author: IDS System
-- Date: 2024-11-26
BEGIN;
-- ============================================================================
-- 1. CREATE NEW TABLES
-- ============================================================================
-- Public threat/whitelist sources configuration
CREATE TABLE IF NOT EXISTS public_lists (
id VARCHAR PRIMARY KEY DEFAULT gen_random_uuid(),
name TEXT NOT NULL,
type TEXT NOT NULL CHECK (type IN ('blacklist', 'whitelist')),
url TEXT NOT NULL,
enabled BOOLEAN NOT NULL DEFAULT true,
fetch_interval_minutes INTEGER NOT NULL DEFAULT 10,
last_fetch TIMESTAMP,
last_success TIMESTAMP,
total_ips INTEGER NOT NULL DEFAULT 0,
active_ips INTEGER NOT NULL DEFAULT 0,
error_count INTEGER NOT NULL DEFAULT 0,
last_error TEXT,
created_at TIMESTAMP NOT NULL DEFAULT NOW()
);
CREATE INDEX public_lists_type_idx ON public_lists(type);
CREATE INDEX public_lists_enabled_idx ON public_lists(enabled);
-- Public blacklist IPs from external sources
CREATE TABLE IF NOT EXISTS public_blacklist_ips (
id VARCHAR PRIMARY KEY DEFAULT gen_random_uuid(),
ip_address TEXT NOT NULL,
cidr_range TEXT,
list_id VARCHAR NOT NULL REFERENCES public_lists(id) ON DELETE CASCADE,
first_seen TIMESTAMP NOT NULL DEFAULT NOW(),
last_seen TIMESTAMP NOT NULL DEFAULT NOW(),
is_active BOOLEAN NOT NULL DEFAULT true
);
CREATE INDEX public_blacklist_ip_idx ON public_blacklist_ips(ip_address);
CREATE INDEX public_blacklist_list_idx ON public_blacklist_ips(list_id);
CREATE INDEX public_blacklist_active_idx ON public_blacklist_ips(is_active);
CREATE UNIQUE INDEX public_blacklist_ip_list_key ON public_blacklist_ips(ip_address, list_id);
-- ============================================================================
-- 2. ALTER EXISTING TABLES
-- ============================================================================
-- Extend detections table with public list source tracking
ALTER TABLE detections
ADD COLUMN IF NOT EXISTS detection_source TEXT NOT NULL DEFAULT 'ml_model',
ADD COLUMN IF NOT EXISTS blacklist_id VARCHAR;
CREATE INDEX IF NOT EXISTS detection_source_idx ON detections(detection_source);
-- Add check constraint for valid detection sources
DO $$
BEGIN
IF NOT EXISTS (
SELECT 1 FROM pg_constraint
WHERE conname = 'detections_source_check'
) THEN
ALTER TABLE detections
ADD CONSTRAINT detections_source_check
CHECK (detection_source IN ('ml_model', 'public_blacklist', 'hybrid'));
END IF;
END $$;
-- Extend whitelist table with source tracking
ALTER TABLE whitelist
ADD COLUMN IF NOT EXISTS source TEXT NOT NULL DEFAULT 'manual',
ADD COLUMN IF NOT EXISTS list_id VARCHAR;
CREATE INDEX IF NOT EXISTS whitelist_source_idx ON whitelist(source);
-- Add check constraint for valid whitelist sources
DO $$
BEGIN
IF NOT EXISTS (
SELECT 1 FROM pg_constraint
WHERE conname = 'whitelist_source_check'
) THEN
ALTER TABLE whitelist
ADD CONSTRAINT whitelist_source_check
CHECK (source IN ('manual', 'aws', 'gcp', 'cloudflare', 'iana', 'ntp', 'other'));
END IF;
END $$;
-- ============================================================================
-- 3. UPDATE SCHEMA VERSION
-- ============================================================================
-- Insert new version record
INSERT INTO schema_version (id, version, description)
VALUES (1, 6, 'Add public lists integration (blacklist/whitelist sources)')
ON CONFLICT (id) DO UPDATE
SET version = 6,
description = 'Add public lists integration (blacklist/whitelist sources)',
applied_at = NOW();
-- ============================================================================
-- 4. SEED DEFAULT PUBLIC LISTS (OPTIONAL - commented for manual execution)
-- ============================================================================
/*
-- Blacklist sources
INSERT INTO public_lists (name, type, url, enabled, fetch_interval_minutes) VALUES
('Spamhaus DROP', 'blacklist', 'https://www.spamhaus.org/drop/drop.txt', true, 10),
('Talos Intelligence', 'blacklist', 'https://talosintelligence.com/documents/ip-blacklist', false, 10);
-- Whitelist sources
INSERT INTO public_lists (name, type, url, enabled, fetch_interval_minutes) VALUES
('AWS IP Ranges', 'whitelist', 'https://ip-ranges.amazonaws.com/ip-ranges.json', true, 10),
('Google Cloud IP Ranges', 'whitelist', 'https://www.gstatic.com/ipranges/cloud.json', true, 10),
('Cloudflare IPs', 'whitelist', 'https://www.cloudflare.com/ips-v4', true, 10),
('IANA Root Servers', 'whitelist', 'https://www.iana.org/domains/root/servers', true, 10),
('NTP Pool', 'whitelist', 'https://www.ntppool.org/zone/@', false, 10);
*/
COMMIT;
-- ============================================================================
-- ROLLBACK PROCEDURE (if needed)
-- ============================================================================
/*
BEGIN;
-- Remove new columns from existing tables
ALTER TABLE detections DROP COLUMN IF EXISTS detection_source;
ALTER TABLE detections DROP COLUMN IF EXISTS blacklist_id;
ALTER TABLE whitelist DROP COLUMN IF EXISTS source;
ALTER TABLE whitelist DROP COLUMN IF EXISTS list_id;
-- Drop new tables (CASCADE removes FK constraints)
DROP TABLE IF EXISTS public_blacklist_ips CASCADE;
DROP TABLE IF EXISTS public_lists CASCADE;
-- Revert schema version
UPDATE schema_version SET version = 5, description = 'Rollback from public lists integration' WHERE id = 1;
COMMIT;
*/

View File

@ -0,0 +1,29 @@
[Unit]
Description=IDS Public Lists Fetcher Service
Documentation=https://github.com/yourorg/ids
After=network.target postgresql.service
[Service]
Type=oneshot
User=root
WorkingDirectory=/opt/ids/python_ml
Environment="PYTHONUNBUFFERED=1"
EnvironmentFile=/opt/ids/.env
# Run list fetcher with virtual environment
ExecStart=/opt/ids/python_ml/venv/bin/python3 /opt/ids/python_ml/list_fetcher/run_fetcher.py
# Logging
StandardOutput=journal
StandardError=journal
SyslogIdentifier=ids-list-fetcher
# Security settings
PrivateTmp=true
NoNewPrivileges=true
# Restart policy
Restart=no
[Install]
WantedBy=multi-user.target

View File

@ -0,0 +1,13 @@
[Unit]
Description=IDS Public Lists Fetcher Timer (every 10 minutes)
Documentation=https://github.com/yourorg/ids
[Timer]
# Run every 10 minutes
OnCalendar=*:0/10
OnBootSec=2min
AccuracySec=1min
Persistent=true
[Install]
WantedBy=timers.target

6
main.py Normal file
View File

@ -0,0 +1,6 @@
def main():
print("Hello from repl-nix-workspace!")
if __name__ == "__main__":
main()

8
pyproject.toml Normal file
View File

@ -0,0 +1,8 @@
[project]
name = "repl-nix-workspace"
version = "0.1.0"
description = "Add your description here"
requires-python = ">=3.11"
dependencies = [
"httpx>=0.28.1",
]

View File

@ -0,0 +1,2 @@
# Public Lists Fetcher Module
# Handles download, parsing, and sync of public blacklist/whitelist sources

View File

@ -0,0 +1,391 @@
import asyncio
import httpx
from datetime import datetime
from typing import Dict, List, Set, Tuple, Optional
import psycopg2
from psycopg2.extras import execute_values
import os
import sys
# Add parent directory to path for imports
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
from list_fetcher.parsers import parse_list
class ListFetcher:
"""Fetches and synchronizes public IP lists"""
def __init__(self, database_url: str):
self.database_url = database_url
self.timeout = 30.0
self.max_retries = 3
def get_db_connection(self):
"""Create database connection"""
return psycopg2.connect(self.database_url)
async def fetch_url(self, url: str) -> Optional[str]:
"""Download content from URL with retry logic"""
async with httpx.AsyncClient(timeout=self.timeout, follow_redirects=True) as client:
for attempt in range(self.max_retries):
try:
response = await client.get(url)
response.raise_for_status()
return response.text
except httpx.HTTPError as e:
if attempt == self.max_retries - 1:
raise Exception(f"HTTP error after {self.max_retries} attempts: {e}")
await asyncio.sleep(2 ** attempt) # Exponential backoff
except Exception as e:
if attempt == self.max_retries - 1:
raise Exception(f"Download failed: {e}")
await asyncio.sleep(2 ** attempt)
return None
def get_enabled_lists(self) -> List[Dict]:
"""Get all enabled public lists from database"""
conn = self.get_db_connection()
try:
with conn.cursor() as cur:
cur.execute("""
SELECT id, name, type, url, fetch_interval_minutes
FROM public_lists
WHERE enabled = true
ORDER BY type, name
""")
if cur.description is None:
return []
columns = [desc[0] for desc in cur.description]
return [dict(zip(columns, row)) for row in cur.fetchall()]
finally:
conn.close()
def get_existing_ips(self, list_id: str, list_type: str) -> Set[str]:
"""Get existing IPs for a list from database"""
conn = self.get_db_connection()
try:
with conn.cursor() as cur:
if list_type == 'blacklist':
cur.execute("""
SELECT ip_address
FROM public_blacklist_ips
WHERE list_id = %s AND is_active = true
""", (list_id,))
else: # whitelist
cur.execute("""
SELECT ip_address
FROM whitelist
WHERE list_id = %s AND active = true
""", (list_id,))
return {row[0] for row in cur.fetchall()}
finally:
conn.close()
def sync_blacklist_ips(self, list_id: str, new_ips: Set[Tuple[str, Optional[str]]]):
"""Sync blacklist IPs: add new, mark inactive old ones"""
conn = self.get_db_connection()
try:
with conn.cursor() as cur:
# Get existing IPs
existing = self.get_existing_ips(list_id, 'blacklist')
new_ip_addresses = {ip for ip, _ in new_ips}
# Calculate diff
to_add = new_ip_addresses - existing
to_deactivate = existing - new_ip_addresses
to_update = existing & new_ip_addresses
# Mark old IPs as inactive
if to_deactivate:
cur.execute("""
UPDATE public_blacklist_ips
SET is_active = false
WHERE list_id = %s AND ip_address = ANY(%s)
""", (list_id, list(to_deactivate)))
# Update last_seen for existing active IPs
if to_update:
cur.execute("""
UPDATE public_blacklist_ips
SET last_seen = NOW()
WHERE list_id = %s AND ip_address = ANY(%s)
""", (list_id, list(to_update)))
# Add new IPs
if to_add:
values = []
for ip, cidr in new_ips:
if ip in to_add:
values.append((ip, cidr, list_id))
execute_values(cur, """
INSERT INTO public_blacklist_ips (ip_address, cidr_range, list_id)
VALUES %s
ON CONFLICT (ip_address, list_id) DO UPDATE
SET is_active = true, last_seen = NOW()
""", values)
# Update list stats
cur.execute("""
UPDATE public_lists
SET total_ips = %s,
active_ips = %s,
last_success = NOW()
WHERE id = %s
""", (len(new_ip_addresses), len(new_ip_addresses), list_id))
conn.commit()
return len(to_add), len(to_deactivate), len(to_update)
except Exception as e:
conn.rollback()
raise e
finally:
conn.close()
def sync_whitelist_ips(self, list_id: str, list_name: str, new_ips: Set[Tuple[str, Optional[str]]]):
"""Sync whitelist IPs: add new, deactivate old ones"""
conn = self.get_db_connection()
try:
with conn.cursor() as cur:
# Get existing IPs
existing = self.get_existing_ips(list_id, 'whitelist')
new_ip_addresses = {ip for ip, _ in new_ips}
# Calculate diff
to_add = new_ip_addresses - existing
to_deactivate = existing - new_ip_addresses
to_update = existing & new_ip_addresses
# Determine source name from list name
source = 'other'
list_lower = list_name.lower()
if 'aws' in list_lower:
source = 'aws'
elif 'gcp' in list_lower or 'google' in list_lower:
source = 'gcp'
elif 'cloudflare' in list_lower:
source = 'cloudflare'
elif 'iana' in list_lower:
source = 'iana'
elif 'ntp' in list_lower:
source = 'ntp'
# Mark old IPs as inactive
if to_deactivate:
cur.execute("""
UPDATE whitelist
SET active = false
WHERE list_id = %s AND ip_address = ANY(%s)
""", (list_id, list(to_deactivate)))
# Add new IPs
if to_add:
values = []
for ip, cidr in new_ips:
if ip in to_add:
comment = f"Auto-imported from {list_name}"
if cidr:
comment += f" (CIDR: {cidr})"
values.append((ip, comment, source, list_id))
execute_values(cur, """
INSERT INTO whitelist (ip_address, comment, source, list_id)
VALUES %s
ON CONFLICT (ip_address) DO UPDATE
SET active = true, source = EXCLUDED.source, list_id = EXCLUDED.list_id
""", values)
# Update list stats
cur.execute("""
UPDATE public_lists
SET total_ips = %s,
active_ips = %s,
last_success = NOW()
WHERE id = %s
""", (len(new_ip_addresses), len(new_ip_addresses), list_id))
conn.commit()
return len(to_add), len(to_deactivate), len(to_update)
except Exception as e:
conn.rollback()
raise e
finally:
conn.close()
async def fetch_and_sync_list(self, list_config: Dict) -> Dict:
"""Fetch and sync a single list"""
list_id = list_config['id']
list_name = list_config['name']
list_type = list_config['type']
url = list_config['url']
result = {
'list_id': list_id,
'list_name': list_name,
'success': False,
'added': 0,
'removed': 0,
'updated': 0,
'error': None
}
conn = self.get_db_connection()
try:
# Update last_fetch timestamp
with conn.cursor() as cur:
cur.execute("""
UPDATE public_lists
SET last_fetch = NOW()
WHERE id = %s
""", (list_id,))
conn.commit()
# Download content
print(f"[{datetime.now().strftime('%H:%M:%S')}] Downloading {list_name} from {url}...")
content = await self.fetch_url(url)
if not content:
raise Exception("Empty response from server")
# Parse IPs
print(f"[{datetime.now().strftime('%H:%M:%S')}] Parsing {list_name}...")
ips = parse_list(list_name, content)
if not ips:
raise Exception("No valid IPs found in list")
print(f"[{datetime.now().strftime('%H:%M:%S')}] Found {len(ips)} IPs, syncing to database...")
# Sync to database
if list_type == 'blacklist':
added, removed, updated = self.sync_blacklist_ips(list_id, ips)
else:
added, removed, updated = self.sync_whitelist_ips(list_id, list_name, ips)
result.update({
'success': True,
'added': added,
'removed': removed,
'updated': updated
})
print(f"[{datetime.now().strftime('%H:%M:%S')}] ✓ {list_name}: +{added} -{removed} ~{updated}")
# Reset error count on success
with conn.cursor() as cur:
cur.execute("""
UPDATE public_lists
SET error_count = 0, last_error = NULL
WHERE id = %s
""", (list_id,))
conn.commit()
except Exception as e:
error_msg = str(e)
result['error'] = error_msg
print(f"[{datetime.now().strftime('%H:%M:%S')}] ✗ {list_name}: {error_msg}")
# Increment error count
with conn.cursor() as cur:
cur.execute("""
UPDATE public_lists
SET error_count = error_count + 1,
last_error = %s
WHERE id = %s
""", (error_msg[:500], list_id))
conn.commit()
finally:
conn.close()
return result
async def fetch_all_lists(self) -> List[Dict]:
"""Fetch and sync all enabled lists"""
print(f"\n{'='*60}")
print(f"[{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}] PUBLIC LISTS SYNC")
print(f"{'='*60}\n")
# Get enabled lists
lists = self.get_enabled_lists()
if not lists:
print("No enabled lists found")
return []
print(f"Found {len(lists)} enabled lists\n")
# Fetch all lists in parallel
tasks = [self.fetch_and_sync_list(list_config) for list_config in lists]
results = await asyncio.gather(*tasks, return_exceptions=True)
# Summary
print(f"\n{'='*60}")
print("SYNC SUMMARY")
print(f"{'='*60}")
success_count = sum(1 for r in results if isinstance(r, dict) and r.get('success'))
error_count = len(results) - success_count
total_added = sum(r.get('added', 0) for r in results if isinstance(r, dict))
total_removed = sum(r.get('removed', 0) for r in results if isinstance(r, dict))
print(f"Success: {success_count}/{len(results)}")
print(f"Errors: {error_count}/{len(results)}")
print(f"Total IPs Added: {total_added}")
print(f"Total IPs Removed: {total_removed}")
print(f"{'='*60}\n")
return [r for r in results if isinstance(r, dict)]
async def main():
"""Main entry point for list fetcher"""
database_url = os.getenv('DATABASE_URL')
if not database_url:
print("ERROR: DATABASE_URL environment variable not set")
return 1
fetcher = ListFetcher(database_url)
try:
# Fetch and sync all lists
await fetcher.fetch_all_lists()
# Run merge logic to sync detections with blacklist/whitelist priority
print("\n" + "="*60)
print("RUNNING MERGE LOGIC")
print("="*60 + "\n")
# Import merge logic (avoid circular imports)
import sys
from pathlib import Path
merge_logic_path = Path(__file__).parent.parent
sys.path.insert(0, str(merge_logic_path))
from merge_logic import MergeLogic
merge = MergeLogic(database_url)
stats = merge.sync_public_blacklist_detections()
print(f"\nMerge Logic Stats:")
print(f" Created detections: {stats['created']}")
print(f" Cleaned invalid detections: {stats['cleaned']}")
print(f" Skipped (whitelisted): {stats['skipped_whitelisted']}")
print("="*60 + "\n")
return 0
except Exception as e:
print(f"FATAL ERROR: {e}")
import traceback
traceback.print_exc()
return 1
if __name__ == "__main__":
exit_code = asyncio.run(main())
sys.exit(exit_code)

View File

@ -0,0 +1,280 @@
import re
import json
from typing import List, Dict, Set, Optional
from datetime import datetime
import ipaddress
class ListParser:
"""Base parser for public IP lists"""
@staticmethod
def validate_ip(ip_str: str) -> bool:
"""Validate IP address or CIDR range"""
try:
ipaddress.ip_network(ip_str, strict=False)
return True
except ValueError:
return False
@staticmethod
def normalize_cidr(ip_str: str) -> tuple[str, Optional[str]]:
"""
Normalize IP/CIDR to (ip_address, cidr_range)
Example: '1.2.3.0/24' -> ('1.2.3.0', '1.2.3.0/24')
'1.2.3.4' -> ('1.2.3.4', None)
"""
try:
network = ipaddress.ip_network(ip_str, strict=False)
if '/' in ip_str:
return (str(network.network_address), str(network))
else:
return (ip_str, None)
except ValueError:
return (ip_str, None)
class SpamhausParser(ListParser):
"""Parser for Spamhaus DROP list"""
@staticmethod
def parse(content: str) -> Set[tuple[str, Optional[str]]]:
"""
Parse Spamhaus DROP format:
; Comment lines start with semicolon
1.2.3.0/24 ; SBL12345
"""
ips = set()
lines = content.strip().split('\n')
for line in lines:
line = line.strip()
# Skip comments and empty lines
if not line or line.startswith(';') or line.startswith('#'):
continue
# Extract IP/CIDR before comment
parts = line.split(';')
if parts:
ip_part = parts[0].strip()
if ip_part and ListParser.validate_ip(ip_part):
ips.add(ListParser.normalize_cidr(ip_part))
return ips
class TalosParser(ListParser):
"""Parser for Talos Intelligence blacklist"""
@staticmethod
def parse(content: str) -> Set[tuple[str, Optional[str]]]:
"""
Parse Talos format (plain IP list):
1.2.3.4
5.6.7.0/24
"""
ips = set()
lines = content.strip().split('\n')
for line in lines:
line = line.strip()
# Skip comments and empty lines
if not line or line.startswith('#') or line.startswith('//'):
continue
# Validate and add
if ListParser.validate_ip(line):
ips.add(ListParser.normalize_cidr(line))
return ips
class AWSParser(ListParser):
"""Parser for AWS IP ranges JSON"""
@staticmethod
def parse(content: str) -> Set[tuple[str, Optional[str]]]:
"""
Parse AWS JSON format:
{
"prefixes": [
{"ip_prefix": "1.2.3.0/24", "region": "us-east-1", "service": "EC2"}
]
}
"""
ips = set()
try:
data = json.loads(content)
# IPv4 prefixes
for prefix in data.get('prefixes', []):
ip_prefix = prefix.get('ip_prefix')
if ip_prefix and ListParser.validate_ip(ip_prefix):
ips.add(ListParser.normalize_cidr(ip_prefix))
# IPv6 prefixes (optional)
for prefix in data.get('ipv6_prefixes', []):
ipv6_prefix = prefix.get('ipv6_prefix')
if ipv6_prefix and ListParser.validate_ip(ipv6_prefix):
ips.add(ListParser.normalize_cidr(ipv6_prefix))
except json.JSONDecodeError:
pass
return ips
class GCPParser(ListParser):
"""Parser for Google Cloud IP ranges JSON"""
@staticmethod
def parse(content: str) -> Set[tuple[str, Optional[str]]]:
"""
Parse GCP JSON format:
{
"prefixes": [
{"ipv4Prefix": "1.2.3.0/24"},
{"ipv6Prefix": "2001:db8::/32"}
]
}
"""
ips = set()
try:
data = json.loads(content)
for prefix in data.get('prefixes', []):
# IPv4
ipv4 = prefix.get('ipv4Prefix')
if ipv4 and ListParser.validate_ip(ipv4):
ips.add(ListParser.normalize_cidr(ipv4))
# IPv6
ipv6 = prefix.get('ipv6Prefix')
if ipv6 and ListParser.validate_ip(ipv6):
ips.add(ListParser.normalize_cidr(ipv6))
except json.JSONDecodeError:
pass
return ips
class CloudflareParser(ListParser):
"""Parser for Cloudflare IP list"""
@staticmethod
def parse(content: str) -> Set[tuple[str, Optional[str]]]:
"""
Parse Cloudflare format (plain CIDR list):
1.2.3.0/24
5.6.7.0/24
"""
ips = set()
lines = content.strip().split('\n')
for line in lines:
line = line.strip()
# Skip empty lines and comments
if not line or line.startswith('#'):
continue
if ListParser.validate_ip(line):
ips.add(ListParser.normalize_cidr(line))
return ips
class IANAParser(ListParser):
"""Parser for IANA Root Servers"""
@staticmethod
def parse(content: str) -> Set[tuple[str, Optional[str]]]:
"""
Parse IANA root servers (extract IPs from HTML/text)
Look for IPv4 addresses in format XXX.XXX.XXX.XXX
"""
ips = set()
# Regex for IPv4 addresses
ipv4_pattern = r'\b(?:[0-9]{1,3}\.){3}[0-9]{1,3}\b'
matches = re.findall(ipv4_pattern, content)
for ip in matches:
if ListParser.validate_ip(ip):
ips.add(ListParser.normalize_cidr(ip))
return ips
class NTPPoolParser(ListParser):
"""Parser for NTP Pool servers"""
@staticmethod
def parse(content: str) -> Set[tuple[str, Optional[str]]]:
"""
Parse NTP pool format (plain IP list or JSON)
Tries multiple formats
"""
ips = set()
# Try JSON first
try:
data = json.loads(content)
if isinstance(data, list):
for item in data:
if isinstance(item, str) and ListParser.validate_ip(item):
ips.add(ListParser.normalize_cidr(item))
elif isinstance(item, dict):
ip = item.get('ip') or item.get('address')
if ip and ListParser.validate_ip(ip):
ips.add(ListParser.normalize_cidr(ip))
except json.JSONDecodeError:
# Fallback to plain text parsing
lines = content.strip().split('\n')
for line in lines:
line = line.strip()
if line and ListParser.validate_ip(line):
ips.add(ListParser.normalize_cidr(line))
return ips
# Parser registry
PARSERS: Dict[str, type[ListParser]] = {
'spamhaus': SpamhausParser,
'talos': TalosParser,
'aws': AWSParser,
'gcp': GCPParser,
'cloudflare': CloudflareParser,
'iana': IANAParser,
'ntp': NTPPoolParser,
}
def get_parser(list_name: str) -> Optional[type[ListParser]]:
"""Get parser by list name (case-insensitive match)"""
list_name_lower = list_name.lower()
for key, parser in PARSERS.items():
if key in list_name_lower:
return parser
# Default fallback: try plain text parser
return TalosParser
def parse_list(list_name: str, content: str) -> Set[tuple[str, Optional[str]]]:
"""
Parse list content using appropriate parser
Returns set of (ip_address, cidr_range) tuples
"""
parser_class = get_parser(list_name)
if parser_class:
parser = parser_class()
return parser.parse(content)
return set()

View File

@ -0,0 +1,17 @@
#!/usr/bin/env python3
"""
IDS List Fetcher Runner
Fetches and syncs public blacklist/whitelist sources every 10 minutes
"""
import asyncio
import sys
import os
# Add parent directory to path
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
from list_fetcher.fetcher import main
if __name__ == "__main__":
exit_code = asyncio.run(main())
sys.exit(exit_code)

View File

@ -0,0 +1,174 @@
#!/usr/bin/env python3
"""
Seed default public lists into database
Run after migration 006 to populate initial lists
"""
import psycopg2
import os
import sys
import argparse
# Add parent directory to path
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
from list_fetcher.fetcher import ListFetcher
import asyncio
DEFAULT_LISTS = [
# Blacklists
{
'name': 'Spamhaus DROP',
'type': 'blacklist',
'url': 'https://www.spamhaus.org/drop/drop.txt',
'enabled': True,
'fetch_interval_minutes': 10
},
{
'name': 'Talos Intelligence IP Blacklist',
'type': 'blacklist',
'url': 'https://talosintelligence.com/documents/ip-blacklist',
'enabled': False, # Disabled by default - verify URL first
'fetch_interval_minutes': 10
},
# Whitelists
{
'name': 'AWS IP Ranges',
'type': 'whitelist',
'url': 'https://ip-ranges.amazonaws.com/ip-ranges.json',
'enabled': True,
'fetch_interval_minutes': 10
},
{
'name': 'Google Cloud IP Ranges',
'type': 'whitelist',
'url': 'https://www.gstatic.com/ipranges/cloud.json',
'enabled': True,
'fetch_interval_minutes': 10
},
{
'name': 'Cloudflare IPv4',
'type': 'whitelist',
'url': 'https://www.cloudflare.com/ips-v4',
'enabled': True,
'fetch_interval_minutes': 10
},
{
'name': 'IANA Root Servers',
'type': 'whitelist',
'url': 'https://www.iana.org/domains/root/servers',
'enabled': True,
'fetch_interval_minutes': 10
},
{
'name': 'NTP Pool Servers',
'type': 'whitelist',
'url': 'https://www.ntppool.org/zone/@',
'enabled': False, # Disabled by default - zone parameter needed
'fetch_interval_minutes': 10
}
]
def seed_lists(database_url: str, dry_run: bool = False):
"""Insert default lists into database"""
conn = psycopg2.connect(database_url)
try:
with conn.cursor() as cur:
# Check if lists already exist
cur.execute("SELECT COUNT(*) FROM public_lists")
result = cur.fetchone()
existing_count = result[0] if result else 0
if existing_count > 0 and not dry_run:
print(f"⚠️ Warning: {existing_count} lists already exist in database")
response = input("Continue and add default lists? (y/n): ")
if response.lower() != 'y':
print("Aborted")
return
print(f"\n{'='*60}")
print("SEEDING DEFAULT PUBLIC LISTS")
print(f"{'='*60}\n")
for list_config in DEFAULT_LISTS:
if dry_run:
status = "✓ ENABLED" if list_config['enabled'] else "○ DISABLED"
print(f"{status} {list_config['type'].upper()}: {list_config['name']}")
print(f" URL: {list_config['url']}")
print()
else:
cur.execute("""
INSERT INTO public_lists (name, type, url, enabled, fetch_interval_minutes)
VALUES (%s, %s, %s, %s, %s)
RETURNING id, name
""", (
list_config['name'],
list_config['type'],
list_config['url'],
list_config['enabled'],
list_config['fetch_interval_minutes']
))
result = cur.fetchone()
if result:
list_id, list_name = result
status = "" if list_config['enabled'] else ""
print(f"{status} Added: {list_name} (ID: {list_id})")
if not dry_run:
conn.commit()
print(f"\n✓ Successfully seeded {len(DEFAULT_LISTS)} lists")
print(f"{'='*60}\n")
else:
print(f"\n{'='*60}")
print(f"DRY RUN: Would seed {len(DEFAULT_LISTS)} lists")
print(f"{'='*60}\n")
except Exception as e:
conn.rollback()
print(f"✗ Error: {e}")
import traceback
traceback.print_exc()
return 1
finally:
conn.close()
return 0
async def sync_lists(database_url: str):
"""Run initial sync of all enabled lists"""
print("\nRunning initial sync of enabled lists...\n")
fetcher = ListFetcher(database_url)
await fetcher.fetch_all_lists()
def main():
parser = argparse.ArgumentParser(description='Seed default public lists')
parser.add_argument('--dry-run', action='store_true', help='Show what would be added without inserting')
parser.add_argument('--sync', action='store_true', help='Run initial sync after seeding')
args = parser.parse_args()
database_url = os.getenv('DATABASE_URL')
if not database_url:
print("ERROR: DATABASE_URL environment variable not set")
return 1
# Seed lists
exit_code = seed_lists(database_url, dry_run=args.dry_run)
if exit_code != 0:
return exit_code
# Optionally sync
if args.sync and not args.dry_run:
asyncio.run(sync_lists(database_url))
return 0
if __name__ == "__main__":
sys.exit(main())

351
python_ml/merge_logic.py Normal file
View File

@ -0,0 +1,351 @@
#!/usr/bin/env python3
"""
Merge Logic for Public Lists Integration
Implements priority: Manual Whitelist > Public Whitelist > Public Blacklist
"""
import os
import psycopg2
from typing import Dict, Set, Optional
from datetime import datetime
import logging
import ipaddress
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
def ip_matches_cidr(ip_address: str, cidr_range: Optional[str]) -> bool:
"""
Check if IP address matches CIDR range
Returns True if cidr_range is None (exact match) or if IP is in range
"""
if not cidr_range:
return True # Exact match handling
try:
ip = ipaddress.ip_address(ip_address)
network = ipaddress.ip_network(cidr_range, strict=False)
return ip in network
except (ValueError, TypeError):
logger.warning(f"Invalid IP/CIDR: {ip_address}/{cidr_range}")
return False
class MergeLogic:
"""
Handles merge logic between manual entries and public lists
Priority: Manual whitelist > Public whitelist > Public blacklist
"""
def __init__(self, database_url: str):
self.database_url = database_url
def get_db_connection(self):
"""Create database connection"""
return psycopg2.connect(self.database_url)
def get_all_whitelisted_ips(self) -> Set[str]:
"""
Get all whitelisted IPs (manual + public)
Manual whitelist has higher priority than public whitelist
"""
conn = self.get_db_connection()
try:
with conn.cursor() as cur:
cur.execute("""
SELECT DISTINCT ip_address
FROM whitelist
WHERE active = true
""")
return {row[0] for row in cur.fetchall()}
finally:
conn.close()
def get_public_blacklist_ips(self) -> Set[str]:
"""Get all active public blacklist IPs"""
conn = self.get_db_connection()
try:
with conn.cursor() as cur:
cur.execute("""
SELECT DISTINCT ip_address
FROM public_blacklist_ips
WHERE is_active = true
""")
return {row[0] for row in cur.fetchall()}
finally:
conn.close()
def should_block_ip(self, ip_address: str) -> tuple[bool, str]:
"""
Determine if IP should be blocked based on merge logic
Returns: (should_block, reason)
Priority:
1. Manual whitelist (exact or CIDR) DON'T block (highest priority)
2. Public whitelist (exact or CIDR) DON'T block
3. Public blacklist (exact or CIDR) DO block
4. Not in any list DON'T block (only ML decides)
"""
conn = self.get_db_connection()
try:
with conn.cursor() as cur:
# Check manual whitelist (highest priority) - exact + CIDR matching
cur.execute("""
SELECT ip_address, list_id FROM whitelist
WHERE active = true
AND source = 'manual'
""")
for row in cur.fetchall():
wl_ip, wl_cidr = row[0], None
# Check if whitelist entry has CIDR notation
if '/' in wl_ip:
wl_cidr = wl_ip
if wl_ip == ip_address or ip_matches_cidr(ip_address, wl_cidr):
return (False, "manual_whitelist")
# Check public whitelist (any source except 'manual') - exact + CIDR
cur.execute("""
SELECT ip_address, list_id FROM whitelist
WHERE active = true
AND source != 'manual'
""")
for row in cur.fetchall():
wl_ip, wl_cidr = row[0], None
if '/' in wl_ip:
wl_cidr = wl_ip
if wl_ip == ip_address or ip_matches_cidr(ip_address, wl_cidr):
return (False, "public_whitelist")
# Check public blacklist - exact + CIDR matching
cur.execute("""
SELECT id, ip_address, cidr_range FROM public_blacklist_ips
WHERE is_active = true
""")
for row in cur.fetchall():
bl_id, bl_ip, bl_cidr = row
# Match exact IP or check if IP is in CIDR range
if bl_ip == ip_address or ip_matches_cidr(ip_address, bl_cidr):
return (True, f"public_blacklist:{bl_id}")
# Not in any list
return (False, "not_listed")
finally:
conn.close()
def create_detection_from_blacklist(
self,
ip_address: str,
blacklist_id: str,
risk_score: int = 75
) -> Optional[str]:
"""
Create detection record for public blacklist IP
Only if not whitelisted (priority check)
"""
should_block, reason = self.should_block_ip(ip_address)
if not should_block:
logger.info(f"IP {ip_address} not blocked - reason: {reason}")
return None
conn = self.get_db_connection()
try:
with conn.cursor() as cur:
# Check if detection already exists
cur.execute("""
SELECT id FROM detections
WHERE source_ip = %s
AND detection_source = 'public_blacklist'
LIMIT 1
""", (ip_address,))
existing = cur.fetchone()
if existing:
logger.info(f"Detection already exists for {ip_address}")
return existing[0]
# Create new detection
cur.execute("""
INSERT INTO detections (
source_ip,
risk_score,
anomaly_type,
detection_source,
blacklist_id,
detected_at,
blocked
) VALUES (%s, %s, %s, %s, %s, %s, %s)
RETURNING id
""", (
ip_address,
str(risk_score),
'public_blacklist',
'public_blacklist',
blacklist_id,
datetime.utcnow(),
False # Will be blocked by auto-block service if risk_score >= 80
))
result = cur.fetchone()
if not result:
logger.error(f"Failed to get detection ID after insert for {ip_address}")
return None
detection_id = result[0]
conn.commit()
logger.info(f"Created detection {detection_id} for blacklisted IP {ip_address}")
return detection_id
except Exception as e:
conn.rollback()
logger.error(f"Failed to create detection for {ip_address}: {e}")
return None
finally:
conn.close()
def cleanup_invalid_detections(self) -> int:
"""
Remove detections for IPs that are now whitelisted
Respects priority: manual/public whitelist overrides blacklist
"""
conn = self.get_db_connection()
try:
with conn.cursor() as cur:
# Delete detections for whitelisted IPs
cur.execute("""
DELETE FROM detections
WHERE detection_source = 'public_blacklist'
AND source_ip IN (
SELECT ip_address FROM whitelist WHERE active = true
)
""")
deleted = cur.rowcount
conn.commit()
if deleted > 0:
logger.info(f"Cleaned up {deleted} detections for whitelisted IPs")
return deleted
except Exception as e:
conn.rollback()
logger.error(f"Failed to cleanup detections: {e}")
return 0
finally:
conn.close()
def sync_public_blacklist_detections(self) -> Dict[str, int]:
"""
Sync detections with current public blacklist state using BULK operations
Creates detections for blacklisted IPs (if not whitelisted)
Removes detections for IPs no longer blacklisted or now whitelisted
"""
stats = {
'created': 0,
'cleaned': 0,
'skipped_whitelisted': 0
}
conn = self.get_db_connection()
try:
with conn.cursor() as cur:
# Cleanup whitelisted IPs first (priority)
stats['cleaned'] = self.cleanup_invalid_detections()
# Bulk create detections for blacklisted IPs (excluding whitelisted)
# Uses PostgreSQL INET/CIDR operators for proper CIDR range matching
# Critical for performance with 186M+ rows (single query vs O(N) loops)
cur.execute("""
WITH blacklisted_ranges AS (
-- Blacklist entries with CIDR ranges (e.g. Spamhaus /24)
SELECT
bl.id as blacklist_id,
bl.ip_address,
COALESCE(bl.cidr_range, bl.ip_address) as cidr
FROM public_blacklist_ips bl
WHERE bl.is_active = true
),
whitelisted_ranges AS (
-- Whitelist entries (manual + public) with CIDR support
SELECT
ip_address,
CASE
WHEN ip_address ~ '/' THEN ip_address::inet
ELSE ip_address::inet
END as ip_range,
source
FROM whitelist
WHERE active = true
)
INSERT INTO detections (
source_ip,
risk_score,
anomaly_type,
detection_source,
blacklist_id,
detected_at,
blocked
)
SELECT DISTINCT
bl.ip_address,
'75',
'public_blacklist',
'public_blacklist',
bl.blacklist_id,
NOW(),
false
FROM blacklisted_ranges bl
WHERE bl.cidr IS NOT NULL
-- Exclude if IP is in any whitelist range (manual or public)
-- Priority: Manual whitelist > Public whitelist > Blacklist
AND NOT EXISTS (
SELECT 1 FROM whitelisted_ranges wl
WHERE bl.ip_address::inet <<= wl.ip_range
)
-- Avoid duplicate detections
AND NOT EXISTS (
SELECT 1 FROM detections d
WHERE d.source_ip = bl.ip_address
AND d.detection_source = 'public_blacklist'
)
RETURNING id
""")
created_ids = cur.fetchall()
stats['created'] = len(created_ids)
conn.commit()
logger.info(f"Bulk sync complete: {stats}")
return stats
except Exception as e:
conn.rollback()
logger.error(f"Failed to sync detections: {e}")
import traceback
traceback.print_exc()
return stats
finally:
conn.close()
def main():
"""Run merge logic sync"""
database_url = os.environ.get('DATABASE_URL')
if not database_url:
logger.error("DATABASE_URL environment variable not set")
return 1
merge = MergeLogic(database_url)
stats = merge.sync_public_blacklist_detections()
print(f"\n{'='*60}")
print("MERGE LOGIC SYNC COMPLETED")
print(f"{'='*60}")
print(f"Created detections: {stats['created']}")
print(f"Cleaned invalid detections: {stats['cleaned']}")
print(f"Skipped (whitelisted): {stats['skipped_whitelisted']}")
print(f"{'='*60}\n")
return 0
if __name__ == "__main__":
exit(main())

View File

@ -1,7 +1,7 @@
import type { Express } from "express";
import { createServer, type Server } from "http";
import { storage } from "./storage";
import { insertRouterSchema, insertDetectionSchema, insertWhitelistSchema, networkAnalytics, routers } from "@shared/schema";
import { insertRouterSchema, insertDetectionSchema, insertWhitelistSchema, insertPublicListSchema, networkAnalytics, routers } from "@shared/schema";
import { db } from "./db";
import { desc, eq } from "drizzle-orm";
@ -148,6 +148,127 @@ export async function registerRoutes(app: Express): Promise<Server> {
}
});
// Public Lists
app.get("/api/public-lists", async (req, res) => {
try {
const lists = await storage.getAllPublicLists();
res.json(lists);
} catch (error) {
console.error('[DB ERROR] Failed to fetch public lists:', error);
res.status(500).json({ error: "Failed to fetch public lists" });
}
});
app.get("/api/public-lists/:id", async (req, res) => {
try {
const list = await storage.getPublicListById(req.params.id);
if (!list) {
return res.status(404).json({ error: "List not found" });
}
res.json(list);
} catch (error) {
res.status(500).json({ error: "Failed to fetch list" });
}
});
app.post("/api/public-lists", async (req, res) => {
try {
const validatedData = insertPublicListSchema.parse(req.body);
const list = await storage.createPublicList(validatedData);
res.json(list);
} catch (error: any) {
console.error('[API ERROR] Failed to create public list:', error);
if (error.name === 'ZodError') {
return res.status(400).json({ error: "Invalid list data", details: error.errors });
}
res.status(400).json({ error: "Invalid list data" });
}
});
app.patch("/api/public-lists/:id", async (req, res) => {
try {
const validatedData = insertPublicListSchema.partial().parse(req.body);
const list = await storage.updatePublicList(req.params.id, validatedData);
if (!list) {
return res.status(404).json({ error: "List not found" });
}
res.json(list);
} catch (error: any) {
console.error('[API ERROR] Failed to update public list:', error);
if (error.name === 'ZodError') {
return res.status(400).json({ error: "Invalid list data", details: error.errors });
}
res.status(400).json({ error: "Invalid list data" });
}
});
app.delete("/api/public-lists/:id", async (req, res) => {
try {
const success = await storage.deletePublicList(req.params.id);
if (!success) {
return res.status(404).json({ error: "List not found" });
}
res.json({ success: true });
} catch (error) {
res.status(500).json({ error: "Failed to delete list" });
}
});
app.post("/api/public-lists/:id/sync", async (req, res) => {
try {
const list = await storage.getPublicListById(req.params.id);
if (!list) {
return res.status(404).json({ error: "List not found" });
}
const updated = await storage.updatePublicList(req.params.id, {
lastAttempt: new Date('1970-01-01T00:00:00Z'),
errorMessage: null,
});
res.json({
success: true,
message: "Manual sync triggered - list marked for immediate sync",
note: "Fetcher will sync this list on next cycle (max 10 minutes). Check logs: journalctl -u ids-list-fetcher -n 50",
list: updated
});
} catch (error) {
console.error('[API ERROR] Failed to trigger sync:', error);
res.status(500).json({ error: "Failed to trigger sync" });
}
});
// Public Blacklist IPs
app.get("/api/public-blacklist", async (req, res) => {
try {
const limit = parseInt(req.query.limit as string) || 1000;
const listId = req.query.listId as string | undefined;
const ipAddress = req.query.ipAddress as string | undefined;
const isActive = req.query.isActive === 'true';
const ips = await storage.getPublicBlacklistIps({
limit,
listId,
ipAddress,
isActive: req.query.isActive !== undefined ? isActive : undefined,
});
res.json(ips);
} catch (error) {
console.error('[DB ERROR] Failed to fetch blacklist IPs:', error);
res.status(500).json({ error: "Failed to fetch blacklist IPs" });
}
});
app.get("/api/public-blacklist/stats", async (req, res) => {
try {
const stats = await storage.getPublicBlacklistStats();
res.json(stats);
} catch (error) {
console.error('[DB ERROR] Failed to fetch blacklist stats:', error);
res.status(500).json({ error: "Failed to fetch stats" });
}
});
// Training History
app.get("/api/training-history", async (req, res) => {
try {

View File

@ -5,6 +5,8 @@ import {
whitelist,
trainingHistory,
networkAnalytics,
publicLists,
publicBlacklistIps,
type Router,
type InsertRouter,
type NetworkLog,
@ -16,6 +18,10 @@ import {
type TrainingHistory,
type InsertTrainingHistory,
type NetworkAnalytics,
type PublicList,
type InsertPublicList,
type PublicBlacklistIp,
type InsertPublicBlacklistIp,
} from "@shared/schema";
import { db } from "./db";
import { eq, desc, and, gte, sql, inArray } from "drizzle-orm";
@ -74,6 +80,26 @@ export interface IStorage {
recentDetections: Detection[];
}>;
// Public Lists
getAllPublicLists(): Promise<PublicList[]>;
getPublicListById(id: string): Promise<PublicList | undefined>;
createPublicList(list: InsertPublicList): Promise<PublicList>;
updatePublicList(id: string, list: Partial<InsertPublicList>): Promise<PublicList | undefined>;
deletePublicList(id: string): Promise<boolean>;
// Public Blacklist IPs
getPublicBlacklistIps(options: {
limit?: number;
listId?: string;
ipAddress?: string;
isActive?: boolean;
}): Promise<PublicBlacklistIp[]>;
getPublicBlacklistStats(): Promise<{
totalLists: number;
totalIps: number;
overlapWithDetections: number;
}>;
// System
testConnection(): Promise<boolean>;
}
@ -387,6 +413,107 @@ export class DatabaseStorage implements IStorage {
};
}
// Public Lists
async getAllPublicLists(): Promise<PublicList[]> {
return await db.select().from(publicLists).orderBy(desc(publicLists.createdAt));
}
async getPublicListById(id: string): Promise<PublicList | undefined> {
const [list] = await db.select().from(publicLists).where(eq(publicLists.id, id));
return list || undefined;
}
async createPublicList(insertList: InsertPublicList): Promise<PublicList> {
const [list] = await db.insert(publicLists).values(insertList).returning();
return list;
}
async updatePublicList(id: string, updateData: Partial<InsertPublicList>): Promise<PublicList | undefined> {
const [list] = await db
.update(publicLists)
.set(updateData)
.where(eq(publicLists.id, id))
.returning();
return list || undefined;
}
async deletePublicList(id: string): Promise<boolean> {
const result = await db.delete(publicLists).where(eq(publicLists.id, id));
return result.rowCount !== null && result.rowCount > 0;
}
// Public Blacklist IPs
async getPublicBlacklistIps(options: {
limit?: number;
listId?: string;
ipAddress?: string;
isActive?: boolean;
}): Promise<PublicBlacklistIp[]> {
const { limit = 1000, listId, ipAddress, isActive } = options;
const conditions = [];
if (listId) {
conditions.push(eq(publicBlacklistIps.listId, listId));
}
if (ipAddress) {
conditions.push(eq(publicBlacklistIps.ipAddress, ipAddress));
}
if (isActive !== undefined) {
conditions.push(eq(publicBlacklistIps.isActive, isActive));
}
const query = db
.select()
.from(publicBlacklistIps)
.orderBy(desc(publicBlacklistIps.lastSeen))
.limit(limit);
if (conditions.length > 0) {
return await query.where(and(...conditions));
}
return await query;
}
async getPublicBlacklistStats(): Promise<{
totalLists: number;
totalIps: number;
overlapWithDetections: number;
}> {
const lists = await db.select().from(publicLists).where(eq(publicLists.type, 'blacklist'));
const totalLists = lists.length;
const [{ count: totalIps }] = await db
.select({ count: sql<number>`count(*)::int` })
.from(publicBlacklistIps)
.where(eq(publicBlacklistIps.isActive, true));
const [{ count: overlapWithDetections }] = await db
.select({ count: sql<number>`count(distinct ${detections.sourceIp})::int` })
.from(detections)
.innerJoin(publicBlacklistIps, eq(detections.sourceIp, publicBlacklistIps.ipAddress))
.where(
and(
eq(publicBlacklistIps.isActive, true),
eq(detections.detectionSource, 'public_blacklist'),
sql`NOT EXISTS (
SELECT 1 FROM ${whitelist}
WHERE ${whitelist.ipAddress} = ${detections.sourceIp}
AND ${whitelist.active} = true
)`
)
);
return {
totalLists,
totalIps: totalIps || 0,
overlapWithDetections: overlapWithDetections || 0,
};
}
async testConnection(): Promise<boolean> {
try {
await db.execute(sql`SELECT 1`);

View File

@ -58,11 +58,15 @@ export const detections = pgTable("detections", {
asNumber: text("as_number"),
asName: text("as_name"),
isp: text("isp"),
// Public lists integration
detectionSource: text("detection_source").notNull().default("ml_model"),
blacklistId: varchar("blacklist_id").references(() => publicBlacklistIps.id, { onDelete: 'set null' }),
}, (table) => ({
sourceIpIdx: index("detection_source_ip_idx").on(table.sourceIp),
riskScoreIdx: index("risk_score_idx").on(table.riskScore),
detectedAtIdx: index("detected_at_idx").on(table.detectedAt),
countryIdx: index("country_idx").on(table.country),
detectionSourceIdx: index("detection_source_idx").on(table.detectionSource),
}));
// Whitelist per IP fidati
@ -74,7 +78,12 @@ export const whitelist = pgTable("whitelist", {
createdBy: text("created_by"),
active: boolean("active").notNull().default(true),
createdAt: timestamp("created_at").defaultNow().notNull(),
});
// Public lists integration
source: text("source").notNull().default("manual"),
listId: varchar("list_id").references(() => publicLists.id, { onDelete: 'set null' }),
}, (table) => ({
sourceIdx: index("whitelist_source_idx").on(table.source),
}));
// ML Training history
export const trainingHistory = pgTable("training_history", {
@ -125,6 +134,42 @@ export const networkAnalytics = pgTable("network_analytics", {
dateHourUnique: unique("network_analytics_date_hour_key").on(table.date, table.hour),
}));
// Public threat/whitelist sources
export const publicLists = pgTable("public_lists", {
id: varchar("id").primaryKey().default(sql`gen_random_uuid()`),
name: text("name").notNull(),
type: text("type").notNull(),
url: text("url").notNull(),
enabled: boolean("enabled").notNull().default(true),
fetchIntervalMinutes: integer("fetch_interval_minutes").notNull().default(10),
lastFetch: timestamp("last_fetch"),
lastSuccess: timestamp("last_success"),
totalIps: integer("total_ips").notNull().default(0),
activeIps: integer("active_ips").notNull().default(0),
errorCount: integer("error_count").notNull().default(0),
lastError: text("last_error"),
createdAt: timestamp("created_at").defaultNow().notNull(),
}, (table) => ({
typeIdx: index("public_lists_type_idx").on(table.type),
enabledIdx: index("public_lists_enabled_idx").on(table.enabled),
}));
// Public blacklist IPs from external sources
export const publicBlacklistIps = pgTable("public_blacklist_ips", {
id: varchar("id").primaryKey().default(sql`gen_random_uuid()`),
ipAddress: text("ip_address").notNull(),
cidrRange: text("cidr_range"),
listId: varchar("list_id").notNull().references(() => publicLists.id, { onDelete: 'cascade' }),
firstSeen: timestamp("first_seen").defaultNow().notNull(),
lastSeen: timestamp("last_seen").defaultNow().notNull(),
isActive: boolean("is_active").notNull().default(true),
}, (table) => ({
ipAddressIdx: index("public_blacklist_ip_idx").on(table.ipAddress),
listIdIdx: index("public_blacklist_list_idx").on(table.listId),
isActiveIdx: index("public_blacklist_active_idx").on(table.isActive),
ipListUnique: unique("public_blacklist_ip_list_key").on(table.ipAddress, table.listId),
}));
// Schema version tracking for database migrations
export const schemaVersion = pgTable("schema_version", {
id: integer("id").primaryKey().default(1),
@ -138,7 +183,30 @@ export const routersRelations = relations(routers, ({ many }) => ({
logs: many(networkLogs),
}));
// Rimossa relazione router (non più FK)
export const publicListsRelations = relations(publicLists, ({ many }) => ({
blacklistIps: many(publicBlacklistIps),
}));
export const publicBlacklistIpsRelations = relations(publicBlacklistIps, ({ one }) => ({
list: one(publicLists, {
fields: [publicBlacklistIps.listId],
references: [publicLists.id],
}),
}));
export const whitelistRelations = relations(whitelist, ({ one }) => ({
list: one(publicLists, {
fields: [whitelist.listId],
references: [publicLists.id],
}),
}));
export const detectionsRelations = relations(detections, ({ one }) => ({
blacklist: one(publicBlacklistIps, {
fields: [detections.blacklistId],
references: [publicBlacklistIps.id],
}),
}));
// Insert schemas
export const insertRouterSchema = createInsertSchema(routers).omit({
@ -176,6 +244,19 @@ export const insertNetworkAnalyticsSchema = createInsertSchema(networkAnalytics)
createdAt: true,
});
export const insertPublicListSchema = createInsertSchema(publicLists).omit({
id: true,
createdAt: true,
lastFetch: true,
lastSuccess: true,
});
export const insertPublicBlacklistIpSchema = createInsertSchema(publicBlacklistIps).omit({
id: true,
firstSeen: true,
lastSeen: true,
});
// Types
export type Router = typeof routers.$inferSelect;
export type InsertRouter = z.infer<typeof insertRouterSchema>;
@ -197,3 +278,9 @@ export type InsertSchemaVersion = z.infer<typeof insertSchemaVersionSchema>;
export type NetworkAnalytics = typeof networkAnalytics.$inferSelect;
export type InsertNetworkAnalytics = z.infer<typeof insertNetworkAnalyticsSchema>;
export type PublicList = typeof publicLists.$inferSelect;
export type InsertPublicList = z.infer<typeof insertPublicListSchema>;
export type PublicBlacklistIp = typeof publicBlacklistIps.$inferSelect;
export type InsertPublicBlacklistIp = z.infer<typeof insertPublicBlacklistIpSchema>;

101
uv.lock Normal file
View File

@ -0,0 +1,101 @@
version = 1
revision = 3
requires-python = ">=3.11"
[[package]]
name = "anyio"
version = "4.11.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "idna" },
{ name = "sniffio" },
{ name = "typing-extensions", marker = "python_full_version < '3.13'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/c6/78/7d432127c41b50bccba979505f272c16cbcadcc33645d5fa3a738110ae75/anyio-4.11.0.tar.gz", hash = "sha256:82a8d0b81e318cc5ce71a5f1f8b5c4e63619620b63141ef8c995fa0db95a57c4", size = 219094, upload-time = "2025-09-23T09:19:12.58Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/15/b3/9b1a8074496371342ec1e796a96f99c82c945a339cd81a8e73de28b4cf9e/anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc", size = 109097, upload-time = "2025-09-23T09:19:10.601Z" },
]
[[package]]
name = "certifi"
version = "2025.11.12"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/a2/8c/58f469717fa48465e4a50c014a0400602d3c437d7c0c468e17ada824da3a/certifi-2025.11.12.tar.gz", hash = "sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316", size = 160538, upload-time = "2025-11-12T02:54:51.517Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/70/7d/9bc192684cea499815ff478dfcdc13835ddf401365057044fb721ec6bddb/certifi-2025.11.12-py3-none-any.whl", hash = "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b", size = 159438, upload-time = "2025-11-12T02:54:49.735Z" },
]
[[package]]
name = "h11"
version = "0.16.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" },
]
[[package]]
name = "httpcore"
version = "1.0.9"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "certifi" },
{ name = "h11" },
]
sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" },
]
[[package]]
name = "httpx"
version = "0.28.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio" },
{ name = "certifi" },
{ name = "httpcore" },
{ name = "idna" },
]
sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" },
]
[[package]]
name = "idna"
version = "3.11"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" },
]
[[package]]
name = "repl-nix-workspace"
version = "0.1.0"
source = { virtual = "." }
dependencies = [
{ name = "httpx" },
]
[package.metadata]
requires-dist = [{ name = "httpx", specifier = ">=0.28.1" }]
[[package]]
name = "sniffio"
version = "1.3.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" },
]
[[package]]
name = "typing-extensions"
version = "4.15.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" },
]