Compare commits

...

2 Commits

Author SHA1 Message Date
Marco Lanzara
1133ca356f 🚀 Release v1.0.101
- Tipo: patch
- Database schema: database-schema/schema.sql (solo struttura)
- Data: 2026-01-02 16:14:47
2026-01-02 16:14:47 +00:00
marco370
aa74340706 Implement pagination and server-side search for detection records
Update client-side to handle pagination and debounced search input, refactor server API routes and storage to support offset, limit, and search queries on detection records.

Replit-Commit-Author: Agent
Replit-Commit-Session-Id: 7a657272-55ba-4a79-9a2e-f1ed9bc7a528
Replit-Commit-Checkpoint-Type: full_checkpoint
Replit-Commit-Event-Id: 0ad70992-7e31-48d6-8e52-b2442cc2a623
Replit-Commit-Screenshot-Url: https://storage.googleapis.com/screenshot-production-us-central1/449cf7c4-c97a-45ae-8234-e5c5b8d6a84f/7a657272-55ba-4a79-9a2e-f1ed9bc7a528/C6BdLIt
2026-01-02 16:07:07 +00:00
6 changed files with 228 additions and 64 deletions

View File

@ -0,0 +1,51 @@
journalctl -u ids-list-fetcher -n 50 --no-pager
Jan 02 17:10:02 ids.alfacom.it ids-list-fetcher[2139]: ============================================================
Jan 02 17:10:02 ids.alfacom.it ids-list-fetcher[2139]: ============================================================
Jan 02 17:10:02 ids.alfacom.it ids-list-fetcher[2139]: RUNNING MERGE LOGIC
Jan 02 17:10:02 ids.alfacom.it ids-list-fetcher[2139]: ============================================================
Jan 02 17:10:12 ids.alfacom.it ids-list-fetcher[2139]: INFO:merge_logic:Bulk sync complete: {'created': 0, 'cleaned': 0, 'skipped_whitelisted': 0}
Jan 02 17:10:12 ids.alfacom.it ids-list-fetcher[2139]: Merge Logic Stats:
Jan 02 17:10:12 ids.alfacom.it ids-list-fetcher[2139]: Created detections: 0
Jan 02 17:10:12 ids.alfacom.it ids-list-fetcher[2139]: Cleaned invalid detections: 0
Jan 02 17:10:12 ids.alfacom.it ids-list-fetcher[2139]: Skipped (whitelisted): 0
Jan 02 17:10:12 ids.alfacom.it ids-list-fetcher[2139]: ============================================================
Jan 02 17:10:12 ids.alfacom.it systemd[1]: ids-list-fetcher.service: Deactivated successfully.
Jan 02 17:10:12 ids.alfacom.it systemd[1]: Finished IDS Public Lists Fetcher Service.
Jan 02 17:12:35 ids.alfacom.it systemd[1]: Starting IDS Public Lists Fetcher Service...
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: ============================================================
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: [2026-01-02 17:12:35] PUBLIC LISTS SYNC
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: ============================================================
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: Found 4 enabled lists
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: [17:12:35] Downloading Spamhaus from https://www.spamhaus.org/drop/drop_v4.json...
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: [17:12:35] Downloading AWS from https://ip-ranges.amazonaws.com/ip-ranges.json...
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: [17:12:35] Downloading Google Cloud from https://www.gstatic.com/ipranges/cloud.json...
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: [17:12:35] Downloading Google globali from https://www.gstatic.com/ipranges/goog.json...
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: [17:12:35] Parsing AWS...
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: [17:12:35] Found 9548 IPs, syncing to database...
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: [17:12:35] ✓ AWS: +0 -0 ~9548
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: [17:12:35] Parsing Google globali...
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: [17:12:35] ✗ Google globali: No valid IPs found in list
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: [17:12:35] Parsing Google Cloud...
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: [17:12:35] ✗ Google Cloud: No valid IPs found in list
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: [17:12:35] Parsing Spamhaus...
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: [17:12:35] Found 1468 IPs, syncing to database...
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: [17:12:35] ✓ Spamhaus: +0 -0 ~1468
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: ============================================================
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: SYNC SUMMARY
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: ============================================================
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: Success: 2/4
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: Errors: 2/4
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: Total IPs Added: 0
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: Total IPs Removed: 0
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: ============================================================
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: ============================================================
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: RUNNING MERGE LOGIC
Jan 02 17:12:35 ids.alfacom.it ids-list-fetcher[2279]: ============================================================
Jan 02 17:12:45 ids.alfacom.it ids-list-fetcher[2279]: INFO:merge_logic:Bulk sync complete: {'created': 0, 'cleaned': 0, 'skipped_whitelisted': 0}
Jan 02 17:12:45 ids.alfacom.it ids-list-fetcher[2279]: Merge Logic Stats:
Jan 02 17:12:45 ids.alfacom.it ids-list-fetcher[2279]: Created detections: 0
Jan 02 17:12:45 ids.alfacom.it ids-list-fetcher[2279]: Cleaned invalid detections: 0
Jan 02 17:12:45 ids.alfacom.it ids-list-fetcher[2279]: Skipped (whitelisted): 0
Jan 02 17:12:45 ids.alfacom.it ids-list-fetcher[2279]: ============================================================
Jan 02 17:12:45 ids.alfacom.it systemd[1]: ids-list-fetcher.service: Deactivated successfully.
Jan 02 17:12:45 ids.alfacom.it systemd[1]: Finished IDS Public Lists Fetcher Service.

View File

@ -5,40 +5,74 @@ import { Button } from "@/components/ui/button";
import { Input } from "@/components/ui/input";
import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from "@/components/ui/select";
import { Slider } from "@/components/ui/slider";
import { AlertTriangle, Search, Shield, Globe, MapPin, Building2, ShieldPlus, ShieldCheck, Unlock } from "lucide-react";
import { AlertTriangle, Search, Shield, Globe, MapPin, Building2, ShieldPlus, ShieldCheck, Unlock, ChevronLeft, ChevronRight } from "lucide-react";
import { format } from "date-fns";
import { useState } from "react";
import { useState, useEffect, useMemo } from "react";
import type { Detection, Whitelist } from "@shared/schema";
import { getFlag } from "@/lib/country-flags";
import { apiRequest, queryClient } from "@/lib/queryClient";
import { useToast } from "@/hooks/use-toast";
const ITEMS_PER_PAGE = 50;
interface DetectionsResponse {
detections: Detection[];
total: number;
}
export default function Detections() {
const [searchQuery, setSearchQuery] = useState("");
const [searchInput, setSearchInput] = useState("");
const [debouncedSearch, setDebouncedSearch] = useState("");
const [anomalyTypeFilter, setAnomalyTypeFilter] = useState<string>("all");
const [minScore, setMinScore] = useState(0);
const [maxScore, setMaxScore] = useState(100);
const [currentPage, setCurrentPage] = useState(1);
const { toast } = useToast();
// Build query params
const queryParams = new URLSearchParams();
queryParams.set("limit", "5000");
// Debounce search input
useEffect(() => {
const timer = setTimeout(() => {
setDebouncedSearch(searchInput);
setCurrentPage(1); // Reset to first page on search
}, 300);
return () => clearTimeout(timer);
}, [searchInput]);
// Reset page on filter change
useEffect(() => {
setCurrentPage(1);
}, [anomalyTypeFilter, minScore, maxScore]);
// Build query params with pagination and search
const queryParams = useMemo(() => {
const params = new URLSearchParams();
params.set("limit", ITEMS_PER_PAGE.toString());
params.set("offset", ((currentPage - 1) * ITEMS_PER_PAGE).toString());
if (anomalyTypeFilter !== "all") {
queryParams.set("anomalyType", anomalyTypeFilter);
params.set("anomalyType", anomalyTypeFilter);
}
if (minScore > 0) {
queryParams.set("minScore", minScore.toString());
params.set("minScore", minScore.toString());
}
if (maxScore < 100) {
queryParams.set("maxScore", maxScore.toString());
params.set("maxScore", maxScore.toString());
}
if (debouncedSearch.trim()) {
params.set("search", debouncedSearch.trim());
}
return params.toString();
}, [currentPage, anomalyTypeFilter, minScore, maxScore, debouncedSearch]);
const { data: detections, isLoading } = useQuery<Detection[]>({
queryKey: ["/api/detections", anomalyTypeFilter, minScore, maxScore],
queryFn: () => fetch(`/api/detections?${queryParams.toString()}`).then(r => r.json()),
refetchInterval: 5000,
const { data, isLoading } = useQuery<DetectionsResponse>({
queryKey: ["/api/detections", currentPage, anomalyTypeFilter, minScore, maxScore, debouncedSearch],
queryFn: () => fetch(`/api/detections?${queryParams}`).then(r => r.json()),
refetchInterval: 10000,
});
const detections = data?.detections || [];
const totalCount = data?.total || 0;
const totalPages = Math.ceil(totalCount / ITEMS_PER_PAGE);
// Fetch whitelist to check if IP is already whitelisted
const { data: whitelistData } = useQuery<Whitelist[]>({
queryKey: ["/api/whitelist"],
@ -47,11 +81,6 @@ export default function Detections() {
// Create a Set of whitelisted IPs for fast lookup
const whitelistedIps = new Set(whitelistData?.map(w => w.ipAddress) || []);
const filteredDetections = detections?.filter((d) =>
d.sourceIp.toLowerCase().includes(searchQuery.toLowerCase()) ||
d.anomalyType.toLowerCase().includes(searchQuery.toLowerCase())
);
// Mutation per aggiungere a whitelist
const addToWhitelistMutation = useMutation({
mutationFn: async (detection: Detection) => {
@ -137,9 +166,9 @@ export default function Detections() {
<div className="relative flex-1 min-w-[200px]">
<Search className="absolute left-3 top-1/2 -translate-y-1/2 h-4 w-4 text-muted-foreground" />
<Input
placeholder="Cerca per IP o tipo anomalia..."
value={searchQuery}
onChange={(e) => setSearchQuery(e.target.value)}
placeholder="Cerca per IP, paese, organizzazione..."
value={searchInput}
onChange={(e) => setSearchInput(e.target.value)}
className="pl-9"
data-testid="input-search"
/>
@ -191,9 +220,36 @@ export default function Detections() {
{/* Detections List */}
<Card data-testid="card-detections-list">
<CardHeader>
<CardTitle className="flex items-center gap-2">
<CardTitle className="flex items-center justify-between gap-2 flex-wrap">
<div className="flex items-center gap-2">
<AlertTriangle className="h-5 w-5" />
Rilevamenti ({filteredDetections?.length || 0})
Rilevamenti ({totalCount})
</div>
{totalPages > 1 && (
<div className="flex items-center gap-2 text-sm font-normal">
<Button
variant="outline"
size="icon"
onClick={() => setCurrentPage(p => Math.max(1, p - 1))}
disabled={currentPage === 1}
data-testid="button-prev-page"
>
<ChevronLeft className="h-4 w-4" />
</Button>
<span data-testid="text-pagination">
Pagina {currentPage} di {totalPages}
</span>
<Button
variant="outline"
size="icon"
onClick={() => setCurrentPage(p => Math.min(totalPages, p + 1))}
disabled={currentPage === totalPages}
data-testid="button-next-page"
>
<ChevronRight className="h-4 w-4" />
</Button>
</div>
)}
</CardTitle>
</CardHeader>
<CardContent>
@ -201,9 +257,9 @@ export default function Detections() {
<div className="text-center py-8 text-muted-foreground" data-testid="text-loading">
Caricamento...
</div>
) : filteredDetections && filteredDetections.length > 0 ? (
) : detections.length > 0 ? (
<div className="space-y-3">
{filteredDetections.map((detection) => (
{detections.map((detection) => (
<div
key={detection.id}
className="p-4 rounded-lg border hover-elevate"
@ -356,11 +412,40 @@ export default function Detections() {
<div className="text-center py-12 text-muted-foreground" data-testid="text-no-results">
<AlertTriangle className="h-12 w-12 mx-auto mb-2 opacity-50" />
<p>Nessun rilevamento trovato</p>
{searchQuery && (
{debouncedSearch && (
<p className="text-sm">Prova con un altro termine di ricerca</p>
)}
</div>
)}
{/* Bottom pagination */}
{totalPages > 1 && detections.length > 0 && (
<div className="flex items-center justify-center gap-4 mt-6 pt-4 border-t">
<Button
variant="outline"
size="sm"
onClick={() => setCurrentPage(p => Math.max(1, p - 1))}
disabled={currentPage === 1}
data-testid="button-prev-page-bottom"
>
<ChevronLeft className="h-4 w-4 mr-1" />
Precedente
</Button>
<span className="text-sm text-muted-foreground" data-testid="text-pagination-bottom">
Pagina {currentPage} di {totalPages} ({totalCount} totali)
</span>
<Button
variant="outline"
size="sm"
onClick={() => setCurrentPage(p => Math.min(totalPages, p + 1))}
disabled={currentPage === totalPages}
data-testid="button-next-page-bottom"
>
Successiva
<ChevronRight className="h-4 w-4 ml-1" />
</Button>
</div>
)}
</CardContent>
</Card>
</div>

View File

@ -2,7 +2,7 @@
-- PostgreSQL database dump
--
\restrict 0WksqiXSxEbKkimrOffHTFz303y80NXUjCIEjcTgyodl4SsmTQnolXeqWX5mUy4
\restrict tgCSU0lsazWYhvZDtJSySVv0XWsanBDacMCTKBwtKYs25ogGpWpmf5QkuPPQcwM
-- Dumped from database version 16.11 (74c6bb6)
-- Dumped by pg_dump version 16.10
@ -387,5 +387,5 @@ ALTER TABLE ONLY public.public_blacklist_ips
-- PostgreSQL database dump complete
--
\unrestrict 0WksqiXSxEbKkimrOffHTFz303y80NXUjCIEjcTgyodl4SsmTQnolXeqWX5mUy4
\unrestrict tgCSU0lsazWYhvZDtJSySVv0XWsanBDacMCTKBwtKYs25ogGpWpmf5QkuPPQcwM

View File

@ -77,18 +77,22 @@ export async function registerRoutes(app: Express): Promise<Server> {
// Detections
app.get("/api/detections", async (req, res) => {
try {
const limit = req.query.limit ? parseInt(req.query.limit as string) : 500;
const limit = req.query.limit ? parseInt(req.query.limit as string) : 50;
const offset = req.query.offset ? parseInt(req.query.offset as string) : 0;
const anomalyType = req.query.anomalyType as string | undefined;
const minScore = req.query.minScore ? parseFloat(req.query.minScore as string) : undefined;
const maxScore = req.query.maxScore ? parseFloat(req.query.maxScore as string) : undefined;
const search = req.query.search as string | undefined;
const detections = await storage.getAllDetections({
const result = await storage.getAllDetections({
limit,
offset,
anomalyType,
minScore,
maxScore
maxScore,
search
});
res.json(detections);
res.json(result);
} catch (error) {
console.error('[DB ERROR] Failed to fetch detections:', error);
res.status(500).json({ error: "Failed to fetch detections" });
@ -474,14 +478,15 @@ export async function registerRoutes(app: Express): Promise<Server> {
app.get("/api/stats", async (req, res) => {
try {
const routers = await storage.getAllRouters();
const detections = await storage.getAllDetections({ limit: 1000 });
const detectionsResult = await storage.getAllDetections({ limit: 1000 });
const recentLogs = await storage.getRecentLogs(1000);
const whitelist = await storage.getAllWhitelist();
const latestTraining = await storage.getLatestTraining();
const blockedCount = detections.filter(d => d.blocked).length;
const criticalCount = detections.filter(d => parseFloat(d.riskScore) >= 85).length;
const highCount = detections.filter(d => parseFloat(d.riskScore) >= 70 && parseFloat(d.riskScore) < 85).length;
const detectionsList = detectionsResult.detections;
const blockedCount = detectionsList.filter(d => d.blocked).length;
const criticalCount = detectionsList.filter(d => parseFloat(d.riskScore) >= 85).length;
const highCount = detectionsList.filter(d => parseFloat(d.riskScore) >= 70 && parseFloat(d.riskScore) < 85).length;
res.json({
routers: {
@ -489,7 +494,7 @@ export async function registerRoutes(app: Express): Promise<Server> {
enabled: routers.filter(r => r.enabled).length
},
detections: {
total: detections.length,
total: detectionsResult.total,
blocked: blockedCount,
critical: criticalCount,
high: highCount

View File

@ -43,10 +43,12 @@ export interface IStorage {
// Detections
getAllDetections(options: {
limit?: number;
offset?: number;
anomalyType?: string;
minScore?: number;
maxScore?: number;
}): Promise<Detection[]>;
search?: string;
}): Promise<{ detections: Detection[]; total: number }>;
getDetectionByIp(sourceIp: string): Promise<Detection | undefined>;
createDetection(detection: InsertDetection): Promise<Detection>;
updateDetection(id: string, detection: Partial<InsertDetection>): Promise<Detection | undefined>;
@ -174,11 +176,13 @@ export class DatabaseStorage implements IStorage {
// Detections
async getAllDetections(options: {
limit?: number;
offset?: number;
anomalyType?: string;
minScore?: number;
maxScore?: number;
}): Promise<Detection[]> {
const { limit = 5000, anomalyType, minScore, maxScore } = options;
search?: string;
}): Promise<{ detections: Detection[]; total: number }> {
const { limit = 50, offset = 0, anomalyType, minScore, maxScore, search } = options;
// Build WHERE conditions
const conditions = [];
@ -196,17 +200,36 @@ export class DatabaseStorage implements IStorage {
conditions.push(sql`${detections.riskScore}::numeric <= ${maxScore}`);
}
const query = db
.select()
.from(detections)
.orderBy(desc(detections.detectedAt))
.limit(limit);
if (conditions.length > 0) {
return await query.where(and(...conditions));
// Search by IP or anomaly type (case-insensitive)
if (search && search.trim()) {
const searchLower = search.trim().toLowerCase();
conditions.push(sql`(
LOWER(${detections.sourceIp}) LIKE ${'%' + searchLower + '%'} OR
LOWER(${detections.anomalyType}) LIKE ${'%' + searchLower + '%'} OR
LOWER(COALESCE(${detections.country}, '')) LIKE ${'%' + searchLower + '%'} OR
LOWER(COALESCE(${detections.organization}, '')) LIKE ${'%' + searchLower + '%'}
)`);
}
return await query;
const whereClause = conditions.length > 0 ? and(...conditions) : undefined;
// Get total count for pagination
const countResult = await db
.select({ count: sql<number>`count(*)::int` })
.from(detections)
.where(whereClause);
const total = countResult[0]?.count || 0;
// Get paginated results
const results = await db
.select()
.from(detections)
.where(whereClause)
.orderBy(desc(detections.detectedAt))
.limit(limit)
.offset(offset);
return { detections: results, total };
}
async getDetectionByIp(sourceIp: string): Promise<Detection | undefined> {

View File

@ -1,7 +1,13 @@
{
"version": "1.0.100",
"lastUpdate": "2026-01-02T15:51:11.271Z",
"version": "1.0.101",
"lastUpdate": "2026-01-02T16:14:47.174Z",
"changelog": [
{
"version": "1.0.101",
"date": "2026-01-02",
"type": "patch",
"description": "Deployment automatico v1.0.101"
},
{
"version": "1.0.100",
"date": "2026-01-02",
@ -295,12 +301,6 @@
"date": "2025-11-24",
"type": "patch",
"description": "Deployment automatico v1.0.52"
},
{
"version": "1.0.51",
"date": "2025-11-24",
"type": "patch",
"description": "Deployment automatico v1.0.51"
}
]
}