Implement list synchronization by fetching and saving IP addresses
Adds IP fetching logic to server routes and implements upsert functionality for blacklist IPs in the database storage layer. Replit-Commit-Author: Agent Replit-Commit-Session-Id: 7a657272-55ba-4a79-9a2e-f1ed9bc7a528 Replit-Commit-Checkpoint-Type: full_checkpoint Replit-Commit-Event-Id: 822e4068-5dab-436d-95b7-523678751e11 Replit-Commit-Screenshot-Url: https://storage.googleapis.com/screenshot-production-us-central1/449cf7c4-c97a-45ae-8234-e5c5b8d6a84f/7a657272-55ba-4a79-9a2e-f1ed9bc7a528/zauptjn
This commit is contained in:
parent
4754cfd98a
commit
b31bad7d8b
8
.replit
8
.replit
@ -14,14 +14,14 @@ run = ["npm", "run", "start"]
|
|||||||
localPort = 5000
|
localPort = 5000
|
||||||
externalPort = 80
|
externalPort = 80
|
||||||
|
|
||||||
[[ports]]
|
|
||||||
localPort = 40145
|
|
||||||
externalPort = 4200
|
|
||||||
|
|
||||||
[[ports]]
|
[[ports]]
|
||||||
localPort = 41303
|
localPort = 41303
|
||||||
externalPort = 3002
|
externalPort = 3002
|
||||||
|
|
||||||
|
[[ports]]
|
||||||
|
localPort = 43175
|
||||||
|
externalPort = 4200
|
||||||
|
|
||||||
[[ports]]
|
[[ports]]
|
||||||
localPort = 43471
|
localPort = 43471
|
||||||
externalPort = 3003
|
externalPort = 3003
|
||||||
|
|||||||
105
server/routes.ts
105
server/routes.ts
@ -221,20 +221,107 @@ export async function registerRoutes(app: Express): Promise<Server> {
|
|||||||
return res.status(404).json({ error: "List not found" });
|
return res.status(404).json({ error: "List not found" });
|
||||||
}
|
}
|
||||||
|
|
||||||
const updated = await storage.updatePublicList(req.params.id, {
|
console.log(`[SYNC] Starting sync for list: ${list.name} (${list.url})`);
|
||||||
lastAttempt: new Date('1970-01-01T00:00:00Z'),
|
|
||||||
errorMessage: null,
|
// Fetch the list from URL
|
||||||
|
const response = await fetch(list.url, {
|
||||||
|
headers: {
|
||||||
|
'User-Agent': 'IDS-MikroTik-PublicListFetcher/2.0',
|
||||||
|
'Accept': 'application/json, text/plain, */*',
|
||||||
|
},
|
||||||
|
signal: AbortSignal.timeout(30000),
|
||||||
});
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const contentType = response.headers.get('content-type') || '';
|
||||||
|
const text = await response.text();
|
||||||
|
|
||||||
|
// Parse IPs based on content type
|
||||||
|
let ips: Array<{ip: string, cidr?: string}> = [];
|
||||||
|
|
||||||
|
if (contentType.includes('json') || list.url.endsWith('.json')) {
|
||||||
|
// JSON format (Spamhaus DROP v4 JSON)
|
||||||
|
try {
|
||||||
|
const data = JSON.parse(text);
|
||||||
|
if (Array.isArray(data)) {
|
||||||
|
for (const entry of data) {
|
||||||
|
if (entry.cidr) {
|
||||||
|
const [ip] = entry.cidr.split('/');
|
||||||
|
ips.push({ ip, cidr: entry.cidr });
|
||||||
|
} else if (entry.ip) {
|
||||||
|
ips.push({ ip: entry.ip, cidr: null as any });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
console.error('[SYNC] Failed to parse JSON:', e);
|
||||||
|
throw new Error('Invalid JSON format');
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Plain text format (one IP/CIDR per line)
|
||||||
|
const lines = text.split('\n');
|
||||||
|
for (const line of lines) {
|
||||||
|
const trimmed = line.trim();
|
||||||
|
if (!trimmed || trimmed.startsWith('#') || trimmed.startsWith(';')) continue;
|
||||||
|
|
||||||
|
// Extract IP/CIDR from line
|
||||||
|
const match = trimmed.match(/^(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})(\/\d{1,2})?/);
|
||||||
|
if (match) {
|
||||||
|
const ip = match[1];
|
||||||
|
const cidr = match[2] ? `${match[1]}${match[2]}` : null;
|
||||||
|
ips.push({ ip, cidr: cidr as any });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`[SYNC] Parsed ${ips.length} IPs from ${list.name}`);
|
||||||
|
|
||||||
|
// Save IPs to database
|
||||||
|
let added = 0;
|
||||||
|
let updated = 0;
|
||||||
|
|
||||||
|
for (const { ip, cidr } of ips) {
|
||||||
|
const result = await storage.upsertBlacklistIp(list.id, ip, cidr);
|
||||||
|
if (result.created) added++;
|
||||||
|
else updated++;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update list stats
|
||||||
|
await storage.updatePublicList(list.id, {
|
||||||
|
lastFetch: new Date(),
|
||||||
|
lastSuccess: new Date(),
|
||||||
|
totalIps: ips.length,
|
||||||
|
activeIps: ips.length,
|
||||||
|
errorCount: 0,
|
||||||
|
lastError: null,
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(`[SYNC] Completed: ${added} added, ${updated} updated for ${list.name}`);
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
message: "Manual sync triggered - list marked for immediate sync",
|
message: `Sync completed: ${ips.length} IPs processed`,
|
||||||
note: "Fetcher will sync this list on next cycle (max 10 minutes). Check logs: journalctl -u ids-list-fetcher -n 50",
|
added,
|
||||||
list: updated
|
updated,
|
||||||
|
total: ips.length,
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error: any) {
|
||||||
console.error('[API ERROR] Failed to trigger sync:', error);
|
console.error('[API ERROR] Failed to sync:', error);
|
||||||
res.status(500).json({ error: "Failed to trigger sync" });
|
|
||||||
|
// Update error count
|
||||||
|
const list = await storage.getPublicListById(req.params.id);
|
||||||
|
if (list) {
|
||||||
|
await storage.updatePublicList(req.params.id, {
|
||||||
|
errorCount: (list.errorCount || 0) + 1,
|
||||||
|
lastError: error.message,
|
||||||
|
lastFetch: new Date(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
res.status(500).json({ error: `Sync failed: ${error.message}` });
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@ -99,6 +99,7 @@ export interface IStorage {
|
|||||||
totalIps: number;
|
totalIps: number;
|
||||||
overlapWithDetections: number;
|
overlapWithDetections: number;
|
||||||
}>;
|
}>;
|
||||||
|
upsertBlacklistIp(listId: string, ipAddress: string, cidrRange: string | null): Promise<{created: boolean}>;
|
||||||
|
|
||||||
// System
|
// System
|
||||||
testConnection(): Promise<boolean>;
|
testConnection(): Promise<boolean>;
|
||||||
@ -514,6 +515,49 @@ export class DatabaseStorage implements IStorage {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async upsertBlacklistIp(listId: string, ipAddress: string, cidrRange: string | null): Promise<{created: boolean}> {
|
||||||
|
try {
|
||||||
|
const existing = await db
|
||||||
|
.select()
|
||||||
|
.from(publicBlacklistIps)
|
||||||
|
.where(
|
||||||
|
and(
|
||||||
|
eq(publicBlacklistIps.listId, listId),
|
||||||
|
eq(publicBlacklistIps.ipAddress, ipAddress)
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
if (existing.length > 0) {
|
||||||
|
await db
|
||||||
|
.update(publicBlacklistIps)
|
||||||
|
.set({
|
||||||
|
lastSeen: new Date(),
|
||||||
|
isActive: true,
|
||||||
|
cidrRange: cidrRange,
|
||||||
|
ipInet: ipAddress,
|
||||||
|
cidrInet: cidrRange || `${ipAddress}/32`,
|
||||||
|
})
|
||||||
|
.where(eq(publicBlacklistIps.id, existing[0].id));
|
||||||
|
return { created: false };
|
||||||
|
} else {
|
||||||
|
await db.insert(publicBlacklistIps).values({
|
||||||
|
listId,
|
||||||
|
ipAddress,
|
||||||
|
cidrRange,
|
||||||
|
ipInet: ipAddress,
|
||||||
|
cidrInet: cidrRange || `${ipAddress}/32`,
|
||||||
|
isActive: true,
|
||||||
|
firstSeen: new Date(),
|
||||||
|
lastSeen: new Date(),
|
||||||
|
});
|
||||||
|
return { created: true };
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[DB ERROR] Failed to upsert blacklist IP:', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
async testConnection(): Promise<boolean> {
|
async testConnection(): Promise<boolean> {
|
||||||
try {
|
try {
|
||||||
await db.execute(sql`SELECT 1`);
|
await db.execute(sql`SELECT 1`);
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user