diff --git a/collector.py b/collector.py new file mode 100644 index 0000000..5a2981b --- /dev/null +++ b/collector.py @@ -0,0 +1,110 @@ +import requests +import os +from datetime import datetime +from dotenv import load_dotenv + +load_dotenv() + +from modules.parse import ( + run_bgp_curl_command, + run_ipv4_route_summary_curl_command, + run_ipv6_route_summary_curl_command +) +from modules.bgp import parse_bgp_data, parse_route_summary +from modules.database import ( + init_db, add_peer_stats, cleanup_old_data, + add_total_routes_stats +) + +BGP_JSON_URL = "http://127.0.0.1:5000/bgp/json" + +def fetch_bgp_data(): + try: + response = requests.get(BGP_JSON_URL, timeout=15) + response.raise_for_status() + return response.json() + except requests.RequestException: + return None + +def process_and_store_data(bgp_data, ipv4_summary_raw, ipv6_summary_raw): + now = datetime.utcnow().isoformat() + + stats_to_add = [] + if bgp_data: + for version, peers in [('ipv4', bgp_data.get('ipv4_peers', [])), ('ipv6', bgp_data.get('ipv6_peers', []))]: + for peer in peers: + try: + prefixes_received = int(peer.get('state_pfx_rcd', 0)) + except (ValueError, TypeError): + prefixes_received = 0 + try: + prefixes_sent = int(peer.get('prefix_sent', 0)) + except (ValueError, TypeError): + prefixes_sent = 0 + + stats_to_add.append({ + "timestamp": now, + "neighbor_address": peer['neighbor'], + "ip_version": version, + "asn": int(peer.get('as_number', 0)), + "description": peer.get('description', ''), + "prefixes_received": prefixes_received, + "prefixes_sent": prefixes_sent + }) + + if stats_to_add: + add_peer_stats(stats_to_add) + print(f" -> {len(stats_to_add)} peer records saved.") + else: + print(" -> No peer data found to save.") + + ipv4_summary = parse_route_summary(ipv4_summary_raw) + ipv6_summary = parse_route_summary(ipv6_summary_raw) + + total_routes_to_add = [ + {"timestamp": now, "ip_version": "ipv4", "total_routes": ipv4_summary["routes"], "total_fib": ipv4_summary["fib"]}, + {"timestamp": now, "ip_version": "ipv6", "total_routes": ipv6_summary["routes"], "total_fib": ipv6_summary["fib"]} + ] + add_total_routes_stats(total_routes_to_add) + print(f" -> IPv4 Totaal: {ipv4_summary['routes']}, IPv6 Totaal: {ipv6_summary['routes']} saved.") + +if __name__ == "__main__": + print(f"--- Collector Script Gestart: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')} ---") + init_db() + + all_ok = True + + print("[TASK] BGP peer data ophalen...", end='', flush=True) + bgp_data = fetch_bgp_data() + if not bgp_data or not bgp_data.get('ipv4_peers'): + print("Error") + all_ok = False + else: + print("Success") + + print("[TASK] IPv4 Route Summary ophalen...", end='', flush=True) + ipv4_summary_raw = run_ipv4_route_summary_curl_command() + if not ipv4_summary_raw or not ipv4_summary_raw.get('data') or 'Totals' not in ipv4_summary_raw.get('data', ''): + print("Error") + all_ok = False + else: + print("Success") + + print("[TASK] IPv6 Route Summary ophalen...", end='', flush=True) + ipv6_summary_raw = run_ipv6_route_summary_curl_command() + if not ipv6_summary_raw or not ipv6_summary_raw.get('data') or 'Totals' not in ipv6_summary_raw.get('data', ''): + print("Error") + all_ok = False + else: + print("Success") + + if all_ok: + print("[DB] Data verwerken en opslaan...") + process_and_store_data(bgp_data, ipv4_summary_raw, ipv6_summary_raw) + else: + print("[WARN] Verwerking overgeslagen vanwege eerdere fouten.") + + print("[DB] Oude database records opschonen...") + cleanup_old_data() + + print("--- Collector Script Complete ---") \ No newline at end of file