Router-tools/modules/database.py

133 lines
No EOL
4.1 KiB
Python

import sqlite3
import os
from datetime import datetime, timedelta
from dotenv import load_dotenv
load_dotenv()
DB_PATH = os.getenv("BGP_STATS_DB_PATH", "bgp_stats.db")
RETENTION_DAYS = int(os.getenv("BGP_STATS_RETENTION_DAYS", 30))
def get_db_connection():
os.makedirs(os.path.dirname(DB_PATH), exist_ok=True)
conn = sqlite3.connect(DB_PATH)
conn.row_factory = sqlite3.Row
return conn
def init_db():
conn = get_db_connection()
cursor = conn.cursor()
cursor.execute('''
CREATE TABLE IF NOT EXISTS peer_history (
timestamp TEXT NOT NULL,
neighbor_address TEXT NOT NULL,
ip_version TEXT NOT NULL,
asn INTEGER,
description TEXT,
prefixes_received INTEGER,
prefixes_sent INTEGER,
PRIMARY KEY (timestamp, neighbor_address)
)
''')
cursor.execute('''
CREATE TABLE IF NOT EXISTS total_routes_history (
timestamp TEXT NOT NULL,
ip_version TEXT NOT NULL,
total_routes INTEGER,
total_fib INTEGER,
PRIMARY KEY (timestamp, ip_version)
)
''')
conn.commit()
conn.close()
def add_peer_stats(stats_list):
conn = get_db_connection()
cursor = conn.cursor()
try:
cursor.executemany('''
INSERT OR IGNORE INTO peer_history
(timestamp, neighbor_address, ip_version, asn, description, prefixes_received, prefixes_sent)
VALUES (:timestamp, :neighbor_address, :ip_version, :asn, :description, :prefixes_received, :prefixes_sent)
''', stats_list)
conn.commit()
finally:
conn.close()
def get_peer_history(neighbor_address, start_date, end_date):
conn = get_db_connection()
cursor = conn.cursor()
cursor.execute('''
SELECT timestamp, prefixes_received, prefixes_sent
FROM peer_history
WHERE neighbor_address = ? AND timestamp BETWEEN ? AND ?
ORDER BY timestamp ASC
''', (neighbor_address, start_date, end_date))
rows = cursor.fetchall()
conn.close()
return [dict(row) for row in rows]
def add_total_routes_stats(stats_list):
conn = get_db_connection()
cursor = conn.cursor()
try:
cursor.executemany('''
INSERT OR IGNORE INTO total_routes_history
(timestamp, ip_version, total_routes, total_fib)
VALUES (:timestamp, :ip_version, :total_routes, :total_fib)
''', stats_list)
conn.commit()
finally:
conn.close()
def get_total_routes_history(start_date, end_date):
conn = get_db_connection()
cursor = conn.cursor()
cursor.execute('''
SELECT timestamp, ip_version, total_routes
FROM total_routes_history
WHERE timestamp BETWEEN ? AND ?
ORDER BY timestamp ASC
''', (start_date, end_date))
rows = cursor.fetchall()
conn.close()
return [dict(row) for row in rows]
def get_unique_peers():
conn = get_db_connection()
cursor = conn.cursor()
query = '''
SELECT neighbor_address, ip_version, asn, description
FROM (
SELECT *, ROW_NUMBER() OVER(PARTITION BY neighbor_address ORDER BY timestamp DESC) as rn
FROM peer_history
)
WHERE rn = 1
ORDER BY ip_version, neighbor_address
'''
cursor.execute(query)
rows = cursor.fetchall()
conn.close()
return [dict(row) for row in rows]
def cleanup_old_data():
conn = get_db_connection()
cursor = conn.cursor()
cutoff_date = (datetime.utcnow() - timedelta(days=RETENTION_DAYS)).isoformat()
cursor.execute("DELETE FROM peer_history WHERE timestamp < ?", (cutoff_date,))
deleted_peers = cursor.rowcount
cursor.execute("DELETE FROM total_routes_history WHERE timestamp < ?", (cutoff_date,))
deleted_totals = cursor.rowcount
if deleted_peers > 0 or deleted_totals > 0:
conn.commit()
cursor.execute("VACUUM")
print(f" -> {deleted_peers} peer records and {deleted_totals} total records cleaned.")
else:
print(" -> No old records found to clean up.")
conn.commit()
conn.close()