162 lines
No EOL
5 KiB
Python
162 lines
No EOL
5 KiB
Python
import sqlite3
|
|
import os
|
|
from datetime import datetime, timedelta
|
|
from dotenv import load_dotenv
|
|
|
|
load_dotenv()
|
|
|
|
DB_PATH = os.getenv("BGP_STATS_DB_PATH", "bgp_stats.db")
|
|
RETENTION_DAYS = int(os.getenv("BGP_STATS_RETENTION_DAYS", 30))
|
|
|
|
def get_db_connection():
|
|
os.makedirs(os.path.dirname(DB_PATH), exist_ok=True)
|
|
conn = sqlite3.connect(DB_PATH)
|
|
conn.row_factory = sqlite3.Row
|
|
return conn
|
|
|
|
def init_db():
|
|
conn = get_db_connection()
|
|
cursor = conn.cursor()
|
|
cursor.execute('''
|
|
CREATE TABLE IF NOT EXISTS peer_history (
|
|
timestamp TEXT NOT NULL,
|
|
neighbor_address TEXT NOT NULL,
|
|
ip_version TEXT NOT NULL,
|
|
asn INTEGER,
|
|
description TEXT,
|
|
prefixes_received INTEGER,
|
|
prefixes_sent INTEGER,
|
|
PRIMARY KEY (timestamp, neighbor_address)
|
|
)
|
|
''')
|
|
cursor.execute('''
|
|
CREATE TABLE IF NOT EXISTS total_routes_history (
|
|
timestamp TEXT NOT NULL,
|
|
ip_version TEXT NOT NULL,
|
|
total_routes INTEGER,
|
|
total_fib INTEGER,
|
|
PRIMARY KEY (timestamp, ip_version)
|
|
)
|
|
''')
|
|
conn.commit()
|
|
conn.close()
|
|
|
|
def add_peer_stats(stats_list):
|
|
conn = get_db_connection()
|
|
cursor = conn.cursor()
|
|
try:
|
|
cursor.executemany('''
|
|
INSERT OR IGNORE INTO peer_history
|
|
(timestamp, neighbor_address, ip_version, asn, description, prefixes_received, prefixes_sent)
|
|
VALUES (:timestamp, :neighbor_address, :ip_version, :asn, :description, :prefixes_received, :prefixes_sent)
|
|
''', stats_list)
|
|
conn.commit()
|
|
finally:
|
|
conn.close()
|
|
|
|
def get_peer_history(neighbor_address, start_date, end_date, interval_minutes=10):
|
|
conn = get_db_connection()
|
|
cursor = conn.cursor()
|
|
|
|
interval_seconds = interval_minutes * 60
|
|
outlier_threshold_received = 10
|
|
|
|
query = f'''
|
|
SELECT
|
|
MAX(timestamp) as timestamp,
|
|
CAST(AVG(CASE
|
|
WHEN prefixes_received > ? THEN prefixes_received
|
|
ELSE NULL
|
|
END) AS INTEGER) as prefixes_received,
|
|
CAST(AVG(prefixes_sent) AS INTEGER) as prefixes_sent
|
|
FROM peer_history
|
|
WHERE neighbor_address = ? AND timestamp BETWEEN ? AND ?
|
|
GROUP BY
|
|
CAST(strftime('%s', timestamp) / ? AS INTEGER)
|
|
ORDER BY timestamp ASC
|
|
'''
|
|
|
|
cursor.execute(query, (outlier_threshold_received, neighbor_address, start_date, end_date, interval_seconds))
|
|
rows = cursor.fetchall()
|
|
conn.close()
|
|
return [dict(row) for row in rows]
|
|
|
|
def add_total_routes_stats(stats_list):
|
|
conn = get_db_connection()
|
|
cursor = conn.cursor()
|
|
try:
|
|
cursor.executemany('''
|
|
INSERT OR IGNORE INTO total_routes_history
|
|
(timestamp, ip_version, total_routes, total_fib)
|
|
VALUES (:timestamp, :ip_version, :total_routes, :total_fib)
|
|
''', stats_list)
|
|
conn.commit()
|
|
finally:
|
|
conn.close()
|
|
|
|
def get_total_routes_history(start_date, end_date, interval_minutes=10):
|
|
conn = get_db_connection()
|
|
cursor = conn.cursor()
|
|
|
|
interval_seconds = interval_minutes * 60
|
|
outlier_threshold = 10000
|
|
|
|
query = f'''
|
|
SELECT
|
|
MAX(timestamp) as timestamp,
|
|
ip_version,
|
|
CAST(AVG(CASE
|
|
WHEN total_routes > ? THEN total_routes
|
|
ELSE NULL
|
|
END) AS INTEGER) as total_routes
|
|
FROM total_routes_history
|
|
WHERE timestamp BETWEEN ? AND ?
|
|
GROUP BY
|
|
ip_version,
|
|
CAST(strftime('%s', timestamp) / ? AS INTEGER)
|
|
ORDER BY timestamp ASC
|
|
'''
|
|
|
|
cursor.execute(query, (outlier_threshold, start_date, end_date, interval_seconds))
|
|
rows = cursor.fetchall()
|
|
conn.close()
|
|
return [dict(row) for row in rows]
|
|
|
|
def get_unique_peers():
|
|
conn = get_db_connection()
|
|
cursor = conn.cursor()
|
|
query = '''
|
|
SELECT neighbor_address, ip_version, asn, description
|
|
FROM (
|
|
SELECT *, ROW_NUMBER() OVER(PARTITION BY neighbor_address ORDER BY timestamp DESC) as rn
|
|
FROM peer_history
|
|
)
|
|
WHERE rn = 1
|
|
ORDER BY ip_version, neighbor_address
|
|
'''
|
|
cursor.execute(query)
|
|
rows = cursor.fetchall()
|
|
conn.close()
|
|
return [dict(row) for row in rows]
|
|
|
|
def cleanup_old_data():
|
|
conn = get_db_connection()
|
|
cursor = conn.cursor()
|
|
|
|
cutoff_date = (datetime.utcnow() - timedelta(days=RETENTION_DAYS)).isoformat()
|
|
|
|
cursor.execute("DELETE FROM peer_history WHERE timestamp < ?", (cutoff_date,))
|
|
deleted_peers = cursor.rowcount
|
|
|
|
cursor.execute("DELETE FROM total_routes_history WHERE timestamp < ?", (cutoff_date,))
|
|
deleted_totals = cursor.rowcount
|
|
|
|
if deleted_peers > 0 or deleted_totals > 0:
|
|
conn.commit()
|
|
cursor.execute("VACUUM")
|
|
print(f" -> {deleted_peers} peer records and {deleted_totals} total records cleaned.")
|
|
else:
|
|
print(" -> No old records found to clean up.")
|
|
|
|
conn.commit()
|
|
conn.close() |