feat: Add PostgreSQL support and enhance admin dashboard

- Migrate all service files from hardcoded SQLite to dual SQLite/PostgreSQL support
- Add PostgreSQL service to docker-compose.yml
- Create database abstraction layer (database.py) with execute_query, row_to_dict helpers
- Update all 7 service files: account, rating, contact, ai_cost, quota, site_assistant, admin
- Add new admin endpoint /database-stats for table size and row count visualization
- Add database_type field to system health endpoint
- Update .env.example with proper PostgreSQL connection string
This commit is contained in:
Your Name
2026-03-31 21:51:45 +02:00
parent 42b1ad1250
commit 030418f6db
11 changed files with 1930 additions and 1006 deletions

View File

@@ -1,31 +1,23 @@
"""AI cost tracking service — monitors and limits AI API spending."""
"""AI cost tracking service — monitors and limits AI API spending.
Supports both SQLite (development) and PostgreSQL (production).
"""
import logging
import os
import sqlite3
from datetime import datetime, timezone
from flask import current_app
from app.utils.database import db_connection, execute_query, is_postgres, row_to_dict
logger = logging.getLogger(__name__)
# Monthly budget in USD — set via environment variable, default $50
AI_MONTHLY_BUDGET = float(os.getenv("AI_MONTHLY_BUDGET", "50.0"))
# Estimated cost per 1K tokens (adjust based on your model)
COST_PER_1K_INPUT_TOKENS = float(os.getenv("AI_COST_PER_1K_INPUT", "0.0"))
COST_PER_1K_OUTPUT_TOKENS = float(os.getenv("AI_COST_PER_1K_OUTPUT", "0.0"))
def _connect() -> sqlite3.Connection:
db_path = current_app.config["DATABASE_PATH"]
db_dir = os.path.dirname(db_path)
if db_dir:
os.makedirs(db_dir, exist_ok=True)
connection = sqlite3.connect(db_path)
connection.row_factory = sqlite3.Row
return connection
def _utc_now() -> str:
return datetime.now(timezone.utc).isoformat()
@@ -36,24 +28,43 @@ def _current_month() -> str:
def init_ai_cost_db():
"""Create AI cost tracking table if not exists."""
with _connect() as conn:
conn.executescript(
"""
CREATE TABLE IF NOT EXISTS ai_cost_log (
id INTEGER PRIMARY KEY AUTOINCREMENT,
tool TEXT NOT NULL,
model TEXT NOT NULL,
input_tokens INTEGER DEFAULT 0,
output_tokens INTEGER DEFAULT 0,
estimated_cost_usd REAL DEFAULT 0.0,
period_month TEXT NOT NULL,
created_at TEXT NOT NULL
);
with db_connection() as conn:
if is_postgres():
cursor = conn.cursor()
cursor.execute("""
CREATE TABLE IF NOT EXISTS ai_cost_log (
id SERIAL PRIMARY KEY,
tool TEXT NOT NULL,
model TEXT NOT NULL,
input_tokens INTEGER DEFAULT 0,
output_tokens INTEGER DEFAULT 0,
estimated_cost_usd REAL DEFAULT 0.0,
period_month TEXT NOT NULL,
created_at TEXT NOT NULL
)
""")
cursor.execute("""
CREATE INDEX IF NOT EXISTS idx_ai_cost_period
ON ai_cost_log(period_month)
""")
else:
conn.executescript(
"""
CREATE TABLE IF NOT EXISTS ai_cost_log (
id INTEGER PRIMARY KEY AUTOINCREMENT,
tool TEXT NOT NULL,
model TEXT NOT NULL,
input_tokens INTEGER DEFAULT 0,
output_tokens INTEGER DEFAULT 0,
estimated_cost_usd REAL DEFAULT 0.0,
period_month TEXT NOT NULL,
created_at TEXT NOT NULL
);
CREATE INDEX IF NOT EXISTS idx_ai_cost_period
ON ai_cost_log(period_month);
"""
)
CREATE INDEX IF NOT EXISTS idx_ai_cost_period
ON ai_cost_log(period_month);
"""
)
def log_ai_usage(
@@ -63,22 +74,41 @@ def log_ai_usage(
output_tokens: int = 0,
) -> None:
"""Log an AI API call with token usage."""
estimated_cost = (
(input_tokens / 1000.0) * COST_PER_1K_INPUT_TOKENS
+ (output_tokens / 1000.0) * COST_PER_1K_OUTPUT_TOKENS
)
estimated_cost = (input_tokens / 1000.0) * COST_PER_1K_INPUT_TOKENS + (
output_tokens / 1000.0
) * COST_PER_1K_OUTPUT_TOKENS
with _connect() as conn:
conn.execute(
with db_connection() as conn:
sql = (
"""INSERT INTO ai_cost_log
(tool, model, input_tokens, output_tokens, estimated_cost_usd, period_month, created_at)
VALUES (?, ?, ?, ?, ?, ?, ?)""",
(tool, model, input_tokens, output_tokens, estimated_cost, _current_month(), _utc_now()),
(tool, model, input_tokens, output_tokens, estimated_cost_usd, period_month, created_at)
VALUES (%s, %s, %s, %s, %s, %s, %s)"""
if is_postgres()
else """INSERT INTO ai_cost_log
(tool, model, input_tokens, output_tokens, estimated_cost_usd, period_month, created_at)
VALUES (?, ?, ?, ?, ?, ?, ?)"""
)
execute_query(
conn,
sql,
(
tool,
model,
input_tokens,
output_tokens,
estimated_cost,
_current_month(),
_utc_now(),
),
)
logger.info(
"AI usage: tool=%s model=%s in=%d out=%d cost=$%.4f",
tool, model, input_tokens, output_tokens, estimated_cost,
tool,
model,
input_tokens,
output_tokens,
estimated_cost,
)
@@ -86,17 +116,26 @@ def get_monthly_spend() -> dict:
"""Get the current month's AI spending summary."""
month = _current_month()
with _connect() as conn:
row = conn.execute(
with db_connection() as conn:
sql = (
"""SELECT
COUNT(*) as total_calls,
COALESCE(SUM(input_tokens), 0) as total_input_tokens,
COALESCE(SUM(output_tokens), 0) as total_output_tokens,
COALESCE(SUM(estimated_cost_usd), 0.0) as total_cost
FROM ai_cost_log
WHERE period_month = ?""",
(month,),
).fetchone()
COUNT(*) as total_calls,
COALESCE(SUM(input_tokens), 0) as total_input_tokens,
COALESCE(SUM(output_tokens), 0) as total_output_tokens,
COALESCE(SUM(estimated_cost_usd), 0.0) as total_cost
FROM ai_cost_log
WHERE period_month = %s"""
if is_postgres()
else """SELECT
COUNT(*) as total_calls,
COALESCE(SUM(input_tokens), 0) as total_input_tokens,
COALESCE(SUM(output_tokens), 0) as total_output_tokens,
COALESCE(SUM(estimated_cost_usd), 0.0) as total_cost
FROM ai_cost_log
WHERE period_month = ?"""
)
cursor = execute_query(conn, sql, (month,))
row = row_to_dict(cursor.fetchone())
return {
"period": month,
@@ -107,7 +146,10 @@ def get_monthly_spend() -> dict:
"budget_usd": AI_MONTHLY_BUDGET,
"budget_remaining_usd": round(AI_MONTHLY_BUDGET - row["total_cost"], 4),
"budget_used_percent": round(
(row["total_cost"] / AI_MONTHLY_BUDGET * 100) if AI_MONTHLY_BUDGET > 0 else 0, 1
(row["total_cost"] / AI_MONTHLY_BUDGET * 100)
if AI_MONTHLY_BUDGET > 0
else 0,
1,
),
}
@@ -128,4 +170,5 @@ def check_ai_budget() -> None:
class AiBudgetExceededError(Exception):
"""Raised when the monthly AI budget is exceeded."""
pass