feat: track user interest in paid plans and enhance admin analytics
- Added tracking for user interest in the pro plan on the PricingPage. - Implemented auto-retry for CSRF token failures in API response interceptor. - Introduced new interfaces and API functions for enhanced admin analytics, including ratings, tool usage, user stats, plan interest, and system health.
This commit is contained in:
@@ -5,9 +5,15 @@ from app.extensions import limiter
|
||||
from app.services.account_service import get_user_by_id, is_user_admin, set_user_role, update_user_plan
|
||||
from app.services.admin_service import (
|
||||
get_admin_overview,
|
||||
get_admin_ratings_detail,
|
||||
get_admin_system_health,
|
||||
get_admin_tool_analytics,
|
||||
get_admin_user_registration_stats,
|
||||
get_plan_interest_summary,
|
||||
list_admin_contacts,
|
||||
list_admin_users,
|
||||
mark_admin_contact_read,
|
||||
record_plan_interest_click,
|
||||
)
|
||||
from app.services.ai_cost_service import get_monthly_spend
|
||||
from app.utils.auth import get_current_user_id
|
||||
@@ -155,3 +161,89 @@ def ai_cost_dashboard():
|
||||
|
||||
spend = get_monthly_spend()
|
||||
return jsonify(spend), 200
|
||||
|
||||
|
||||
@admin_bp.route("/ratings", methods=["GET"])
|
||||
@limiter.limit("60/hour")
|
||||
def admin_ratings_route():
|
||||
"""Return detailed ratings and reviews for admin inspection."""
|
||||
auth_error = _require_admin_session()
|
||||
if auth_error:
|
||||
return auth_error
|
||||
|
||||
try:
|
||||
page = max(1, int(request.args.get("page", 1)))
|
||||
except ValueError:
|
||||
page = 1
|
||||
|
||||
try:
|
||||
per_page = max(1, min(int(request.args.get("per_page", 20)), 100))
|
||||
except ValueError:
|
||||
per_page = 20
|
||||
|
||||
tool_filter = request.args.get("tool", "").strip()
|
||||
|
||||
return jsonify(get_admin_ratings_detail(page=page, per_page=per_page, tool_filter=tool_filter)), 200
|
||||
|
||||
|
||||
@admin_bp.route("/tool-analytics", methods=["GET"])
|
||||
@limiter.limit("60/hour")
|
||||
def admin_tool_analytics_route():
|
||||
"""Return detailed per-tool usage analytics."""
|
||||
auth_error = _require_admin_session()
|
||||
if auth_error:
|
||||
return auth_error
|
||||
|
||||
return jsonify(get_admin_tool_analytics()), 200
|
||||
|
||||
|
||||
@admin_bp.route("/user-stats", methods=["GET"])
|
||||
@limiter.limit("60/hour")
|
||||
def admin_user_stats_route():
|
||||
"""Return user registration trends and breakdown."""
|
||||
auth_error = _require_admin_session()
|
||||
if auth_error:
|
||||
return auth_error
|
||||
|
||||
return jsonify(get_admin_user_registration_stats()), 200
|
||||
|
||||
|
||||
@admin_bp.route("/plan-interest", methods=["GET"])
|
||||
@limiter.limit("60/hour")
|
||||
def admin_plan_interest_route():
|
||||
"""Return paid plan click interest summary."""
|
||||
auth_error = _require_admin_session()
|
||||
if auth_error:
|
||||
return auth_error
|
||||
|
||||
return jsonify(get_plan_interest_summary()), 200
|
||||
|
||||
|
||||
@admin_bp.route("/system-health", methods=["GET"])
|
||||
@limiter.limit("60/hour")
|
||||
def admin_system_health_route():
|
||||
"""Return system health indicators."""
|
||||
auth_error = _require_admin_session()
|
||||
if auth_error:
|
||||
return auth_error
|
||||
|
||||
return jsonify(get_admin_system_health()), 200
|
||||
|
||||
|
||||
@admin_bp.route("/plan-interest/record", methods=["POST"])
|
||||
@limiter.limit("30/hour")
|
||||
def record_plan_interest_route():
|
||||
"""Record a click on a paid plan button — public endpoint."""
|
||||
data = request.get_json(silent=True) or {}
|
||||
plan = str(data.get("plan", "pro")).strip().lower()
|
||||
billing = str(data.get("billing", "monthly")).strip().lower()
|
||||
|
||||
if plan not in ("pro",):
|
||||
plan = "pro"
|
||||
if billing not in ("monthly", "yearly"):
|
||||
billing = "monthly"
|
||||
|
||||
user_id = get_current_user_id()
|
||||
record_plan_interest_click(user_id=user_id, plan=plan, billing=billing)
|
||||
|
||||
return jsonify({"message": "Interest recorded."}), 200
|
||||
|
||||
@@ -285,4 +285,382 @@ def list_admin_contacts(page: int = 1, per_page: int = 20) -> dict:
|
||||
|
||||
|
||||
def mark_admin_contact_read(message_id: int) -> bool:
|
||||
return mark_read(message_id)
|
||||
return mark_read(message_id)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Enhanced admin analytics
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _ensure_plan_interest_table():
|
||||
"""Create plan_interest_clicks table if it does not exist."""
|
||||
with _connect() as conn:
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS plan_interest_clicks (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
user_id INTEGER,
|
||||
plan TEXT NOT NULL,
|
||||
billing TEXT NOT NULL DEFAULT 'monthly',
|
||||
created_at TEXT NOT NULL
|
||||
)
|
||||
"""
|
||||
)
|
||||
conn.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_plan_interest_created ON plan_interest_clicks(created_at)"
|
||||
)
|
||||
|
||||
|
||||
def record_plan_interest_click(user_id: int | None, plan: str, billing: str = "monthly") -> None:
|
||||
"""Record a click on a pricing plan button."""
|
||||
_ensure_plan_interest_table()
|
||||
now = datetime.now(timezone.utc).isoformat()
|
||||
with _connect() as conn:
|
||||
conn.execute(
|
||||
"INSERT INTO plan_interest_clicks (user_id, plan, billing, created_at) VALUES (?, ?, ?, ?)",
|
||||
(user_id, plan, billing, now),
|
||||
)
|
||||
|
||||
|
||||
def get_plan_interest_summary() -> dict:
|
||||
"""Return summary of paid plan button clicks."""
|
||||
_ensure_plan_interest_table()
|
||||
cutoff_7d = (datetime.now(timezone.utc) - timedelta(days=7)).isoformat()
|
||||
cutoff_30d = (datetime.now(timezone.utc) - timedelta(days=30)).isoformat()
|
||||
|
||||
with _connect() as conn:
|
||||
total_row = conn.execute(
|
||||
"""
|
||||
SELECT
|
||||
COUNT(*) AS total_clicks,
|
||||
COUNT(DISTINCT user_id) AS unique_users,
|
||||
COALESCE(SUM(CASE WHEN created_at >= ? THEN 1 ELSE 0 END), 0) AS clicks_last_7d,
|
||||
COALESCE(SUM(CASE WHEN created_at >= ? THEN 1 ELSE 0 END), 0) AS clicks_last_30d
|
||||
FROM plan_interest_clicks
|
||||
""",
|
||||
(cutoff_7d, cutoff_30d),
|
||||
).fetchone()
|
||||
|
||||
by_plan_rows = conn.execute(
|
||||
"""
|
||||
SELECT plan, billing, COUNT(*) AS clicks
|
||||
FROM plan_interest_clicks
|
||||
GROUP BY plan, billing
|
||||
ORDER BY clicks DESC
|
||||
"""
|
||||
).fetchall()
|
||||
|
||||
recent_rows = conn.execute(
|
||||
"""
|
||||
SELECT
|
||||
plan_interest_clicks.id,
|
||||
plan_interest_clicks.user_id,
|
||||
plan_interest_clicks.plan,
|
||||
plan_interest_clicks.billing,
|
||||
plan_interest_clicks.created_at,
|
||||
users.email
|
||||
FROM plan_interest_clicks
|
||||
LEFT JOIN users ON users.id = plan_interest_clicks.user_id
|
||||
ORDER BY plan_interest_clicks.created_at DESC
|
||||
LIMIT 20
|
||||
"""
|
||||
).fetchall()
|
||||
|
||||
return {
|
||||
"total_clicks": int(total_row["total_clicks"]) if total_row else 0,
|
||||
"unique_users": int(total_row["unique_users"]) if total_row else 0,
|
||||
"clicks_last_7d": int(total_row["clicks_last_7d"]) if total_row else 0,
|
||||
"clicks_last_30d": int(total_row["clicks_last_30d"]) if total_row else 0,
|
||||
"by_plan": [
|
||||
{"plan": row["plan"], "billing": row["billing"], "clicks": int(row["clicks"])}
|
||||
for row in by_plan_rows
|
||||
],
|
||||
"recent": [
|
||||
{
|
||||
"id": row["id"],
|
||||
"user_id": row["user_id"],
|
||||
"email": row["email"],
|
||||
"plan": row["plan"],
|
||||
"billing": row["billing"],
|
||||
"created_at": row["created_at"],
|
||||
}
|
||||
for row in recent_rows
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
def get_admin_ratings_detail(page: int = 1, per_page: int = 20, tool_filter: str = "") -> dict:
|
||||
"""Return detailed ratings list with feedback for the admin dashboard."""
|
||||
safe_page = max(1, page)
|
||||
safe_per_page = max(1, min(per_page, 100))
|
||||
offset = (safe_page - 1) * safe_per_page
|
||||
|
||||
with _connect() as conn:
|
||||
# Total count
|
||||
count_sql = "SELECT COUNT(*) AS total FROM tool_ratings"
|
||||
count_params: list[object] = []
|
||||
if tool_filter:
|
||||
count_sql += " WHERE tool = ?"
|
||||
count_params.append(tool_filter)
|
||||
|
||||
total_row = conn.execute(count_sql, tuple(count_params)).fetchone()
|
||||
|
||||
# Paginated ratings
|
||||
sql = """
|
||||
SELECT id, tool, rating, feedback, tag, fingerprint, created_at
|
||||
FROM tool_ratings
|
||||
"""
|
||||
params: list[object] = []
|
||||
if tool_filter:
|
||||
sql += " WHERE tool = ?"
|
||||
params.append(tool_filter)
|
||||
sql += " ORDER BY created_at DESC LIMIT ? OFFSET ?"
|
||||
params.extend([safe_per_page, offset])
|
||||
|
||||
rows = conn.execute(sql, tuple(params)).fetchall()
|
||||
|
||||
# Per-tool summary
|
||||
summary_rows = conn.execute(
|
||||
"""
|
||||
SELECT
|
||||
tool,
|
||||
COUNT(*) AS count,
|
||||
COALESCE(AVG(rating), 0) AS average,
|
||||
COALESCE(SUM(CASE WHEN rating >= 4 THEN 1 ELSE 0 END), 0) AS positive,
|
||||
COALESCE(SUM(CASE WHEN rating <= 2 THEN 1 ELSE 0 END), 0) AS negative
|
||||
FROM tool_ratings
|
||||
GROUP BY tool
|
||||
ORDER BY count DESC
|
||||
"""
|
||||
).fetchall()
|
||||
|
||||
return {
|
||||
"items": [
|
||||
{
|
||||
"id": row["id"],
|
||||
"tool": row["tool"],
|
||||
"rating": int(row["rating"]),
|
||||
"feedback": row["feedback"] or "",
|
||||
"tag": row["tag"] or "",
|
||||
"created_at": row["created_at"],
|
||||
}
|
||||
for row in rows
|
||||
],
|
||||
"page": safe_page,
|
||||
"per_page": safe_per_page,
|
||||
"total": int(total_row["total"]) if total_row else 0,
|
||||
"tool_summaries": [
|
||||
{
|
||||
"tool": row["tool"],
|
||||
"count": int(row["count"]),
|
||||
"average": round(row["average"], 1),
|
||||
"positive": int(row["positive"]),
|
||||
"negative": int(row["negative"]),
|
||||
}
|
||||
for row in summary_rows
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
def get_admin_tool_analytics() -> dict:
|
||||
"""Return detailed per-tool usage analytics for the admin dashboard."""
|
||||
cutoff_24h = (datetime.now(timezone.utc) - timedelta(days=1)).isoformat()
|
||||
cutoff_7d = (datetime.now(timezone.utc) - timedelta(days=7)).isoformat()
|
||||
cutoff_30d = (datetime.now(timezone.utc) - timedelta(days=30)).isoformat()
|
||||
|
||||
with _connect() as conn:
|
||||
# Per-tool detailed stats
|
||||
tool_rows = conn.execute(
|
||||
"""
|
||||
SELECT
|
||||
tool,
|
||||
COUNT(*) AS total_runs,
|
||||
COALESCE(SUM(CASE WHEN status = 'completed' THEN 1 ELSE 0 END), 0) AS completed,
|
||||
COALESCE(SUM(CASE WHEN status = 'failed' THEN 1 ELSE 0 END), 0) AS failed,
|
||||
COALESCE(SUM(CASE WHEN created_at >= ? THEN 1 ELSE 0 END), 0) AS runs_24h,
|
||||
COALESCE(SUM(CASE WHEN created_at >= ? THEN 1 ELSE 0 END), 0) AS runs_7d,
|
||||
COALESCE(SUM(CASE WHEN created_at >= ? THEN 1 ELSE 0 END), 0) AS runs_30d,
|
||||
COUNT(DISTINCT user_id) AS unique_users
|
||||
FROM file_history
|
||||
GROUP BY tool
|
||||
ORDER BY total_runs DESC
|
||||
""",
|
||||
(cutoff_24h, cutoff_7d, cutoff_30d),
|
||||
).fetchall()
|
||||
|
||||
# Daily usage for the last 30 days
|
||||
daily_rows = conn.execute(
|
||||
"""
|
||||
SELECT
|
||||
DATE(created_at) AS day,
|
||||
COUNT(*) AS total,
|
||||
COALESCE(SUM(CASE WHEN status = 'completed' THEN 1 ELSE 0 END), 0) AS completed,
|
||||
COALESCE(SUM(CASE WHEN status = 'failed' THEN 1 ELSE 0 END), 0) AS failed
|
||||
FROM file_history
|
||||
WHERE created_at >= ?
|
||||
GROUP BY DATE(created_at)
|
||||
ORDER BY day ASC
|
||||
""",
|
||||
(cutoff_30d,),
|
||||
).fetchall()
|
||||
|
||||
# Most common errors
|
||||
error_rows = conn.execute(
|
||||
"""
|
||||
SELECT
|
||||
tool,
|
||||
metadata_json,
|
||||
COUNT(*) AS occurrences
|
||||
FROM file_history
|
||||
WHERE status = 'failed' AND created_at >= ?
|
||||
GROUP BY tool, metadata_json
|
||||
ORDER BY occurrences DESC
|
||||
LIMIT 15
|
||||
""",
|
||||
(cutoff_30d,),
|
||||
).fetchall()
|
||||
|
||||
return {
|
||||
"tools": [
|
||||
{
|
||||
"tool": row["tool"],
|
||||
"total_runs": int(row["total_runs"]),
|
||||
"completed": int(row["completed"]),
|
||||
"failed": int(row["failed"]),
|
||||
"success_rate": round((int(row["completed"]) / int(row["total_runs"])) * 100, 1) if int(row["total_runs"]) > 0 else 0,
|
||||
"runs_24h": int(row["runs_24h"]),
|
||||
"runs_7d": int(row["runs_7d"]),
|
||||
"runs_30d": int(row["runs_30d"]),
|
||||
"unique_users": int(row["unique_users"]),
|
||||
}
|
||||
for row in tool_rows
|
||||
],
|
||||
"daily_usage": [
|
||||
{
|
||||
"day": row["day"],
|
||||
"total": int(row["total"]),
|
||||
"completed": int(row["completed"]),
|
||||
"failed": int(row["failed"]),
|
||||
}
|
||||
for row in daily_rows
|
||||
],
|
||||
"common_errors": [
|
||||
{
|
||||
"tool": row["tool"],
|
||||
"error": _parse_metadata(row["metadata_json"]).get("error", "Unknown error"),
|
||||
"occurrences": int(row["occurrences"]),
|
||||
}
|
||||
for row in error_rows
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
def get_admin_user_registration_stats() -> dict:
|
||||
"""Return user registration trends and breakdown."""
|
||||
cutoff_7d = (datetime.now(timezone.utc) - timedelta(days=7)).isoformat()
|
||||
cutoff_30d = (datetime.now(timezone.utc) - timedelta(days=30)).isoformat()
|
||||
|
||||
with _connect() as conn:
|
||||
totals_row = conn.execute(
|
||||
"""
|
||||
SELECT
|
||||
COUNT(*) AS total,
|
||||
COALESCE(SUM(CASE WHEN created_at >= ? THEN 1 ELSE 0 END), 0) AS last_7d,
|
||||
COALESCE(SUM(CASE WHEN created_at >= ? THEN 1 ELSE 0 END), 0) AS last_30d,
|
||||
COALESCE(SUM(CASE WHEN plan = 'pro' THEN 1 ELSE 0 END), 0) AS pro_count,
|
||||
COALESCE(SUM(CASE WHEN plan = 'free' THEN 1 ELSE 0 END), 0) AS free_count
|
||||
FROM users
|
||||
""",
|
||||
(cutoff_7d, cutoff_30d),
|
||||
).fetchone()
|
||||
|
||||
# Daily registrations for the last 30 days
|
||||
daily_rows = conn.execute(
|
||||
"""
|
||||
SELECT DATE(created_at) AS day, COUNT(*) AS registrations
|
||||
FROM users
|
||||
WHERE created_at >= ?
|
||||
GROUP BY DATE(created_at)
|
||||
ORDER BY day ASC
|
||||
""",
|
||||
(cutoff_30d,),
|
||||
).fetchall()
|
||||
|
||||
# Most active users (by task count)
|
||||
active_rows = conn.execute(
|
||||
"""
|
||||
SELECT
|
||||
users.id,
|
||||
users.email,
|
||||
users.plan,
|
||||
users.created_at,
|
||||
COUNT(file_history.id) AS total_tasks
|
||||
FROM users
|
||||
JOIN file_history ON file_history.user_id = users.id
|
||||
GROUP BY users.id
|
||||
ORDER BY total_tasks DESC
|
||||
LIMIT 10
|
||||
"""
|
||||
).fetchall()
|
||||
|
||||
return {
|
||||
"total_users": int(totals_row["total"]) if totals_row else 0,
|
||||
"new_last_7d": int(totals_row["last_7d"]) if totals_row else 0,
|
||||
"new_last_30d": int(totals_row["last_30d"]) if totals_row else 0,
|
||||
"pro_users": int(totals_row["pro_count"]) if totals_row else 0,
|
||||
"free_users": int(totals_row["free_count"]) if totals_row else 0,
|
||||
"daily_registrations": [
|
||||
{"day": row["day"], "count": int(row["registrations"])}
|
||||
for row in daily_rows
|
||||
],
|
||||
"most_active_users": [
|
||||
{
|
||||
"id": row["id"],
|
||||
"email": row["email"],
|
||||
"plan": row["plan"],
|
||||
"created_at": row["created_at"],
|
||||
"total_tasks": int(row["total_tasks"]),
|
||||
}
|
||||
for row in active_rows
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
def get_admin_system_health() -> dict:
|
||||
"""Return system health indicators for the admin dashboard."""
|
||||
from app.services.openrouter_config_service import get_openrouter_settings
|
||||
|
||||
ai_cost_summary = get_monthly_spend()
|
||||
settings = get_openrouter_settings()
|
||||
|
||||
with _connect() as conn:
|
||||
# Recent error rate (last 1h)
|
||||
cutoff_1h = (datetime.now(timezone.utc) - timedelta(hours=1)).isoformat()
|
||||
error_row = conn.execute(
|
||||
"""
|
||||
SELECT
|
||||
COUNT(*) AS total,
|
||||
COALESCE(SUM(CASE WHEN status = 'failed' THEN 1 ELSE 0 END), 0) AS failed
|
||||
FROM file_history
|
||||
WHERE created_at >= ?
|
||||
""",
|
||||
(cutoff_1h,),
|
||||
).fetchone()
|
||||
|
||||
# DB size
|
||||
db_path = current_app.config["DATABASE_PATH"]
|
||||
db_size_mb = round(os.path.getsize(db_path) / (1024 * 1024), 2) if os.path.exists(db_path) else 0
|
||||
|
||||
error_total = int(error_row["total"]) if error_row else 0
|
||||
error_failed = int(error_row["failed"]) if error_row else 0
|
||||
|
||||
return {
|
||||
"ai_configured": bool(settings.api_key),
|
||||
"ai_model": settings.model,
|
||||
"ai_budget_used_percent": ai_cost_summary["budget_used_percent"],
|
||||
"error_rate_1h": round((error_failed / error_total) * 100, 1) if error_total > 0 else 0,
|
||||
"tasks_last_1h": error_total,
|
||||
"failures_last_1h": error_failed,
|
||||
"database_size_mb": db_size_mb,
|
||||
}
|
||||
@@ -55,8 +55,9 @@ def _call_openrouter(
|
||||
settings = get_openrouter_settings()
|
||||
|
||||
if not settings.api_key:
|
||||
logger.error("OPENROUTER_API_KEY is not set or is a placeholder value.")
|
||||
raise PdfAiError(
|
||||
"AI service is not configured. Set OPENROUTER_API_KEY in the application configuration."
|
||||
"AI features are temporarily unavailable. Our team has been notified."
|
||||
)
|
||||
|
||||
messages = [
|
||||
@@ -79,9 +80,40 @@ def _call_openrouter(
|
||||
},
|
||||
timeout=60,
|
||||
)
|
||||
|
||||
if response.status_code == 401:
|
||||
logger.error("OpenRouter API key is invalid or expired (401).")
|
||||
raise PdfAiError(
|
||||
"AI features are temporarily unavailable due to a configuration issue. Our team has been notified."
|
||||
)
|
||||
|
||||
if response.status_code == 402:
|
||||
logger.error("OpenRouter account has insufficient credits (402).")
|
||||
raise PdfAiError(
|
||||
"AI processing credits have been exhausted. Please try again later."
|
||||
)
|
||||
|
||||
if response.status_code == 429:
|
||||
logger.warning("OpenRouter rate limit reached (429).")
|
||||
raise PdfAiError(
|
||||
"AI service is experiencing high demand. Please wait a moment and try again."
|
||||
)
|
||||
|
||||
if response.status_code >= 500:
|
||||
logger.error("OpenRouter server error (%s).", response.status_code)
|
||||
raise PdfAiError(
|
||||
"AI service provider is experiencing issues. Please try again shortly."
|
||||
)
|
||||
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
|
||||
# Handle model-level errors returned inside a 200 response
|
||||
if data.get("error"):
|
||||
error_msg = data["error"].get("message", "") if isinstance(data["error"], dict) else str(data["error"])
|
||||
logger.error("OpenRouter returned an error payload: %s", error_msg)
|
||||
raise PdfAiError("AI service encountered an issue. Please try again.")
|
||||
|
||||
reply = (
|
||||
data.get("choices", [{}])[0]
|
||||
.get("message", {})
|
||||
@@ -107,10 +139,15 @@ def _call_openrouter(
|
||||
|
||||
return reply
|
||||
|
||||
except PdfAiError:
|
||||
raise
|
||||
except requests.exceptions.Timeout:
|
||||
raise PdfAiError("AI service timed out. Please try again.")
|
||||
except requests.exceptions.ConnectionError:
|
||||
logger.error("Cannot connect to OpenRouter API at %s", settings.base_url)
|
||||
raise PdfAiError("AI service is unreachable. Please try again shortly.")
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger.error(f"OpenRouter API error: {e}")
|
||||
logger.error("OpenRouter API error: %s", e)
|
||||
raise PdfAiError("AI service is temporarily unavailable.")
|
||||
|
||||
|
||||
|
||||
@@ -246,7 +246,8 @@ def stream_site_assistant_chat(
|
||||
check_ai_budget()
|
||||
settings = get_openrouter_settings()
|
||||
if not settings.api_key:
|
||||
raise RuntimeError("OPENROUTER_API_KEY is not configured for the application.")
|
||||
logger.error("OPENROUTER_API_KEY is not set — assistant AI unavailable.")
|
||||
raise RuntimeError("AI assistant is temporarily unavailable. Please try again later.")
|
||||
|
||||
response_model = settings.model
|
||||
messages = _build_ai_messages(
|
||||
@@ -436,7 +437,8 @@ def _request_ai_reply(
|
||||
settings = get_openrouter_settings()
|
||||
|
||||
if not settings.api_key:
|
||||
raise RuntimeError("OPENROUTER_API_KEY is not configured for the application.")
|
||||
logger.error("OPENROUTER_API_KEY is not set — assistant AI unavailable.")
|
||||
raise RuntimeError("AI assistant is temporarily unavailable. Please try again later.")
|
||||
|
||||
messages = _build_ai_messages(
|
||||
message=message,
|
||||
|
||||
Reference in New Issue
Block a user