ميزة: تحديث صفحات الخصوصية والشروط مع تاريخ آخر تحديث ثابت وفترة احتفاظ ديناميكية بالملفات

ميزة: إضافة خدمة تحليلات لتكامل Google Analytics

اختبار: تحديث اختبارات خدمة واجهة برمجة التطبيقات (API) لتعكس تغييرات نقاط النهاية

إصلاح: تعديل خدمة واجهة برمجة التطبيقات (API) لدعم تحميل ملفات متعددة ومصادقة المستخدم

ميزة: تطبيق مخزن مصادقة باستخدام Zustand لإدارة المستخدمين

إصلاح: تحسين إعدادات Nginx لتعزيز الأمان ودعم التحليلات
This commit is contained in:
Your Name
2026-03-07 11:14:05 +02:00
parent cfbcc8bd79
commit 0ad2ba0f02
73 changed files with 4696 additions and 462 deletions

View File

@@ -0,0 +1,517 @@
"""User accounts, API keys, history, and usage storage using SQLite."""
import hashlib
import json
import logging
import os
import secrets
import sqlite3
from datetime import datetime, timezone
from flask import current_app
from werkzeug.security import check_password_hash, generate_password_hash
logger = logging.getLogger(__name__)
VALID_PLANS = {"free", "pro"}
def _utc_now() -> str:
"""Return a stable UTC timestamp string."""
return datetime.now(timezone.utc).isoformat()
def get_current_period_month() -> str:
"""Return the active usage period in YYYY-MM format."""
return datetime.now(timezone.utc).strftime("%Y-%m")
def normalize_plan(plan: str | None) -> str:
"""Normalize plan values to the supported set."""
return "pro" if plan == "pro" else "free"
def _connect() -> sqlite3.Connection:
"""Create a SQLite connection with row access by column name."""
db_path = current_app.config["DATABASE_PATH"]
db_dir = os.path.dirname(db_path)
if db_dir:
os.makedirs(db_dir, exist_ok=True)
connection = sqlite3.connect(db_path)
connection.row_factory = sqlite3.Row
connection.execute("PRAGMA foreign_keys = ON")
return connection
def _column_exists(conn: sqlite3.Connection, table_name: str, column_name: str) -> bool:
"""Check whether one column exists in a SQLite table."""
rows = conn.execute(f"PRAGMA table_info({table_name})").fetchall()
return any(row["name"] == column_name for row in rows)
def _serialize_user(row: sqlite3.Row | None) -> dict | None:
"""Convert a user row into API-safe data."""
if row is None:
return None
return {
"id": row["id"],
"email": row["email"],
"plan": normalize_plan(row["plan"]),
"created_at": row["created_at"],
}
def _serialize_api_key(row: sqlite3.Row) -> dict:
"""Convert an API key row into public API-safe data."""
return {
"id": row["id"],
"name": row["name"],
"key_prefix": row["key_prefix"],
"last_used_at": row["last_used_at"],
"revoked_at": row["revoked_at"],
"created_at": row["created_at"],
}
def _normalize_email(email: str) -> str:
"""Normalize user emails for lookups and uniqueness."""
return email.strip().lower()
def _hash_api_key(raw_key: str) -> str:
"""Return a deterministic digest for one API key."""
return hashlib.sha256(raw_key.encode("utf-8")).hexdigest()
def init_account_db():
"""Initialize user, history, API key, and usage tables if they do not exist."""
with _connect() as conn:
conn.executescript(
"""
CREATE TABLE IF NOT EXISTS users (
id INTEGER PRIMARY KEY AUTOINCREMENT,
email TEXT NOT NULL UNIQUE,
password_hash TEXT NOT NULL,
plan TEXT NOT NULL DEFAULT 'free',
created_at TEXT NOT NULL,
updated_at TEXT NOT NULL
);
CREATE TABLE IF NOT EXISTS file_history (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id INTEGER NOT NULL,
tool TEXT NOT NULL,
original_filename TEXT,
output_filename TEXT,
status TEXT NOT NULL,
download_url TEXT,
metadata_json TEXT NOT NULL DEFAULT '{}',
created_at TEXT NOT NULL,
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS api_keys (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id INTEGER NOT NULL,
name TEXT NOT NULL,
key_prefix TEXT NOT NULL,
key_hash TEXT NOT NULL UNIQUE,
last_used_at TEXT,
revoked_at TEXT,
created_at TEXT NOT NULL,
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS usage_events (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id INTEGER NOT NULL,
api_key_id INTEGER,
source TEXT NOT NULL,
tool TEXT NOT NULL,
task_id TEXT NOT NULL,
event_type TEXT NOT NULL,
created_at TEXT NOT NULL,
period_month TEXT NOT NULL,
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE,
FOREIGN KEY (api_key_id) REFERENCES api_keys(id) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_file_history_user_created
ON file_history(user_id, created_at DESC);
CREATE INDEX IF NOT EXISTS idx_api_keys_user_created
ON api_keys(user_id, created_at DESC);
CREATE INDEX IF NOT EXISTS idx_usage_events_user_source_period_event
ON usage_events(user_id, source, period_month, event_type);
CREATE INDEX IF NOT EXISTS idx_usage_events_task_lookup
ON usage_events(user_id, source, task_id, event_type);
"""
)
if not _column_exists(conn, "users", "plan"):
conn.execute(
"ALTER TABLE users ADD COLUMN plan TEXT NOT NULL DEFAULT 'free'"
)
if not _column_exists(conn, "users", "updated_at"):
conn.execute(
"ALTER TABLE users ADD COLUMN updated_at TEXT NOT NULL DEFAULT ''"
)
def create_user(email: str, password: str) -> dict:
"""Create a new user and return the public record."""
email = _normalize_email(email)
now = _utc_now()
try:
with _connect() as conn:
cursor = conn.execute(
"""
INSERT INTO users (email, password_hash, plan, created_at, updated_at)
VALUES (?, ?, 'free', ?, ?)
""",
(email, generate_password_hash(password), now, now),
)
user_id = cursor.lastrowid
row = conn.execute(
"SELECT id, email, plan, created_at FROM users WHERE id = ?",
(user_id,),
).fetchone()
except sqlite3.IntegrityError as exc:
raise ValueError("An account with this email already exists.") from exc
return _serialize_user(row) or {}
def authenticate_user(email: str, password: str) -> dict | None:
"""Return the public user record when credentials are valid."""
email = _normalize_email(email)
with _connect() as conn:
row = conn.execute(
"SELECT * FROM users WHERE email = ?",
(email,),
).fetchone()
if row is None or not check_password_hash(row["password_hash"], password):
return None
return _serialize_user(row)
def get_user_by_id(user_id: int) -> dict | None:
"""Fetch a public user record by id."""
with _connect() as conn:
row = conn.execute(
"SELECT id, email, plan, created_at FROM users WHERE id = ?",
(user_id,),
).fetchone()
return _serialize_user(row)
def update_user_plan(user_id: int, plan: str) -> dict | None:
"""Update one user plan and return the public record."""
normalized_plan = normalize_plan(plan)
if normalized_plan not in VALID_PLANS:
raise ValueError("Invalid plan.")
with _connect() as conn:
conn.execute(
"""
UPDATE users
SET plan = ?, updated_at = ?
WHERE id = ?
""",
(normalized_plan, _utc_now(), user_id),
)
row = conn.execute(
"SELECT id, email, plan, created_at FROM users WHERE id = ?",
(user_id,),
).fetchone()
return _serialize_user(row)
def create_api_key(user_id: int, name: str) -> dict:
"""Create one API key and return the public record plus raw secret once."""
name = name.strip()
if not name:
raise ValueError("API key name is required.")
if len(name) > 100:
raise ValueError("API key name must be 100 characters or less.")
raw_key = f"spdf_{secrets.token_urlsafe(32)}"
now = _utc_now()
with _connect() as conn:
cursor = conn.execute(
"""
INSERT INTO api_keys (user_id, name, key_prefix, key_hash, created_at)
VALUES (?, ?, ?, ?, ?)
""",
(
user_id,
name,
raw_key[:16],
_hash_api_key(raw_key),
now,
),
)
row = conn.execute(
"""
SELECT id, name, key_prefix, last_used_at, revoked_at, created_at
FROM api_keys
WHERE id = ?
""",
(cursor.lastrowid,),
).fetchone()
result = _serialize_api_key(row)
result["raw_key"] = raw_key
return result
def list_api_keys(user_id: int) -> list[dict]:
"""Return all API keys for one user."""
with _connect() as conn:
rows = conn.execute(
"""
SELECT id, name, key_prefix, last_used_at, revoked_at, created_at
FROM api_keys
WHERE user_id = ?
ORDER BY created_at DESC
""",
(user_id,),
).fetchall()
return [_serialize_api_key(row) for row in rows]
def revoke_api_key(user_id: int, key_id: int) -> bool:
"""Revoke one API key owned by one user."""
with _connect() as conn:
cursor = conn.execute(
"""
UPDATE api_keys
SET revoked_at = ?
WHERE id = ? AND user_id = ? AND revoked_at IS NULL
""",
(_utc_now(), key_id, user_id),
)
return cursor.rowcount > 0
def get_api_key_actor(raw_key: str) -> dict | None:
"""Resolve one raw API key into the owning active user context."""
if not raw_key:
return None
key_hash = _hash_api_key(raw_key.strip())
now = _utc_now()
with _connect() as conn:
row = conn.execute(
"""
SELECT
api_keys.id AS api_key_id,
api_keys.user_id,
api_keys.name,
api_keys.key_prefix,
api_keys.last_used_at,
users.email,
users.plan,
users.created_at
FROM api_keys
INNER JOIN users ON users.id = api_keys.user_id
WHERE api_keys.key_hash = ? AND api_keys.revoked_at IS NULL
""",
(key_hash,),
).fetchone()
if row is None:
return None
conn.execute(
"UPDATE api_keys SET last_used_at = ? WHERE id = ?",
(now, row["api_key_id"]),
)
return {
"api_key_id": row["api_key_id"],
"user_id": row["user_id"],
"email": row["email"],
"plan": normalize_plan(row["plan"]),
"created_at": row["created_at"],
"name": row["name"],
"key_prefix": row["key_prefix"],
"last_used_at": now,
}
def record_file_history(
user_id: int,
tool: str,
original_filename: str | None,
output_filename: str | None,
status: str,
download_url: str | None,
metadata: dict | None = None,
):
"""Persist one generated-file history entry."""
with _connect() as conn:
conn.execute(
"""
INSERT INTO file_history (
user_id, tool, original_filename, output_filename,
status, download_url, metadata_json, created_at
)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
""",
(
user_id,
tool,
original_filename,
output_filename,
status,
download_url,
json.dumps(metadata or {}, ensure_ascii=True),
_utc_now(),
),
)
def record_task_history(
user_id: int | None,
tool: str,
original_filename: str | None,
result: dict,
):
"""Persist task results when the request belongs to an authenticated user."""
if user_id is None:
return
metadata = {}
for key, value in result.items():
if key in {"status", "download_url", "filename"}:
continue
if key in {"procedures", "flowcharts", "pages"} and isinstance(value, list):
metadata[f"{key}_count"] = len(value)
continue
metadata[key] = value
try:
record_file_history(
user_id=user_id,
tool=tool,
original_filename=original_filename,
output_filename=result.get("filename"),
status=result.get("status", "completed"),
download_url=result.get("download_url"),
metadata=metadata,
)
except Exception:
logger.exception("Failed to persist task history for tool=%s", tool)
def list_file_history(user_id: int, limit: int = 50) -> list[dict]:
"""Return most recent file history entries for one user."""
with _connect() as conn:
rows = conn.execute(
"""
SELECT id, tool, original_filename, output_filename, status,
download_url, metadata_json, created_at
FROM file_history
WHERE user_id = ?
ORDER BY created_at DESC
LIMIT ?
""",
(user_id, limit),
).fetchall()
return [
{
"id": row["id"],
"tool": row["tool"],
"original_filename": row["original_filename"],
"output_filename": row["output_filename"],
"status": row["status"],
"download_url": row["download_url"],
"metadata": json.loads(row["metadata_json"] or "{}"),
"created_at": row["created_at"],
}
for row in rows
]
def record_usage_event(
user_id: int | None,
source: str,
tool: str,
task_id: str,
event_type: str,
api_key_id: int | None = None,
):
"""Persist one usage event when it belongs to an authenticated actor."""
if user_id is None:
return
with _connect() as conn:
conn.execute(
"""
INSERT INTO usage_events (
user_id, api_key_id, source, tool, task_id,
event_type, created_at, period_month
)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
""",
(
user_id,
api_key_id,
source,
tool,
task_id,
event_type,
_utc_now(),
get_current_period_month(),
),
)
def count_usage_events(
user_id: int,
source: str,
event_type: str = "accepted",
period_month: str | None = None,
) -> int:
"""Count usage events for one user, source, period, and type."""
with _connect() as conn:
row = conn.execute(
"""
SELECT COUNT(*) AS count
FROM usage_events
WHERE user_id = ? AND source = ? AND event_type = ? AND period_month = ?
""",
(user_id, source, event_type, period_month or get_current_period_month()),
).fetchone()
return int(row["count"]) if row else 0
def has_task_access(user_id: int, source: str, task_id: str) -> bool:
"""Return whether one user owns one previously accepted task for one source."""
with _connect() as conn:
row = conn.execute(
"""
SELECT 1
FROM usage_events
WHERE user_id = ? AND source = ? AND task_id = ? AND event_type = 'accepted'
LIMIT 1
""",
(user_id, source, task_id),
).fetchone()
return row is not None

View File

@@ -0,0 +1,227 @@
"""Plan entitlements, actor resolution, and quota enforcement."""
from dataclasses import dataclass
from flask import current_app, request
from app.services.account_service import (
count_usage_events,
get_api_key_actor,
get_user_by_id,
get_current_period_month,
has_task_access,
normalize_plan,
record_usage_event,
)
from app.utils.auth import get_current_user_id, logout_user_session
from app.utils.file_validator import validate_file
FREE_PLAN = "free"
PRO_PLAN = "pro"
FREE_WEB_MONTHLY_LIMIT = 50
PRO_WEB_MONTHLY_LIMIT = 500
PRO_API_MONTHLY_LIMIT = 1000
FREE_HISTORY_LIMIT = 25
PRO_HISTORY_LIMIT = 250
FREE_HOMEPAGE_LIMIT_MB = 50
PRO_HOMEPAGE_LIMIT_MB = 100
@dataclass(frozen=True)
class ActorContext:
"""Resolved access context for one incoming request."""
source: str
actor_type: str
user_id: int | None
plan: str
api_key_id: int | None = None
class PolicyError(Exception):
"""A request failed access or quota policy validation."""
def __init__(self, message: str, status_code: int = 400):
self.message = message
self.status_code = status_code
super().__init__(message)
def get_history_limit(plan: str) -> int:
"""Return the default history limit for one plan."""
return PRO_HISTORY_LIMIT if normalize_plan(plan) == PRO_PLAN else FREE_HISTORY_LIMIT
def get_web_quota_limit(plan: str, actor_type: str) -> int | None:
"""Return the monthly accepted-task cap for one web actor."""
if actor_type == "anonymous":
return None
return PRO_WEB_MONTHLY_LIMIT if normalize_plan(plan) == PRO_PLAN else FREE_WEB_MONTHLY_LIMIT
def get_api_quota_limit(plan: str) -> int | None:
"""Return the monthly accepted-task cap for one API actor."""
return PRO_API_MONTHLY_LIMIT if normalize_plan(plan) == PRO_PLAN else None
def ads_enabled(plan: str, actor_type: str) -> bool:
"""Return whether ads should display for one actor."""
return not (actor_type != "anonymous" and normalize_plan(plan) == PRO_PLAN)
def get_effective_file_size_limits_bytes(plan: str) -> dict[str, int]:
"""Return effective backend upload limits for one plan."""
base_limits = current_app.config["FILE_SIZE_LIMITS"]
if normalize_plan(plan) != PRO_PLAN:
return dict(base_limits)
return {key: value * 2 for key, value in base_limits.items()}
def get_effective_file_size_limits_mb(plan: str) -> dict[str, int]:
"""Return effective frontend-friendly upload limits for one plan."""
byte_limits = get_effective_file_size_limits_bytes(plan)
return {
"pdf": byte_limits["pdf"] // (1024 * 1024),
"word": byte_limits["docx"] // (1024 * 1024),
"image": byte_limits["png"] // (1024 * 1024),
"video": byte_limits["mp4"] // (1024 * 1024),
"homepageSmartUpload": PRO_HOMEPAGE_LIMIT_MB
if normalize_plan(plan) == PRO_PLAN
else FREE_HOMEPAGE_LIMIT_MB,
}
def get_usage_summary_for_user(user_id: int, plan: str) -> dict:
"""Return usage/quota summary for one authenticated user."""
normalized_plan = normalize_plan(plan)
current_period = get_current_period_month()
web_used = count_usage_events(
user_id, "web", event_type="accepted", period_month=current_period
)
api_used = count_usage_events(
user_id, "api", event_type="accepted", period_month=current_period
)
return {
"plan": normalized_plan,
"period_month": current_period,
"ads_enabled": ads_enabled(normalized_plan, "session"),
"history_limit": get_history_limit(normalized_plan),
"file_limits_mb": get_effective_file_size_limits_mb(normalized_plan),
"web_quota": {
"used": web_used,
"limit": get_web_quota_limit(normalized_plan, "session"),
},
"api_quota": {
"used": api_used,
"limit": get_api_quota_limit(normalized_plan),
},
}
def resolve_web_actor() -> ActorContext:
"""Resolve the active web actor from session state."""
user_id = get_current_user_id()
if user_id is None:
return ActorContext(source="web", actor_type="anonymous", user_id=None, plan=FREE_PLAN)
user = get_user_by_id(user_id)
if user is None:
logout_user_session()
return ActorContext(source="web", actor_type="anonymous", user_id=None, plan=FREE_PLAN)
return ActorContext(
source="web",
actor_type="session",
user_id=user["id"],
plan=normalize_plan(user["plan"]),
)
def resolve_api_actor() -> ActorContext:
"""Resolve the active B2B API actor from X-API-Key header."""
raw_key = request.headers.get("X-API-Key", "").strip()
if not raw_key:
raise PolicyError("X-API-Key header is required.", 401)
actor = get_api_key_actor(raw_key)
if actor is None:
raise PolicyError("Invalid or revoked API key.", 401)
plan = normalize_plan(actor["plan"])
if plan != PRO_PLAN:
raise PolicyError("API access requires an active Pro plan.", 403)
return ActorContext(
source="api",
actor_type="api_key",
user_id=actor["user_id"],
plan=plan,
api_key_id=actor["api_key_id"],
)
def validate_actor_file(file_storage, allowed_types: list[str], actor: ActorContext):
"""Validate one uploaded file with plan-aware size limits."""
return validate_file(
file_storage,
allowed_types=allowed_types,
size_limit_overrides=get_effective_file_size_limits_bytes(actor.plan),
)
def assert_quota_available(actor: ActorContext):
"""Ensure an actor still has accepted-task quota for the current month."""
if actor.user_id is None:
return
if actor.source == "web":
limit = get_web_quota_limit(actor.plan, actor.actor_type)
if limit is None:
return
used = count_usage_events(actor.user_id, "web", event_type="accepted")
if used >= limit:
if normalize_plan(actor.plan) == PRO_PLAN:
raise PolicyError("Your monthly Pro web quota has been reached.", 429)
raise PolicyError(
"Your monthly free plan limit has been reached. Upgrade to Pro for higher limits.",
429,
)
return
limit = get_api_quota_limit(actor.plan)
if limit is None:
raise PolicyError("API access requires an active Pro plan.", 403)
used = count_usage_events(actor.user_id, "api", event_type="accepted")
if used >= limit:
raise PolicyError("Your monthly API quota has been reached.", 429)
def record_accepted_usage(actor: ActorContext, tool: str, celery_task_id: str):
"""Record one accepted usage event after task dispatch succeeds."""
record_usage_event(
user_id=actor.user_id,
source=actor.source,
tool=tool,
task_id=celery_task_id,
event_type="accepted",
api_key_id=actor.api_key_id,
)
def build_task_tracking_kwargs(actor: ActorContext) -> dict:
"""Return Celery kwargs required for task-side tracking."""
return {
"user_id": actor.user_id,
"usage_source": actor.source,
"api_key_id": actor.api_key_id,
}
def assert_api_task_access(actor: ActorContext, task_id: str):
"""Ensure one API actor can poll one task id."""
if actor.user_id is None or not has_task_access(actor.user_id, "api", task_id):
raise PolicyError("Task not found.", 404)

View File

@@ -0,0 +1,29 @@
"""Shared helpers for task completion tracking."""
from app.services.account_service import record_task_history, record_usage_event
def finalize_task_tracking(
*,
user_id: int | None,
tool: str,
original_filename: str | None,
result: dict,
usage_source: str,
api_key_id: int | None,
celery_task_id: str | None,
):
"""Persist task history and usage lifecycle events."""
record_task_history(user_id, tool, original_filename, result)
if user_id is None or not celery_task_id:
return
event_type = "completed" if result.get("status") == "completed" else "failed"
record_usage_event(
user_id=user_id,
source=usage_source,
tool=tool,
task_id=celery_task_id,
event_type=event_type,
api_key_id=api_key_id,
)