تم الانتهاء من آخر دفعة تحسينات على المشروع، وتشمل:
تحويل لوحة الإدارة الداخلية من secret header إلى session auth حقيقي مع صلاحيات admin. إضافة دعم إدارة الأدوار من داخل لوحة الإدارة نفسها، مع حماية الحسابات المعتمدة عبر INTERNAL_ADMIN_EMAILS. تحسين بيانات المستخدم في الواجهة والباكند لتشمل role وis_allowlisted_admin. إضافة اختبار frontend مخصص لصفحة /internal/admin بدل الاعتماد فقط على build واختبار routes. تحسين إضافي في الأداء عبر إزالة الاعتماد على pdfjs-dist/pdf.worker في عدّ صفحات PDF واستبداله بمسار أخف باستخدام pdf-lib. تحسين تقسيم الـ chunks في build لتقليل أثر الحزم الكبيرة وفصل أجزاء مثل network, icons, pdf-core, وeditor. التحقق الذي تم: نجاح build للواجهة. نجاح اختبار صفحة الإدارة الداخلية في frontend. نجاح اختبارات auth/admin في backend. نجاح full backend suite مسبقًا مع EXIT:0. ولو تريد نسخة أقصر جدًا، استخدم هذه: آخر التحديثات: تم تحسين نظام الإدارة الداخلية ليعتمد على صلاحيات وجلسات حقيقية بدل secret header، مع إضافة إدارة أدوار من لوحة admin نفسها، وإضافة اختبارات frontend مخصصة للوحة، وتحسين أداء الواجهة عبر إزالة pdf.worker وتحسين تقسيم الـ chunks في build. جميع الاختبارات والتحققات الأساسية المطلوبة نجح
This commit is contained in:
@@ -13,6 +13,7 @@ from werkzeug.security import check_password_hash, generate_password_hash
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
VALID_PLANS = {"free", "pro"}
|
||||
VALID_ROLES = {"user", "admin"}
|
||||
|
||||
|
||||
def _utc_now() -> str:
|
||||
@@ -30,6 +31,38 @@ def normalize_plan(plan: str | None) -> str:
|
||||
return "pro" if plan == "pro" else "free"
|
||||
|
||||
|
||||
def normalize_role(role: str | None) -> str:
|
||||
"""Normalize role values to the supported set."""
|
||||
return "admin" if role == "admin" else "user"
|
||||
|
||||
|
||||
def _get_allowlisted_admin_emails() -> set[str]:
|
||||
configured = current_app.config.get("INTERNAL_ADMIN_EMAILS", ())
|
||||
return {
|
||||
str(email).strip().lower()
|
||||
for email in configured
|
||||
if str(email).strip()
|
||||
}
|
||||
|
||||
|
||||
def is_allowlisted_admin_email(email: str | None) -> bool:
|
||||
"""Return whether an email is bootstrapped as admin from configuration."""
|
||||
normalized_email = _normalize_email(email or "")
|
||||
return normalized_email in _get_allowlisted_admin_emails()
|
||||
|
||||
|
||||
def _resolve_row_role(row: sqlite3.Row | None) -> str:
|
||||
if row is None:
|
||||
return "user"
|
||||
|
||||
row_keys = row.keys()
|
||||
stored_role = normalize_role(row["role"]) if "role" in row_keys else "user"
|
||||
email = str(row["email"]).strip().lower() if "email" in row_keys else ""
|
||||
if stored_role == "admin" or email in _get_allowlisted_admin_emails():
|
||||
return "admin"
|
||||
return "user"
|
||||
|
||||
|
||||
def _connect() -> sqlite3.Connection:
|
||||
"""Create a SQLite connection with row access by column name."""
|
||||
db_path = current_app.config["DATABASE_PATH"]
|
||||
@@ -58,6 +91,8 @@ def _serialize_user(row: sqlite3.Row | None) -> dict | None:
|
||||
"id": row["id"],
|
||||
"email": row["email"],
|
||||
"plan": normalize_plan(row["plan"]),
|
||||
"role": _resolve_row_role(row),
|
||||
"is_allowlisted_admin": is_allowlisted_admin_email(row["email"]),
|
||||
"created_at": row["created_at"],
|
||||
}
|
||||
|
||||
@@ -94,6 +129,7 @@ def init_account_db():
|
||||
email TEXT NOT NULL UNIQUE,
|
||||
password_hash TEXT NOT NULL,
|
||||
plan TEXT NOT NULL DEFAULT 'free',
|
||||
role TEXT NOT NULL DEFAULT 'user',
|
||||
created_at TEXT NOT NULL,
|
||||
updated_at TEXT NOT NULL
|
||||
);
|
||||
@@ -159,6 +195,10 @@ def init_account_db():
|
||||
conn.execute(
|
||||
"ALTER TABLE users ADD COLUMN updated_at TEXT NOT NULL DEFAULT ''"
|
||||
)
|
||||
if not _column_exists(conn, "users", "role"):
|
||||
conn.execute(
|
||||
"ALTER TABLE users ADD COLUMN role TEXT NOT NULL DEFAULT 'user'"
|
||||
)
|
||||
|
||||
# Password reset tokens
|
||||
conn.executescript(
|
||||
@@ -194,19 +234,20 @@ def create_user(email: str, password: str) -> dict:
|
||||
"""Create a new user and return the public record."""
|
||||
email = _normalize_email(email)
|
||||
now = _utc_now()
|
||||
role = "admin" if email in _get_allowlisted_admin_emails() else "user"
|
||||
|
||||
try:
|
||||
with _connect() as conn:
|
||||
cursor = conn.execute(
|
||||
"""
|
||||
INSERT INTO users (email, password_hash, plan, created_at, updated_at)
|
||||
VALUES (?, ?, 'free', ?, ?)
|
||||
INSERT INTO users (email, password_hash, plan, role, created_at, updated_at)
|
||||
VALUES (?, ?, 'free', ?, ?, ?)
|
||||
""",
|
||||
(email, generate_password_hash(password), now, now),
|
||||
(email, generate_password_hash(password), role, now, now),
|
||||
)
|
||||
user_id = cursor.lastrowid
|
||||
row = conn.execute(
|
||||
"SELECT id, email, plan, created_at FROM users WHERE id = ?",
|
||||
"SELECT id, email, plan, role, created_at FROM users WHERE id = ?",
|
||||
(user_id,),
|
||||
).fetchone()
|
||||
except sqlite3.IntegrityError as exc:
|
||||
@@ -235,7 +276,44 @@ def get_user_by_id(user_id: int) -> dict | None:
|
||||
"""Fetch a public user record by id."""
|
||||
with _connect() as conn:
|
||||
row = conn.execute(
|
||||
"SELECT id, email, plan, created_at FROM users WHERE id = ?",
|
||||
"SELECT id, email, plan, role, created_at FROM users WHERE id = ?",
|
||||
(user_id,),
|
||||
).fetchone()
|
||||
|
||||
return _serialize_user(row)
|
||||
|
||||
|
||||
def is_user_admin(user_id: int | None) -> bool:
|
||||
"""Return whether one user has internal admin access."""
|
||||
if user_id is None:
|
||||
return False
|
||||
|
||||
with _connect() as conn:
|
||||
row = conn.execute(
|
||||
"SELECT id, email, role FROM users WHERE id = ?",
|
||||
(user_id,),
|
||||
).fetchone()
|
||||
|
||||
return _resolve_row_role(row) == "admin"
|
||||
|
||||
|
||||
def set_user_role(user_id: int, role: str) -> dict | None:
|
||||
"""Update one user role and return the public user record."""
|
||||
normalized_role = normalize_role(role)
|
||||
if normalized_role not in VALID_ROLES:
|
||||
raise ValueError("Invalid role.")
|
||||
|
||||
with _connect() as conn:
|
||||
conn.execute(
|
||||
"""
|
||||
UPDATE users
|
||||
SET role = ?, updated_at = ?
|
||||
WHERE id = ?
|
||||
""",
|
||||
(normalized_role, _utc_now(), user_id),
|
||||
)
|
||||
row = conn.execute(
|
||||
"SELECT id, email, plan, role, created_at FROM users WHERE id = ?",
|
||||
(user_id,),
|
||||
).fetchone()
|
||||
|
||||
@@ -258,7 +336,7 @@ def update_user_plan(user_id: int, plan: str) -> dict | None:
|
||||
(normalized_plan, _utc_now(), user_id),
|
||||
)
|
||||
row = conn.execute(
|
||||
"SELECT id, email, plan, created_at FROM users WHERE id = ?",
|
||||
"SELECT id, email, plan, role, created_at FROM users WHERE id = ?",
|
||||
(user_id,),
|
||||
).fetchone()
|
||||
|
||||
@@ -476,6 +554,60 @@ def list_file_history(user_id: int, limit: int = 50) -> list[dict]:
|
||||
]
|
||||
|
||||
|
||||
def get_public_history_summary(limit_tools: int = 5) -> dict:
|
||||
"""Return aggregate public-friendly processing stats derived from history."""
|
||||
cutoff_24h = (datetime.now(timezone.utc) - timedelta(days=1)).isoformat()
|
||||
|
||||
with _connect() as conn:
|
||||
totals_row = conn.execute(
|
||||
"""
|
||||
SELECT
|
||||
COUNT(*) AS total,
|
||||
COALESCE(SUM(CASE WHEN status = 'completed' THEN 1 ELSE 0 END), 0) AS completed,
|
||||
COALESCE(SUM(CASE WHEN status = 'failed' THEN 1 ELSE 0 END), 0) AS failed
|
||||
FROM file_history
|
||||
"""
|
||||
).fetchone()
|
||||
|
||||
recent_row = conn.execute(
|
||||
"""
|
||||
SELECT COUNT(*) AS total
|
||||
FROM file_history
|
||||
WHERE created_at >= ?
|
||||
""",
|
||||
(cutoff_24h,),
|
||||
).fetchone()
|
||||
|
||||
top_rows = conn.execute(
|
||||
"""
|
||||
SELECT tool, COUNT(*) AS count
|
||||
FROM file_history
|
||||
WHERE status = 'completed'
|
||||
GROUP BY tool
|
||||
ORDER BY count DESC, tool ASC
|
||||
LIMIT ?
|
||||
""",
|
||||
(limit_tools,),
|
||||
).fetchall()
|
||||
|
||||
total = int(totals_row["total"]) if totals_row else 0
|
||||
completed = int(totals_row["completed"]) if totals_row else 0
|
||||
failed = int(totals_row["failed"]) if totals_row else 0
|
||||
success_rate = round((completed / total) * 100, 1) if total else 0.0
|
||||
|
||||
return {
|
||||
"total_files_processed": total,
|
||||
"completed_files": completed,
|
||||
"failed_files": failed,
|
||||
"success_rate": success_rate,
|
||||
"files_last_24h": int(recent_row["total"]) if recent_row else 0,
|
||||
"top_tools": [
|
||||
{"tool": row["tool"], "count": int(row["count"])}
|
||||
for row in top_rows
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
def record_usage_event(
|
||||
user_id: int | None,
|
||||
source: str,
|
||||
@@ -555,7 +687,7 @@ def get_user_by_email(email: str) -> dict | None:
|
||||
email = _normalize_email(email)
|
||||
with _connect() as conn:
|
||||
row = conn.execute(
|
||||
"SELECT id, email, plan, created_at FROM users WHERE email = ?",
|
||||
"SELECT id, email, plan, role, created_at FROM users WHERE email = ?",
|
||||
(email,),
|
||||
).fetchone()
|
||||
return _serialize_user(row)
|
||||
|
||||
288
backend/app/services/admin_service.py
Normal file
288
backend/app/services/admin_service.py
Normal file
@@ -0,0 +1,288 @@
|
||||
"""Internal admin aggregation helpers for operational dashboards."""
|
||||
import json
|
||||
import os
|
||||
import sqlite3
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
from flask import current_app
|
||||
|
||||
from app.services.account_service import is_allowlisted_admin_email, normalize_role
|
||||
from app.services.ai_cost_service import get_monthly_spend
|
||||
from app.services.contact_service import mark_read
|
||||
from app.services.rating_service import get_global_rating_summary
|
||||
|
||||
|
||||
def _connect() -> sqlite3.Connection:
|
||||
db_path = current_app.config["DATABASE_PATH"]
|
||||
db_dir = os.path.dirname(db_path)
|
||||
if db_dir:
|
||||
os.makedirs(db_dir, exist_ok=True)
|
||||
connection = sqlite3.connect(db_path)
|
||||
connection.row_factory = sqlite3.Row
|
||||
return connection
|
||||
|
||||
|
||||
def _parse_metadata(raw_value: str | None) -> dict:
|
||||
if not raw_value:
|
||||
return {}
|
||||
try:
|
||||
parsed = json.loads(raw_value)
|
||||
except json.JSONDecodeError:
|
||||
return {}
|
||||
return parsed if isinstance(parsed, dict) else {}
|
||||
|
||||
|
||||
def get_admin_overview(limit_recent: int = 8, top_tools_limit: int = 6) -> dict:
|
||||
cutoff_24h = (datetime.now(timezone.utc) - timedelta(days=1)).isoformat()
|
||||
ai_cost_summary = get_monthly_spend()
|
||||
|
||||
with _connect() as conn:
|
||||
users_row = conn.execute(
|
||||
"""
|
||||
SELECT
|
||||
COUNT(*) AS total_users,
|
||||
COALESCE(SUM(CASE WHEN plan = 'pro' THEN 1 ELSE 0 END), 0) AS pro_users,
|
||||
COALESCE(SUM(CASE WHEN plan = 'free' THEN 1 ELSE 0 END), 0) AS free_users
|
||||
FROM users
|
||||
"""
|
||||
).fetchone()
|
||||
|
||||
history_row = conn.execute(
|
||||
"""
|
||||
SELECT
|
||||
COUNT(*) AS total_files_processed,
|
||||
COALESCE(SUM(CASE WHEN status = 'completed' THEN 1 ELSE 0 END), 0) AS completed_files,
|
||||
COALESCE(SUM(CASE WHEN status = 'failed' THEN 1 ELSE 0 END), 0) AS failed_files,
|
||||
COALESCE(SUM(CASE WHEN created_at >= ? THEN 1 ELSE 0 END), 0) AS files_last_24h
|
||||
FROM file_history
|
||||
""",
|
||||
(cutoff_24h,),
|
||||
).fetchone()
|
||||
|
||||
top_tools_rows = conn.execute(
|
||||
"""
|
||||
SELECT
|
||||
tool,
|
||||
COUNT(*) AS total_runs,
|
||||
COALESCE(SUM(CASE WHEN status = 'failed' THEN 1 ELSE 0 END), 0) AS failed_runs
|
||||
FROM file_history
|
||||
GROUP BY tool
|
||||
ORDER BY total_runs DESC, tool ASC
|
||||
LIMIT ?
|
||||
""",
|
||||
(top_tools_limit,),
|
||||
).fetchall()
|
||||
|
||||
failure_rows = conn.execute(
|
||||
"""
|
||||
SELECT
|
||||
file_history.id,
|
||||
file_history.user_id,
|
||||
file_history.tool,
|
||||
file_history.original_filename,
|
||||
file_history.metadata_json,
|
||||
file_history.created_at,
|
||||
users.email
|
||||
FROM file_history
|
||||
LEFT JOIN users ON users.id = file_history.user_id
|
||||
WHERE file_history.status = 'failed'
|
||||
ORDER BY file_history.created_at DESC
|
||||
LIMIT ?
|
||||
""",
|
||||
(limit_recent,),
|
||||
).fetchall()
|
||||
|
||||
recent_user_rows = conn.execute(
|
||||
"""
|
||||
SELECT
|
||||
users.id,
|
||||
users.email,
|
||||
users.plan,
|
||||
users.created_at,
|
||||
COALESCE((SELECT COUNT(*) FROM file_history WHERE file_history.user_id = users.id), 0) AS total_tasks,
|
||||
COALESCE((SELECT COUNT(*) FROM api_keys WHERE api_keys.user_id = users.id AND api_keys.revoked_at IS NULL), 0) AS active_api_keys
|
||||
FROM users
|
||||
ORDER BY users.created_at DESC
|
||||
LIMIT ?
|
||||
""",
|
||||
(limit_recent,),
|
||||
).fetchall()
|
||||
|
||||
contact_row = conn.execute(
|
||||
"""
|
||||
SELECT
|
||||
COUNT(*) AS total_messages,
|
||||
COALESCE(SUM(CASE WHEN is_read = 0 THEN 1 ELSE 0 END), 0) AS unread_messages
|
||||
FROM contact_messages
|
||||
"""
|
||||
).fetchone()
|
||||
|
||||
recent_contact_rows = conn.execute(
|
||||
"""
|
||||
SELECT id, name, email, category, subject, message, created_at, is_read
|
||||
FROM contact_messages
|
||||
ORDER BY created_at DESC
|
||||
LIMIT ?
|
||||
""",
|
||||
(limit_recent,),
|
||||
).fetchall()
|
||||
|
||||
total_processed = int(history_row["total_files_processed"]) if history_row else 0
|
||||
completed_files = int(history_row["completed_files"]) if history_row else 0
|
||||
success_rate = round((completed_files / total_processed) * 100, 1) if total_processed else 0.0
|
||||
|
||||
return {
|
||||
"users": {
|
||||
"total": int(users_row["total_users"]) if users_row else 0,
|
||||
"pro": int(users_row["pro_users"]) if users_row else 0,
|
||||
"free": int(users_row["free_users"]) if users_row else 0,
|
||||
},
|
||||
"processing": {
|
||||
"total_files_processed": total_processed,
|
||||
"completed_files": completed_files,
|
||||
"failed_files": int(history_row["failed_files"]) if history_row else 0,
|
||||
"files_last_24h": int(history_row["files_last_24h"]) if history_row else 0,
|
||||
"success_rate": success_rate,
|
||||
},
|
||||
"ratings": get_global_rating_summary(),
|
||||
"ai_cost": {
|
||||
"month": ai_cost_summary["period"],
|
||||
"total_usd": ai_cost_summary["total_cost_usd"],
|
||||
"budget_usd": ai_cost_summary["budget_usd"],
|
||||
"percent_used": ai_cost_summary["budget_used_percent"],
|
||||
},
|
||||
"contacts": {
|
||||
"total_messages": int(contact_row["total_messages"]) if contact_row else 0,
|
||||
"unread_messages": int(contact_row["unread_messages"]) if contact_row else 0,
|
||||
"recent": [
|
||||
{
|
||||
"id": row["id"],
|
||||
"name": row["name"],
|
||||
"email": row["email"],
|
||||
"category": row["category"],
|
||||
"subject": row["subject"],
|
||||
"message": row["message"],
|
||||
"created_at": row["created_at"],
|
||||
"is_read": bool(row["is_read"]),
|
||||
}
|
||||
for row in recent_contact_rows
|
||||
],
|
||||
},
|
||||
"top_tools": [
|
||||
{
|
||||
"tool": row["tool"],
|
||||
"total_runs": int(row["total_runs"]),
|
||||
"failed_runs": int(row["failed_runs"]),
|
||||
}
|
||||
for row in top_tools_rows
|
||||
],
|
||||
"recent_failures": [
|
||||
{
|
||||
"id": row["id"],
|
||||
"user_id": row["user_id"],
|
||||
"email": row["email"],
|
||||
"tool": row["tool"],
|
||||
"original_filename": row["original_filename"],
|
||||
"created_at": row["created_at"],
|
||||
"metadata": _parse_metadata(row["metadata_json"]),
|
||||
}
|
||||
for row in failure_rows
|
||||
],
|
||||
"recent_users": [
|
||||
{
|
||||
"id": row["id"],
|
||||
"email": row["email"],
|
||||
"plan": row["plan"],
|
||||
"created_at": row["created_at"],
|
||||
"total_tasks": int(row["total_tasks"]),
|
||||
"active_api_keys": int(row["active_api_keys"]),
|
||||
}
|
||||
for row in recent_user_rows
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
def list_admin_users(limit: int = 25, query: str = "") -> list[dict]:
|
||||
normalized_query = query.strip().lower()
|
||||
sql = """
|
||||
SELECT
|
||||
users.id,
|
||||
users.email,
|
||||
users.plan,
|
||||
users.role,
|
||||
users.created_at,
|
||||
COALESCE((SELECT COUNT(*) FROM file_history WHERE file_history.user_id = users.id), 0) AS total_tasks,
|
||||
COALESCE((SELECT COUNT(*) FROM file_history WHERE file_history.user_id = users.id AND file_history.status = 'completed'), 0) AS completed_tasks,
|
||||
COALESCE((SELECT COUNT(*) FROM file_history WHERE file_history.user_id = users.id AND file_history.status = 'failed'), 0) AS failed_tasks,
|
||||
COALESCE((SELECT COUNT(*) FROM api_keys WHERE api_keys.user_id = users.id AND api_keys.revoked_at IS NULL), 0) AS active_api_keys
|
||||
FROM users
|
||||
"""
|
||||
params: list[object] = []
|
||||
if normalized_query:
|
||||
sql += " WHERE LOWER(users.email) LIKE ?"
|
||||
params.append(f"%{normalized_query}%")
|
||||
sql += " ORDER BY users.created_at DESC LIMIT ?"
|
||||
params.append(limit)
|
||||
|
||||
with _connect() as conn:
|
||||
rows = conn.execute(sql, tuple(params)).fetchall()
|
||||
|
||||
return [
|
||||
{
|
||||
"id": row["id"],
|
||||
"email": row["email"],
|
||||
"plan": row["plan"],
|
||||
"role": "admin" if is_allowlisted_admin_email(row["email"]) else normalize_role(row["role"]),
|
||||
"is_allowlisted_admin": is_allowlisted_admin_email(row["email"]),
|
||||
"created_at": row["created_at"],
|
||||
"total_tasks": int(row["total_tasks"]),
|
||||
"completed_tasks": int(row["completed_tasks"]),
|
||||
"failed_tasks": int(row["failed_tasks"]),
|
||||
"active_api_keys": int(row["active_api_keys"]),
|
||||
}
|
||||
for row in rows
|
||||
]
|
||||
|
||||
|
||||
def list_admin_contacts(page: int = 1, per_page: int = 20) -> dict:
|
||||
safe_page = max(1, page)
|
||||
safe_per_page = max(1, min(per_page, 100))
|
||||
offset = (safe_page - 1) * safe_per_page
|
||||
|
||||
with _connect() as conn:
|
||||
total_row = conn.execute(
|
||||
"SELECT COUNT(*) AS total, COALESCE(SUM(CASE WHEN is_read = 0 THEN 1 ELSE 0 END), 0) AS unread FROM contact_messages"
|
||||
).fetchone()
|
||||
rows = conn.execute(
|
||||
"""
|
||||
SELECT id, name, email, category, subject, message, created_at, is_read
|
||||
FROM contact_messages
|
||||
ORDER BY created_at DESC
|
||||
LIMIT ? OFFSET ?
|
||||
""",
|
||||
(safe_per_page, offset),
|
||||
).fetchall()
|
||||
|
||||
return {
|
||||
"items": [
|
||||
{
|
||||
"id": row["id"],
|
||||
"name": row["name"],
|
||||
"email": row["email"],
|
||||
"category": row["category"],
|
||||
"subject": row["subject"],
|
||||
"message": row["message"],
|
||||
"created_at": row["created_at"],
|
||||
"is_read": bool(row["is_read"]),
|
||||
}
|
||||
for row in rows
|
||||
],
|
||||
"page": safe_page,
|
||||
"per_page": safe_per_page,
|
||||
"total": int(total_row["total"]) if total_row else 0,
|
||||
"unread": int(total_row["unread"]) if total_row else 0,
|
||||
}
|
||||
|
||||
|
||||
def mark_admin_contact_read(message_id: int) -> bool:
|
||||
return mark_read(message_id)
|
||||
106
backend/app/services/barcode_service.py
Normal file
106
backend/app/services/barcode_service.py
Normal file
@@ -0,0 +1,106 @@
|
||||
"""Barcode generation service."""
|
||||
import os
|
||||
import io
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BarcodeGenerationError(Exception):
|
||||
"""Custom exception for barcode generation failures."""
|
||||
pass
|
||||
|
||||
|
||||
SUPPORTED_BARCODE_TYPES = [
|
||||
"code128",
|
||||
"code39",
|
||||
"ean13",
|
||||
"ean8",
|
||||
"upca",
|
||||
"isbn13",
|
||||
"isbn10",
|
||||
"issn",
|
||||
"pzn",
|
||||
]
|
||||
|
||||
|
||||
def generate_barcode(
|
||||
data: str,
|
||||
barcode_type: str = "code128",
|
||||
output_path: str = "",
|
||||
output_format: str = "png",
|
||||
) -> dict:
|
||||
"""Generate a barcode image.
|
||||
|
||||
Args:
|
||||
data: The data to encode in the barcode
|
||||
barcode_type: Type of barcode (code128, code39, ean13, etc.)
|
||||
output_path: Path for the output image
|
||||
output_format: "png" or "svg"
|
||||
|
||||
Returns:
|
||||
dict with barcode_type, data, and output_size
|
||||
|
||||
Raises:
|
||||
BarcodeGenerationError: If generation fails
|
||||
"""
|
||||
barcode_type = barcode_type.lower()
|
||||
if barcode_type not in SUPPORTED_BARCODE_TYPES:
|
||||
raise BarcodeGenerationError(
|
||||
f"Unsupported barcode type: {barcode_type}. "
|
||||
f"Supported: {', '.join(SUPPORTED_BARCODE_TYPES)}"
|
||||
)
|
||||
|
||||
if not data or not data.strip():
|
||||
raise BarcodeGenerationError("Barcode data cannot be empty.")
|
||||
|
||||
if len(data) > 200:
|
||||
raise BarcodeGenerationError("Barcode data is too long (max 200 characters).")
|
||||
|
||||
try:
|
||||
import barcode
|
||||
from barcode.writer import ImageWriter
|
||||
|
||||
os.makedirs(os.path.dirname(output_path), exist_ok=True)
|
||||
|
||||
# Map friendly names to python-barcode class names
|
||||
type_map = {
|
||||
"code128": "code128",
|
||||
"code39": "code39",
|
||||
"ean13": "ean13",
|
||||
"ean8": "ean8",
|
||||
"upca": "upca",
|
||||
"isbn13": "isbn13",
|
||||
"isbn10": "isbn10",
|
||||
"issn": "issn",
|
||||
"pzn": "pzn",
|
||||
}
|
||||
bc_type = type_map[barcode_type]
|
||||
|
||||
if output_format == "svg":
|
||||
bc = barcode.get(bc_type, data)
|
||||
# barcode.save() appends the extension automatically
|
||||
output_base = output_path.rsplit(".", 1)[0] if "." in output_path else output_path
|
||||
final_path = bc.save(output_base)
|
||||
else:
|
||||
bc = barcode.get(bc_type, data, writer=ImageWriter())
|
||||
output_base = output_path.rsplit(".", 1)[0] if "." in output_path else output_path
|
||||
final_path = bc.save(output_base)
|
||||
|
||||
if not os.path.exists(final_path):
|
||||
raise BarcodeGenerationError("Barcode file was not created.")
|
||||
|
||||
output_size = os.path.getsize(final_path)
|
||||
logger.info(f"Barcode generated: type={barcode_type}, data={data[:20]}... ({output_size} bytes)")
|
||||
|
||||
return {
|
||||
"barcode_type": barcode_type,
|
||||
"data": data,
|
||||
"output_size": output_size,
|
||||
"output_path": final_path,
|
||||
}
|
||||
|
||||
except BarcodeGenerationError:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise BarcodeGenerationError(f"Barcode generation failed: {str(e)}")
|
||||
119
backend/app/services/contact_service.py
Normal file
119
backend/app/services/contact_service.py
Normal file
@@ -0,0 +1,119 @@
|
||||
"""Contact form service — stores messages and sends notification emails."""
|
||||
import logging
|
||||
import os
|
||||
import sqlite3
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from flask import current_app
|
||||
|
||||
from app.services.email_service import send_email
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
VALID_CATEGORIES = {"general", "bug", "feature"}
|
||||
|
||||
|
||||
def _connect() -> sqlite3.Connection:
|
||||
db_path = current_app.config["DATABASE_PATH"]
|
||||
db_dir = os.path.dirname(db_path)
|
||||
if db_dir:
|
||||
os.makedirs(db_dir, exist_ok=True)
|
||||
conn = sqlite3.connect(db_path)
|
||||
conn.row_factory = sqlite3.Row
|
||||
return conn
|
||||
|
||||
|
||||
def init_contact_db() -> None:
|
||||
"""Create the contact_messages table if it doesn't exist."""
|
||||
conn = _connect()
|
||||
try:
|
||||
conn.execute("""
|
||||
CREATE TABLE IF NOT EXISTS contact_messages (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL,
|
||||
email TEXT NOT NULL,
|
||||
category TEXT NOT NULL DEFAULT 'general',
|
||||
subject TEXT NOT NULL,
|
||||
message TEXT NOT NULL,
|
||||
created_at TEXT NOT NULL,
|
||||
is_read INTEGER NOT NULL DEFAULT 0
|
||||
)
|
||||
""")
|
||||
conn.commit()
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
def save_message(name: str, email: str, category: str, subject: str, message: str) -> dict:
|
||||
"""Persist a contact message and send a notification email."""
|
||||
if category not in VALID_CATEGORIES:
|
||||
category = "general"
|
||||
|
||||
now = datetime.now(timezone.utc).isoformat()
|
||||
conn = _connect()
|
||||
try:
|
||||
cursor = conn.execute(
|
||||
"""INSERT INTO contact_messages (name, email, category, subject, message, created_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?)""",
|
||||
(name, email, category, subject, message, now),
|
||||
)
|
||||
conn.commit()
|
||||
msg_id = cursor.lastrowid
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
# Send notification email to admin
|
||||
admin_email = current_app.config.get("SMTP_FROM", "noreply@saas-pdf.com")
|
||||
try:
|
||||
send_email(
|
||||
to=admin_email,
|
||||
subject=f"[SaaS-PDF Contact] [{category}] {subject}",
|
||||
html_body=f"""
|
||||
<h2>New Contact Message</h2>
|
||||
<p><strong>From:</strong> {name} <{email}></p>
|
||||
<p><strong>Category:</strong> {category}</p>
|
||||
<p><strong>Subject:</strong> {subject}</p>
|
||||
<hr />
|
||||
<p>{message}</p>
|
||||
""",
|
||||
)
|
||||
except Exception:
|
||||
logger.exception("Failed to send contact notification email")
|
||||
|
||||
return {"id": msg_id, "created_at": now}
|
||||
|
||||
|
||||
def get_messages(page: int = 1, per_page: int = 20) -> dict:
|
||||
"""Retrieve paginated contact messages (admin use)."""
|
||||
offset = (page - 1) * per_page
|
||||
conn = _connect()
|
||||
try:
|
||||
total = conn.execute("SELECT COUNT(*) FROM contact_messages").fetchone()[0]
|
||||
rows = conn.execute(
|
||||
"SELECT * FROM contact_messages ORDER BY created_at DESC LIMIT ? OFFSET ?",
|
||||
(per_page, offset),
|
||||
).fetchall()
|
||||
messages = [dict(r) for r in rows]
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
return {
|
||||
"messages": messages,
|
||||
"total": total,
|
||||
"page": page,
|
||||
"per_page": per_page,
|
||||
}
|
||||
|
||||
|
||||
def mark_read(message_id: int) -> bool:
|
||||
"""Mark a contact message as read."""
|
||||
conn = _connect()
|
||||
try:
|
||||
result = conn.execute(
|
||||
"UPDATE contact_messages SET is_read = 1 WHERE id = ?",
|
||||
(message_id,),
|
||||
)
|
||||
conn.commit()
|
||||
return result.rowcount > 0
|
||||
finally:
|
||||
conn.close()
|
||||
176
backend/app/services/image_extra_service.py
Normal file
176
backend/app/services/image_extra_service.py
Normal file
@@ -0,0 +1,176 @@
|
||||
"""Image extra tools — Crop, Rotate/Flip."""
|
||||
import os
|
||||
import logging
|
||||
|
||||
from PIL import Image
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ImageExtraError(Exception):
|
||||
"""Custom exception for image extra tool failures."""
|
||||
pass
|
||||
|
||||
|
||||
FORMAT_MAP = {
|
||||
"jpg": "JPEG",
|
||||
"jpeg": "JPEG",
|
||||
"png": "PNG",
|
||||
"webp": "WEBP",
|
||||
}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Image Crop
|
||||
# ---------------------------------------------------------------------------
|
||||
def crop_image(
|
||||
input_path: str,
|
||||
output_path: str,
|
||||
left: int,
|
||||
top: int,
|
||||
right: int,
|
||||
bottom: int,
|
||||
quality: int = 85,
|
||||
) -> dict:
|
||||
"""Crop an image to a specified rectangle.
|
||||
|
||||
Args:
|
||||
input_path: Path to the input image
|
||||
output_path: Path for the cropped output
|
||||
left: Left edge in pixels
|
||||
top: Top edge in pixels
|
||||
right: Right edge in pixels
|
||||
bottom: Bottom edge in pixels
|
||||
quality: Output quality for lossy formats
|
||||
|
||||
Returns:
|
||||
dict with original and cropped dimensions
|
||||
|
||||
Raises:
|
||||
ImageExtraError: If crop fails
|
||||
"""
|
||||
try:
|
||||
os.makedirs(os.path.dirname(output_path), exist_ok=True)
|
||||
|
||||
with Image.open(input_path) as img:
|
||||
orig_w, orig_h = img.size
|
||||
|
||||
if left < 0 or top < 0 or right > orig_w or bottom > orig_h:
|
||||
raise ImageExtraError(
|
||||
f"Crop area ({left},{top},{right},{bottom}) outside image bounds ({orig_w}x{orig_h})."
|
||||
)
|
||||
if left >= right or top >= bottom:
|
||||
raise ImageExtraError("Invalid crop area: left must be < right, top must be < bottom.")
|
||||
|
||||
cropped = img.crop((left, top, right, bottom))
|
||||
|
||||
ext = os.path.splitext(output_path)[1].lower().strip(".")
|
||||
pil_format = FORMAT_MAP.get(ext, "PNG")
|
||||
|
||||
save_kwargs = {"optimize": True}
|
||||
if pil_format in ("JPEG", "WEBP"):
|
||||
save_kwargs["quality"] = quality
|
||||
if cropped.mode in ("RGBA", "P", "LA"):
|
||||
bg = Image.new("RGB", cropped.size, (255, 255, 255))
|
||||
if cropped.mode == "P":
|
||||
cropped = cropped.convert("RGBA")
|
||||
bg.paste(cropped, mask=cropped.split()[-1] if "A" in cropped.mode else None)
|
||||
cropped = bg
|
||||
|
||||
cropped.save(output_path, format=pil_format, **save_kwargs)
|
||||
|
||||
new_w = right - left
|
||||
new_h = bottom - top
|
||||
logger.info(f"Image crop: {orig_w}x{orig_h} → {new_w}x{new_h}")
|
||||
return {
|
||||
"original_width": orig_w,
|
||||
"original_height": orig_h,
|
||||
"cropped_width": new_w,
|
||||
"cropped_height": new_h,
|
||||
}
|
||||
|
||||
except ImageExtraError:
|
||||
raise
|
||||
except (IOError, OSError, Image.DecompressionBombError) as e:
|
||||
raise ImageExtraError(f"Image crop failed: {str(e)}")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Image Rotate / Flip
|
||||
# ---------------------------------------------------------------------------
|
||||
def rotate_flip_image(
|
||||
input_path: str,
|
||||
output_path: str,
|
||||
rotation: int = 0,
|
||||
flip_horizontal: bool = False,
|
||||
flip_vertical: bool = False,
|
||||
quality: int = 85,
|
||||
) -> dict:
|
||||
"""Rotate and/or flip an image.
|
||||
|
||||
Args:
|
||||
input_path: Path to the input image
|
||||
output_path: Path for the output image
|
||||
rotation: Rotation angle (0, 90, 180, 270)
|
||||
flip_horizontal: Mirror horizontally
|
||||
flip_vertical: Mirror vertically
|
||||
quality: Output quality for lossy formats
|
||||
|
||||
Returns:
|
||||
dict with original and new dimensions
|
||||
|
||||
Raises:
|
||||
ImageExtraError: If operation fails
|
||||
"""
|
||||
if rotation not in (0, 90, 180, 270):
|
||||
raise ImageExtraError("Rotation must be 0, 90, 180, or 270 degrees.")
|
||||
|
||||
try:
|
||||
os.makedirs(os.path.dirname(output_path), exist_ok=True)
|
||||
|
||||
with Image.open(input_path) as img:
|
||||
orig_w, orig_h = img.size
|
||||
result = img
|
||||
|
||||
if rotation:
|
||||
# PIL rotates counter-clockwise, so negate for clockwise
|
||||
result = result.rotate(-rotation, expand=True)
|
||||
|
||||
if flip_horizontal:
|
||||
result = result.transpose(Image.Transpose.FLIP_LEFT_RIGHT)
|
||||
|
||||
if flip_vertical:
|
||||
result = result.transpose(Image.Transpose.FLIP_TOP_BOTTOM)
|
||||
|
||||
new_w, new_h = result.size
|
||||
|
||||
ext = os.path.splitext(output_path)[1].lower().strip(".")
|
||||
pil_format = FORMAT_MAP.get(ext, "PNG")
|
||||
|
||||
save_kwargs = {"optimize": True}
|
||||
if pil_format in ("JPEG", "WEBP"):
|
||||
save_kwargs["quality"] = quality
|
||||
if result.mode in ("RGBA", "P", "LA"):
|
||||
bg = Image.new("RGB", result.size, (255, 255, 255))
|
||||
if result.mode == "P":
|
||||
result = result.convert("RGBA")
|
||||
bg.paste(result, mask=result.split()[-1] if "A" in result.mode else None)
|
||||
result = bg
|
||||
|
||||
result.save(output_path, format=pil_format, **save_kwargs)
|
||||
|
||||
logger.info(f"Image rotate/flip: {orig_w}x{orig_h} → {new_w}x{new_h}, rot={rotation}")
|
||||
return {
|
||||
"original_width": orig_w,
|
||||
"original_height": orig_h,
|
||||
"new_width": new_w,
|
||||
"new_height": new_h,
|
||||
"rotation": rotation,
|
||||
"flipped_horizontal": flip_horizontal,
|
||||
"flipped_vertical": flip_vertical,
|
||||
}
|
||||
|
||||
except ImageExtraError:
|
||||
raise
|
||||
except (IOError, OSError, Image.DecompressionBombError) as e:
|
||||
raise ImageExtraError(f"Image rotate/flip failed: {str(e)}")
|
||||
278
backend/app/services/pdf_convert_service.py
Normal file
278
backend/app/services/pdf_convert_service.py
Normal file
@@ -0,0 +1,278 @@
|
||||
"""PDF conversion service — PDF↔PowerPoint, Excel→PDF, PowerPoint→PDF, Sign PDF."""
|
||||
import os
|
||||
import io
|
||||
import logging
|
||||
import subprocess
|
||||
import tempfile
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PDFConvertError(Exception):
|
||||
"""Custom exception for PDF conversion failures."""
|
||||
pass
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# PDF to PowerPoint (PPTX)
|
||||
# ---------------------------------------------------------------------------
|
||||
def pdf_to_pptx(input_path: str, output_path: str) -> dict:
|
||||
"""Convert a PDF to PowerPoint by rendering each page as a slide image.
|
||||
|
||||
Args:
|
||||
input_path: Path to the input PDF
|
||||
output_path: Path for the output PPTX
|
||||
|
||||
Returns:
|
||||
dict with total_slides and output_size
|
||||
|
||||
Raises:
|
||||
PDFConvertError: If conversion fails
|
||||
"""
|
||||
try:
|
||||
from pdf2image import convert_from_path
|
||||
from pptx import Presentation
|
||||
from pptx.util import Inches, Emu
|
||||
|
||||
images = convert_from_path(input_path, dpi=200)
|
||||
if not images:
|
||||
raise PDFConvertError("PDF has no pages or could not be rendered.")
|
||||
|
||||
prs = Presentation()
|
||||
# Use widescreen 16:9 layout
|
||||
prs.slide_width = Inches(13.333)
|
||||
prs.slide_height = Inches(7.5)
|
||||
|
||||
for img in images:
|
||||
slide = prs.slides.add_slide(prs.slide_layouts[6]) # blank layout
|
||||
img_stream = io.BytesIO()
|
||||
img.save(img_stream, format="PNG")
|
||||
img_stream.seek(0)
|
||||
|
||||
# Scale image to fill slide
|
||||
img_w, img_h = img.size
|
||||
slide_w = prs.slide_width
|
||||
slide_h = prs.slide_height
|
||||
ratio = min(slide_w / Emu(int(img_w * 914400 / 200)),
|
||||
slide_h / Emu(int(img_h * 914400 / 200)))
|
||||
pic_w = int(img_w * 914400 / 200 * ratio)
|
||||
pic_h = int(img_h * 914400 / 200 * ratio)
|
||||
left = (slide_w - pic_w) // 2
|
||||
top = (slide_h - pic_h) // 2
|
||||
|
||||
slide.shapes.add_picture(img_stream, left, top, pic_w, pic_h)
|
||||
|
||||
os.makedirs(os.path.dirname(output_path), exist_ok=True)
|
||||
prs.save(output_path)
|
||||
|
||||
output_size = os.path.getsize(output_path)
|
||||
logger.info(f"PDF→PPTX: {len(images)} slides ({output_size} bytes)")
|
||||
return {"total_slides": len(images), "output_size": output_size}
|
||||
|
||||
except PDFConvertError:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise PDFConvertError(f"PDF to PowerPoint conversion failed: {str(e)}")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Excel (XLSX) to PDF
|
||||
# ---------------------------------------------------------------------------
|
||||
def excel_to_pdf(input_path: str, output_dir: str) -> str:
|
||||
"""Convert an Excel file to PDF using LibreOffice headless.
|
||||
|
||||
Args:
|
||||
input_path: Path to the input XLSX/XLS file
|
||||
output_dir: Directory for the output file
|
||||
|
||||
Returns:
|
||||
Path to the converted PDF
|
||||
|
||||
Raises:
|
||||
PDFConvertError: If conversion fails
|
||||
"""
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
user_install_dir = tempfile.mkdtemp(prefix="lo_excel2pdf_")
|
||||
|
||||
cmd = [
|
||||
"soffice",
|
||||
"--headless",
|
||||
"--norestore",
|
||||
f"-env:UserInstallation=file://{user_install_dir}",
|
||||
"--convert-to", "pdf",
|
||||
"--outdir", output_dir,
|
||||
input_path,
|
||||
]
|
||||
|
||||
try:
|
||||
result = subprocess.run(
|
||||
cmd, capture_output=True, text=True, timeout=120,
|
||||
env={**os.environ, "HOME": user_install_dir},
|
||||
)
|
||||
|
||||
input_basename = os.path.splitext(os.path.basename(input_path))[0]
|
||||
output_path = os.path.join(output_dir, f"{input_basename}.pdf")
|
||||
|
||||
if os.path.exists(output_path) and os.path.getsize(output_path) > 0:
|
||||
logger.info(f"Excel→PDF conversion successful: {output_path}")
|
||||
return output_path
|
||||
|
||||
if result.returncode != 0:
|
||||
stderr = result.stderr or ""
|
||||
real_errors = [
|
||||
line for line in stderr.strip().splitlines()
|
||||
if not line.startswith("Warning: failed to launch javaldx")
|
||||
]
|
||||
error_msg = "\n".join(real_errors) if real_errors else stderr
|
||||
raise PDFConvertError(f"Conversion failed: {error_msg or 'Unknown error'}")
|
||||
|
||||
raise PDFConvertError("Output file was not created.")
|
||||
|
||||
except subprocess.TimeoutExpired:
|
||||
raise PDFConvertError("Conversion timed out. File may be too large.")
|
||||
except FileNotFoundError:
|
||||
raise PDFConvertError("LibreOffice is not installed on the server.")
|
||||
finally:
|
||||
import shutil
|
||||
shutil.rmtree(user_install_dir, ignore_errors=True)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# PowerPoint (PPTX) to PDF
|
||||
# ---------------------------------------------------------------------------
|
||||
def pptx_to_pdf(input_path: str, output_dir: str) -> str:
|
||||
"""Convert a PowerPoint file to PDF using LibreOffice headless.
|
||||
|
||||
Args:
|
||||
input_path: Path to the input PPTX/PPT file
|
||||
output_dir: Directory for the output file
|
||||
|
||||
Returns:
|
||||
Path to the converted PDF
|
||||
|
||||
Raises:
|
||||
PDFConvertError: If conversion fails
|
||||
"""
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
user_install_dir = tempfile.mkdtemp(prefix="lo_pptx2pdf_")
|
||||
|
||||
cmd = [
|
||||
"soffice",
|
||||
"--headless",
|
||||
"--norestore",
|
||||
f"-env:UserInstallation=file://{user_install_dir}",
|
||||
"--convert-to", "pdf",
|
||||
"--outdir", output_dir,
|
||||
input_path,
|
||||
]
|
||||
|
||||
try:
|
||||
result = subprocess.run(
|
||||
cmd, capture_output=True, text=True, timeout=120,
|
||||
env={**os.environ, "HOME": user_install_dir},
|
||||
)
|
||||
|
||||
input_basename = os.path.splitext(os.path.basename(input_path))[0]
|
||||
output_path = os.path.join(output_dir, f"{input_basename}.pdf")
|
||||
|
||||
if os.path.exists(output_path) and os.path.getsize(output_path) > 0:
|
||||
logger.info(f"PPTX→PDF conversion successful: {output_path}")
|
||||
return output_path
|
||||
|
||||
if result.returncode != 0:
|
||||
stderr = result.stderr or ""
|
||||
real_errors = [
|
||||
line for line in stderr.strip().splitlines()
|
||||
if not line.startswith("Warning: failed to launch javaldx")
|
||||
]
|
||||
error_msg = "\n".join(real_errors) if real_errors else stderr
|
||||
raise PDFConvertError(f"Conversion failed: {error_msg or 'Unknown error'}")
|
||||
|
||||
raise PDFConvertError("Output file was not created.")
|
||||
|
||||
except subprocess.TimeoutExpired:
|
||||
raise PDFConvertError("Conversion timed out. File may be too large.")
|
||||
except FileNotFoundError:
|
||||
raise PDFConvertError("LibreOffice is not installed on the server.")
|
||||
finally:
|
||||
import shutil
|
||||
shutil.rmtree(user_install_dir, ignore_errors=True)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Sign PDF (overlay signature image on a page)
|
||||
# ---------------------------------------------------------------------------
|
||||
def sign_pdf(
|
||||
input_path: str,
|
||||
signature_path: str,
|
||||
output_path: str,
|
||||
page: int = 0,
|
||||
x: float = 100,
|
||||
y: float = 100,
|
||||
width: float = 200,
|
||||
height: float = 80,
|
||||
) -> dict:
|
||||
"""Overlay a signature image onto a PDF page.
|
||||
|
||||
Args:
|
||||
input_path: Path to the input PDF
|
||||
signature_path: Path to the signature image (PNG with transparency)
|
||||
output_path: Path for the signed output PDF
|
||||
page: 0-based page index to place signature
|
||||
x: X coordinate (points from left)
|
||||
y: Y coordinate (points from bottom)
|
||||
width: Signature width in points
|
||||
height: Signature height in points
|
||||
|
||||
Returns:
|
||||
dict with total_pages and output_size
|
||||
|
||||
Raises:
|
||||
PDFConvertError: If signing fails
|
||||
"""
|
||||
try:
|
||||
from PyPDF2 import PdfReader, PdfWriter
|
||||
from reportlab.pdfgen import canvas as rl_canvas
|
||||
from reportlab.lib.utils import ImageReader
|
||||
|
||||
reader = PdfReader(input_path)
|
||||
total_pages = len(reader.pages)
|
||||
if total_pages == 0:
|
||||
raise PDFConvertError("PDF has no pages.")
|
||||
if page < 0 or page >= total_pages:
|
||||
raise PDFConvertError(f"Page {page + 1} does not exist (PDF has {total_pages} pages).")
|
||||
|
||||
target_page = reader.pages[page]
|
||||
page_box = target_page.mediabox
|
||||
page_width = float(page_box.width)
|
||||
page_height = float(page_box.height)
|
||||
|
||||
# Create overlay PDF with the signature image
|
||||
overlay_stream = io.BytesIO()
|
||||
c = rl_canvas.Canvas(overlay_stream, pagesize=(page_width, page_height))
|
||||
sig_img = ImageReader(signature_path)
|
||||
c.drawImage(sig_img, x, y, width=width, height=height, mask="auto")
|
||||
c.save()
|
||||
overlay_stream.seek(0)
|
||||
|
||||
overlay_reader = PdfReader(overlay_stream)
|
||||
overlay_page = overlay_reader.pages[0]
|
||||
|
||||
writer = PdfWriter()
|
||||
for i, pg in enumerate(reader.pages):
|
||||
if i == page:
|
||||
pg.merge_page(overlay_page)
|
||||
writer.add_page(pg)
|
||||
|
||||
os.makedirs(os.path.dirname(output_path), exist_ok=True)
|
||||
with open(output_path, "wb") as f:
|
||||
writer.write(f)
|
||||
|
||||
output_size = os.path.getsize(output_path)
|
||||
logger.info(f"Sign PDF: signature on page {page + 1} ({output_size} bytes)")
|
||||
return {"total_pages": total_pages, "output_size": output_size, "signed_page": page + 1}
|
||||
|
||||
except PDFConvertError:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise PDFConvertError(f"Failed to sign PDF: {str(e)}")
|
||||
316
backend/app/services/pdf_extra_service.py
Normal file
316
backend/app/services/pdf_extra_service.py
Normal file
@@ -0,0 +1,316 @@
|
||||
"""Extended PDF tools — Crop, Flatten, Repair, Metadata Editor."""
|
||||
import os
|
||||
import io
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PDFExtraError(Exception):
|
||||
"""Custom exception for extended PDF tool failures."""
|
||||
pass
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Crop PDF
|
||||
# ---------------------------------------------------------------------------
|
||||
def crop_pdf(
|
||||
input_path: str,
|
||||
output_path: str,
|
||||
margin_left: float = 0,
|
||||
margin_right: float = 0,
|
||||
margin_top: float = 0,
|
||||
margin_bottom: float = 0,
|
||||
pages: str = "all",
|
||||
) -> dict:
|
||||
"""Crop margins from PDF pages.
|
||||
|
||||
Args:
|
||||
input_path: Path to the input PDF
|
||||
output_path: Path for the cropped output
|
||||
margin_left/right/top/bottom: Points to crop from each side
|
||||
pages: "all" or comma-separated page numbers (1-based)
|
||||
|
||||
Returns:
|
||||
dict with total_pages and output_size
|
||||
|
||||
Raises:
|
||||
PDFExtraError: If cropping fails
|
||||
"""
|
||||
try:
|
||||
from PyPDF2 import PdfReader, PdfWriter
|
||||
|
||||
reader = PdfReader(input_path)
|
||||
writer = PdfWriter()
|
||||
total_pages = len(reader.pages)
|
||||
|
||||
if total_pages == 0:
|
||||
raise PDFExtraError("PDF has no pages.")
|
||||
|
||||
target_indices = _parse_pages(pages, total_pages)
|
||||
|
||||
for i, page in enumerate(reader.pages):
|
||||
if i in target_indices:
|
||||
box = page.mediabox
|
||||
box.lower_left = (
|
||||
float(box.lower_left[0]) + margin_left,
|
||||
float(box.lower_left[1]) + margin_bottom,
|
||||
)
|
||||
box.upper_right = (
|
||||
float(box.upper_right[0]) - margin_right,
|
||||
float(box.upper_right[1]) - margin_top,
|
||||
)
|
||||
page.mediabox = box
|
||||
page.cropbox = box
|
||||
writer.add_page(page)
|
||||
|
||||
os.makedirs(os.path.dirname(output_path), exist_ok=True)
|
||||
with open(output_path, "wb") as f:
|
||||
writer.write(f)
|
||||
|
||||
output_size = os.path.getsize(output_path)
|
||||
logger.info(f"Crop PDF: {len(target_indices)} pages cropped ({output_size} bytes)")
|
||||
return {
|
||||
"total_pages": total_pages,
|
||||
"cropped_pages": len(target_indices),
|
||||
"output_size": output_size,
|
||||
}
|
||||
|
||||
except PDFExtraError:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise PDFExtraError(f"Failed to crop PDF: {str(e)}")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Flatten PDF (remove interactive form fields, annotations)
|
||||
# ---------------------------------------------------------------------------
|
||||
def flatten_pdf(input_path: str, output_path: str) -> dict:
|
||||
"""Flatten a PDF — burn form fields and annotations into static content.
|
||||
|
||||
Args:
|
||||
input_path: Path to the input PDF
|
||||
output_path: Path for the flattened output
|
||||
|
||||
Returns:
|
||||
dict with total_pages and output_size
|
||||
|
||||
Raises:
|
||||
PDFExtraError: If flatten fails
|
||||
"""
|
||||
try:
|
||||
from PyPDF2 import PdfReader, PdfWriter
|
||||
|
||||
reader = PdfReader(input_path)
|
||||
writer = PdfWriter()
|
||||
total_pages = len(reader.pages)
|
||||
|
||||
if total_pages == 0:
|
||||
raise PDFExtraError("PDF has no pages.")
|
||||
|
||||
for page in reader.pages:
|
||||
# Remove annotations to flatten
|
||||
if "/Annots" in page:
|
||||
del page["/Annots"]
|
||||
writer.add_page(page)
|
||||
|
||||
# Remove AcroForm (interactive forms) at document level
|
||||
if "/AcroForm" in writer._root_object:
|
||||
del writer._root_object["/AcroForm"]
|
||||
|
||||
os.makedirs(os.path.dirname(output_path), exist_ok=True)
|
||||
with open(output_path, "wb") as f:
|
||||
writer.write(f)
|
||||
|
||||
output_size = os.path.getsize(output_path)
|
||||
logger.info(f"Flatten PDF: {total_pages} pages ({output_size} bytes)")
|
||||
return {"total_pages": total_pages, "output_size": output_size}
|
||||
|
||||
except PDFExtraError:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise PDFExtraError(f"Failed to flatten PDF: {str(e)}")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Repair PDF
|
||||
# ---------------------------------------------------------------------------
|
||||
def repair_pdf(input_path: str, output_path: str) -> dict:
|
||||
"""Attempt to repair a damaged PDF by re-writing it.
|
||||
|
||||
Args:
|
||||
input_path: Path to the input PDF
|
||||
output_path: Path for the repaired output
|
||||
|
||||
Returns:
|
||||
dict with total_pages, output_size, and repaired flag
|
||||
|
||||
Raises:
|
||||
PDFExtraError: If repair fails
|
||||
"""
|
||||
try:
|
||||
from PyPDF2 import PdfReader, PdfWriter
|
||||
from PyPDF2.errors import PdfReadError
|
||||
|
||||
try:
|
||||
reader = PdfReader(input_path, strict=False)
|
||||
except PdfReadError as e:
|
||||
raise PDFExtraError(f"Cannot read PDF — file may be severely corrupted: {str(e)}")
|
||||
|
||||
writer = PdfWriter()
|
||||
total_pages = len(reader.pages)
|
||||
|
||||
if total_pages == 0:
|
||||
raise PDFExtraError("PDF has no recoverable pages.")
|
||||
|
||||
recovered = 0
|
||||
for i, page in enumerate(reader.pages):
|
||||
try:
|
||||
writer.add_page(page)
|
||||
recovered += 1
|
||||
except Exception:
|
||||
logger.warning(f"Repair: skipped unrecoverable page {i + 1}")
|
||||
|
||||
if recovered == 0:
|
||||
raise PDFExtraError("No pages could be recovered from the PDF.")
|
||||
|
||||
# Copy metadata if available
|
||||
try:
|
||||
if reader.metadata:
|
||||
writer.add_metadata(reader.metadata)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
os.makedirs(os.path.dirname(output_path), exist_ok=True)
|
||||
with open(output_path, "wb") as f:
|
||||
writer.write(f)
|
||||
|
||||
output_size = os.path.getsize(output_path)
|
||||
logger.info(f"Repair PDF: {recovered}/{total_pages} pages recovered ({output_size} bytes)")
|
||||
return {
|
||||
"total_pages": total_pages,
|
||||
"recovered_pages": recovered,
|
||||
"output_size": output_size,
|
||||
"repaired": True,
|
||||
}
|
||||
|
||||
except PDFExtraError:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise PDFExtraError(f"Failed to repair PDF: {str(e)}")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# PDF Metadata Editor
|
||||
# ---------------------------------------------------------------------------
|
||||
def edit_pdf_metadata(
|
||||
input_path: str,
|
||||
output_path: str,
|
||||
title: str | None = None,
|
||||
author: str | None = None,
|
||||
subject: str | None = None,
|
||||
keywords: str | None = None,
|
||||
creator: str | None = None,
|
||||
) -> dict:
|
||||
"""Edit PDF metadata fields.
|
||||
|
||||
Args:
|
||||
input_path: Path to the input PDF
|
||||
output_path: Path for the output PDF
|
||||
title/author/subject/keywords/creator: New metadata values (None = keep existing)
|
||||
|
||||
Returns:
|
||||
dict with updated metadata and output_size
|
||||
|
||||
Raises:
|
||||
PDFExtraError: If metadata edit fails
|
||||
"""
|
||||
try:
|
||||
from PyPDF2 import PdfReader, PdfWriter
|
||||
|
||||
reader = PdfReader(input_path)
|
||||
writer = PdfWriter()
|
||||
|
||||
for page in reader.pages:
|
||||
writer.add_page(page)
|
||||
|
||||
# Build metadata dict
|
||||
metadata = {}
|
||||
if title is not None:
|
||||
metadata["/Title"] = title
|
||||
if author is not None:
|
||||
metadata["/Author"] = author
|
||||
if subject is not None:
|
||||
metadata["/Subject"] = subject
|
||||
if keywords is not None:
|
||||
metadata["/Keywords"] = keywords
|
||||
if creator is not None:
|
||||
metadata["/Creator"] = creator
|
||||
|
||||
if not metadata:
|
||||
raise PDFExtraError("At least one metadata field must be provided.")
|
||||
|
||||
writer.add_metadata(metadata)
|
||||
|
||||
os.makedirs(os.path.dirname(output_path), exist_ok=True)
|
||||
with open(output_path, "wb") as f:
|
||||
writer.write(f)
|
||||
|
||||
output_size = os.path.getsize(output_path)
|
||||
|
||||
# Read back to confirm
|
||||
current_meta = {}
|
||||
try:
|
||||
r2 = PdfReader(output_path)
|
||||
if r2.metadata:
|
||||
current_meta = {
|
||||
"title": r2.metadata.get("/Title", ""),
|
||||
"author": r2.metadata.get("/Author", ""),
|
||||
"subject": r2.metadata.get("/Subject", ""),
|
||||
"keywords": r2.metadata.get("/Keywords", ""),
|
||||
"creator": r2.metadata.get("/Creator", ""),
|
||||
}
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
logger.info(f"Edit metadata: updated {len(metadata)} fields ({output_size} bytes)")
|
||||
return {
|
||||
"total_pages": len(reader.pages),
|
||||
"output_size": output_size,
|
||||
"metadata": current_meta,
|
||||
}
|
||||
|
||||
except PDFExtraError:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise PDFExtraError(f"Failed to edit PDF metadata: {str(e)}")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
def _parse_pages(pages_spec: str, total_pages: int) -> set[int]:
|
||||
"""Parse page specification to set of 0-based indices."""
|
||||
if pages_spec.strip().lower() == "all":
|
||||
return set(range(total_pages))
|
||||
|
||||
indices = set()
|
||||
for part in pages_spec.split(","):
|
||||
part = part.strip()
|
||||
if "-" in part:
|
||||
try:
|
||||
start, end = part.split("-", 1)
|
||||
start = max(1, int(start))
|
||||
end = min(total_pages, int(end))
|
||||
for p in range(start, end + 1):
|
||||
indices.add(p - 1)
|
||||
except ValueError:
|
||||
continue
|
||||
else:
|
||||
try:
|
||||
p = int(part)
|
||||
if 1 <= p <= total_pages:
|
||||
indices.add(p - 1)
|
||||
except ValueError:
|
||||
continue
|
||||
return indices
|
||||
@@ -135,3 +135,21 @@ def get_all_ratings_summary() -> list[dict]:
|
||||
}
|
||||
for row in rows
|
||||
]
|
||||
|
||||
|
||||
def get_global_rating_summary() -> dict:
|
||||
"""Return aggregate rating stats across all rated tools."""
|
||||
with _connect() as conn:
|
||||
row = conn.execute(
|
||||
"""
|
||||
SELECT
|
||||
COUNT(*) AS count,
|
||||
COALESCE(AVG(rating), 0) AS average
|
||||
FROM tool_ratings
|
||||
"""
|
||||
).fetchone()
|
||||
|
||||
return {
|
||||
"rating_count": int(row["count"]) if row else 0,
|
||||
"average_rating": round(row["average"], 1) if row else 0.0,
|
||||
}
|
||||
|
||||
220
backend/app/services/stripe_service.py
Normal file
220
backend/app/services/stripe_service.py
Normal file
@@ -0,0 +1,220 @@
|
||||
"""Stripe payment service — checkout sessions, webhooks, and subscription management."""
|
||||
import logging
|
||||
import os
|
||||
|
||||
import stripe
|
||||
from flask import current_app
|
||||
|
||||
from app.services.account_service import update_user_plan, get_user_by_id, _connect, _utc_now
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _init_stripe():
|
||||
"""Configure stripe with the app's secret key."""
|
||||
stripe.api_key = current_app.config.get("STRIPE_SECRET_KEY", "")
|
||||
|
||||
|
||||
def _ensure_stripe_columns():
|
||||
"""Add stripe_customer_id and stripe_subscription_id columns if missing."""
|
||||
conn = _connect()
|
||||
try:
|
||||
cols = [row["name"] for row in conn.execute("PRAGMA table_info(users)").fetchall()]
|
||||
if "stripe_customer_id" not in cols:
|
||||
conn.execute("ALTER TABLE users ADD COLUMN stripe_customer_id TEXT")
|
||||
if "stripe_subscription_id" not in cols:
|
||||
conn.execute("ALTER TABLE users ADD COLUMN stripe_subscription_id TEXT")
|
||||
conn.commit()
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
def init_stripe_db():
|
||||
"""Initialize stripe-related DB columns."""
|
||||
_ensure_stripe_columns()
|
||||
|
||||
|
||||
def _get_or_create_customer(user_id: int) -> str:
|
||||
"""Get existing Stripe customer or create one."""
|
||||
_init_stripe()
|
||||
conn = _connect()
|
||||
try:
|
||||
row = conn.execute(
|
||||
"SELECT email, stripe_customer_id FROM users WHERE id = ?",
|
||||
(user_id,),
|
||||
).fetchone()
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
if row is None:
|
||||
raise ValueError("User not found.")
|
||||
|
||||
if row["stripe_customer_id"]:
|
||||
return row["stripe_customer_id"]
|
||||
|
||||
# Create new Stripe customer
|
||||
customer = stripe.Customer.create(
|
||||
email=row["email"],
|
||||
metadata={"user_id": str(user_id)},
|
||||
)
|
||||
|
||||
conn = _connect()
|
||||
try:
|
||||
conn.execute(
|
||||
"UPDATE users SET stripe_customer_id = ?, updated_at = ? WHERE id = ?",
|
||||
(customer.id, _utc_now(), user_id),
|
||||
)
|
||||
conn.commit()
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
return customer.id
|
||||
|
||||
|
||||
def create_checkout_session(user_id: int, price_id: str, success_url: str, cancel_url: str) -> str:
|
||||
"""Create a Stripe Checkout Session and return the URL."""
|
||||
_init_stripe()
|
||||
customer_id = _get_or_create_customer(user_id)
|
||||
|
||||
session = stripe.checkout.Session.create(
|
||||
customer=customer_id,
|
||||
payment_method_types=["card"],
|
||||
line_items=[{"price": price_id, "quantity": 1}],
|
||||
mode="subscription",
|
||||
success_url=success_url,
|
||||
cancel_url=cancel_url,
|
||||
metadata={"user_id": str(user_id)},
|
||||
)
|
||||
return session.url
|
||||
|
||||
|
||||
def create_portal_session(user_id: int, return_url: str) -> str:
|
||||
"""Create a Stripe Customer Portal session for managing subscriptions."""
|
||||
_init_stripe()
|
||||
customer_id = _get_or_create_customer(user_id)
|
||||
|
||||
session = stripe.billing_portal.Session.create(
|
||||
customer=customer_id,
|
||||
return_url=return_url,
|
||||
)
|
||||
return session.url
|
||||
|
||||
|
||||
def handle_webhook_event(payload: bytes, sig_header: str) -> dict:
|
||||
"""Process a Stripe webhook event. Returns a status dict."""
|
||||
webhook_secret = current_app.config.get("STRIPE_WEBHOOK_SECRET", "")
|
||||
if not webhook_secret:
|
||||
logger.warning("STRIPE_WEBHOOK_SECRET not configured — ignoring webhook.")
|
||||
return {"status": "ignored", "reason": "no webhook secret"}
|
||||
|
||||
try:
|
||||
event = stripe.Webhook.construct_event(payload, sig_header, webhook_secret)
|
||||
except stripe.SignatureVerificationError:
|
||||
logger.warning("Stripe webhook signature verification failed.")
|
||||
return {"status": "error", "reason": "signature_failed"}
|
||||
except ValueError:
|
||||
logger.warning("Invalid Stripe webhook payload.")
|
||||
return {"status": "error", "reason": "invalid_payload"}
|
||||
|
||||
event_type = event["type"]
|
||||
data_object = event["data"]["object"]
|
||||
|
||||
if event_type == "checkout.session.completed":
|
||||
_handle_checkout_completed(data_object)
|
||||
elif event_type == "customer.subscription.updated":
|
||||
_handle_subscription_updated(data_object)
|
||||
elif event_type == "customer.subscription.deleted":
|
||||
_handle_subscription_deleted(data_object)
|
||||
elif event_type == "invoice.payment_failed":
|
||||
_handle_payment_failed(data_object)
|
||||
|
||||
return {"status": "ok", "event_type": event_type}
|
||||
|
||||
|
||||
def _find_user_by_customer_id(customer_id: str) -> dict | None:
|
||||
"""Find user by Stripe customer ID."""
|
||||
conn = _connect()
|
||||
try:
|
||||
row = conn.execute(
|
||||
"SELECT id, email, plan, created_at FROM users WHERE stripe_customer_id = ?",
|
||||
(customer_id,),
|
||||
).fetchone()
|
||||
finally:
|
||||
conn.close()
|
||||
return dict(row) if row else None
|
||||
|
||||
|
||||
def _handle_checkout_completed(session: dict):
|
||||
"""Handle successful checkout — activate Pro plan."""
|
||||
customer_id = session.get("customer")
|
||||
subscription_id = session.get("subscription")
|
||||
user_id = session.get("metadata", {}).get("user_id")
|
||||
|
||||
if user_id:
|
||||
conn = _connect()
|
||||
try:
|
||||
conn.execute(
|
||||
"UPDATE users SET plan = 'pro', stripe_subscription_id = ?, updated_at = ? WHERE id = ?",
|
||||
(subscription_id, _utc_now(), int(user_id)),
|
||||
)
|
||||
conn.commit()
|
||||
finally:
|
||||
conn.close()
|
||||
logger.info("User %s upgraded to Pro via checkout.", user_id)
|
||||
elif customer_id:
|
||||
user = _find_user_by_customer_id(customer_id)
|
||||
if user:
|
||||
conn = _connect()
|
||||
try:
|
||||
conn.execute(
|
||||
"UPDATE users SET plan = 'pro', stripe_subscription_id = ?, updated_at = ? WHERE id = ?",
|
||||
(subscription_id, _utc_now(), user["id"]),
|
||||
)
|
||||
conn.commit()
|
||||
finally:
|
||||
conn.close()
|
||||
logger.info("User %s upgraded to Pro via checkout (customer match).", user["id"])
|
||||
|
||||
|
||||
def _handle_subscription_updated(subscription: dict):
|
||||
"""Handle subscription changes (upgrade/downgrade)."""
|
||||
customer_id = subscription.get("customer")
|
||||
status = subscription.get("status")
|
||||
user = _find_user_by_customer_id(customer_id)
|
||||
if not user:
|
||||
return
|
||||
|
||||
if status in ("active", "trialing"):
|
||||
update_user_plan(user["id"], "pro")
|
||||
logger.info("User %s subscription active — Pro plan.", user["id"])
|
||||
elif status in ("past_due", "unpaid"):
|
||||
logger.warning("User %s subscription %s.", user["id"], status)
|
||||
elif status in ("canceled", "incomplete_expired"):
|
||||
update_user_plan(user["id"], "free")
|
||||
logger.info("User %s subscription ended — Free plan.", user["id"])
|
||||
|
||||
|
||||
def _handle_subscription_deleted(subscription: dict):
|
||||
"""Handle subscription cancellation."""
|
||||
customer_id = subscription.get("customer")
|
||||
user = _find_user_by_customer_id(customer_id)
|
||||
if user:
|
||||
update_user_plan(user["id"], "free")
|
||||
conn = _connect()
|
||||
try:
|
||||
conn.execute(
|
||||
"UPDATE users SET stripe_subscription_id = NULL, updated_at = ? WHERE id = ?",
|
||||
(_utc_now(), user["id"]),
|
||||
)
|
||||
conn.commit()
|
||||
finally:
|
||||
conn.close()
|
||||
logger.info("User %s subscription deleted — downgraded to Free.", user["id"])
|
||||
|
||||
|
||||
def _handle_payment_failed(invoice: dict):
|
||||
"""Log payment failures."""
|
||||
customer_id = invoice.get("customer")
|
||||
user = _find_user_by_customer_id(customer_id)
|
||||
if user:
|
||||
logger.warning("Payment failed for user %s (customer %s).", user["id"], customer_id)
|
||||
Reference in New Issue
Block a user