تم الانتهاء من آخر دفعة تحسينات على المشروع، وتشمل:

تحويل لوحة الإدارة الداخلية من secret header إلى session auth حقيقي مع صلاحيات admin.
إضافة دعم إدارة الأدوار من داخل لوحة الإدارة نفسها، مع حماية الحسابات المعتمدة عبر INTERNAL_ADMIN_EMAILS.
تحسين بيانات المستخدم في الواجهة والباكند لتشمل role وis_allowlisted_admin.
إضافة اختبار frontend مخصص لصفحة /internal/admin بدل الاعتماد فقط على build واختبار routes.
تحسين إضافي في الأداء عبر إزالة الاعتماد على pdfjs-dist/pdf.worker في عدّ صفحات PDF واستبداله بمسار أخف باستخدام pdf-lib.
تحسين تقسيم الـ chunks في build لتقليل أثر الحزم الكبيرة وفصل أجزاء مثل network, icons, pdf-core, وeditor.
التحقق الذي تم:

نجاح build للواجهة.
نجاح اختبار صفحة الإدارة الداخلية في frontend.
نجاح اختبارات auth/admin في backend.
نجاح full backend suite مسبقًا مع EXIT:0.
ولو تريد نسخة أقصر جدًا، استخدم هذه:

آخر التحديثات:
تم تحسين نظام الإدارة الداخلية ليعتمد على صلاحيات وجلسات حقيقية بدل secret header، مع إضافة إدارة أدوار من لوحة admin نفسها، وإضافة اختبارات frontend مخصصة للوحة، وتحسين أداء الواجهة عبر إزالة pdf.worker وتحسين تقسيم الـ chunks في build. جميع الاختبارات والتحققات الأساسية المطلوبة نجح
This commit is contained in:
Your Name
2026-03-16 13:50:45 +02:00
parent b5d97324a9
commit 957d37838c
85 changed files with 9915 additions and 119 deletions

View File

@@ -9,6 +9,29 @@ from app.services.account_service import init_account_db
from app.services.rating_service import init_ratings_db
from app.services.ai_cost_service import init_ai_cost_db
from app.services.site_assistant_service import init_site_assistant_db
from app.services.contact_service import init_contact_db
from app.services.stripe_service import init_stripe_db
def _init_sentry(app):
"""Initialize Sentry error monitoring if DSN is configured."""
dsn = app.config.get("SENTRY_DSN", "")
if not dsn:
return
try:
import sentry_sdk
from sentry_sdk.integrations.flask import FlaskIntegration
from sentry_sdk.integrations.celery import CeleryIntegration
sentry_sdk.init(
dsn=dsn,
environment=app.config.get("SENTRY_ENVIRONMENT", "development"),
integrations=[FlaskIntegration(), CeleryIntegration()],
traces_sample_rate=0.1,
send_default_pii=False,
)
except ImportError:
app.logger.warning("sentry-sdk not installed — monitoring disabled.")
def create_app(config_name=None):
@@ -19,6 +42,9 @@ def create_app(config_name=None):
app = Flask(__name__)
app.config.from_object(config[config_name])
# Initialize Sentry early
_init_sentry(app)
# Create upload/output/database directories
os.makedirs(app.config["UPLOAD_FOLDER"], exist_ok=True)
os.makedirs(app.config["OUTPUT_FOLDER"], exist_ok=True)
@@ -79,6 +105,8 @@ def create_app(config_name=None):
init_ratings_db()
init_ai_cost_db()
init_site_assistant_db()
init_contact_db()
init_stripe_db()
# Register blueprints
from app.routes.health import health_bp
@@ -106,6 +134,13 @@ def create_app(config_name=None):
from app.routes.pdf_ai import pdf_ai_bp
from app.routes.rating import rating_bp
from app.routes.assistant import assistant_bp
from app.routes.contact import contact_bp
from app.routes.stripe import stripe_bp
from app.routes.stats import stats_bp
from app.routes.pdf_convert import pdf_convert_bp
from app.routes.pdf_extra import pdf_extra_bp
from app.routes.image_extra import image_extra_bp
from app.routes.barcode import barcode_bp
app.register_blueprint(health_bp, url_prefix="/api")
app.register_blueprint(auth_bp, url_prefix="/api/auth")
@@ -132,5 +167,12 @@ def create_app(config_name=None):
app.register_blueprint(pdf_ai_bp, url_prefix="/api/pdf-ai")
app.register_blueprint(rating_bp, url_prefix="/api/ratings")
app.register_blueprint(assistant_bp, url_prefix="/api/assistant")
app.register_blueprint(contact_bp, url_prefix="/api/contact")
app.register_blueprint(stripe_bp, url_prefix="/api/stripe")
app.register_blueprint(stats_bp, url_prefix="/api/stats")
app.register_blueprint(pdf_convert_bp, url_prefix="/api/convert")
app.register_blueprint(pdf_extra_bp, url_prefix="/api/pdf-tools")
app.register_blueprint(image_extra_bp, url_prefix="/api/image")
app.register_blueprint(barcode_bp, url_prefix="/api/barcode")
return app

View File

@@ -40,6 +40,10 @@ def init_celery(app):
"app.tasks.qrcode_tasks.*": {"queue": "default"},
"app.tasks.html_to_pdf_tasks.*": {"queue": "convert"},
"app.tasks.pdf_ai_tasks.*": {"queue": "default"},
"app.tasks.pdf_convert_tasks.*": {"queue": "convert"},
"app.tasks.pdf_extra_tasks.*": {"queue": "pdf_tools"},
"app.tasks.image_extra_tasks.*": {"queue": "image"},
"app.tasks.barcode_tasks.*": {"queue": "default"},
}
# Celery Beat — periodic tasks

View File

@@ -1,27 +1,100 @@
"""Internal admin endpoints secured by INTERNAL_ADMIN_SECRET."""
from flask import Blueprint, current_app, jsonify, request
"""Internal admin endpoints secured by authenticated admin sessions."""
from flask import Blueprint, jsonify, request
from app.extensions import limiter
from app.services.account_service import get_user_by_id, update_user_plan
from app.services.account_service import get_user_by_id, is_user_admin, set_user_role, update_user_plan
from app.services.admin_service import (
get_admin_overview,
list_admin_contacts,
list_admin_users,
mark_admin_contact_read,
)
from app.services.ai_cost_service import get_monthly_spend
from app.utils.auth import get_current_user_id
admin_bp = Blueprint("admin", __name__)
def _check_admin_secret() -> bool:
"""Return whether the request carries the correct admin secret."""
secret = current_app.config.get("INTERNAL_ADMIN_SECRET", "")
if not secret:
return False
return request.headers.get("X-Admin-Secret", "") == secret
def _require_admin_session():
"""Return an error response unless the request belongs to an authenticated admin."""
user_id = get_current_user_id()
if user_id is None:
return jsonify({"error": "Authentication required."}), 401
if not is_user_admin(user_id):
return jsonify({"error": "Admin access required."}), 403
return None
@admin_bp.route("/overview", methods=["GET"])
@limiter.limit("60/hour")
def admin_overview_route():
"""Return the internal admin dashboard overview."""
auth_error = _require_admin_session()
if auth_error:
return auth_error
return jsonify(get_admin_overview()), 200
@admin_bp.route("/users", methods=["GET"])
@limiter.limit("60/hour")
def admin_users_route():
"""Return recent users plus usage summaries for the admin dashboard."""
auth_error = _require_admin_session()
if auth_error:
return auth_error
query = request.args.get("query", "")
try:
limit = max(1, min(int(request.args.get("limit", 25)), 100))
except ValueError:
limit = 25
return jsonify({"items": list_admin_users(limit=limit, query=query)}), 200
@admin_bp.route("/contacts", methods=["GET"])
@limiter.limit("60/hour")
def admin_contacts_route():
"""Return paginated contact messages for the admin dashboard."""
auth_error = _require_admin_session()
if auth_error:
return auth_error
try:
page = max(1, int(request.args.get("page", 1)))
except ValueError:
page = 1
try:
per_page = max(1, min(int(request.args.get("per_page", 20)), 100))
except ValueError:
per_page = 20
return jsonify(list_admin_contacts(page=page, per_page=per_page)), 200
@admin_bp.route("/contacts/<int:message_id>/read", methods=["POST"])
@limiter.limit("120/hour")
def admin_contacts_mark_read_route(message_id: int):
"""Mark one contact message as read."""
auth_error = _require_admin_session()
if auth_error:
return auth_error
if not mark_admin_contact_read(message_id):
return jsonify({"error": "Message not found."}), 404
return jsonify({"message": "Message marked as read."}), 200
@admin_bp.route("/users/<int:user_id>/plan", methods=["POST"])
@limiter.limit("30/hour")
def update_plan_route(user_id: int):
"""Change the plan for one user — secured by X-Admin-Secret header."""
if not _check_admin_secret():
return jsonify({"error": "Unauthorized."}), 401
"""Change the plan for one user — admin session required."""
auth_error = _require_admin_session()
if auth_error:
return auth_error
data = request.get_json(silent=True) or {}
plan = str(data.get("plan", "")).strip().lower()
@@ -40,12 +113,45 @@ def update_plan_route(user_id: int):
return jsonify({"message": "Plan updated.", "user": updated}), 200
@admin_bp.route("/users/<int:user_id>/role", methods=["POST"])
@limiter.limit("30/hour")
def update_role_route(user_id: int):
"""Change the role for one user — admin session required."""
auth_error = _require_admin_session()
if auth_error:
return auth_error
actor_user_id = get_current_user_id()
data = request.get_json(silent=True) or {}
role = str(data.get("role", "")).strip().lower()
if role not in ("user", "admin"):
return jsonify({"error": "Role must be 'user' or 'admin'."}), 400
user = get_user_by_id(user_id)
if user is None:
return jsonify({"error": "User not found."}), 404
if bool(user.get("is_allowlisted_admin")):
return jsonify({"error": "Allowlisted admin access is managed by INTERNAL_ADMIN_EMAILS."}), 400
if actor_user_id == user_id and role != "admin":
return jsonify({"error": "You cannot remove your own admin role."}), 400
try:
updated = set_user_role(user_id, role)
except ValueError as exc:
return jsonify({"error": str(exc)}), 400
return jsonify({"message": "Role updated.", "user": updated}), 200
@admin_bp.route("/ai-cost", methods=["GET"])
@limiter.limit("60/hour")
def ai_cost_dashboard():
"""Return the current month's AI spending summary."""
if not _check_admin_secret():
return jsonify({"error": "Unauthorized."}), 401
auth_error = _require_admin_session()
if auth_error:
return auth_error
spend = get_monthly_spend()
return jsonify(spend), 200

View File

@@ -0,0 +1,70 @@
"""Routes for barcode generation."""
from flask import Blueprint, request, jsonify
from app.extensions import limiter
from app.services.policy_service import (
assert_quota_available,
build_task_tracking_kwargs,
PolicyError,
record_accepted_usage,
resolve_web_actor,
)
from app.services.barcode_service import SUPPORTED_BARCODE_TYPES
from app.tasks.barcode_tasks import generate_barcode_task
from app.utils.sanitizer import generate_safe_path
barcode_bp = Blueprint("barcode", __name__)
@barcode_bp.route("/generate", methods=["POST"])
@limiter.limit("20/minute")
def generate_barcode_route():
"""Generate a barcode image.
Accepts: JSON or form data with:
- 'data': String to encode
- 'type' (optional): Barcode type (default: code128)
- 'format' (optional): "png" or "svg" (default: png)
"""
if request.is_json:
body = request.get_json()
data = body.get("data", "").strip()
barcode_type = body.get("type", "code128").lower()
output_format = body.get("format", "png").lower()
else:
data = request.form.get("data", "").strip()
barcode_type = request.form.get("type", "code128").lower()
output_format = request.form.get("format", "png").lower()
if not data:
return jsonify({"error": "Barcode data is required."}), 400
if len(data) > 200:
return jsonify({"error": "Barcode data is too long (max 200 characters)."}), 400
if barcode_type not in SUPPORTED_BARCODE_TYPES:
return jsonify({
"error": f"Unsupported barcode type. Supported: {', '.join(SUPPORTED_BARCODE_TYPES)}"
}), 400
if output_format not in ("png", "svg"):
output_format = "png"
actor = resolve_web_actor()
try:
assert_quota_available(actor)
except PolicyError as e:
return jsonify({"error": e.message}), e.status_code
task_id, _ = generate_safe_path("tmp", folder_type="upload")
task = generate_barcode_task.delay(
data, barcode_type, task_id, output_format,
**build_task_tracking_kwargs(actor),
)
record_accepted_usage(actor, "barcode", task.id)
return jsonify({
"task_id": task.id,
"message": "Barcode generation started. Poll /api/tasks/{task_id}/status for progress.",
}), 202

View File

@@ -0,0 +1,43 @@
"""Contact form routes."""
import logging
import re
from flask import Blueprint, jsonify, request
from app.extensions import limiter
from app.services.contact_service import save_message
logger = logging.getLogger(__name__)
contact_bp = Blueprint("contact", __name__)
EMAIL_RE = re.compile(r"^[a-zA-Z0-9._%+\-]+@[a-zA-Z0-9.\-]+\.[a-zA-Z]{2,}$")
@contact_bp.route("/submit", methods=["POST"])
@limiter.limit("5/hour", override_defaults=True)
def submit_contact():
"""Accept a contact form submission."""
data = request.get_json(silent=True) or {}
name = (data.get("name") or "").strip()
email = (data.get("email") or "").strip()
category = (data.get("category") or "general").strip()
subject = (data.get("subject") or "").strip()
message = (data.get("message") or "").strip()
errors = []
if not name or len(name) > 200:
errors.append("Name is required (max 200 characters).")
if not email or not EMAIL_RE.match(email):
errors.append("A valid email address is required.")
if not subject or len(subject) > 500:
errors.append("Subject is required (max 500 characters).")
if not message or len(message) > 5000:
errors.append("Message is required (max 5000 characters).")
if errors:
return jsonify({"error": errors[0], "errors": errors}), 400
result = save_message(name, email, category, subject, message)
return jsonify({"message": "Message sent successfully.", **result}), 201

View File

@@ -0,0 +1,147 @@
"""Routes for image extra tools — Crop, Rotate/Flip."""
from flask import Blueprint, request, jsonify
from app.extensions import limiter
from app.services.policy_service import (
assert_quota_available,
build_task_tracking_kwargs,
PolicyError,
record_accepted_usage,
resolve_web_actor,
validate_actor_file,
)
from app.utils.file_validator import FileValidationError
from app.utils.sanitizer import generate_safe_path
from app.tasks.image_extra_tasks import crop_image_task, rotate_flip_image_task
image_extra_bp = Blueprint("image_extra", __name__)
ALLOWED_IMAGE_TYPES = ["png", "jpg", "jpeg", "webp"]
# ---------------------------------------------------------------------------
# Image Crop — POST /api/image/crop
# ---------------------------------------------------------------------------
@image_extra_bp.route("/crop", methods=["POST"])
@limiter.limit("10/minute")
def crop_image_route():
"""Crop an image to specified dimensions.
Accepts: multipart/form-data with:
- 'file': Image file
- 'left', 'top', 'right', 'bottom': Crop rectangle in pixels
"""
if "file" not in request.files:
return jsonify({"error": "No file provided."}), 400
file = request.files["file"]
try:
left = int(request.form.get("left", 0))
top = int(request.form.get("top", 0))
right = int(request.form.get("right", 0))
bottom = int(request.form.get("bottom", 0))
except (ValueError, TypeError):
return jsonify({"error": "Crop dimensions must be integers."}), 400
if right <= left or bottom <= top:
return jsonify({"error": "Invalid crop area: right > left and bottom > top required."}), 400
try:
quality = max(1, min(100, int(request.form.get("quality", 85))))
except ValueError:
quality = 85
actor = resolve_web_actor()
try:
assert_quota_available(actor)
except PolicyError as e:
return jsonify({"error": e.message}), e.status_code
try:
original_filename, ext = validate_actor_file(
file, allowed_types=ALLOWED_IMAGE_TYPES, actor=actor
)
except FileValidationError as e:
return jsonify({"error": e.message}), e.code
task_id, input_path = generate_safe_path(ext, folder_type="upload")
file.save(input_path)
task = crop_image_task.delay(
input_path, task_id, original_filename,
left, top, right, bottom, quality,
**build_task_tracking_kwargs(actor),
)
record_accepted_usage(actor, "image-crop", task.id)
return jsonify({
"task_id": task.id,
"message": "Cropping started. Poll /api/tasks/{task_id}/status for progress.",
}), 202
# ---------------------------------------------------------------------------
# Image Rotate/Flip — POST /api/image/rotate-flip
# ---------------------------------------------------------------------------
@image_extra_bp.route("/rotate-flip", methods=["POST"])
@limiter.limit("10/minute")
def rotate_flip_image_route():
"""Rotate and/or flip an image.
Accepts: multipart/form-data with:
- 'file': Image file
- 'rotation' (optional): 0, 90, 180, or 270 (default: 0)
- 'flip_horizontal' (optional): "true"/"false" (default: false)
- 'flip_vertical' (optional): "true"/"false" (default: false)
"""
if "file" not in request.files:
return jsonify({"error": "No file provided."}), 400
file = request.files["file"]
try:
rotation = int(request.form.get("rotation", 0))
except ValueError:
rotation = 0
if rotation not in (0, 90, 180, 270):
return jsonify({"error": "Rotation must be 0, 90, 180, or 270 degrees."}), 400
flip_horizontal = request.form.get("flip_horizontal", "false").lower() == "true"
flip_vertical = request.form.get("flip_vertical", "false").lower() == "true"
if rotation == 0 and not flip_horizontal and not flip_vertical:
return jsonify({"error": "At least one transformation is required."}), 400
try:
quality = max(1, min(100, int(request.form.get("quality", 85))))
except ValueError:
quality = 85
actor = resolve_web_actor()
try:
assert_quota_available(actor)
except PolicyError as e:
return jsonify({"error": e.message}), e.status_code
try:
original_filename, ext = validate_actor_file(
file, allowed_types=ALLOWED_IMAGE_TYPES, actor=actor
)
except FileValidationError as e:
return jsonify({"error": e.message}), e.code
task_id, input_path = generate_safe_path(ext, folder_type="upload")
file.save(input_path)
task = rotate_flip_image_task.delay(
input_path, task_id, original_filename,
rotation, flip_horizontal, flip_vertical, quality,
**build_task_tracking_kwargs(actor),
)
record_accepted_usage(actor, "image-rotate-flip", task.id)
return jsonify({
"task_id": task.id,
"message": "Transformation started. Poll /api/tasks/{task_id}/status for progress.",
}), 202

View File

@@ -0,0 +1,217 @@
"""Routes for new PDF conversions — PDF↔PPTX, Excel→PDF, Sign PDF."""
import os
import uuid
from flask import Blueprint, request, jsonify, current_app
from app.extensions import limiter
from app.services.policy_service import (
assert_quota_available,
build_task_tracking_kwargs,
PolicyError,
record_accepted_usage,
resolve_web_actor,
validate_actor_file,
)
from app.utils.file_validator import FileValidationError
from app.utils.sanitizer import generate_safe_path
from app.tasks.pdf_convert_tasks import (
pdf_to_pptx_task,
excel_to_pdf_task,
pptx_to_pdf_task,
sign_pdf_task,
)
pdf_convert_bp = Blueprint("pdf_convert", __name__)
ALLOWED_IMAGE_TYPES = ["png", "jpg", "jpeg", "webp"]
# ---------------------------------------------------------------------------
# PDF to PowerPoint — POST /api/convert/pdf-to-pptx
# ---------------------------------------------------------------------------
@pdf_convert_bp.route("/pdf-to-pptx", methods=["POST"])
@limiter.limit("10/minute")
def pdf_to_pptx_route():
"""Convert a PDF to PowerPoint (PPTX)."""
if "file" not in request.files:
return jsonify({"error": "No file provided."}), 400
file = request.files["file"]
actor = resolve_web_actor()
try:
assert_quota_available(actor)
except PolicyError as e:
return jsonify({"error": e.message}), e.status_code
try:
original_filename, ext = validate_actor_file(file, allowed_types=["pdf"], actor=actor)
except FileValidationError as e:
return jsonify({"error": e.message}), e.code
task_id, input_path = generate_safe_path(ext, folder_type="upload")
file.save(input_path)
task = pdf_to_pptx_task.delay(
input_path, task_id, original_filename,
**build_task_tracking_kwargs(actor),
)
record_accepted_usage(actor, "pdf-to-pptx", task.id)
return jsonify({
"task_id": task.id,
"message": "Conversion started. Poll /api/tasks/{task_id}/status for progress.",
}), 202
# ---------------------------------------------------------------------------
# Excel to PDF — POST /api/convert/excel-to-pdf
# ---------------------------------------------------------------------------
@pdf_convert_bp.route("/excel-to-pdf", methods=["POST"])
@limiter.limit("10/minute")
def excel_to_pdf_route():
"""Convert an Excel file to PDF."""
if "file" not in request.files:
return jsonify({"error": "No file provided."}), 400
file = request.files["file"]
actor = resolve_web_actor()
try:
assert_quota_available(actor)
except PolicyError as e:
return jsonify({"error": e.message}), e.status_code
try:
original_filename, ext = validate_actor_file(
file, allowed_types=["xlsx", "xls"], actor=actor
)
except FileValidationError as e:
return jsonify({"error": e.message}), e.code
task_id, input_path = generate_safe_path(ext, folder_type="upload")
file.save(input_path)
task = excel_to_pdf_task.delay(
input_path, task_id, original_filename,
**build_task_tracking_kwargs(actor),
)
record_accepted_usage(actor, "excel-to-pdf", task.id)
return jsonify({
"task_id": task.id,
"message": "Conversion started. Poll /api/tasks/{task_id}/status for progress.",
}), 202
# ---------------------------------------------------------------------------
# PowerPoint to PDF — POST /api/convert/pptx-to-pdf
# ---------------------------------------------------------------------------
@pdf_convert_bp.route("/pptx-to-pdf", methods=["POST"])
@limiter.limit("10/minute")
def pptx_to_pdf_route():
"""Convert a PowerPoint file to PDF."""
if "file" not in request.files:
return jsonify({"error": "No file provided."}), 400
file = request.files["file"]
actor = resolve_web_actor()
try:
assert_quota_available(actor)
except PolicyError as e:
return jsonify({"error": e.message}), e.status_code
try:
original_filename, ext = validate_actor_file(
file, allowed_types=["pptx", "ppt"], actor=actor
)
except FileValidationError as e:
return jsonify({"error": e.message}), e.code
task_id, input_path = generate_safe_path(ext, folder_type="upload")
file.save(input_path)
task = pptx_to_pdf_task.delay(
input_path, task_id, original_filename,
**build_task_tracking_kwargs(actor),
)
record_accepted_usage(actor, "pptx-to-pdf", task.id)
return jsonify({
"task_id": task.id,
"message": "Conversion started. Poll /api/tasks/{task_id}/status for progress.",
}), 202
# ---------------------------------------------------------------------------
# Sign PDF — POST /api/pdf-tools/sign
# ---------------------------------------------------------------------------
@pdf_convert_bp.route("/sign", methods=["POST"])
@limiter.limit("10/minute")
def sign_pdf_route():
"""Sign a PDF by overlaying a signature image.
Accepts: multipart/form-data with:
- 'file': PDF file
- 'signature': Signature image (PNG/JPG)
- 'page' (optional): 1-based page number (default: 1)
- 'x', 'y' (optional): Position in points (default: 100, 100)
- 'width', 'height' (optional): Size in points (default: 200, 80)
"""
if "file" not in request.files:
return jsonify({"error": "No PDF file provided."}), 400
if "signature" not in request.files:
return jsonify({"error": "No signature image provided."}), 400
pdf_file = request.files["file"]
sig_file = request.files["signature"]
actor = resolve_web_actor()
try:
assert_quota_available(actor)
except PolicyError as e:
return jsonify({"error": e.message}), e.status_code
try:
original_filename, ext = validate_actor_file(pdf_file, allowed_types=["pdf"], actor=actor)
except FileValidationError as e:
return jsonify({"error": e.message}), e.code
try:
_, sig_ext = validate_actor_file(sig_file, allowed_types=ALLOWED_IMAGE_TYPES, actor=actor)
except FileValidationError as e:
return jsonify({"error": f"Signature image: {e.message}"}), e.code
# Parse position parameters
try:
page = max(1, int(request.form.get("page", 1))) - 1 # Convert to 0-based
x = float(request.form.get("x", 100))
y = float(request.form.get("y", 100))
width = float(request.form.get("width", 200))
height = float(request.form.get("height", 80))
except (ValueError, TypeError):
return jsonify({"error": "Invalid position parameters."}), 400
if width <= 0 or height <= 0:
return jsonify({"error": "Width and height must be positive."}), 400
task_id = str(uuid.uuid4())
upload_dir = os.path.join(current_app.config["UPLOAD_FOLDER"], task_id)
os.makedirs(upload_dir, exist_ok=True)
input_path = os.path.join(upload_dir, f"{uuid.uuid4()}.pdf")
pdf_file.save(input_path)
signature_path = os.path.join(upload_dir, f"{uuid.uuid4()}.{sig_ext}")
sig_file.save(signature_path)
task = sign_pdf_task.delay(
input_path, signature_path, task_id, original_filename,
page, x, y, width, height,
**build_task_tracking_kwargs(actor),
)
record_accepted_usage(actor, "sign-pdf", task.id)
return jsonify({
"task_id": task.id,
"message": "Signing started. Poll /api/tasks/{task_id}/status for progress.",
}), 202

View File

@@ -0,0 +1,209 @@
"""Routes for extended PDF tools — Crop, Flatten, Repair, Metadata Editor."""
from flask import Blueprint, request, jsonify
from app.extensions import limiter
from app.services.policy_service import (
assert_quota_available,
build_task_tracking_kwargs,
PolicyError,
record_accepted_usage,
resolve_web_actor,
validate_actor_file,
)
from app.utils.file_validator import FileValidationError
from app.utils.sanitizer import generate_safe_path
from app.tasks.pdf_extra_tasks import (
crop_pdf_task,
flatten_pdf_task,
repair_pdf_task,
edit_metadata_task,
)
pdf_extra_bp = Blueprint("pdf_extra", __name__)
# ---------------------------------------------------------------------------
# Crop PDF — POST /api/pdf-tools/crop
# ---------------------------------------------------------------------------
@pdf_extra_bp.route("/crop", methods=["POST"])
@limiter.limit("10/minute")
def crop_pdf_route():
"""Crop margins from a PDF.
Accepts: multipart/form-data with:
- 'file': PDF file
- 'margin_left', 'margin_right', 'margin_top', 'margin_bottom': Points to crop
- 'pages' (optional): "all" or comma-separated page numbers
"""
if "file" not in request.files:
return jsonify({"error": "No file provided."}), 400
file = request.files["file"]
actor = resolve_web_actor()
try:
assert_quota_available(actor)
except PolicyError as e:
return jsonify({"error": e.message}), e.status_code
try:
original_filename, ext = validate_actor_file(file, allowed_types=["pdf"], actor=actor)
except FileValidationError as e:
return jsonify({"error": e.message}), e.code
try:
margin_left = float(request.form.get("margin_left", 0))
margin_right = float(request.form.get("margin_right", 0))
margin_top = float(request.form.get("margin_top", 0))
margin_bottom = float(request.form.get("margin_bottom", 0))
except (ValueError, TypeError):
return jsonify({"error": "Margin values must be numbers."}), 400
pages = request.form.get("pages", "all")
task_id, input_path = generate_safe_path(ext, folder_type="upload")
file.save(input_path)
task = crop_pdf_task.delay(
input_path, task_id, original_filename,
margin_left, margin_right, margin_top, margin_bottom, pages,
**build_task_tracking_kwargs(actor),
)
record_accepted_usage(actor, "crop-pdf", task.id)
return jsonify({
"task_id": task.id,
"message": "Cropping started. Poll /api/tasks/{task_id}/status for progress.",
}), 202
# ---------------------------------------------------------------------------
# Flatten PDF — POST /api/pdf-tools/flatten
# ---------------------------------------------------------------------------
@pdf_extra_bp.route("/flatten", methods=["POST"])
@limiter.limit("10/minute")
def flatten_pdf_route():
"""Flatten a PDF — remove interactive forms and annotations."""
if "file" not in request.files:
return jsonify({"error": "No file provided."}), 400
file = request.files["file"]
actor = resolve_web_actor()
try:
assert_quota_available(actor)
except PolicyError as e:
return jsonify({"error": e.message}), e.status_code
try:
original_filename, ext = validate_actor_file(file, allowed_types=["pdf"], actor=actor)
except FileValidationError as e:
return jsonify({"error": e.message}), e.code
task_id, input_path = generate_safe_path(ext, folder_type="upload")
file.save(input_path)
task = flatten_pdf_task.delay(
input_path, task_id, original_filename,
**build_task_tracking_kwargs(actor),
)
record_accepted_usage(actor, "flatten-pdf", task.id)
return jsonify({
"task_id": task.id,
"message": "Flattening started. Poll /api/tasks/{task_id}/status for progress.",
}), 202
# ---------------------------------------------------------------------------
# Repair PDF — POST /api/pdf-tools/repair
# ---------------------------------------------------------------------------
@pdf_extra_bp.route("/repair", methods=["POST"])
@limiter.limit("10/minute")
def repair_pdf_route():
"""Attempt to repair a damaged PDF."""
if "file" not in request.files:
return jsonify({"error": "No file provided."}), 400
file = request.files["file"]
actor = resolve_web_actor()
try:
assert_quota_available(actor)
except PolicyError as e:
return jsonify({"error": e.message}), e.status_code
try:
original_filename, ext = validate_actor_file(file, allowed_types=["pdf"], actor=actor)
except FileValidationError as e:
return jsonify({"error": e.message}), e.code
task_id, input_path = generate_safe_path(ext, folder_type="upload")
file.save(input_path)
task = repair_pdf_task.delay(
input_path, task_id, original_filename,
**build_task_tracking_kwargs(actor),
)
record_accepted_usage(actor, "repair-pdf", task.id)
return jsonify({
"task_id": task.id,
"message": "Repair started. Poll /api/tasks/{task_id}/status for progress.",
}), 202
# ---------------------------------------------------------------------------
# Edit PDF Metadata — POST /api/pdf-tools/metadata
# ---------------------------------------------------------------------------
@pdf_extra_bp.route("/metadata", methods=["POST"])
@limiter.limit("10/minute")
def edit_metadata_route():
"""Edit PDF metadata fields.
Accepts: multipart/form-data with:
- 'file': PDF file
- 'title', 'author', 'subject', 'keywords', 'creator' (optional)
"""
if "file" not in request.files:
return jsonify({"error": "No file provided."}), 400
file = request.files["file"]
title = request.form.get("title")
author = request.form.get("author")
subject = request.form.get("subject")
keywords = request.form.get("keywords")
creator = request.form.get("creator")
if not any([title, author, subject, keywords, creator]):
return jsonify({"error": "At least one metadata field must be provided."}), 400
# Validate string lengths
for field_name, field_val in [("title", title), ("author", author),
("subject", subject), ("keywords", keywords),
("creator", creator)]:
if field_val and len(field_val) > 500:
return jsonify({"error": f"{field_name} must be 500 characters or less."}), 400
actor = resolve_web_actor()
try:
assert_quota_available(actor)
except PolicyError as e:
return jsonify({"error": e.message}), e.status_code
try:
original_filename, ext = validate_actor_file(file, allowed_types=["pdf"], actor=actor)
except FileValidationError as e:
return jsonify({"error": e.message}), e.code
task_id, input_path = generate_safe_path(ext, folder_type="upload")
file.save(input_path)
task = edit_metadata_task.delay(
input_path, task_id, original_filename,
title, author, subject, keywords, creator,
**build_task_tracking_kwargs(actor),
)
record_accepted_usage(actor, "edit-metadata", task.id)
return jsonify({
"task_id": task.id,
"message": "Metadata editing started. Poll /api/tasks/{task_id}/status for progress.",
}), 202

View File

@@ -0,0 +1,17 @@
"""Public site-level statistics for social proof and developer pages."""
from flask import Blueprint, jsonify
from app.extensions import limiter
from app.services.account_service import get_public_history_summary
from app.services.rating_service import get_global_rating_summary
stats_bp = Blueprint("stats", __name__)
@stats_bp.route("/summary", methods=["GET"])
@limiter.limit("120/hour")
def get_stats_summary_route():
"""Return aggregate processing and rating stats safe for public display."""
history_summary = get_public_history_summary()
rating_summary = get_global_rating_summary()
return jsonify({**history_summary, **rating_summary}), 200

View File

@@ -0,0 +1,85 @@
"""Stripe payment routes — checkout, portal, and webhooks."""
import logging
from flask import Blueprint, current_app, jsonify, request, session
from app.extensions import limiter
from app.services.stripe_service import (
create_checkout_session,
create_portal_session,
handle_webhook_event,
)
logger = logging.getLogger(__name__)
stripe_bp = Blueprint("stripe", __name__)
def _get_authenticated_user_id() -> int | None:
"""Return the logged-in user's ID or None."""
return session.get("user_id")
@stripe_bp.route("/create-checkout-session", methods=["POST"])
@limiter.limit("10/hour", override_defaults=True)
def checkout():
"""Create a Stripe Checkout Session for Pro subscription."""
user_id = _get_authenticated_user_id()
if not user_id:
return jsonify({"error": "Authentication required."}), 401
data = request.get_json(silent=True) or {}
billing = data.get("billing", "monthly")
monthly_price = current_app.config.get("STRIPE_PRICE_ID_PRO_MONTHLY", "")
yearly_price = current_app.config.get("STRIPE_PRICE_ID_PRO_YEARLY", "")
price_id = yearly_price if billing == "yearly" and yearly_price else monthly_price
if not price_id:
return jsonify({"error": "Payment is not configured yet."}), 503
frontend_url = current_app.config.get("FRONTEND_URL", "http://localhost:5173")
success_url = f"{frontend_url}/account?payment=success"
cancel_url = f"{frontend_url}/pricing?payment=cancelled"
try:
url = create_checkout_session(user_id, price_id, success_url, cancel_url)
except Exception as e:
logger.exception("Stripe checkout session creation failed")
return jsonify({"error": "Failed to create payment session."}), 500
return jsonify({"url": url})
@stripe_bp.route("/create-portal-session", methods=["POST"])
@limiter.limit("10/hour", override_defaults=True)
def portal():
"""Create a Stripe Customer Portal session."""
user_id = _get_authenticated_user_id()
if not user_id:
return jsonify({"error": "Authentication required."}), 401
frontend_url = current_app.config.get("FRONTEND_URL", "http://localhost:5173")
return_url = f"{frontend_url}/account"
try:
url = create_portal_session(user_id, return_url)
except Exception as e:
logger.exception("Stripe portal session creation failed")
return jsonify({"error": "Failed to create portal session."}), 500
return jsonify({"url": url})
@stripe_bp.route("/webhook", methods=["POST"])
def webhook():
"""Handle Stripe webhook events. No rate limit — Stripe signs each call."""
payload = request.get_data()
sig_header = request.headers.get("Stripe-Signature", "")
result = handle_webhook_event(payload, sig_header)
if result["status"] == "error":
return jsonify(result), 400
return jsonify(result), 200

View File

@@ -34,6 +34,23 @@ from app.tasks.pdf_tools_tasks import (
unlock_pdf_task,
)
from app.tasks.flowchart_tasks import extract_flowchart_task
from app.tasks.ocr_tasks import ocr_image_task, ocr_pdf_task
from app.tasks.removebg_tasks import remove_bg_task
from app.tasks.pdf_ai_tasks import (
chat_with_pdf_task, summarize_pdf_task, translate_pdf_task, extract_tables_task,
)
from app.tasks.pdf_to_excel_tasks import pdf_to_excel_task
from app.tasks.html_to_pdf_tasks import html_to_pdf_task
from app.tasks.qrcode_tasks import generate_qr_task
from app.tasks.pdf_convert_tasks import (
pdf_to_pptx_task, excel_to_pdf_task, pptx_to_pdf_task, sign_pdf_task,
)
from app.tasks.pdf_extra_tasks import (
crop_pdf_task, flatten_pdf_task, repair_pdf_task, edit_metadata_task,
)
from app.tasks.image_extra_tasks import crop_image_task, rotate_flip_image_task
from app.tasks.barcode_tasks import generate_barcode_task
from app.services.barcode_service import SUPPORTED_BARCODE_TYPES
logger = logging.getLogger(__name__)
@@ -680,3 +697,760 @@ def extract_flowchart_route():
)
record_accepted_usage(actor, "pdf-flowchart", task.id)
return jsonify({"task_id": task.id, "message": "Flowchart extraction started."}), 202
# ===========================================================================
# Phase 2: Previously uncovered existing tools
# ===========================================================================
# ---------------------------------------------------------------------------
# OCR — POST /api/v1/ocr/image & /api/v1/ocr/pdf
# ---------------------------------------------------------------------------
@v1_bp.route("/ocr/image", methods=["POST"])
@limiter.limit("10/minute")
def ocr_image_route():
"""Extract text from an image using OCR."""
actor, err = _resolve_and_check()
if err:
return err
if "file" not in request.files:
return jsonify({"error": "No file provided."}), 400
file = request.files["file"]
lang = request.form.get("lang", "eng")
try:
original_filename, ext = validate_actor_file(
file, allowed_types=ALLOWED_IMAGE_TYPES, actor=actor
)
except FileValidationError as e:
return jsonify({"error": e.message}), e.code
task_id, input_path = generate_safe_path(ext, folder_type="upload")
file.save(input_path)
task = ocr_image_task.delay(
input_path, task_id, original_filename, lang,
**build_task_tracking_kwargs(actor),
)
record_accepted_usage(actor, "ocr-image", task.id)
return jsonify({"task_id": task.id, "message": "OCR started."}), 202
@v1_bp.route("/ocr/pdf", methods=["POST"])
@limiter.limit("10/minute")
def ocr_pdf_route():
"""Extract text from a PDF using OCR."""
actor, err = _resolve_and_check()
if err:
return err
if "file" not in request.files:
return jsonify({"error": "No file provided."}), 400
file = request.files["file"]
lang = request.form.get("lang", "eng")
try:
original_filename, ext = validate_actor_file(file, allowed_types=["pdf"], actor=actor)
except FileValidationError as e:
return jsonify({"error": e.message}), e.code
task_id, input_path = generate_safe_path(ext, folder_type="upload")
file.save(input_path)
task = ocr_pdf_task.delay(
input_path, task_id, original_filename, lang,
**build_task_tracking_kwargs(actor),
)
record_accepted_usage(actor, "ocr-pdf", task.id)
return jsonify({"task_id": task.id, "message": "OCR started."}), 202
# ---------------------------------------------------------------------------
# Remove Background — POST /api/v1/image/remove-bg
# ---------------------------------------------------------------------------
@v1_bp.route("/image/remove-bg", methods=["POST"])
@limiter.limit("5/minute")
def remove_bg_route():
"""Remove background from an image."""
actor, err = _resolve_and_check()
if err:
return err
if "file" not in request.files:
return jsonify({"error": "No file provided."}), 400
file = request.files["file"]
try:
original_filename, ext = validate_actor_file(
file, allowed_types=ALLOWED_IMAGE_TYPES, actor=actor
)
except FileValidationError as e:
return jsonify({"error": e.message}), e.code
task_id, input_path = generate_safe_path(ext, folder_type="upload")
file.save(input_path)
task = remove_bg_task.delay(
input_path, task_id, original_filename,
**build_task_tracking_kwargs(actor),
)
record_accepted_usage(actor, "remove-bg", task.id)
return jsonify({"task_id": task.id, "message": "Background removal started."}), 202
# ---------------------------------------------------------------------------
# PDF AI — POST /api/v1/pdf-ai/chat, summarize, translate, extract-tables
# ---------------------------------------------------------------------------
@v1_bp.route("/pdf-ai/chat", methods=["POST"])
@limiter.limit("5/minute")
def chat_pdf_route():
"""Chat with a PDF using AI."""
actor, err = _resolve_and_check()
if err:
return err
if "file" not in request.files:
return jsonify({"error": "No file provided."}), 400
file = request.files["file"]
question = request.form.get("question", "").strip()
if not question:
return jsonify({"error": "Question is required."}), 400
try:
original_filename, ext = validate_actor_file(file, allowed_types=["pdf"], actor=actor)
except FileValidationError as e:
return jsonify({"error": e.message}), e.code
task_id, input_path = generate_safe_path(ext, folder_type="upload")
file.save(input_path)
task = chat_with_pdf_task.delay(
input_path, task_id, original_filename, question,
**build_task_tracking_kwargs(actor),
)
record_accepted_usage(actor, "chat-pdf", task.id)
return jsonify({"task_id": task.id, "message": "Chat started."}), 202
@v1_bp.route("/pdf-ai/summarize", methods=["POST"])
@limiter.limit("5/minute")
def summarize_pdf_route():
"""Summarize a PDF using AI."""
actor, err = _resolve_and_check()
if err:
return err
if "file" not in request.files:
return jsonify({"error": "No file provided."}), 400
file = request.files["file"]
length = request.form.get("length", "medium")
if length not in ("short", "medium", "long"):
length = "medium"
try:
original_filename, ext = validate_actor_file(file, allowed_types=["pdf"], actor=actor)
except FileValidationError as e:
return jsonify({"error": e.message}), e.code
task_id, input_path = generate_safe_path(ext, folder_type="upload")
file.save(input_path)
task = summarize_pdf_task.delay(
input_path, task_id, original_filename, length,
**build_task_tracking_kwargs(actor),
)
record_accepted_usage(actor, "summarize-pdf", task.id)
return jsonify({"task_id": task.id, "message": "Summarization started."}), 202
@v1_bp.route("/pdf-ai/translate", methods=["POST"])
@limiter.limit("5/minute")
def translate_pdf_route():
"""Translate a PDF using AI."""
actor, err = _resolve_and_check()
if err:
return err
if "file" not in request.files:
return jsonify({"error": "No file provided."}), 400
file = request.files["file"]
target_language = request.form.get("target_language", "").strip()
if not target_language:
return jsonify({"error": "Target language is required."}), 400
try:
original_filename, ext = validate_actor_file(file, allowed_types=["pdf"], actor=actor)
except FileValidationError as e:
return jsonify({"error": e.message}), e.code
task_id, input_path = generate_safe_path(ext, folder_type="upload")
file.save(input_path)
task = translate_pdf_task.delay(
input_path, task_id, original_filename, target_language,
**build_task_tracking_kwargs(actor),
)
record_accepted_usage(actor, "translate-pdf", task.id)
return jsonify({"task_id": task.id, "message": "Translation started."}), 202
@v1_bp.route("/pdf-ai/extract-tables", methods=["POST"])
@limiter.limit("10/minute")
def extract_tables_route():
"""Extract tables from a PDF using AI."""
actor, err = _resolve_and_check()
if err:
return err
if "file" not in request.files:
return jsonify({"error": "No file provided."}), 400
file = request.files["file"]
try:
original_filename, ext = validate_actor_file(file, allowed_types=["pdf"], actor=actor)
except FileValidationError as e:
return jsonify({"error": e.message}), e.code
task_id, input_path = generate_safe_path(ext, folder_type="upload")
file.save(input_path)
task = extract_tables_task.delay(
input_path, task_id, original_filename,
**build_task_tracking_kwargs(actor),
)
record_accepted_usage(actor, "extract-tables", task.id)
return jsonify({"task_id": task.id, "message": "Table extraction started."}), 202
# ---------------------------------------------------------------------------
# PDF to Excel — POST /api/v1/convert/pdf-to-excel
# ---------------------------------------------------------------------------
@v1_bp.route("/convert/pdf-to-excel", methods=["POST"])
@limiter.limit("10/minute")
def pdf_to_excel_route():
"""Convert a PDF to Excel."""
actor, err = _resolve_and_check()
if err:
return err
if "file" not in request.files:
return jsonify({"error": "No file provided."}), 400
file = request.files["file"]
try:
original_filename, ext = validate_actor_file(file, allowed_types=["pdf"], actor=actor)
except FileValidationError as e:
return jsonify({"error": e.message}), e.code
task_id, input_path = generate_safe_path(ext, folder_type="upload")
file.save(input_path)
task = pdf_to_excel_task.delay(
input_path, task_id, original_filename,
**build_task_tracking_kwargs(actor),
)
record_accepted_usage(actor, "pdf-to-excel", task.id)
return jsonify({"task_id": task.id, "message": "Conversion started."}), 202
# ---------------------------------------------------------------------------
# HTML to PDF — POST /api/v1/convert/html-to-pdf
# ---------------------------------------------------------------------------
@v1_bp.route("/convert/html-to-pdf", methods=["POST"])
@limiter.limit("10/minute")
def html_to_pdf_route():
"""Convert HTML to PDF."""
actor, err = _resolve_and_check()
if err:
return err
if "file" not in request.files:
return jsonify({"error": "No file provided."}), 400
file = request.files["file"]
try:
original_filename, ext = validate_actor_file(
file, allowed_types=["html", "htm"], actor=actor
)
except FileValidationError as e:
return jsonify({"error": e.message}), e.code
task_id, input_path = generate_safe_path(ext, folder_type="upload")
file.save(input_path)
task = html_to_pdf_task.delay(
input_path, task_id, original_filename,
**build_task_tracking_kwargs(actor),
)
record_accepted_usage(actor, "html-to-pdf", task.id)
return jsonify({"task_id": task.id, "message": "Conversion started."}), 202
# ---------------------------------------------------------------------------
# QR Code — POST /api/v1/qrcode/generate
# ---------------------------------------------------------------------------
@v1_bp.route("/qrcode/generate", methods=["POST"])
@limiter.limit("20/minute")
def generate_qr_route():
"""Generate a QR code."""
actor, err = _resolve_and_check()
if err:
return err
if request.is_json:
body = request.get_json()
data = body.get("data", "")
size = body.get("size", 300)
else:
data = request.form.get("data", "")
size = request.form.get("size", 300)
if not str(data).strip():
return jsonify({"error": "QR code data is required."}), 400
try:
size = max(100, min(2000, int(size)))
except (ValueError, TypeError):
size = 300
task_id = str(uuid.uuid4())
task = generate_qr_task.delay(
task_id, str(data).strip(), size, "png",
**build_task_tracking_kwargs(actor),
)
record_accepted_usage(actor, "qr-code", task.id)
return jsonify({"task_id": task.id, "message": "QR code generation started."}), 202
# ===========================================================================
# Phase 2: New tools
# ===========================================================================
# ---------------------------------------------------------------------------
# PDF to PowerPoint — POST /api/v1/convert/pdf-to-pptx
# ---------------------------------------------------------------------------
@v1_bp.route("/convert/pdf-to-pptx", methods=["POST"])
@limiter.limit("10/minute")
def v1_pdf_to_pptx_route():
"""Convert a PDF to PowerPoint."""
actor, err = _resolve_and_check()
if err:
return err
if "file" not in request.files:
return jsonify({"error": "No file provided."}), 400
file = request.files["file"]
try:
original_filename, ext = validate_actor_file(file, allowed_types=["pdf"], actor=actor)
except FileValidationError as e:
return jsonify({"error": e.message}), e.code
task_id, input_path = generate_safe_path(ext, folder_type="upload")
file.save(input_path)
task = pdf_to_pptx_task.delay(
input_path, task_id, original_filename,
**build_task_tracking_kwargs(actor),
)
record_accepted_usage(actor, "pdf-to-pptx", task.id)
return jsonify({"task_id": task.id, "message": "Conversion started."}), 202
# ---------------------------------------------------------------------------
# Excel to PDF — POST /api/v1/convert/excel-to-pdf
# ---------------------------------------------------------------------------
@v1_bp.route("/convert/excel-to-pdf", methods=["POST"])
@limiter.limit("10/minute")
def v1_excel_to_pdf_route():
"""Convert an Excel file to PDF."""
actor, err = _resolve_and_check()
if err:
return err
if "file" not in request.files:
return jsonify({"error": "No file provided."}), 400
file = request.files["file"]
try:
original_filename, ext = validate_actor_file(
file, allowed_types=["xlsx", "xls"], actor=actor
)
except FileValidationError as e:
return jsonify({"error": e.message}), e.code
task_id, input_path = generate_safe_path(ext, folder_type="upload")
file.save(input_path)
task = excel_to_pdf_task.delay(
input_path, task_id, original_filename,
**build_task_tracking_kwargs(actor),
)
record_accepted_usage(actor, "excel-to-pdf", task.id)
return jsonify({"task_id": task.id, "message": "Conversion started."}), 202
# ---------------------------------------------------------------------------
# PowerPoint to PDF — POST /api/v1/convert/pptx-to-pdf
# ---------------------------------------------------------------------------
@v1_bp.route("/convert/pptx-to-pdf", methods=["POST"])
@limiter.limit("10/minute")
def v1_pptx_to_pdf_route():
"""Convert a PowerPoint file to PDF."""
actor, err = _resolve_and_check()
if err:
return err
if "file" not in request.files:
return jsonify({"error": "No file provided."}), 400
file = request.files["file"]
try:
original_filename, ext = validate_actor_file(
file, allowed_types=["pptx", "ppt"], actor=actor
)
except FileValidationError as e:
return jsonify({"error": e.message}), e.code
task_id, input_path = generate_safe_path(ext, folder_type="upload")
file.save(input_path)
task = pptx_to_pdf_task.delay(
input_path, task_id, original_filename,
**build_task_tracking_kwargs(actor),
)
record_accepted_usage(actor, "pptx-to-pdf", task.id)
return jsonify({"task_id": task.id, "message": "Conversion started."}), 202
# ---------------------------------------------------------------------------
# Sign PDF — POST /api/v1/pdf-tools/sign
# ---------------------------------------------------------------------------
@v1_bp.route("/pdf-tools/sign", methods=["POST"])
@limiter.limit("10/minute")
def v1_sign_pdf_route():
"""Sign a PDF with a signature image."""
actor, err = _resolve_and_check()
if err:
return err
if "file" not in request.files:
return jsonify({"error": "No PDF file provided."}), 400
if "signature" not in request.files:
return jsonify({"error": "No signature image provided."}), 400
pdf_file = request.files["file"]
sig_file = request.files["signature"]
try:
original_filename, ext = validate_actor_file(pdf_file, allowed_types=["pdf"], actor=actor)
except FileValidationError as e:
return jsonify({"error": e.message}), e.code
try:
_, sig_ext = validate_actor_file(sig_file, allowed_types=ALLOWED_IMAGE_TYPES, actor=actor)
except FileValidationError as e:
return jsonify({"error": f"Signature: {e.message}"}), e.code
try:
page = max(1, int(request.form.get("page", 1))) - 1
x = float(request.form.get("x", 100))
y = float(request.form.get("y", 100))
width = float(request.form.get("width", 200))
height = float(request.form.get("height", 80))
except (ValueError, TypeError):
return jsonify({"error": "Invalid position parameters."}), 400
task_id = str(uuid.uuid4())
upload_dir = os.path.join(current_app.config["UPLOAD_FOLDER"], task_id)
os.makedirs(upload_dir, exist_ok=True)
input_path = os.path.join(upload_dir, f"{uuid.uuid4()}.pdf")
pdf_file.save(input_path)
signature_path = os.path.join(upload_dir, f"{uuid.uuid4()}.{sig_ext}")
sig_file.save(signature_path)
task = sign_pdf_task.delay(
input_path, signature_path, task_id, original_filename,
page, x, y, width, height,
**build_task_tracking_kwargs(actor),
)
record_accepted_usage(actor, "sign-pdf", task.id)
return jsonify({"task_id": task.id, "message": "Signing started."}), 202
# ---------------------------------------------------------------------------
# Crop PDF — POST /api/v1/pdf-tools/crop
# ---------------------------------------------------------------------------
@v1_bp.route("/pdf-tools/crop", methods=["POST"])
@limiter.limit("10/minute")
def v1_crop_pdf_route():
"""Crop margins from a PDF."""
actor, err = _resolve_and_check()
if err:
return err
if "file" not in request.files:
return jsonify({"error": "No file provided."}), 400
file = request.files["file"]
try:
margin_left = float(request.form.get("margin_left", 0))
margin_right = float(request.form.get("margin_right", 0))
margin_top = float(request.form.get("margin_top", 0))
margin_bottom = float(request.form.get("margin_bottom", 0))
except (ValueError, TypeError):
return jsonify({"error": "Margin values must be numbers."}), 400
pages = request.form.get("pages", "all")
try:
original_filename, ext = validate_actor_file(file, allowed_types=["pdf"], actor=actor)
except FileValidationError as e:
return jsonify({"error": e.message}), e.code
task_id, input_path = generate_safe_path(ext, folder_type="upload")
file.save(input_path)
task = crop_pdf_task.delay(
input_path, task_id, original_filename,
margin_left, margin_right, margin_top, margin_bottom, pages,
**build_task_tracking_kwargs(actor),
)
record_accepted_usage(actor, "crop-pdf", task.id)
return jsonify({"task_id": task.id, "message": "Cropping started."}), 202
# ---------------------------------------------------------------------------
# Flatten PDF — POST /api/v1/pdf-tools/flatten
# ---------------------------------------------------------------------------
@v1_bp.route("/pdf-tools/flatten", methods=["POST"])
@limiter.limit("10/minute")
def v1_flatten_pdf_route():
"""Flatten a PDF."""
actor, err = _resolve_and_check()
if err:
return err
if "file" not in request.files:
return jsonify({"error": "No file provided."}), 400
file = request.files["file"]
try:
original_filename, ext = validate_actor_file(file, allowed_types=["pdf"], actor=actor)
except FileValidationError as e:
return jsonify({"error": e.message}), e.code
task_id, input_path = generate_safe_path(ext, folder_type="upload")
file.save(input_path)
task = flatten_pdf_task.delay(
input_path, task_id, original_filename,
**build_task_tracking_kwargs(actor),
)
record_accepted_usage(actor, "flatten-pdf", task.id)
return jsonify({"task_id": task.id, "message": "Flattening started."}), 202
# ---------------------------------------------------------------------------
# Repair PDF — POST /api/v1/pdf-tools/repair
# ---------------------------------------------------------------------------
@v1_bp.route("/pdf-tools/repair", methods=["POST"])
@limiter.limit("10/minute")
def v1_repair_pdf_route():
"""Repair a damaged PDF."""
actor, err = _resolve_and_check()
if err:
return err
if "file" not in request.files:
return jsonify({"error": "No file provided."}), 400
file = request.files["file"]
try:
original_filename, ext = validate_actor_file(file, allowed_types=["pdf"], actor=actor)
except FileValidationError as e:
return jsonify({"error": e.message}), e.code
task_id, input_path = generate_safe_path(ext, folder_type="upload")
file.save(input_path)
task = repair_pdf_task.delay(
input_path, task_id, original_filename,
**build_task_tracking_kwargs(actor),
)
record_accepted_usage(actor, "repair-pdf", task.id)
return jsonify({"task_id": task.id, "message": "Repair started."}), 202
# ---------------------------------------------------------------------------
# Edit PDF Metadata — POST /api/v1/pdf-tools/metadata
# ---------------------------------------------------------------------------
@v1_bp.route("/pdf-tools/metadata", methods=["POST"])
@limiter.limit("10/minute")
def v1_edit_metadata_route():
"""Edit PDF metadata."""
actor, err = _resolve_and_check()
if err:
return err
if "file" not in request.files:
return jsonify({"error": "No file provided."}), 400
file = request.files["file"]
title = request.form.get("title")
author = request.form.get("author")
subject = request.form.get("subject")
keywords = request.form.get("keywords")
creator = request.form.get("creator")
if not any([title, author, subject, keywords, creator]):
return jsonify({"error": "At least one metadata field required."}), 400
try:
original_filename, ext = validate_actor_file(file, allowed_types=["pdf"], actor=actor)
except FileValidationError as e:
return jsonify({"error": e.message}), e.code
task_id, input_path = generate_safe_path(ext, folder_type="upload")
file.save(input_path)
task = edit_metadata_task.delay(
input_path, task_id, original_filename,
title, author, subject, keywords, creator,
**build_task_tracking_kwargs(actor),
)
record_accepted_usage(actor, "edit-metadata", task.id)
return jsonify({"task_id": task.id, "message": "Metadata editing started."}), 202
# ---------------------------------------------------------------------------
# Image Crop — POST /api/v1/image/crop
# ---------------------------------------------------------------------------
@v1_bp.route("/image/crop", methods=["POST"])
@limiter.limit("10/minute")
def v1_crop_image_route():
"""Crop an image."""
actor, err = _resolve_and_check()
if err:
return err
if "file" not in request.files:
return jsonify({"error": "No file provided."}), 400
file = request.files["file"]
try:
left = int(request.form.get("left", 0))
top = int(request.form.get("top", 0))
right = int(request.form.get("right", 0))
bottom = int(request.form.get("bottom", 0))
except (ValueError, TypeError):
return jsonify({"error": "Crop dimensions must be integers."}), 400
if right <= left or bottom <= top:
return jsonify({"error": "Invalid crop area."}), 400
try:
original_filename, ext = validate_actor_file(
file, allowed_types=ALLOWED_IMAGE_TYPES, actor=actor
)
except FileValidationError as e:
return jsonify({"error": e.message}), e.code
task_id, input_path = generate_safe_path(ext, folder_type="upload")
file.save(input_path)
task = crop_image_task.delay(
input_path, task_id, original_filename,
left, top, right, bottom,
**build_task_tracking_kwargs(actor),
)
record_accepted_usage(actor, "image-crop", task.id)
return jsonify({"task_id": task.id, "message": "Cropping started."}), 202
# ---------------------------------------------------------------------------
# Image Rotate/Flip — POST /api/v1/image/rotate-flip
# ---------------------------------------------------------------------------
@v1_bp.route("/image/rotate-flip", methods=["POST"])
@limiter.limit("10/minute")
def v1_rotate_flip_image_route():
"""Rotate and/or flip an image."""
actor, err = _resolve_and_check()
if err:
return err
if "file" not in request.files:
return jsonify({"error": "No file provided."}), 400
file = request.files["file"]
try:
rotation = int(request.form.get("rotation", 0))
except ValueError:
rotation = 0
if rotation not in (0, 90, 180, 270):
return jsonify({"error": "Rotation must be 0, 90, 180, or 270."}), 400
flip_horizontal = request.form.get("flip_horizontal", "false").lower() == "true"
flip_vertical = request.form.get("flip_vertical", "false").lower() == "true"
try:
original_filename, ext = validate_actor_file(
file, allowed_types=ALLOWED_IMAGE_TYPES, actor=actor
)
except FileValidationError as e:
return jsonify({"error": e.message}), e.code
task_id, input_path = generate_safe_path(ext, folder_type="upload")
file.save(input_path)
task = rotate_flip_image_task.delay(
input_path, task_id, original_filename,
rotation, flip_horizontal, flip_vertical,
**build_task_tracking_kwargs(actor),
)
record_accepted_usage(actor, "image-rotate-flip", task.id)
return jsonify({"task_id": task.id, "message": "Transformation started."}), 202
# ---------------------------------------------------------------------------
# Barcode — POST /api/v1/barcode/generate
# ---------------------------------------------------------------------------
@v1_bp.route("/barcode/generate", methods=["POST"])
@limiter.limit("20/minute")
def v1_generate_barcode_route():
"""Generate a barcode."""
actor, err = _resolve_and_check()
if err:
return err
if request.is_json:
body = request.get_json()
data = body.get("data", "").strip()
barcode_type = body.get("type", "code128").lower()
output_format = body.get("format", "png").lower()
else:
data = request.form.get("data", "").strip()
barcode_type = request.form.get("type", "code128").lower()
output_format = request.form.get("format", "png").lower()
if not data:
return jsonify({"error": "Barcode data is required."}), 400
if barcode_type not in SUPPORTED_BARCODE_TYPES:
return jsonify({"error": f"Unsupported type. Supported: {', '.join(SUPPORTED_BARCODE_TYPES)}"}), 400
if output_format not in ("png", "svg"):
output_format = "png"
task_id = str(uuid.uuid4())
task = generate_barcode_task.delay(
data, barcode_type, task_id, output_format,
**build_task_tracking_kwargs(actor),
)
record_accepted_usage(actor, "barcode", task.id)
return jsonify({"task_id": task.id, "message": "Barcode generation started."}), 202

View File

@@ -13,6 +13,7 @@ from werkzeug.security import check_password_hash, generate_password_hash
logger = logging.getLogger(__name__)
VALID_PLANS = {"free", "pro"}
VALID_ROLES = {"user", "admin"}
def _utc_now() -> str:
@@ -30,6 +31,38 @@ def normalize_plan(plan: str | None) -> str:
return "pro" if plan == "pro" else "free"
def normalize_role(role: str | None) -> str:
"""Normalize role values to the supported set."""
return "admin" if role == "admin" else "user"
def _get_allowlisted_admin_emails() -> set[str]:
configured = current_app.config.get("INTERNAL_ADMIN_EMAILS", ())
return {
str(email).strip().lower()
for email in configured
if str(email).strip()
}
def is_allowlisted_admin_email(email: str | None) -> bool:
"""Return whether an email is bootstrapped as admin from configuration."""
normalized_email = _normalize_email(email or "")
return normalized_email in _get_allowlisted_admin_emails()
def _resolve_row_role(row: sqlite3.Row | None) -> str:
if row is None:
return "user"
row_keys = row.keys()
stored_role = normalize_role(row["role"]) if "role" in row_keys else "user"
email = str(row["email"]).strip().lower() if "email" in row_keys else ""
if stored_role == "admin" or email in _get_allowlisted_admin_emails():
return "admin"
return "user"
def _connect() -> sqlite3.Connection:
"""Create a SQLite connection with row access by column name."""
db_path = current_app.config["DATABASE_PATH"]
@@ -58,6 +91,8 @@ def _serialize_user(row: sqlite3.Row | None) -> dict | None:
"id": row["id"],
"email": row["email"],
"plan": normalize_plan(row["plan"]),
"role": _resolve_row_role(row),
"is_allowlisted_admin": is_allowlisted_admin_email(row["email"]),
"created_at": row["created_at"],
}
@@ -94,6 +129,7 @@ def init_account_db():
email TEXT NOT NULL UNIQUE,
password_hash TEXT NOT NULL,
plan TEXT NOT NULL DEFAULT 'free',
role TEXT NOT NULL DEFAULT 'user',
created_at TEXT NOT NULL,
updated_at TEXT NOT NULL
);
@@ -159,6 +195,10 @@ def init_account_db():
conn.execute(
"ALTER TABLE users ADD COLUMN updated_at TEXT NOT NULL DEFAULT ''"
)
if not _column_exists(conn, "users", "role"):
conn.execute(
"ALTER TABLE users ADD COLUMN role TEXT NOT NULL DEFAULT 'user'"
)
# Password reset tokens
conn.executescript(
@@ -194,19 +234,20 @@ def create_user(email: str, password: str) -> dict:
"""Create a new user and return the public record."""
email = _normalize_email(email)
now = _utc_now()
role = "admin" if email in _get_allowlisted_admin_emails() else "user"
try:
with _connect() as conn:
cursor = conn.execute(
"""
INSERT INTO users (email, password_hash, plan, created_at, updated_at)
VALUES (?, ?, 'free', ?, ?)
INSERT INTO users (email, password_hash, plan, role, created_at, updated_at)
VALUES (?, ?, 'free', ?, ?, ?)
""",
(email, generate_password_hash(password), now, now),
(email, generate_password_hash(password), role, now, now),
)
user_id = cursor.lastrowid
row = conn.execute(
"SELECT id, email, plan, created_at FROM users WHERE id = ?",
"SELECT id, email, plan, role, created_at FROM users WHERE id = ?",
(user_id,),
).fetchone()
except sqlite3.IntegrityError as exc:
@@ -235,7 +276,44 @@ def get_user_by_id(user_id: int) -> dict | None:
"""Fetch a public user record by id."""
with _connect() as conn:
row = conn.execute(
"SELECT id, email, plan, created_at FROM users WHERE id = ?",
"SELECT id, email, plan, role, created_at FROM users WHERE id = ?",
(user_id,),
).fetchone()
return _serialize_user(row)
def is_user_admin(user_id: int | None) -> bool:
"""Return whether one user has internal admin access."""
if user_id is None:
return False
with _connect() as conn:
row = conn.execute(
"SELECT id, email, role FROM users WHERE id = ?",
(user_id,),
).fetchone()
return _resolve_row_role(row) == "admin"
def set_user_role(user_id: int, role: str) -> dict | None:
"""Update one user role and return the public user record."""
normalized_role = normalize_role(role)
if normalized_role not in VALID_ROLES:
raise ValueError("Invalid role.")
with _connect() as conn:
conn.execute(
"""
UPDATE users
SET role = ?, updated_at = ?
WHERE id = ?
""",
(normalized_role, _utc_now(), user_id),
)
row = conn.execute(
"SELECT id, email, plan, role, created_at FROM users WHERE id = ?",
(user_id,),
).fetchone()
@@ -258,7 +336,7 @@ def update_user_plan(user_id: int, plan: str) -> dict | None:
(normalized_plan, _utc_now(), user_id),
)
row = conn.execute(
"SELECT id, email, plan, created_at FROM users WHERE id = ?",
"SELECT id, email, plan, role, created_at FROM users WHERE id = ?",
(user_id,),
).fetchone()
@@ -476,6 +554,60 @@ def list_file_history(user_id: int, limit: int = 50) -> list[dict]:
]
def get_public_history_summary(limit_tools: int = 5) -> dict:
"""Return aggregate public-friendly processing stats derived from history."""
cutoff_24h = (datetime.now(timezone.utc) - timedelta(days=1)).isoformat()
with _connect() as conn:
totals_row = conn.execute(
"""
SELECT
COUNT(*) AS total,
COALESCE(SUM(CASE WHEN status = 'completed' THEN 1 ELSE 0 END), 0) AS completed,
COALESCE(SUM(CASE WHEN status = 'failed' THEN 1 ELSE 0 END), 0) AS failed
FROM file_history
"""
).fetchone()
recent_row = conn.execute(
"""
SELECT COUNT(*) AS total
FROM file_history
WHERE created_at >= ?
""",
(cutoff_24h,),
).fetchone()
top_rows = conn.execute(
"""
SELECT tool, COUNT(*) AS count
FROM file_history
WHERE status = 'completed'
GROUP BY tool
ORDER BY count DESC, tool ASC
LIMIT ?
""",
(limit_tools,),
).fetchall()
total = int(totals_row["total"]) if totals_row else 0
completed = int(totals_row["completed"]) if totals_row else 0
failed = int(totals_row["failed"]) if totals_row else 0
success_rate = round((completed / total) * 100, 1) if total else 0.0
return {
"total_files_processed": total,
"completed_files": completed,
"failed_files": failed,
"success_rate": success_rate,
"files_last_24h": int(recent_row["total"]) if recent_row else 0,
"top_tools": [
{"tool": row["tool"], "count": int(row["count"])}
for row in top_rows
],
}
def record_usage_event(
user_id: int | None,
source: str,
@@ -555,7 +687,7 @@ def get_user_by_email(email: str) -> dict | None:
email = _normalize_email(email)
with _connect() as conn:
row = conn.execute(
"SELECT id, email, plan, created_at FROM users WHERE email = ?",
"SELECT id, email, plan, role, created_at FROM users WHERE email = ?",
(email,),
).fetchone()
return _serialize_user(row)

View File

@@ -0,0 +1,288 @@
"""Internal admin aggregation helpers for operational dashboards."""
import json
import os
import sqlite3
from datetime import datetime, timedelta, timezone
from flask import current_app
from app.services.account_service import is_allowlisted_admin_email, normalize_role
from app.services.ai_cost_service import get_monthly_spend
from app.services.contact_service import mark_read
from app.services.rating_service import get_global_rating_summary
def _connect() -> sqlite3.Connection:
db_path = current_app.config["DATABASE_PATH"]
db_dir = os.path.dirname(db_path)
if db_dir:
os.makedirs(db_dir, exist_ok=True)
connection = sqlite3.connect(db_path)
connection.row_factory = sqlite3.Row
return connection
def _parse_metadata(raw_value: str | None) -> dict:
if not raw_value:
return {}
try:
parsed = json.loads(raw_value)
except json.JSONDecodeError:
return {}
return parsed if isinstance(parsed, dict) else {}
def get_admin_overview(limit_recent: int = 8, top_tools_limit: int = 6) -> dict:
cutoff_24h = (datetime.now(timezone.utc) - timedelta(days=1)).isoformat()
ai_cost_summary = get_monthly_spend()
with _connect() as conn:
users_row = conn.execute(
"""
SELECT
COUNT(*) AS total_users,
COALESCE(SUM(CASE WHEN plan = 'pro' THEN 1 ELSE 0 END), 0) AS pro_users,
COALESCE(SUM(CASE WHEN plan = 'free' THEN 1 ELSE 0 END), 0) AS free_users
FROM users
"""
).fetchone()
history_row = conn.execute(
"""
SELECT
COUNT(*) AS total_files_processed,
COALESCE(SUM(CASE WHEN status = 'completed' THEN 1 ELSE 0 END), 0) AS completed_files,
COALESCE(SUM(CASE WHEN status = 'failed' THEN 1 ELSE 0 END), 0) AS failed_files,
COALESCE(SUM(CASE WHEN created_at >= ? THEN 1 ELSE 0 END), 0) AS files_last_24h
FROM file_history
""",
(cutoff_24h,),
).fetchone()
top_tools_rows = conn.execute(
"""
SELECT
tool,
COUNT(*) AS total_runs,
COALESCE(SUM(CASE WHEN status = 'failed' THEN 1 ELSE 0 END), 0) AS failed_runs
FROM file_history
GROUP BY tool
ORDER BY total_runs DESC, tool ASC
LIMIT ?
""",
(top_tools_limit,),
).fetchall()
failure_rows = conn.execute(
"""
SELECT
file_history.id,
file_history.user_id,
file_history.tool,
file_history.original_filename,
file_history.metadata_json,
file_history.created_at,
users.email
FROM file_history
LEFT JOIN users ON users.id = file_history.user_id
WHERE file_history.status = 'failed'
ORDER BY file_history.created_at DESC
LIMIT ?
""",
(limit_recent,),
).fetchall()
recent_user_rows = conn.execute(
"""
SELECT
users.id,
users.email,
users.plan,
users.created_at,
COALESCE((SELECT COUNT(*) FROM file_history WHERE file_history.user_id = users.id), 0) AS total_tasks,
COALESCE((SELECT COUNT(*) FROM api_keys WHERE api_keys.user_id = users.id AND api_keys.revoked_at IS NULL), 0) AS active_api_keys
FROM users
ORDER BY users.created_at DESC
LIMIT ?
""",
(limit_recent,),
).fetchall()
contact_row = conn.execute(
"""
SELECT
COUNT(*) AS total_messages,
COALESCE(SUM(CASE WHEN is_read = 0 THEN 1 ELSE 0 END), 0) AS unread_messages
FROM contact_messages
"""
).fetchone()
recent_contact_rows = conn.execute(
"""
SELECT id, name, email, category, subject, message, created_at, is_read
FROM contact_messages
ORDER BY created_at DESC
LIMIT ?
""",
(limit_recent,),
).fetchall()
total_processed = int(history_row["total_files_processed"]) if history_row else 0
completed_files = int(history_row["completed_files"]) if history_row else 0
success_rate = round((completed_files / total_processed) * 100, 1) if total_processed else 0.0
return {
"users": {
"total": int(users_row["total_users"]) if users_row else 0,
"pro": int(users_row["pro_users"]) if users_row else 0,
"free": int(users_row["free_users"]) if users_row else 0,
},
"processing": {
"total_files_processed": total_processed,
"completed_files": completed_files,
"failed_files": int(history_row["failed_files"]) if history_row else 0,
"files_last_24h": int(history_row["files_last_24h"]) if history_row else 0,
"success_rate": success_rate,
},
"ratings": get_global_rating_summary(),
"ai_cost": {
"month": ai_cost_summary["period"],
"total_usd": ai_cost_summary["total_cost_usd"],
"budget_usd": ai_cost_summary["budget_usd"],
"percent_used": ai_cost_summary["budget_used_percent"],
},
"contacts": {
"total_messages": int(contact_row["total_messages"]) if contact_row else 0,
"unread_messages": int(contact_row["unread_messages"]) if contact_row else 0,
"recent": [
{
"id": row["id"],
"name": row["name"],
"email": row["email"],
"category": row["category"],
"subject": row["subject"],
"message": row["message"],
"created_at": row["created_at"],
"is_read": bool(row["is_read"]),
}
for row in recent_contact_rows
],
},
"top_tools": [
{
"tool": row["tool"],
"total_runs": int(row["total_runs"]),
"failed_runs": int(row["failed_runs"]),
}
for row in top_tools_rows
],
"recent_failures": [
{
"id": row["id"],
"user_id": row["user_id"],
"email": row["email"],
"tool": row["tool"],
"original_filename": row["original_filename"],
"created_at": row["created_at"],
"metadata": _parse_metadata(row["metadata_json"]),
}
for row in failure_rows
],
"recent_users": [
{
"id": row["id"],
"email": row["email"],
"plan": row["plan"],
"created_at": row["created_at"],
"total_tasks": int(row["total_tasks"]),
"active_api_keys": int(row["active_api_keys"]),
}
for row in recent_user_rows
],
}
def list_admin_users(limit: int = 25, query: str = "") -> list[dict]:
normalized_query = query.strip().lower()
sql = """
SELECT
users.id,
users.email,
users.plan,
users.role,
users.created_at,
COALESCE((SELECT COUNT(*) FROM file_history WHERE file_history.user_id = users.id), 0) AS total_tasks,
COALESCE((SELECT COUNT(*) FROM file_history WHERE file_history.user_id = users.id AND file_history.status = 'completed'), 0) AS completed_tasks,
COALESCE((SELECT COUNT(*) FROM file_history WHERE file_history.user_id = users.id AND file_history.status = 'failed'), 0) AS failed_tasks,
COALESCE((SELECT COUNT(*) FROM api_keys WHERE api_keys.user_id = users.id AND api_keys.revoked_at IS NULL), 0) AS active_api_keys
FROM users
"""
params: list[object] = []
if normalized_query:
sql += " WHERE LOWER(users.email) LIKE ?"
params.append(f"%{normalized_query}%")
sql += " ORDER BY users.created_at DESC LIMIT ?"
params.append(limit)
with _connect() as conn:
rows = conn.execute(sql, tuple(params)).fetchall()
return [
{
"id": row["id"],
"email": row["email"],
"plan": row["plan"],
"role": "admin" if is_allowlisted_admin_email(row["email"]) else normalize_role(row["role"]),
"is_allowlisted_admin": is_allowlisted_admin_email(row["email"]),
"created_at": row["created_at"],
"total_tasks": int(row["total_tasks"]),
"completed_tasks": int(row["completed_tasks"]),
"failed_tasks": int(row["failed_tasks"]),
"active_api_keys": int(row["active_api_keys"]),
}
for row in rows
]
def list_admin_contacts(page: int = 1, per_page: int = 20) -> dict:
safe_page = max(1, page)
safe_per_page = max(1, min(per_page, 100))
offset = (safe_page - 1) * safe_per_page
with _connect() as conn:
total_row = conn.execute(
"SELECT COUNT(*) AS total, COALESCE(SUM(CASE WHEN is_read = 0 THEN 1 ELSE 0 END), 0) AS unread FROM contact_messages"
).fetchone()
rows = conn.execute(
"""
SELECT id, name, email, category, subject, message, created_at, is_read
FROM contact_messages
ORDER BY created_at DESC
LIMIT ? OFFSET ?
""",
(safe_per_page, offset),
).fetchall()
return {
"items": [
{
"id": row["id"],
"name": row["name"],
"email": row["email"],
"category": row["category"],
"subject": row["subject"],
"message": row["message"],
"created_at": row["created_at"],
"is_read": bool(row["is_read"]),
}
for row in rows
],
"page": safe_page,
"per_page": safe_per_page,
"total": int(total_row["total"]) if total_row else 0,
"unread": int(total_row["unread"]) if total_row else 0,
}
def mark_admin_contact_read(message_id: int) -> bool:
return mark_read(message_id)

View File

@@ -0,0 +1,106 @@
"""Barcode generation service."""
import os
import io
import logging
logger = logging.getLogger(__name__)
class BarcodeGenerationError(Exception):
"""Custom exception for barcode generation failures."""
pass
SUPPORTED_BARCODE_TYPES = [
"code128",
"code39",
"ean13",
"ean8",
"upca",
"isbn13",
"isbn10",
"issn",
"pzn",
]
def generate_barcode(
data: str,
barcode_type: str = "code128",
output_path: str = "",
output_format: str = "png",
) -> dict:
"""Generate a barcode image.
Args:
data: The data to encode in the barcode
barcode_type: Type of barcode (code128, code39, ean13, etc.)
output_path: Path for the output image
output_format: "png" or "svg"
Returns:
dict with barcode_type, data, and output_size
Raises:
BarcodeGenerationError: If generation fails
"""
barcode_type = barcode_type.lower()
if barcode_type not in SUPPORTED_BARCODE_TYPES:
raise BarcodeGenerationError(
f"Unsupported barcode type: {barcode_type}. "
f"Supported: {', '.join(SUPPORTED_BARCODE_TYPES)}"
)
if not data or not data.strip():
raise BarcodeGenerationError("Barcode data cannot be empty.")
if len(data) > 200:
raise BarcodeGenerationError("Barcode data is too long (max 200 characters).")
try:
import barcode
from barcode.writer import ImageWriter
os.makedirs(os.path.dirname(output_path), exist_ok=True)
# Map friendly names to python-barcode class names
type_map = {
"code128": "code128",
"code39": "code39",
"ean13": "ean13",
"ean8": "ean8",
"upca": "upca",
"isbn13": "isbn13",
"isbn10": "isbn10",
"issn": "issn",
"pzn": "pzn",
}
bc_type = type_map[barcode_type]
if output_format == "svg":
bc = barcode.get(bc_type, data)
# barcode.save() appends the extension automatically
output_base = output_path.rsplit(".", 1)[0] if "." in output_path else output_path
final_path = bc.save(output_base)
else:
bc = barcode.get(bc_type, data, writer=ImageWriter())
output_base = output_path.rsplit(".", 1)[0] if "." in output_path else output_path
final_path = bc.save(output_base)
if not os.path.exists(final_path):
raise BarcodeGenerationError("Barcode file was not created.")
output_size = os.path.getsize(final_path)
logger.info(f"Barcode generated: type={barcode_type}, data={data[:20]}... ({output_size} bytes)")
return {
"barcode_type": barcode_type,
"data": data,
"output_size": output_size,
"output_path": final_path,
}
except BarcodeGenerationError:
raise
except Exception as e:
raise BarcodeGenerationError(f"Barcode generation failed: {str(e)}")

View File

@@ -0,0 +1,119 @@
"""Contact form service — stores messages and sends notification emails."""
import logging
import os
import sqlite3
from datetime import datetime, timezone
from flask import current_app
from app.services.email_service import send_email
logger = logging.getLogger(__name__)
VALID_CATEGORIES = {"general", "bug", "feature"}
def _connect() -> sqlite3.Connection:
db_path = current_app.config["DATABASE_PATH"]
db_dir = os.path.dirname(db_path)
if db_dir:
os.makedirs(db_dir, exist_ok=True)
conn = sqlite3.connect(db_path)
conn.row_factory = sqlite3.Row
return conn
def init_contact_db() -> None:
"""Create the contact_messages table if it doesn't exist."""
conn = _connect()
try:
conn.execute("""
CREATE TABLE IF NOT EXISTS contact_messages (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL,
email TEXT NOT NULL,
category TEXT NOT NULL DEFAULT 'general',
subject TEXT NOT NULL,
message TEXT NOT NULL,
created_at TEXT NOT NULL,
is_read INTEGER NOT NULL DEFAULT 0
)
""")
conn.commit()
finally:
conn.close()
def save_message(name: str, email: str, category: str, subject: str, message: str) -> dict:
"""Persist a contact message and send a notification email."""
if category not in VALID_CATEGORIES:
category = "general"
now = datetime.now(timezone.utc).isoformat()
conn = _connect()
try:
cursor = conn.execute(
"""INSERT INTO contact_messages (name, email, category, subject, message, created_at)
VALUES (?, ?, ?, ?, ?, ?)""",
(name, email, category, subject, message, now),
)
conn.commit()
msg_id = cursor.lastrowid
finally:
conn.close()
# Send notification email to admin
admin_email = current_app.config.get("SMTP_FROM", "noreply@saas-pdf.com")
try:
send_email(
to=admin_email,
subject=f"[SaaS-PDF Contact] [{category}] {subject}",
html_body=f"""
<h2>New Contact Message</h2>
<p><strong>From:</strong> {name} &lt;{email}&gt;</p>
<p><strong>Category:</strong> {category}</p>
<p><strong>Subject:</strong> {subject}</p>
<hr />
<p>{message}</p>
""",
)
except Exception:
logger.exception("Failed to send contact notification email")
return {"id": msg_id, "created_at": now}
def get_messages(page: int = 1, per_page: int = 20) -> dict:
"""Retrieve paginated contact messages (admin use)."""
offset = (page - 1) * per_page
conn = _connect()
try:
total = conn.execute("SELECT COUNT(*) FROM contact_messages").fetchone()[0]
rows = conn.execute(
"SELECT * FROM contact_messages ORDER BY created_at DESC LIMIT ? OFFSET ?",
(per_page, offset),
).fetchall()
messages = [dict(r) for r in rows]
finally:
conn.close()
return {
"messages": messages,
"total": total,
"page": page,
"per_page": per_page,
}
def mark_read(message_id: int) -> bool:
"""Mark a contact message as read."""
conn = _connect()
try:
result = conn.execute(
"UPDATE contact_messages SET is_read = 1 WHERE id = ?",
(message_id,),
)
conn.commit()
return result.rowcount > 0
finally:
conn.close()

View File

@@ -0,0 +1,176 @@
"""Image extra tools — Crop, Rotate/Flip."""
import os
import logging
from PIL import Image
logger = logging.getLogger(__name__)
class ImageExtraError(Exception):
"""Custom exception for image extra tool failures."""
pass
FORMAT_MAP = {
"jpg": "JPEG",
"jpeg": "JPEG",
"png": "PNG",
"webp": "WEBP",
}
# ---------------------------------------------------------------------------
# Image Crop
# ---------------------------------------------------------------------------
def crop_image(
input_path: str,
output_path: str,
left: int,
top: int,
right: int,
bottom: int,
quality: int = 85,
) -> dict:
"""Crop an image to a specified rectangle.
Args:
input_path: Path to the input image
output_path: Path for the cropped output
left: Left edge in pixels
top: Top edge in pixels
right: Right edge in pixels
bottom: Bottom edge in pixels
quality: Output quality for lossy formats
Returns:
dict with original and cropped dimensions
Raises:
ImageExtraError: If crop fails
"""
try:
os.makedirs(os.path.dirname(output_path), exist_ok=True)
with Image.open(input_path) as img:
orig_w, orig_h = img.size
if left < 0 or top < 0 or right > orig_w or bottom > orig_h:
raise ImageExtraError(
f"Crop area ({left},{top},{right},{bottom}) outside image bounds ({orig_w}x{orig_h})."
)
if left >= right or top >= bottom:
raise ImageExtraError("Invalid crop area: left must be < right, top must be < bottom.")
cropped = img.crop((left, top, right, bottom))
ext = os.path.splitext(output_path)[1].lower().strip(".")
pil_format = FORMAT_MAP.get(ext, "PNG")
save_kwargs = {"optimize": True}
if pil_format in ("JPEG", "WEBP"):
save_kwargs["quality"] = quality
if cropped.mode in ("RGBA", "P", "LA"):
bg = Image.new("RGB", cropped.size, (255, 255, 255))
if cropped.mode == "P":
cropped = cropped.convert("RGBA")
bg.paste(cropped, mask=cropped.split()[-1] if "A" in cropped.mode else None)
cropped = bg
cropped.save(output_path, format=pil_format, **save_kwargs)
new_w = right - left
new_h = bottom - top
logger.info(f"Image crop: {orig_w}x{orig_h}{new_w}x{new_h}")
return {
"original_width": orig_w,
"original_height": orig_h,
"cropped_width": new_w,
"cropped_height": new_h,
}
except ImageExtraError:
raise
except (IOError, OSError, Image.DecompressionBombError) as e:
raise ImageExtraError(f"Image crop failed: {str(e)}")
# ---------------------------------------------------------------------------
# Image Rotate / Flip
# ---------------------------------------------------------------------------
def rotate_flip_image(
input_path: str,
output_path: str,
rotation: int = 0,
flip_horizontal: bool = False,
flip_vertical: bool = False,
quality: int = 85,
) -> dict:
"""Rotate and/or flip an image.
Args:
input_path: Path to the input image
output_path: Path for the output image
rotation: Rotation angle (0, 90, 180, 270)
flip_horizontal: Mirror horizontally
flip_vertical: Mirror vertically
quality: Output quality for lossy formats
Returns:
dict with original and new dimensions
Raises:
ImageExtraError: If operation fails
"""
if rotation not in (0, 90, 180, 270):
raise ImageExtraError("Rotation must be 0, 90, 180, or 270 degrees.")
try:
os.makedirs(os.path.dirname(output_path), exist_ok=True)
with Image.open(input_path) as img:
orig_w, orig_h = img.size
result = img
if rotation:
# PIL rotates counter-clockwise, so negate for clockwise
result = result.rotate(-rotation, expand=True)
if flip_horizontal:
result = result.transpose(Image.Transpose.FLIP_LEFT_RIGHT)
if flip_vertical:
result = result.transpose(Image.Transpose.FLIP_TOP_BOTTOM)
new_w, new_h = result.size
ext = os.path.splitext(output_path)[1].lower().strip(".")
pil_format = FORMAT_MAP.get(ext, "PNG")
save_kwargs = {"optimize": True}
if pil_format in ("JPEG", "WEBP"):
save_kwargs["quality"] = quality
if result.mode in ("RGBA", "P", "LA"):
bg = Image.new("RGB", result.size, (255, 255, 255))
if result.mode == "P":
result = result.convert("RGBA")
bg.paste(result, mask=result.split()[-1] if "A" in result.mode else None)
result = bg
result.save(output_path, format=pil_format, **save_kwargs)
logger.info(f"Image rotate/flip: {orig_w}x{orig_h}{new_w}x{new_h}, rot={rotation}")
return {
"original_width": orig_w,
"original_height": orig_h,
"new_width": new_w,
"new_height": new_h,
"rotation": rotation,
"flipped_horizontal": flip_horizontal,
"flipped_vertical": flip_vertical,
}
except ImageExtraError:
raise
except (IOError, OSError, Image.DecompressionBombError) as e:
raise ImageExtraError(f"Image rotate/flip failed: {str(e)}")

View File

@@ -0,0 +1,278 @@
"""PDF conversion service — PDF↔PowerPoint, Excel→PDF, PowerPoint→PDF, Sign PDF."""
import os
import io
import logging
import subprocess
import tempfile
logger = logging.getLogger(__name__)
class PDFConvertError(Exception):
"""Custom exception for PDF conversion failures."""
pass
# ---------------------------------------------------------------------------
# PDF to PowerPoint (PPTX)
# ---------------------------------------------------------------------------
def pdf_to_pptx(input_path: str, output_path: str) -> dict:
"""Convert a PDF to PowerPoint by rendering each page as a slide image.
Args:
input_path: Path to the input PDF
output_path: Path for the output PPTX
Returns:
dict with total_slides and output_size
Raises:
PDFConvertError: If conversion fails
"""
try:
from pdf2image import convert_from_path
from pptx import Presentation
from pptx.util import Inches, Emu
images = convert_from_path(input_path, dpi=200)
if not images:
raise PDFConvertError("PDF has no pages or could not be rendered.")
prs = Presentation()
# Use widescreen 16:9 layout
prs.slide_width = Inches(13.333)
prs.slide_height = Inches(7.5)
for img in images:
slide = prs.slides.add_slide(prs.slide_layouts[6]) # blank layout
img_stream = io.BytesIO()
img.save(img_stream, format="PNG")
img_stream.seek(0)
# Scale image to fill slide
img_w, img_h = img.size
slide_w = prs.slide_width
slide_h = prs.slide_height
ratio = min(slide_w / Emu(int(img_w * 914400 / 200)),
slide_h / Emu(int(img_h * 914400 / 200)))
pic_w = int(img_w * 914400 / 200 * ratio)
pic_h = int(img_h * 914400 / 200 * ratio)
left = (slide_w - pic_w) // 2
top = (slide_h - pic_h) // 2
slide.shapes.add_picture(img_stream, left, top, pic_w, pic_h)
os.makedirs(os.path.dirname(output_path), exist_ok=True)
prs.save(output_path)
output_size = os.path.getsize(output_path)
logger.info(f"PDF→PPTX: {len(images)} slides ({output_size} bytes)")
return {"total_slides": len(images), "output_size": output_size}
except PDFConvertError:
raise
except Exception as e:
raise PDFConvertError(f"PDF to PowerPoint conversion failed: {str(e)}")
# ---------------------------------------------------------------------------
# Excel (XLSX) to PDF
# ---------------------------------------------------------------------------
def excel_to_pdf(input_path: str, output_dir: str) -> str:
"""Convert an Excel file to PDF using LibreOffice headless.
Args:
input_path: Path to the input XLSX/XLS file
output_dir: Directory for the output file
Returns:
Path to the converted PDF
Raises:
PDFConvertError: If conversion fails
"""
os.makedirs(output_dir, exist_ok=True)
user_install_dir = tempfile.mkdtemp(prefix="lo_excel2pdf_")
cmd = [
"soffice",
"--headless",
"--norestore",
f"-env:UserInstallation=file://{user_install_dir}",
"--convert-to", "pdf",
"--outdir", output_dir,
input_path,
]
try:
result = subprocess.run(
cmd, capture_output=True, text=True, timeout=120,
env={**os.environ, "HOME": user_install_dir},
)
input_basename = os.path.splitext(os.path.basename(input_path))[0]
output_path = os.path.join(output_dir, f"{input_basename}.pdf")
if os.path.exists(output_path) and os.path.getsize(output_path) > 0:
logger.info(f"Excel→PDF conversion successful: {output_path}")
return output_path
if result.returncode != 0:
stderr = result.stderr or ""
real_errors = [
line for line in stderr.strip().splitlines()
if not line.startswith("Warning: failed to launch javaldx")
]
error_msg = "\n".join(real_errors) if real_errors else stderr
raise PDFConvertError(f"Conversion failed: {error_msg or 'Unknown error'}")
raise PDFConvertError("Output file was not created.")
except subprocess.TimeoutExpired:
raise PDFConvertError("Conversion timed out. File may be too large.")
except FileNotFoundError:
raise PDFConvertError("LibreOffice is not installed on the server.")
finally:
import shutil
shutil.rmtree(user_install_dir, ignore_errors=True)
# ---------------------------------------------------------------------------
# PowerPoint (PPTX) to PDF
# ---------------------------------------------------------------------------
def pptx_to_pdf(input_path: str, output_dir: str) -> str:
"""Convert a PowerPoint file to PDF using LibreOffice headless.
Args:
input_path: Path to the input PPTX/PPT file
output_dir: Directory for the output file
Returns:
Path to the converted PDF
Raises:
PDFConvertError: If conversion fails
"""
os.makedirs(output_dir, exist_ok=True)
user_install_dir = tempfile.mkdtemp(prefix="lo_pptx2pdf_")
cmd = [
"soffice",
"--headless",
"--norestore",
f"-env:UserInstallation=file://{user_install_dir}",
"--convert-to", "pdf",
"--outdir", output_dir,
input_path,
]
try:
result = subprocess.run(
cmd, capture_output=True, text=True, timeout=120,
env={**os.environ, "HOME": user_install_dir},
)
input_basename = os.path.splitext(os.path.basename(input_path))[0]
output_path = os.path.join(output_dir, f"{input_basename}.pdf")
if os.path.exists(output_path) and os.path.getsize(output_path) > 0:
logger.info(f"PPTX→PDF conversion successful: {output_path}")
return output_path
if result.returncode != 0:
stderr = result.stderr or ""
real_errors = [
line for line in stderr.strip().splitlines()
if not line.startswith("Warning: failed to launch javaldx")
]
error_msg = "\n".join(real_errors) if real_errors else stderr
raise PDFConvertError(f"Conversion failed: {error_msg or 'Unknown error'}")
raise PDFConvertError("Output file was not created.")
except subprocess.TimeoutExpired:
raise PDFConvertError("Conversion timed out. File may be too large.")
except FileNotFoundError:
raise PDFConvertError("LibreOffice is not installed on the server.")
finally:
import shutil
shutil.rmtree(user_install_dir, ignore_errors=True)
# ---------------------------------------------------------------------------
# Sign PDF (overlay signature image on a page)
# ---------------------------------------------------------------------------
def sign_pdf(
input_path: str,
signature_path: str,
output_path: str,
page: int = 0,
x: float = 100,
y: float = 100,
width: float = 200,
height: float = 80,
) -> dict:
"""Overlay a signature image onto a PDF page.
Args:
input_path: Path to the input PDF
signature_path: Path to the signature image (PNG with transparency)
output_path: Path for the signed output PDF
page: 0-based page index to place signature
x: X coordinate (points from left)
y: Y coordinate (points from bottom)
width: Signature width in points
height: Signature height in points
Returns:
dict with total_pages and output_size
Raises:
PDFConvertError: If signing fails
"""
try:
from PyPDF2 import PdfReader, PdfWriter
from reportlab.pdfgen import canvas as rl_canvas
from reportlab.lib.utils import ImageReader
reader = PdfReader(input_path)
total_pages = len(reader.pages)
if total_pages == 0:
raise PDFConvertError("PDF has no pages.")
if page < 0 or page >= total_pages:
raise PDFConvertError(f"Page {page + 1} does not exist (PDF has {total_pages} pages).")
target_page = reader.pages[page]
page_box = target_page.mediabox
page_width = float(page_box.width)
page_height = float(page_box.height)
# Create overlay PDF with the signature image
overlay_stream = io.BytesIO()
c = rl_canvas.Canvas(overlay_stream, pagesize=(page_width, page_height))
sig_img = ImageReader(signature_path)
c.drawImage(sig_img, x, y, width=width, height=height, mask="auto")
c.save()
overlay_stream.seek(0)
overlay_reader = PdfReader(overlay_stream)
overlay_page = overlay_reader.pages[0]
writer = PdfWriter()
for i, pg in enumerate(reader.pages):
if i == page:
pg.merge_page(overlay_page)
writer.add_page(pg)
os.makedirs(os.path.dirname(output_path), exist_ok=True)
with open(output_path, "wb") as f:
writer.write(f)
output_size = os.path.getsize(output_path)
logger.info(f"Sign PDF: signature on page {page + 1} ({output_size} bytes)")
return {"total_pages": total_pages, "output_size": output_size, "signed_page": page + 1}
except PDFConvertError:
raise
except Exception as e:
raise PDFConvertError(f"Failed to sign PDF: {str(e)}")

View File

@@ -0,0 +1,316 @@
"""Extended PDF tools — Crop, Flatten, Repair, Metadata Editor."""
import os
import io
import logging
logger = logging.getLogger(__name__)
class PDFExtraError(Exception):
"""Custom exception for extended PDF tool failures."""
pass
# ---------------------------------------------------------------------------
# Crop PDF
# ---------------------------------------------------------------------------
def crop_pdf(
input_path: str,
output_path: str,
margin_left: float = 0,
margin_right: float = 0,
margin_top: float = 0,
margin_bottom: float = 0,
pages: str = "all",
) -> dict:
"""Crop margins from PDF pages.
Args:
input_path: Path to the input PDF
output_path: Path for the cropped output
margin_left/right/top/bottom: Points to crop from each side
pages: "all" or comma-separated page numbers (1-based)
Returns:
dict with total_pages and output_size
Raises:
PDFExtraError: If cropping fails
"""
try:
from PyPDF2 import PdfReader, PdfWriter
reader = PdfReader(input_path)
writer = PdfWriter()
total_pages = len(reader.pages)
if total_pages == 0:
raise PDFExtraError("PDF has no pages.")
target_indices = _parse_pages(pages, total_pages)
for i, page in enumerate(reader.pages):
if i in target_indices:
box = page.mediabox
box.lower_left = (
float(box.lower_left[0]) + margin_left,
float(box.lower_left[1]) + margin_bottom,
)
box.upper_right = (
float(box.upper_right[0]) - margin_right,
float(box.upper_right[1]) - margin_top,
)
page.mediabox = box
page.cropbox = box
writer.add_page(page)
os.makedirs(os.path.dirname(output_path), exist_ok=True)
with open(output_path, "wb") as f:
writer.write(f)
output_size = os.path.getsize(output_path)
logger.info(f"Crop PDF: {len(target_indices)} pages cropped ({output_size} bytes)")
return {
"total_pages": total_pages,
"cropped_pages": len(target_indices),
"output_size": output_size,
}
except PDFExtraError:
raise
except Exception as e:
raise PDFExtraError(f"Failed to crop PDF: {str(e)}")
# ---------------------------------------------------------------------------
# Flatten PDF (remove interactive form fields, annotations)
# ---------------------------------------------------------------------------
def flatten_pdf(input_path: str, output_path: str) -> dict:
"""Flatten a PDF — burn form fields and annotations into static content.
Args:
input_path: Path to the input PDF
output_path: Path for the flattened output
Returns:
dict with total_pages and output_size
Raises:
PDFExtraError: If flatten fails
"""
try:
from PyPDF2 import PdfReader, PdfWriter
reader = PdfReader(input_path)
writer = PdfWriter()
total_pages = len(reader.pages)
if total_pages == 0:
raise PDFExtraError("PDF has no pages.")
for page in reader.pages:
# Remove annotations to flatten
if "/Annots" in page:
del page["/Annots"]
writer.add_page(page)
# Remove AcroForm (interactive forms) at document level
if "/AcroForm" in writer._root_object:
del writer._root_object["/AcroForm"]
os.makedirs(os.path.dirname(output_path), exist_ok=True)
with open(output_path, "wb") as f:
writer.write(f)
output_size = os.path.getsize(output_path)
logger.info(f"Flatten PDF: {total_pages} pages ({output_size} bytes)")
return {"total_pages": total_pages, "output_size": output_size}
except PDFExtraError:
raise
except Exception as e:
raise PDFExtraError(f"Failed to flatten PDF: {str(e)}")
# ---------------------------------------------------------------------------
# Repair PDF
# ---------------------------------------------------------------------------
def repair_pdf(input_path: str, output_path: str) -> dict:
"""Attempt to repair a damaged PDF by re-writing it.
Args:
input_path: Path to the input PDF
output_path: Path for the repaired output
Returns:
dict with total_pages, output_size, and repaired flag
Raises:
PDFExtraError: If repair fails
"""
try:
from PyPDF2 import PdfReader, PdfWriter
from PyPDF2.errors import PdfReadError
try:
reader = PdfReader(input_path, strict=False)
except PdfReadError as e:
raise PDFExtraError(f"Cannot read PDF — file may be severely corrupted: {str(e)}")
writer = PdfWriter()
total_pages = len(reader.pages)
if total_pages == 0:
raise PDFExtraError("PDF has no recoverable pages.")
recovered = 0
for i, page in enumerate(reader.pages):
try:
writer.add_page(page)
recovered += 1
except Exception:
logger.warning(f"Repair: skipped unrecoverable page {i + 1}")
if recovered == 0:
raise PDFExtraError("No pages could be recovered from the PDF.")
# Copy metadata if available
try:
if reader.metadata:
writer.add_metadata(reader.metadata)
except Exception:
pass
os.makedirs(os.path.dirname(output_path), exist_ok=True)
with open(output_path, "wb") as f:
writer.write(f)
output_size = os.path.getsize(output_path)
logger.info(f"Repair PDF: {recovered}/{total_pages} pages recovered ({output_size} bytes)")
return {
"total_pages": total_pages,
"recovered_pages": recovered,
"output_size": output_size,
"repaired": True,
}
except PDFExtraError:
raise
except Exception as e:
raise PDFExtraError(f"Failed to repair PDF: {str(e)}")
# ---------------------------------------------------------------------------
# PDF Metadata Editor
# ---------------------------------------------------------------------------
def edit_pdf_metadata(
input_path: str,
output_path: str,
title: str | None = None,
author: str | None = None,
subject: str | None = None,
keywords: str | None = None,
creator: str | None = None,
) -> dict:
"""Edit PDF metadata fields.
Args:
input_path: Path to the input PDF
output_path: Path for the output PDF
title/author/subject/keywords/creator: New metadata values (None = keep existing)
Returns:
dict with updated metadata and output_size
Raises:
PDFExtraError: If metadata edit fails
"""
try:
from PyPDF2 import PdfReader, PdfWriter
reader = PdfReader(input_path)
writer = PdfWriter()
for page in reader.pages:
writer.add_page(page)
# Build metadata dict
metadata = {}
if title is not None:
metadata["/Title"] = title
if author is not None:
metadata["/Author"] = author
if subject is not None:
metadata["/Subject"] = subject
if keywords is not None:
metadata["/Keywords"] = keywords
if creator is not None:
metadata["/Creator"] = creator
if not metadata:
raise PDFExtraError("At least one metadata field must be provided.")
writer.add_metadata(metadata)
os.makedirs(os.path.dirname(output_path), exist_ok=True)
with open(output_path, "wb") as f:
writer.write(f)
output_size = os.path.getsize(output_path)
# Read back to confirm
current_meta = {}
try:
r2 = PdfReader(output_path)
if r2.metadata:
current_meta = {
"title": r2.metadata.get("/Title", ""),
"author": r2.metadata.get("/Author", ""),
"subject": r2.metadata.get("/Subject", ""),
"keywords": r2.metadata.get("/Keywords", ""),
"creator": r2.metadata.get("/Creator", ""),
}
except Exception:
pass
logger.info(f"Edit metadata: updated {len(metadata)} fields ({output_size} bytes)")
return {
"total_pages": len(reader.pages),
"output_size": output_size,
"metadata": current_meta,
}
except PDFExtraError:
raise
except Exception as e:
raise PDFExtraError(f"Failed to edit PDF metadata: {str(e)}")
# ---------------------------------------------------------------------------
# Helpers
# ---------------------------------------------------------------------------
def _parse_pages(pages_spec: str, total_pages: int) -> set[int]:
"""Parse page specification to set of 0-based indices."""
if pages_spec.strip().lower() == "all":
return set(range(total_pages))
indices = set()
for part in pages_spec.split(","):
part = part.strip()
if "-" in part:
try:
start, end = part.split("-", 1)
start = max(1, int(start))
end = min(total_pages, int(end))
for p in range(start, end + 1):
indices.add(p - 1)
except ValueError:
continue
else:
try:
p = int(part)
if 1 <= p <= total_pages:
indices.add(p - 1)
except ValueError:
continue
return indices

View File

@@ -135,3 +135,21 @@ def get_all_ratings_summary() -> list[dict]:
}
for row in rows
]
def get_global_rating_summary() -> dict:
"""Return aggregate rating stats across all rated tools."""
with _connect() as conn:
row = conn.execute(
"""
SELECT
COUNT(*) AS count,
COALESCE(AVG(rating), 0) AS average
FROM tool_ratings
"""
).fetchone()
return {
"rating_count": int(row["count"]) if row else 0,
"average_rating": round(row["average"], 1) if row else 0.0,
}

View File

@@ -0,0 +1,220 @@
"""Stripe payment service — checkout sessions, webhooks, and subscription management."""
import logging
import os
import stripe
from flask import current_app
from app.services.account_service import update_user_plan, get_user_by_id, _connect, _utc_now
logger = logging.getLogger(__name__)
def _init_stripe():
"""Configure stripe with the app's secret key."""
stripe.api_key = current_app.config.get("STRIPE_SECRET_KEY", "")
def _ensure_stripe_columns():
"""Add stripe_customer_id and stripe_subscription_id columns if missing."""
conn = _connect()
try:
cols = [row["name"] for row in conn.execute("PRAGMA table_info(users)").fetchall()]
if "stripe_customer_id" not in cols:
conn.execute("ALTER TABLE users ADD COLUMN stripe_customer_id TEXT")
if "stripe_subscription_id" not in cols:
conn.execute("ALTER TABLE users ADD COLUMN stripe_subscription_id TEXT")
conn.commit()
finally:
conn.close()
def init_stripe_db():
"""Initialize stripe-related DB columns."""
_ensure_stripe_columns()
def _get_or_create_customer(user_id: int) -> str:
"""Get existing Stripe customer or create one."""
_init_stripe()
conn = _connect()
try:
row = conn.execute(
"SELECT email, stripe_customer_id FROM users WHERE id = ?",
(user_id,),
).fetchone()
finally:
conn.close()
if row is None:
raise ValueError("User not found.")
if row["stripe_customer_id"]:
return row["stripe_customer_id"]
# Create new Stripe customer
customer = stripe.Customer.create(
email=row["email"],
metadata={"user_id": str(user_id)},
)
conn = _connect()
try:
conn.execute(
"UPDATE users SET stripe_customer_id = ?, updated_at = ? WHERE id = ?",
(customer.id, _utc_now(), user_id),
)
conn.commit()
finally:
conn.close()
return customer.id
def create_checkout_session(user_id: int, price_id: str, success_url: str, cancel_url: str) -> str:
"""Create a Stripe Checkout Session and return the URL."""
_init_stripe()
customer_id = _get_or_create_customer(user_id)
session = stripe.checkout.Session.create(
customer=customer_id,
payment_method_types=["card"],
line_items=[{"price": price_id, "quantity": 1}],
mode="subscription",
success_url=success_url,
cancel_url=cancel_url,
metadata={"user_id": str(user_id)},
)
return session.url
def create_portal_session(user_id: int, return_url: str) -> str:
"""Create a Stripe Customer Portal session for managing subscriptions."""
_init_stripe()
customer_id = _get_or_create_customer(user_id)
session = stripe.billing_portal.Session.create(
customer=customer_id,
return_url=return_url,
)
return session.url
def handle_webhook_event(payload: bytes, sig_header: str) -> dict:
"""Process a Stripe webhook event. Returns a status dict."""
webhook_secret = current_app.config.get("STRIPE_WEBHOOK_SECRET", "")
if not webhook_secret:
logger.warning("STRIPE_WEBHOOK_SECRET not configured — ignoring webhook.")
return {"status": "ignored", "reason": "no webhook secret"}
try:
event = stripe.Webhook.construct_event(payload, sig_header, webhook_secret)
except stripe.SignatureVerificationError:
logger.warning("Stripe webhook signature verification failed.")
return {"status": "error", "reason": "signature_failed"}
except ValueError:
logger.warning("Invalid Stripe webhook payload.")
return {"status": "error", "reason": "invalid_payload"}
event_type = event["type"]
data_object = event["data"]["object"]
if event_type == "checkout.session.completed":
_handle_checkout_completed(data_object)
elif event_type == "customer.subscription.updated":
_handle_subscription_updated(data_object)
elif event_type == "customer.subscription.deleted":
_handle_subscription_deleted(data_object)
elif event_type == "invoice.payment_failed":
_handle_payment_failed(data_object)
return {"status": "ok", "event_type": event_type}
def _find_user_by_customer_id(customer_id: str) -> dict | None:
"""Find user by Stripe customer ID."""
conn = _connect()
try:
row = conn.execute(
"SELECT id, email, plan, created_at FROM users WHERE stripe_customer_id = ?",
(customer_id,),
).fetchone()
finally:
conn.close()
return dict(row) if row else None
def _handle_checkout_completed(session: dict):
"""Handle successful checkout — activate Pro plan."""
customer_id = session.get("customer")
subscription_id = session.get("subscription")
user_id = session.get("metadata", {}).get("user_id")
if user_id:
conn = _connect()
try:
conn.execute(
"UPDATE users SET plan = 'pro', stripe_subscription_id = ?, updated_at = ? WHERE id = ?",
(subscription_id, _utc_now(), int(user_id)),
)
conn.commit()
finally:
conn.close()
logger.info("User %s upgraded to Pro via checkout.", user_id)
elif customer_id:
user = _find_user_by_customer_id(customer_id)
if user:
conn = _connect()
try:
conn.execute(
"UPDATE users SET plan = 'pro', stripe_subscription_id = ?, updated_at = ? WHERE id = ?",
(subscription_id, _utc_now(), user["id"]),
)
conn.commit()
finally:
conn.close()
logger.info("User %s upgraded to Pro via checkout (customer match).", user["id"])
def _handle_subscription_updated(subscription: dict):
"""Handle subscription changes (upgrade/downgrade)."""
customer_id = subscription.get("customer")
status = subscription.get("status")
user = _find_user_by_customer_id(customer_id)
if not user:
return
if status in ("active", "trialing"):
update_user_plan(user["id"], "pro")
logger.info("User %s subscription active — Pro plan.", user["id"])
elif status in ("past_due", "unpaid"):
logger.warning("User %s subscription %s.", user["id"], status)
elif status in ("canceled", "incomplete_expired"):
update_user_plan(user["id"], "free")
logger.info("User %s subscription ended — Free plan.", user["id"])
def _handle_subscription_deleted(subscription: dict):
"""Handle subscription cancellation."""
customer_id = subscription.get("customer")
user = _find_user_by_customer_id(customer_id)
if user:
update_user_plan(user["id"], "free")
conn = _connect()
try:
conn.execute(
"UPDATE users SET stripe_subscription_id = NULL, updated_at = ? WHERE id = ?",
(_utc_now(), user["id"]),
)
conn.commit()
finally:
conn.close()
logger.info("User %s subscription deleted — downgraded to Free.", user["id"])
def _handle_payment_failed(invoice: dict):
"""Log payment failures."""
customer_id = invoice.get("customer")
user = _find_user_by_customer_id(customer_id)
if user:
logger.warning("Payment failed for user %s (customer %s).", user["id"], customer_id)

View File

@@ -0,0 +1,65 @@
"""Celery tasks for barcode generation."""
import os
import logging
from flask import current_app
from app.extensions import celery
from app.services.barcode_service import generate_barcode, BarcodeGenerationError
from app.services.storage_service import storage
from app.services.task_tracking_service import finalize_task_tracking
from app.utils.sanitizer import cleanup_task_files
logger = logging.getLogger(__name__)
def _cleanup(task_id: str):
cleanup_task_files(task_id, keep_outputs=not storage.use_s3)
def _get_output_dir(task_id: str) -> str:
output_dir = os.path.join(current_app.config["OUTPUT_FOLDER"], task_id)
os.makedirs(output_dir, exist_ok=True)
return output_dir
def _finalize_task(
task_id, user_id, tool, original_filename, result,
usage_source, api_key_id, celery_task_id,
):
finalize_task_tracking(
user_id=user_id, tool=tool, original_filename=original_filename,
result=result, usage_source=usage_source, api_key_id=api_key_id,
celery_task_id=celery_task_id,
)
_cleanup(task_id)
return result
@celery.task(bind=True, name="app.tasks.barcode_tasks.generate_barcode_task")
def generate_barcode_task(
self, data, barcode_type, task_id, output_format="png",
user_id=None, usage_source="web", api_key_id=None,
):
output_dir = _get_output_dir(task_id)
ext = "svg" if output_format == "svg" else "png"
output_path = os.path.join(output_dir, f"{task_id}_barcode.{ext}")
try:
self.update_state(state="PROCESSING", meta={"step": "Generating barcode..."})
stats = generate_barcode(data, barcode_type, output_path, output_format)
final_path = stats.pop("output_path")
self.update_state(state="PROCESSING", meta={"step": "Uploading result..."})
s3_key = storage.upload_file(final_path, task_id, folder="outputs")
download_name = f"barcode_{barcode_type}.{ext}"
download_url = storage.generate_presigned_url(s3_key, original_filename=download_name)
result = {"status": "completed", "download_url": download_url,
"filename": download_name, **stats}
return _finalize_task(task_id, user_id, "barcode", data[:50],
result, usage_source, api_key_id, self.request.id)
except BarcodeGenerationError as e:
return _finalize_task(task_id, user_id, "barcode", data[:50],
{"status": "failed", "error": str(e)},
usage_source, api_key_id, self.request.id)

View File

@@ -0,0 +1,111 @@
"""Celery tasks for image extra tools — Crop, Rotate/Flip."""
import os
import logging
from flask import current_app
from app.extensions import celery
from app.services.image_extra_service import (
crop_image,
rotate_flip_image,
ImageExtraError,
)
from app.services.storage_service import storage
from app.services.task_tracking_service import finalize_task_tracking
from app.utils.sanitizer import cleanup_task_files
logger = logging.getLogger(__name__)
def _cleanup(task_id: str):
cleanup_task_files(task_id, keep_outputs=not storage.use_s3)
def _get_output_dir(task_id: str) -> str:
output_dir = os.path.join(current_app.config["OUTPUT_FOLDER"], task_id)
os.makedirs(output_dir, exist_ok=True)
return output_dir
def _finalize_task(
task_id, user_id, tool, original_filename, result,
usage_source, api_key_id, celery_task_id,
):
finalize_task_tracking(
user_id=user_id, tool=tool, original_filename=original_filename,
result=result, usage_source=usage_source, api_key_id=api_key_id,
celery_task_id=celery_task_id,
)
_cleanup(task_id)
return result
# ---------------------------------------------------------------------------
# Image Crop
# ---------------------------------------------------------------------------
@celery.task(bind=True, name="app.tasks.image_extra_tasks.crop_image_task")
def crop_image_task(
self, input_path, task_id, original_filename,
left, top, right, bottom, quality=85,
user_id=None, usage_source="web", api_key_id=None,
):
output_dir = _get_output_dir(task_id)
ext = os.path.splitext(original_filename)[1].lower().strip(".")
if ext not in ("png", "jpg", "jpeg", "webp"):
ext = "png"
output_path = os.path.join(output_dir, f"{task_id}_cropped.{ext}")
try:
self.update_state(state="PROCESSING", meta={"step": "Cropping image..."})
stats = crop_image(input_path, output_path, left, top, right, bottom, quality)
self.update_state(state="PROCESSING", meta={"step": "Uploading result..."})
s3_key = storage.upload_file(output_path, task_id, folder="outputs")
base = os.path.splitext(original_filename)[0]
download_name = f"{base}_cropped.{ext}"
download_url = storage.generate_presigned_url(s3_key, original_filename=download_name)
result = {"status": "completed", "download_url": download_url,
"filename": download_name, **stats}
return _finalize_task(task_id, user_id, "image-crop", original_filename,
result, usage_source, api_key_id, self.request.id)
except ImageExtraError as e:
return _finalize_task(task_id, user_id, "image-crop", original_filename,
{"status": "failed", "error": str(e)},
usage_source, api_key_id, self.request.id)
# ---------------------------------------------------------------------------
# Image Rotate/Flip
# ---------------------------------------------------------------------------
@celery.task(bind=True, name="app.tasks.image_extra_tasks.rotate_flip_image_task")
def rotate_flip_image_task(
self, input_path, task_id, original_filename,
rotation=0, flip_horizontal=False, flip_vertical=False, quality=85,
user_id=None, usage_source="web", api_key_id=None,
):
output_dir = _get_output_dir(task_id)
ext = os.path.splitext(original_filename)[1].lower().strip(".")
if ext not in ("png", "jpg", "jpeg", "webp"):
ext = "png"
output_path = os.path.join(output_dir, f"{task_id}_transformed.{ext}")
try:
self.update_state(state="PROCESSING", meta={"step": "Transforming image..."})
stats = rotate_flip_image(input_path, output_path, rotation,
flip_horizontal, flip_vertical, quality)
self.update_state(state="PROCESSING", meta={"step": "Uploading result..."})
s3_key = storage.upload_file(output_path, task_id, folder="outputs")
base = os.path.splitext(original_filename)[0]
download_name = f"{base}_transformed.{ext}"
download_url = storage.generate_presigned_url(s3_key, original_filename=download_name)
result = {"status": "completed", "download_url": download_url,
"filename": download_name, **stats}
return _finalize_task(task_id, user_id, "image-rotate-flip", original_filename,
result, usage_source, api_key_id, self.request.id)
except ImageExtraError as e:
return _finalize_task(task_id, user_id, "image-rotate-flip", original_filename,
{"status": "failed", "error": str(e)},
usage_source, api_key_id, self.request.id)

View File

@@ -0,0 +1,171 @@
"""Celery tasks for new PDF conversions — PDF↔PPTX, Excel→PDF, Sign PDF."""
import os
import logging
from flask import current_app
from app.extensions import celery
from app.services.pdf_convert_service import (
pdf_to_pptx,
excel_to_pdf,
pptx_to_pdf,
sign_pdf,
PDFConvertError,
)
from app.services.storage_service import storage
from app.services.task_tracking_service import finalize_task_tracking
from app.utils.sanitizer import cleanup_task_files
logger = logging.getLogger(__name__)
def _cleanup(task_id: str):
cleanup_task_files(task_id, keep_outputs=not storage.use_s3)
def _get_output_dir(task_id: str) -> str:
output_dir = os.path.join(current_app.config["OUTPUT_FOLDER"], task_id)
os.makedirs(output_dir, exist_ok=True)
return output_dir
def _finalize_task(
task_id, user_id, tool, original_filename, result,
usage_source, api_key_id, celery_task_id,
):
finalize_task_tracking(
user_id=user_id, tool=tool, original_filename=original_filename,
result=result, usage_source=usage_source, api_key_id=api_key_id,
celery_task_id=celery_task_id,
)
_cleanup(task_id)
return result
# ---------------------------------------------------------------------------
# PDF to PowerPoint
# ---------------------------------------------------------------------------
@celery.task(bind=True, name="app.tasks.pdf_convert_tasks.pdf_to_pptx_task")
def pdf_to_pptx_task(
self, input_path, task_id, original_filename,
user_id=None, usage_source="web", api_key_id=None,
):
output_dir = _get_output_dir(task_id)
output_path = os.path.join(output_dir, f"{task_id}.pptx")
try:
self.update_state(state="PROCESSING", meta={"step": "Converting PDF to PowerPoint..."})
stats = pdf_to_pptx(input_path, output_path)
self.update_state(state="PROCESSING", meta={"step": "Uploading result..."})
s3_key = storage.upload_file(output_path, task_id, folder="outputs")
base = os.path.splitext(original_filename)[0]
download_name = f"{base}.pptx"
download_url = storage.generate_presigned_url(s3_key, original_filename=download_name)
result = {
"status": "completed", "download_url": download_url,
"filename": download_name, **stats,
}
return _finalize_task(task_id, user_id, "pdf-to-pptx", original_filename,
result, usage_source, api_key_id, self.request.id)
except PDFConvertError as e:
return _finalize_task(task_id, user_id, "pdf-to-pptx", original_filename,
{"status": "failed", "error": str(e)},
usage_source, api_key_id, self.request.id)
# ---------------------------------------------------------------------------
# Excel to PDF
# ---------------------------------------------------------------------------
@celery.task(bind=True, name="app.tasks.pdf_convert_tasks.excel_to_pdf_task")
def excel_to_pdf_task(
self, input_path, task_id, original_filename,
user_id=None, usage_source="web", api_key_id=None,
):
output_dir = _get_output_dir(task_id)
try:
self.update_state(state="PROCESSING", meta={"step": "Converting Excel to PDF..."})
output_path = excel_to_pdf(input_path, output_dir)
self.update_state(state="PROCESSING", meta={"step": "Uploading result..."})
s3_key = storage.upload_file(output_path, task_id, folder="outputs")
base = os.path.splitext(original_filename)[0]
download_name = f"{base}.pdf"
download_url = storage.generate_presigned_url(s3_key, original_filename=download_name)
output_size = os.path.getsize(output_path)
result = {
"status": "completed", "download_url": download_url,
"filename": download_name, "output_size": output_size,
}
return _finalize_task(task_id, user_id, "excel-to-pdf", original_filename,
result, usage_source, api_key_id, self.request.id)
except PDFConvertError as e:
return _finalize_task(task_id, user_id, "excel-to-pdf", original_filename,
{"status": "failed", "error": str(e)},
usage_source, api_key_id, self.request.id)
# ---------------------------------------------------------------------------
# PowerPoint to PDF
# ---------------------------------------------------------------------------
@celery.task(bind=True, name="app.tasks.pdf_convert_tasks.pptx_to_pdf_task")
def pptx_to_pdf_task(
self, input_path, task_id, original_filename,
user_id=None, usage_source="web", api_key_id=None,
):
output_dir = _get_output_dir(task_id)
try:
self.update_state(state="PROCESSING", meta={"step": "Converting PowerPoint to PDF..."})
output_path = pptx_to_pdf(input_path, output_dir)
self.update_state(state="PROCESSING", meta={"step": "Uploading result..."})
s3_key = storage.upload_file(output_path, task_id, folder="outputs")
base = os.path.splitext(original_filename)[0]
download_name = f"{base}.pdf"
download_url = storage.generate_presigned_url(s3_key, original_filename=download_name)
output_size = os.path.getsize(output_path)
result = {
"status": "completed", "download_url": download_url,
"filename": download_name, "output_size": output_size,
}
return _finalize_task(task_id, user_id, "pptx-to-pdf", original_filename,
result, usage_source, api_key_id, self.request.id)
except PDFConvertError as e:
return _finalize_task(task_id, user_id, "pptx-to-pdf", original_filename,
{"status": "failed", "error": str(e)},
usage_source, api_key_id, self.request.id)
# ---------------------------------------------------------------------------
# Sign PDF
# ---------------------------------------------------------------------------
@celery.task(bind=True, name="app.tasks.pdf_convert_tasks.sign_pdf_task")
def sign_pdf_task(
self, input_path, signature_path, task_id, original_filename,
page=0, x=100, y=100, width=200, height=80,
user_id=None, usage_source="web", api_key_id=None,
):
output_dir = _get_output_dir(task_id)
output_path = os.path.join(output_dir, f"{task_id}_signed.pdf")
try:
self.update_state(state="PROCESSING", meta={"step": "Signing PDF..."})
stats = sign_pdf(input_path, signature_path, output_path, page, x, y, width, height)
self.update_state(state="PROCESSING", meta={"step": "Uploading result..."})
s3_key = storage.upload_file(output_path, task_id, folder="outputs")
base = os.path.splitext(original_filename)[0]
download_name = f"{base}_signed.pdf"
download_url = storage.generate_presigned_url(s3_key, original_filename=download_name)
result = {
"status": "completed", "download_url": download_url,
"filename": download_name, **stats,
}
return _finalize_task(task_id, user_id, "sign-pdf", original_filename,
result, usage_source, api_key_id, self.request.id)
except PDFConvertError as e:
return _finalize_task(task_id, user_id, "sign-pdf", original_filename,
{"status": "failed", "error": str(e)},
usage_source, api_key_id, self.request.id)

View File

@@ -0,0 +1,164 @@
"""Celery tasks for extended PDF tools — Crop, Flatten, Repair, Metadata."""
import os
import logging
from flask import current_app
from app.extensions import celery
from app.services.pdf_extra_service import (
crop_pdf,
flatten_pdf,
repair_pdf,
edit_pdf_metadata,
PDFExtraError,
)
from app.services.storage_service import storage
from app.services.task_tracking_service import finalize_task_tracking
from app.utils.sanitizer import cleanup_task_files
logger = logging.getLogger(__name__)
def _cleanup(task_id: str):
cleanup_task_files(task_id, keep_outputs=not storage.use_s3)
def _get_output_dir(task_id: str) -> str:
output_dir = os.path.join(current_app.config["OUTPUT_FOLDER"], task_id)
os.makedirs(output_dir, exist_ok=True)
return output_dir
def _finalize_task(
task_id, user_id, tool, original_filename, result,
usage_source, api_key_id, celery_task_id,
):
finalize_task_tracking(
user_id=user_id, tool=tool, original_filename=original_filename,
result=result, usage_source=usage_source, api_key_id=api_key_id,
celery_task_id=celery_task_id,
)
_cleanup(task_id)
return result
# ---------------------------------------------------------------------------
# Crop PDF
# ---------------------------------------------------------------------------
@celery.task(bind=True, name="app.tasks.pdf_extra_tasks.crop_pdf_task")
def crop_pdf_task(
self, input_path, task_id, original_filename,
margin_left=0, margin_right=0, margin_top=0, margin_bottom=0, pages="all",
user_id=None, usage_source="web", api_key_id=None,
):
output_dir = _get_output_dir(task_id)
output_path = os.path.join(output_dir, f"{task_id}_cropped.pdf")
try:
self.update_state(state="PROCESSING", meta={"step": "Cropping PDF..."})
stats = crop_pdf(input_path, output_path, margin_left, margin_right,
margin_top, margin_bottom, pages)
self.update_state(state="PROCESSING", meta={"step": "Uploading result..."})
s3_key = storage.upload_file(output_path, task_id, folder="outputs")
base = os.path.splitext(original_filename)[0]
download_name = f"{base}_cropped.pdf"
download_url = storage.generate_presigned_url(s3_key, original_filename=download_name)
result = {"status": "completed", "download_url": download_url,
"filename": download_name, **stats}
return _finalize_task(task_id, user_id, "crop-pdf", original_filename,
result, usage_source, api_key_id, self.request.id)
except PDFExtraError as e:
return _finalize_task(task_id, user_id, "crop-pdf", original_filename,
{"status": "failed", "error": str(e)},
usage_source, api_key_id, self.request.id)
# ---------------------------------------------------------------------------
# Flatten PDF
# ---------------------------------------------------------------------------
@celery.task(bind=True, name="app.tasks.pdf_extra_tasks.flatten_pdf_task")
def flatten_pdf_task(
self, input_path, task_id, original_filename,
user_id=None, usage_source="web", api_key_id=None,
):
output_dir = _get_output_dir(task_id)
output_path = os.path.join(output_dir, f"{task_id}_flattened.pdf")
try:
self.update_state(state="PROCESSING", meta={"step": "Flattening PDF..."})
stats = flatten_pdf(input_path, output_path)
self.update_state(state="PROCESSING", meta={"step": "Uploading result..."})
s3_key = storage.upload_file(output_path, task_id, folder="outputs")
base = os.path.splitext(original_filename)[0]
download_name = f"{base}_flattened.pdf"
download_url = storage.generate_presigned_url(s3_key, original_filename=download_name)
result = {"status": "completed", "download_url": download_url,
"filename": download_name, **stats}
return _finalize_task(task_id, user_id, "flatten-pdf", original_filename,
result, usage_source, api_key_id, self.request.id)
except PDFExtraError as e:
return _finalize_task(task_id, user_id, "flatten-pdf", original_filename,
{"status": "failed", "error": str(e)},
usage_source, api_key_id, self.request.id)
# ---------------------------------------------------------------------------
# Repair PDF
# ---------------------------------------------------------------------------
@celery.task(bind=True, name="app.tasks.pdf_extra_tasks.repair_pdf_task")
def repair_pdf_task(
self, input_path, task_id, original_filename,
user_id=None, usage_source="web", api_key_id=None,
):
output_dir = _get_output_dir(task_id)
output_path = os.path.join(output_dir, f"{task_id}_repaired.pdf")
try:
self.update_state(state="PROCESSING", meta={"step": "Repairing PDF..."})
stats = repair_pdf(input_path, output_path)
self.update_state(state="PROCESSING", meta={"step": "Uploading result..."})
s3_key = storage.upload_file(output_path, task_id, folder="outputs")
base = os.path.splitext(original_filename)[0]
download_name = f"{base}_repaired.pdf"
download_url = storage.generate_presigned_url(s3_key, original_filename=download_name)
result = {"status": "completed", "download_url": download_url,
"filename": download_name, **stats}
return _finalize_task(task_id, user_id, "repair-pdf", original_filename,
result, usage_source, api_key_id, self.request.id)
except PDFExtraError as e:
return _finalize_task(task_id, user_id, "repair-pdf", original_filename,
{"status": "failed", "error": str(e)},
usage_source, api_key_id, self.request.id)
# ---------------------------------------------------------------------------
# Edit PDF Metadata
# ---------------------------------------------------------------------------
@celery.task(bind=True, name="app.tasks.pdf_extra_tasks.edit_metadata_task")
def edit_metadata_task(
self, input_path, task_id, original_filename,
title=None, author=None, subject=None, keywords=None, creator=None,
user_id=None, usage_source="web", api_key_id=None,
):
output_dir = _get_output_dir(task_id)
output_path = os.path.join(output_dir, f"{task_id}_metadata.pdf")
try:
self.update_state(state="PROCESSING", meta={"step": "Editing PDF metadata..."})
stats = edit_pdf_metadata(input_path, output_path, title, author, subject, keywords, creator)
self.update_state(state="PROCESSING", meta={"step": "Uploading result..."})
s3_key = storage.upload_file(output_path, task_id, folder="outputs")
download_name = original_filename
download_url = storage.generate_presigned_url(s3_key, original_filename=download_name)
result = {"status": "completed", "download_url": download_url,
"filename": download_name, **stats}
return _finalize_task(task_id, user_id, "edit-metadata", original_filename,
result, usage_source, api_key_id, self.request.id)
except PDFExtraError as e:
return _finalize_task(task_id, user_id, "edit-metadata", original_filename,
{"status": "failed", "error": str(e)},
usage_source, api_key_id, self.request.id)

View File

@@ -0,0 +1,91 @@
"""Database abstraction — supports SQLite (dev) and PostgreSQL (production).
Usage:
from app.utils.database import get_connection
The returned connection behaves like a sqlite3.Connection with row_factory set.
For PostgreSQL it wraps psycopg2 with RealDictCursor for dict-like rows.
Selection logic:
- If DATABASE_URL env var is set (starts with ``postgres``), use PostgreSQL.
- Otherwise fall back to SQLite via DATABASE_PATH config.
"""
import logging
import os
import sqlite3
from contextlib import contextmanager
from flask import current_app
logger = logging.getLogger(__name__)
_pg_available = False
try:
import psycopg2
import psycopg2.extras
_pg_available = True
except ImportError:
pass
def is_postgres() -> bool:
"""Return True when the app is configured to use PostgreSQL."""
db_url = os.getenv("DATABASE_URL", "")
return db_url.startswith("postgres")
def _sqlite_connect() -> sqlite3.Connection:
db_path = current_app.config["DATABASE_PATH"]
db_dir = os.path.dirname(db_path)
if db_dir:
os.makedirs(db_dir, exist_ok=True)
conn = sqlite3.connect(db_path)
conn.row_factory = sqlite3.Row
conn.execute("PRAGMA foreign_keys = ON")
return conn
def _pg_connect():
"""Return a psycopg2 connection with RealDictCursor."""
if not _pg_available:
raise RuntimeError("psycopg2 is not installed — cannot use PostgreSQL.")
db_url = os.getenv("DATABASE_URL", "")
conn = psycopg2.connect(db_url, cursor_factory=psycopg2.extras.RealDictCursor)
conn.autocommit = False
return conn
def get_connection():
"""Get a database connection (SQLite or PostgreSQL based on config)."""
if is_postgres():
return _pg_connect()
return _sqlite_connect()
@contextmanager
def db_connection():
"""Context manager that yields a connection and handles commit/rollback."""
conn = get_connection()
try:
yield conn
conn.commit()
except Exception:
conn.rollback()
raise
finally:
conn.close()
def adapt_sql(sql: str) -> str:
"""Adapt SQLite SQL to PostgreSQL if needed.
Converts:
- INTEGER PRIMARY KEY AUTOINCREMENT -> SERIAL PRIMARY KEY
- ? placeholders -> %s placeholders
"""
if not is_postgres():
return sql
sql = sql.replace("INTEGER PRIMARY KEY AUTOINCREMENT", "SERIAL PRIMARY KEY")
sql = sql.replace("?", "%s")
return sql

Binary file not shown.

View File

@@ -10,10 +10,16 @@ load_dotenv(os.path.join(REPO_ROOT, ".env"))
load_dotenv(os.path.join(BASE_DIR, ".env"), override=False)
def _parse_csv_env(name: str) -> tuple[str, ...]:
raw_value = os.getenv(name, "")
return tuple(item.strip().lower() for item in raw_value.split(",") if item.strip())
class BaseConfig:
"""Base configuration."""
SECRET_KEY = os.getenv("SECRET_KEY", "change-me-in-production")
INTERNAL_ADMIN_SECRET = os.getenv("INTERNAL_ADMIN_SECRET", "")
INTERNAL_ADMIN_EMAILS = _parse_csv_env("INTERNAL_ADMIN_EMAILS")
# File upload settings
MAX_CONTENT_LENGTH = int(
@@ -47,6 +53,14 @@ class BaseConfig:
"bmp": ["image/bmp"],
"mp4": ["video/mp4"],
"webm": ["video/webm"],
"pptx": [
"application/vnd.openxmlformats-officedocument.presentationml.presentation"
],
"ppt": ["application/vnd.ms-powerpoint"],
"xlsx": [
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
],
"xls": ["application/vnd.ms-excel"],
}
# File size limits per type (bytes)
@@ -64,6 +78,10 @@ class BaseConfig:
"bmp": 15 * 1024 * 1024, # 15MB
"mp4": 50 * 1024 * 1024, # 50MB
"webm": 50 * 1024 * 1024, # 50MB
"pptx": 20 * 1024 * 1024, # 20MB
"ppt": 20 * 1024 * 1024, # 20MB
"xlsx": 15 * 1024 * 1024, # 15MB
"xls": 15 * 1024 * 1024, # 15MB
}
# Redis
@@ -102,6 +120,22 @@ class BaseConfig:
SMTP_USE_TLS = os.getenv("SMTP_USE_TLS", "true").lower() == "true"
FRONTEND_URL = os.getenv("FRONTEND_URL", "http://localhost:5173")
# Stripe
STRIPE_SECRET_KEY = os.getenv("STRIPE_SECRET_KEY", "")
STRIPE_WEBHOOK_SECRET = os.getenv("STRIPE_WEBHOOK_SECRET", "")
STRIPE_PRICE_ID_PRO_MONTHLY = os.getenv("STRIPE_PRICE_ID_PRO_MONTHLY", "")
STRIPE_PRICE_ID_PRO_YEARLY = os.getenv("STRIPE_PRICE_ID_PRO_YEARLY", "")
# Sentry
SENTRY_DSN = os.getenv("SENTRY_DSN", "")
SENTRY_ENVIRONMENT = os.getenv("SENTRY_ENVIRONMENT", "development")
# Site domain
SITE_DOMAIN = os.getenv("SITE_DOMAIN", "https://saas-pdf.com")
# PostgreSQL (production) — set DATABASE_URL to use PG instead of SQLite
DATABASE_URL = os.getenv("DATABASE_URL", "")
# Feature flags (default: enabled — set to "false" to disable a feature)
FEATURE_EDITOR = os.getenv("FEATURE_EDITOR", "true").lower() == "true"
FEATURE_OCR = os.getenv("FEATURE_OCR", "true").lower() == "true"
@@ -130,6 +164,9 @@ class TestingConfig(BaseConfig):
UPLOAD_FOLDER = "/tmp/test_uploads"
OUTPUT_FOLDER = "/tmp/test_outputs"
DATABASE_PATH = "/tmp/test_saas_pdf.db"
FEATURE_EDITOR = False
FEATURE_OCR = False
FEATURE_REMOVEBG = False
# Disable Redis-backed rate limiting; use in-memory instead
RATELIMIT_STORAGE_URI = "memory://"

62
backend/pytest_status.txt Normal file
View File

@@ -0,0 +1,62 @@
============================= test session starts =============================
platform win32 -- Python 3.13.12, pytest-8.4.2, pluggy-1.6.0
rootdir: C:\xampp\htdocs\SaaS-PDF
plugins: anyio-4.12.1, cov-7.0.0, flask-1.3.0, mock-3.15.1, requests-mock-1.12.1
collected 286 items
backend\tests\test_admin.py .... [ 1%]
backend\tests\test_assistant.py ... [ 2%]
backend\tests\test_auth.py ..... [ 4%]
backend\tests\test_compress.py .. [ 4%]
backend\tests\test_compress_image.py ... [ 5%]
backend\tests\test_compress_service.py ... [ 6%]
backend\tests\test_compress_tasks.py ... [ 8%]
backend\tests\test_config.py ... [ 9%]
backend\tests\test_contact.py ....... [ 11%]
backend\tests\test_convert.py .... [ 12%]
backend\tests\test_convert_tasks.py .... [ 14%]
backend\tests\test_download.py ..... [ 16%]
backend\tests\test_file_validator.py ......... [ 19%]
backend\tests\test_flowchart_tasks.py .. [ 19%]
backend\tests\test_health.py .. [ 20%]
backend\tests\test_history.py ... [ 21%]
backend\tests\test_html_to_pdf.py .... [ 23%]
backend\tests\test_image.py ... [ 24%]
backend\tests\test_image_service.py ... [ 25%]
backend\tests\test_image_tasks.py ..... [ 26%]
backend\tests\test_load.py ....... [ 29%]
backend\tests\test_maintenance_tasks.py ........ [ 32%]
backend\tests\test_ocr.py ........ [ 34%]
backend\tests\test_ocr_service.py .... [ 36%]
backend\tests\test_openrouter_config_service.py ..... [ 38%]
backend\tests\test_password_reset.py ........ [ 40%]
backend\tests\test_pdf_ai.py ......... [ 44%]
backend\tests\test_pdf_editor.py ....... [ 46%]
backend\tests\test_pdf_service.py ... [ 47%]
backend\tests\test_pdf_to_excel.py .. [ 48%]
backend\tests\test_pdf_tools.py ....................................... [ 61%]
backend\tests\test_pdf_tools_service.py ........... [ 65%]
backend\tests\test_pdf_tools_tasks.py ......... [ 68%]
backend\tests\test_phase2_tools.py ........................s........ [ 80%]
backend\tests\test_qrcode.py .... [ 81%]
backend\tests\test_rate_limiter.py ..... [ 83%]
backend\tests\test_rating.py ........ [ 86%]
backend\tests\test_removebg.py ... [ 87%]
backend\tests\test_sanitizer.py ........ [ 90%]
backend\tests\test_site_assistant_service.py .. [ 90%]
backend\tests\test_stats.py . [ 91%]
backend\tests\test_storage_service.py .... [ 92%]
backend\tests\test_stripe.py .... [ 94%]
backend\tests\test_tasks_route.py .... [ 95%]
backend\tests\test_utils.py .. [ 96%]
backend\tests\test_video.py ....... [ 98%]
backend\tests\test_video_service.py .. [ 99%]
backend\tests\test_video_tasks.py .. [100%]
============================== warnings summary ===============================
backend/tests/test_pdf_tools_service.py::TestMergePdfsService::test_merge_file_not_found_raises
C:\xampp\htdocs\SaaS-PDF\.venv\Lib\site-packages\PyPDF2\__init__.py:21: DeprecationWarning: PyPDF2 is deprecated. Please move to the pypdf library instead.
warnings.warn(
-- Docs: https://docs.pytest.org/en/stable/how-to/capture-warnings.html
============ 285 passed, 1 skipped, 1 warning in 220.52s (0:03:40) ============

View File

@@ -35,6 +35,12 @@ weasyprint>=60.0,<62.0
# OCR
pytesseract>=0.3.10,<1.0
# PowerPoint Processing
python-pptx>=0.6.21,<2.0
# Barcode Generation
python-barcode>=0.15,<1.0
# Background Removal
rembg>=2.0,<3.0
onnxruntime>=1.16,<2.0
@@ -48,6 +54,15 @@ requests>=2.31,<3.0
# Security
werkzeug>=3.0,<4.0
# Payments
stripe>=8.0,<10.0
# Monitoring
sentry-sdk[flask]>=2.0,<3.0
# PostgreSQL (production)
psycopg2-binary>=2.9,<3.0
# Testing
pytest>=8.0,<9.0
pytest-flask>=1.3,<2.0

180
backend/test_all.txt Normal file
View File

@@ -0,0 +1,180 @@
........................................................................ [ 30%]
.........F
================================== FAILURES ===================================
____________ TestOcrFeatureFlag.test_ocr_image_disabled_by_default ____________
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\redis\connection.py:378: in connect
sock = self.retry.call_with_retry(
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\redis\retry.py:62: in call_with_retry
return do()
^^^^
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\redis\connection.py:379: in <lambda>
lambda: self._connect(), lambda error: self.disconnect(error)
^^^^^^^^^^^^^^^
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\redis\connection.py:732: in _connect
for res in socket.getaddrinfo(
C:\Program Files\WindowsApps\PythonSoftwareFoundation.Python.3.13_3.13.3312.0_x64__qbz5n2kfra8p0\Lib\socket.py:977: in getaddrinfo
for res in _socket.getaddrinfo(host, port, family, type, proto, flags):
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
E socket.gaierror: [Errno 11001] getaddrinfo failed
During handling of the above exception, another exception occurred:
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\celery\backends\redis.py:130: in reconnect_on_error
yield
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\celery\backends\redis.py:180: in _consume_from
self._pubsub.subscribe(key)
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\redis\client.py:1018: in subscribe
ret_val = self.execute_command("SUBSCRIBE", *new_channels.keys())
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\redis\client.py:843: in execute_command
self.connection = self.connection_pool.get_connection()
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\redis\utils.py:183: in wrapper
return func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\redis\connection.py:1483: in get_connection
connection.connect()
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\redis\connection.py:384: in connect
raise ConnectionError(self._error_message(e))
E redis.exceptions.ConnectionError: Error 11001 connecting to redis:6379. getaddrinfo failed.
During handling of the above exception, another exception occurred:
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\redis\connection.py:378: in connect
sock = self.retry.call_with_retry(
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\redis\retry.py:62: in call_with_retry
return do()
^^^^
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\redis\connection.py:379: in <lambda>
lambda: self._connect(), lambda error: self.disconnect(error)
^^^^^^^^^^^^^^^
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\redis\connection.py:732: in _connect
for res in socket.getaddrinfo(
C:\Program Files\WindowsApps\PythonSoftwareFoundation.Python.3.13_3.13.3312.0_x64__qbz5n2kfra8p0\Lib\socket.py:977: in getaddrinfo
for res in _socket.getaddrinfo(host, port, family, type, proto, flags):
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
E socket.gaierror: [Errno 11001] getaddrinfo failed
During handling of the above exception, another exception occurred:
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\celery\backends\redis.py:133: in reconnect_on_error
self._ensure(self._reconnect_pubsub, ())
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\celery\backends\redis.py:428: in ensure
return retry_over_time(
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\kombu\utils\functional.py:318: in retry_over_time
return fun(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\celery\backends\redis.py:109: in _reconnect_pubsub
metas = self.backend.client.mget(self.subscribed_to)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\redis\commands\core.py:2009: in mget
return self.execute_command("MGET", *args, **options)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\redis\client.py:605: in execute_command
return self._execute_command(*args, **options)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\redis\client.py:611: in _execute_command
conn = self.connection or pool.get_connection()
^^^^^^^^^^^^^^^^^^^^^
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\redis\utils.py:183: in wrapper
return func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\redis\connection.py:1483: in get_connection
connection.connect()
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\redis\connection.py:384: in connect
raise ConnectionError(self._error_message(e))
E redis.exceptions.ConnectionError: Error 11001 connecting to redis:6379. getaddrinfo failed.
The above exception was the direct cause of the following exception:
tests\test_ocr.py:18: in test_ocr_image_disabled_by_default
response = client.post(
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\werkzeug\test.py:1167: in post
return self.open(*args, **kw)
^^^^^^^^^^^^^^^^^^^^^^
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\flask\testing.py:235: in open
response = super().open(
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\werkzeug\test.py:1116: in open
response_parts = self.run_wsgi_app(request.environ, buffered=buffered)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\werkzeug\test.py:988: in run_wsgi_app
rv = run_wsgi_app(self.application, environ, buffered=buffered)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\werkzeug\test.py:1264: in run_wsgi_app
app_rv = app(environ, start_response)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\flask\app.py:1536: in __call__
return self.wsgi_app(environ, start_response)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\flask\app.py:1514: in wsgi_app
response = self.handle_exception(e)
^^^^^^^^^^^^^^^^^^^^^^^^
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\flask_cors\extension.py:194: in wrapped_function
return cors_after_request(app.make_response(f(*args, **kwargs)))
^^^^^^^^^^^^^^^^^^
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\flask\app.py:1511: in wsgi_app
response = self.full_dispatch_request()
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\flask\app.py:919: in full_dispatch_request
rv = self.handle_user_exception(e)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\flask_cors\extension.py:194: in wrapped_function
return cors_after_request(app.make_response(f(*args, **kwargs)))
^^^^^^^^^^^^^^^^^^
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\flask\app.py:917: in full_dispatch_request
rv = self.dispatch_request()
^^^^^^^^^^^^^^^^^^^^^^^
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\flask\app.py:902: in dispatch_request
return self.ensure_sync(self.view_functions[rule.endpoint])(**view_args) # type: ignore[no-any-return]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\flask_limiter\extension.py:1314: in __inner
return cast(R, flask.current_app.ensure_sync(obj)(*a, **k))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
app\routes\ocr.py:69: in ocr_image_route
task = ocr_image_task.delay(
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\celery\app\task.py:444: in delay
return self.apply_async(args, kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\celery\app\task.py:608: in apply_async
return app.send_task(
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\celery\app\base.py:946: in send_task
self.backend.on_task_call(P, task_id)
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\celery\backends\redis.py:417: in on_task_call
self.result_consumer.consume_from(task_id)
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\celery\backends\redis.py:172: in consume_from
return self.start(task_id)
^^^^^^^^^^^^^^^^^^^
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\celery\backends\redis.py:150: in start
self._consume_from(initial_task_id)
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\celery\backends\redis.py:179: in _consume_from
with self.reconnect_on_error():
^^^^^^^^^^^^^^^^^^^^^^^^^
C:\Program Files\WindowsApps\PythonSoftwareFoundation.Python.3.13_3.13.3312.0_x64__qbz5n2kfra8p0\Lib\contextlib.py:162: in __exit__
self.gen.throw(value)
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\celery\backends\redis.py:136: in reconnect_on_error
raise RuntimeError(E_RETRY_LIMIT_EXCEEDED) from e
E RuntimeError:
E Retry limit exceeded while trying to reconnect to the Celery redis result store backend. The Celery application must be restarted.
------------------------------ Captured log call ------------------------------
ERROR celery.backends.redis:redis.py:435 Connection to Redis lost: Retry (0/20) now.
ERROR celery.backends.redis:redis.py:435 Connection to Redis lost: Retry (1/20) in 1.00 second.
ERROR celery.backends.redis:redis.py:435 Connection to Redis lost: Retry (2/20) in 1.00 second.
ERROR celery.backends.redis:redis.py:435 Connection to Redis lost: Retry (3/20) in 1.00 second.
ERROR celery.backends.redis:redis.py:435 Connection to Redis lost: Retry (4/20) in 1.00 second.
ERROR celery.backends.redis:redis.py:435 Connection to Redis lost: Retry (5/20) in 1.00 second.
ERROR celery.backends.redis:redis.py:435 Connection to Redis lost: Retry (6/20) in 1.00 second.
ERROR celery.backends.redis:redis.py:435 Connection to Redis lost: Retry (7/20) in 1.00 second.
ERROR celery.backends.redis:redis.py:435 Connection to Redis lost: Retry (8/20) in 1.00 second.
ERROR celery.backends.redis:redis.py:435 Connection to Redis lost: Retry (9/20) in 1.00 second.
ERROR celery.backends.redis:redis.py:435 Connection to Redis lost: Retry (10/20) in 1.00 second.
ERROR celery.backends.redis:redis.py:435 Connection to Redis lost: Retry (11/20) in 1.00 second.
ERROR celery.backends.redis:redis.py:435 Connection to Redis lost: Retry (12/20) in 1.00 second.
ERROR celery.backends.redis:redis.py:435 Connection to Redis lost: Retry (13/20) in 1.00 second.
ERROR celery.backends.redis:redis.py:435 Connection to Redis lost: Retry (14/20) in 1.00 second.
ERROR celery.backends.redis:redis.py:435 Connection to Redis lost: Retry (15/20) in 1.00 second.
ERROR celery.backends.redis:redis.py:435 Connection to Redis lost: Retry (16/20) in 1.00 second.
ERROR celery.backends.redis:redis.py:435 Connection to Redis lost: Retry (17/20) in 1.00 second.
ERROR celery.backends.redis:redis.py:435 Connection to Redis lost: Retry (18/20) in 1.00 second.
ERROR celery.backends.redis:redis.py:435 Connection to Redis lost: Retry (19/20) in 1.00 second.
CRITICAL celery.backends.redis:redis.py:135
Retry limit exceeded while trying to reconnect to the Celery redis result store backend. The Celery application must be restarted.
=========================== short test summary info ===========================
FAILED tests/test_ocr.py::TestOcrFeatureFlag::test_ocr_image_disabled_by_default
!!!!!!!!!!!!!!!!!!!!!!!!!! stopping after 1 failures !!!!!!!!!!!!!!!!!!!!!!!!!!
1 failed, 81 passed in 116.84s (0:01:56)

View File

@@ -0,0 +1,8 @@
ImportError while loading conftest 'C:\xampp\htdocs\SaaS-PDF\backend\tests\conftest.py'.
tests\conftest.py:7: in <module>
from app import create_app
app\__init__.py:13: in <module>
from app.services.stripe_service import init_stripe_db
app\services\stripe_service.py:5: in <module>
import stripe
E ModuleNotFoundError: No module named 'stripe'

19
backend/test_run.txt Normal file
View File

@@ -0,0 +1,19 @@
============================= test session starts =============================
platform win32 -- Python 3.13.12, pytest-8.4.2, pluggy-1.6.0 -- C:\Users\ahmed\AppData\Local\Microsoft\WindowsApps\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\python.exe
cachedir: .pytest_cache
rootdir: C:\xampp\htdocs\SaaS-PDF\backend
plugins: anyio-4.12.1, cov-7.0.0, flask-1.3.0, mock-3.15.1, requests-mock-1.12.1
collecting ... collected 10 items
tests/test_health.py::test_health_endpoint PASSED [ 10%]
tests/test_health.py::test_app_creates PASSED [ 20%]
tests/test_config.py::TestConfigEndpoint::test_anonymous_gets_free_limits PASSED [ 30%]
tests/test_config.py::TestConfigEndpoint::test_authenticated_free_user_gets_usage PASSED [ 40%]
tests/test_config.py::TestConfigEndpoint::test_max_upload_mb_is_correct PASSED [ 50%]
tests/test_auth.py::TestAuthRoutes::test_register_success PASSED [ 60%]
tests/test_auth.py::TestAuthRoutes::test_register_duplicate_email PASSED [ 70%]
tests/test_auth.py::TestAuthRoutes::test_login_and_me PASSED [ 80%]
tests/test_auth.py::TestAuthRoutes::test_login_invalid_password PASSED [ 90%]
tests/test_auth.py::TestAuthRoutes::test_me_without_session PASSED [100%]
============================= 10 passed in 9.59s ==============================

0
backend/test_run2.txt Normal file
View File

View File

@@ -9,6 +9,8 @@ from app.services.account_service import init_account_db
from app.services.rating_service import init_ratings_db
from app.services.ai_cost_service import init_ai_cost_db
from app.services.site_assistant_service import init_site_assistant_db
from app.services.contact_service import init_contact_db
from app.services.stripe_service import init_stripe_db
@pytest.fixture
@@ -35,6 +37,8 @@ def app():
init_ratings_db()
init_ai_cost_db()
init_site_assistant_db()
init_contact_db()
init_stripe_db()
# Create temp directories
os.makedirs(app.config['UPLOAD_FOLDER'], exist_ok=True)

175
backend/tests/test_admin.py Normal file
View File

@@ -0,0 +1,175 @@
"""Tests for internal admin dashboard endpoints."""
from app.services.account_service import create_user, record_file_history, set_user_role, update_user_plan
from app.services.contact_service import save_message
from app.services.rating_service import submit_rating
class TestInternalAdminRoutes:
def test_overview_requires_authenticated_admin(self, client):
response = client.get("/api/internal/admin/overview")
assert response.status_code == 401
def test_overview_rejects_non_admin_user(self, app, client):
with app.app_context():
create_user("member@example.com", "testpass123")
login_response = client.post(
"/api/auth/login",
json={"email": "member@example.com", "password": "testpass123"},
)
assert login_response.status_code == 200
response = client.get("/api/internal/admin/overview")
assert response.status_code == 403
def test_overview_returns_operational_summary(self, app, client):
with app.app_context():
first_user = create_user("admin-a@example.com", "testpass123")
second_user = create_user("admin-b@example.com", "testpass123")
set_user_role(first_user["id"], "admin")
update_user_plan(second_user["id"], "pro")
record_file_history(
user_id=first_user["id"],
tool="compress-pdf",
original_filename="one.pdf",
output_filename="one-small.pdf",
status="completed",
download_url="https://example.com/one-small.pdf",
)
record_file_history(
user_id=second_user["id"],
tool="repair-pdf",
original_filename="broken.pdf",
output_filename=None,
status="failed",
download_url=None,
metadata={"error": "Repair failed."},
)
submit_rating("compress-pdf", 5, fingerprint="admin-rating")
message = save_message("Admin User", "ops@example.com", "bug", "Need help", "Broken upload")
login_response = client.post(
"/api/auth/login",
json={"email": "admin-a@example.com", "password": "testpass123"},
)
assert login_response.status_code == 200
response = client.get("/api/internal/admin/overview")
assert response.status_code == 200
data = response.get_json()
assert data["users"]["total"] == 2
assert data["users"]["pro"] == 1
assert data["processing"]["total_files_processed"] == 2
assert data["processing"]["failed_files"] == 1
assert data["ratings"]["rating_count"] == 1
assert data["contacts"]["unread_messages"] == 1
assert data["contacts"]["recent"][0]["id"] == message["id"]
assert data["recent_failures"][0]["tool"] == "repair-pdf"
def test_contacts_can_be_marked_read(self, app, client):
with app.app_context():
admin_user = create_user("admin-reader@example.com", "testpass123")
set_user_role(admin_user["id"], "admin")
message = save_message("Reader", "reader@example.com", "general", "Hello", "Please review")
login_response = client.post(
"/api/auth/login",
json={"email": "admin-reader@example.com", "password": "testpass123"},
)
assert login_response.status_code == 200
mark_response = client.post(f"/api/internal/admin/contacts/{message['id']}/read")
assert mark_response.status_code == 200
contacts_response = client.get("/api/internal/admin/contacts")
assert contacts_response.status_code == 200
contacts_data = contacts_response.get_json()
assert contacts_data["unread"] == 0
assert contacts_data["items"][0]["is_read"] is True
def test_user_plan_can_be_updated(self, app, client):
with app.app_context():
admin_user = create_user("admin-plan@example.com", "testpass123")
user = create_user("plan-change@example.com", "testpass123")
set_user_role(admin_user["id"], "admin")
login_response = client.post(
"/api/auth/login",
json={"email": "admin-plan@example.com", "password": "testpass123"},
)
assert login_response.status_code == 200
response = client.post(
f"/api/internal/admin/users/{user['id']}/plan",
json={"plan": "pro"},
)
assert response.status_code == 200
data = response.get_json()
assert data["user"]["plan"] == "pro"
def test_user_role_can_be_updated(self, app, client):
with app.app_context():
admin_user = create_user("admin-role@example.com", "testpass123")
user = create_user("member-role@example.com", "testpass123")
set_user_role(admin_user["id"], "admin")
login_response = client.post(
"/api/auth/login",
json={"email": "admin-role@example.com", "password": "testpass123"},
)
assert login_response.status_code == 200
response = client.post(
f"/api/internal/admin/users/{user['id']}/role",
json={"role": "admin"},
)
assert response.status_code == 200
data = response.get_json()
assert data["user"]["role"] == "admin"
def test_allowlisted_admin_role_cannot_be_changed(self, app, client):
app.config["INTERNAL_ADMIN_EMAILS"] = ("bootstrap-admin@example.com",)
with app.app_context():
actor = create_user("actor-admin@example.com", "testpass123")
bootstrap = create_user("bootstrap-admin@example.com", "testpass123")
set_user_role(actor["id"], "admin")
login_response = client.post(
"/api/auth/login",
json={"email": "actor-admin@example.com", "password": "testpass123"},
)
assert login_response.status_code == 200
response = client.post(
f"/api/internal/admin/users/{bootstrap['id']}/role",
json={"role": "user"},
)
assert response.status_code == 400
assert "INTERNAL_ADMIN_EMAILS" in response.get_json()["error"]
def test_admin_cannot_remove_own_role(self, app, client):
with app.app_context():
admin_user = create_user("self-admin@example.com", "testpass123")
set_user_role(admin_user["id"], "admin")
login_response = client.post(
"/api/auth/login",
json={"email": "self-admin@example.com", "password": "testpass123"},
)
assert login_response.status_code == 200
response = client.post(
f"/api/internal/admin/users/{admin_user['id']}/role",
json={"role": "user"},
)
assert response.status_code == 400
assert "cannot remove your own admin role" in response.get_json()["error"].lower()

View File

@@ -12,6 +12,18 @@ class TestAuthRoutes:
data = response.get_json()
assert data['user']['email'] == 'user@example.com'
assert data['user']['plan'] == 'free'
assert data['user']['role'] == 'user'
def test_register_assigns_admin_role_for_allowlisted_email(self, app, client):
app.config['INTERNAL_ADMIN_EMAILS'] = ('admin@example.com',)
response = client.post(
'/api/auth/register',
json={'email': 'admin@example.com', 'password': 'secretpass123'},
)
assert response.status_code == 201
assert response.get_json()['user']['role'] == 'admin'
def test_register_duplicate_email(self, client):
client.post(

View File

@@ -0,0 +1,79 @@
"""Tests for the contact form endpoint."""
import pytest
class TestContactSubmission:
"""Tests for POST /api/contact/submit."""
def test_submit_success(self, client):
response = client.post("/api/contact/submit", json={
"name": "Test User",
"email": "test@example.com",
"category": "general",
"subject": "Test Subject",
"message": "This is a test message body.",
})
assert response.status_code == 201
data = response.get_json()
assert data["message"] == "Message sent successfully."
assert "id" in data
assert "created_at" in data
def test_submit_missing_name(self, client):
response = client.post("/api/contact/submit", json={
"email": "test@example.com",
"subject": "Test",
"message": "Body",
})
assert response.status_code == 400
assert "Name" in response.get_json()["error"]
def test_submit_invalid_email(self, client):
response = client.post("/api/contact/submit", json={
"name": "User",
"email": "not-an-email",
"subject": "Test",
"message": "Body",
})
assert response.status_code == 400
assert "email" in response.get_json()["error"].lower()
def test_submit_missing_subject(self, client):
response = client.post("/api/contact/submit", json={
"name": "User",
"email": "test@example.com",
"subject": "",
"message": "Body",
})
assert response.status_code == 400
assert "Subject" in response.get_json()["error"]
def test_submit_missing_message(self, client):
response = client.post("/api/contact/submit", json={
"name": "User",
"email": "test@example.com",
"subject": "Test",
"message": "",
})
assert response.status_code == 400
assert "Message" in response.get_json()["error"]
def test_submit_bug_category(self, client):
response = client.post("/api/contact/submit", json={
"name": "Bug Reporter",
"email": "bug@example.com",
"category": "bug",
"subject": "Found a bug",
"message": "The merge tool crashes on large files.",
})
assert response.status_code == 201
def test_submit_invalid_category_defaults_to_general(self, client):
response = client.post("/api/contact/submit", json={
"name": "User",
"email": "test@example.com",
"category": "hacking",
"subject": "Test",
"message": "Body text here.",
})
assert response.status_code == 201

View File

@@ -0,0 +1,460 @@
"""Tests for Phase 2 routes — PDF Conversion, PDF Extra, Image Extra, Barcode."""
import io
import json
import os
import tempfile
from unittest.mock import patch, MagicMock
import pytest
def _barcode_available():
"""Check if python-barcode is installed."""
try:
import barcode # noqa: F401
return True
except ImportError:
return False
# =========================================================================
# Helpers
# =========================================================================
def _make_pdf():
"""Minimal valid PDF bytes."""
return (
b"%PDF-1.4\n1 0 obj<</Type/Catalog/Pages 2 0 R>>endobj\n"
b"2 0 obj<</Type/Pages/Count 1/Kids[3 0 R]>>endobj\n"
b"3 0 obj<</Type/Page/MediaBox[0 0 612 792]/Parent 2 0 R>>endobj\n"
b"xref\n0 4\n0000000000 65535 f \n0000000009 00000 n \n"
b"0000000058 00000 n \n0000000115 00000 n \n"
b"trailer<</Root 1 0 R/Size 4>>\nstartxref\n190\n%%EOF"
)
def _make_png():
"""Minimal valid PNG bytes."""
return (
b"\x89PNG\r\n\x1a\n"
b"\x00\x00\x00\rIHDR\x00\x00\x00\x01\x00\x00\x00\x01"
b"\x08\x02\x00\x00\x00\x90wS\xde"
b"\x00\x00\x00\x0cIDATx\x9cc\xf8\x0f\x00\x00\x01\x01\x00\x05"
b"\x18\xd8N\x00\x00\x00\x00IEND\xaeB`\x82"
)
def _mock_route(monkeypatch, route_module, task_name, validator_name='validate_actor_file'):
"""Mock validate + generate_safe_path + celery task for a route module."""
mock_task = MagicMock()
mock_task.id = 'mock-task-id'
tmp_dir = tempfile.mkdtemp()
save_path = os.path.join(tmp_dir, 'mock_file')
monkeypatch.setattr(
f'app.routes.{route_module}.validate_actor_file',
lambda f, allowed_types, actor: ('test_file', 'pdf'),
)
monkeypatch.setattr(
f'app.routes.{route_module}.generate_safe_path',
lambda ext, folder_type: ('mock-task-id', save_path),
)
mock_delay = MagicMock(return_value=mock_task)
monkeypatch.setattr(f'app.routes.{route_module}.{task_name}.delay', mock_delay)
return mock_task, mock_delay
# =========================================================================
# PDF Convert Routes — /api/convert
# =========================================================================
class TestPdfToPptx:
def test_no_file(self, client):
resp = client.post('/api/convert/pdf-to-pptx')
assert resp.status_code == 400
def test_success(self, client, monkeypatch):
_, mock_delay = _mock_route(monkeypatch, 'pdf_convert', 'pdf_to_pptx_task')
resp = client.post('/api/convert/pdf-to-pptx', data={
'file': (io.BytesIO(_make_pdf()), 'test.pdf'),
}, content_type='multipart/form-data')
assert resp.status_code == 202
data = resp.get_json()
assert data['task_id'] == 'mock-task-id'
mock_delay.assert_called_once()
class TestExcelToPdf:
def test_no_file(self, client):
resp = client.post('/api/convert/excel-to-pdf')
assert resp.status_code == 400
def test_success(self, client, monkeypatch):
mock_task = MagicMock()
mock_task.id = 'mock-task-id'
tmp_dir = tempfile.mkdtemp()
save_path = os.path.join(tmp_dir, 'mock.xlsx')
monkeypatch.setattr(
'app.routes.pdf_convert.validate_actor_file',
lambda f, allowed_types, actor: ('test.xlsx', 'xlsx'),
)
monkeypatch.setattr(
'app.routes.pdf_convert.generate_safe_path',
lambda ext, folder_type: ('mock-task-id', save_path),
)
mock_delay = MagicMock(return_value=mock_task)
monkeypatch.setattr('app.routes.pdf_convert.excel_to_pdf_task.delay', mock_delay)
# Create a file with xlsx content type
resp = client.post('/api/convert/excel-to-pdf', data={
'file': (io.BytesIO(b'PK\x03\x04' + b'\x00' * 100), 'test.xlsx'),
}, content_type='multipart/form-data')
assert resp.status_code == 202
assert resp.get_json()['task_id'] == 'mock-task-id'
class TestPptxToPdf:
def test_no_file(self, client):
resp = client.post('/api/convert/pptx-to-pdf')
assert resp.status_code == 400
def test_success(self, client, monkeypatch):
mock_task = MagicMock()
mock_task.id = 'mock-task-id'
tmp_dir = tempfile.mkdtemp()
save_path = os.path.join(tmp_dir, 'mock.pptx')
monkeypatch.setattr(
'app.routes.pdf_convert.validate_actor_file',
lambda f, allowed_types, actor: ('test.pptx', 'pptx'),
)
monkeypatch.setattr(
'app.routes.pdf_convert.generate_safe_path',
lambda ext, folder_type: ('mock-task-id', save_path),
)
mock_delay = MagicMock(return_value=mock_task)
monkeypatch.setattr('app.routes.pdf_convert.pptx_to_pdf_task.delay', mock_delay)
resp = client.post('/api/convert/pptx-to-pdf', data={
'file': (io.BytesIO(b'PK\x03\x04' + b'\x00' * 100), 'test.pptx'),
}, content_type='multipart/form-data')
assert resp.status_code == 202
assert resp.get_json()['task_id'] == 'mock-task-id'
class TestSignPdf:
def test_no_files(self, client):
resp = client.post('/api/convert/sign')
assert resp.status_code == 400
def test_missing_signature(self, client):
resp = client.post('/api/convert/sign', data={
'file': (io.BytesIO(_make_pdf()), 'test.pdf'),
}, content_type='multipart/form-data')
assert resp.status_code == 400
def test_success(self, client, monkeypatch):
mock_task = MagicMock()
mock_task.id = 'mock-task-id'
tmp_dir = tempfile.mkdtemp()
monkeypatch.setattr(
'app.routes.pdf_convert.validate_actor_file',
lambda f, allowed_types, actor: ('test.pdf', 'pdf'),
)
monkeypatch.setattr(
'app.routes.pdf_convert.generate_safe_path',
lambda ext, folder_type: ('mock-task-id', os.path.join(tmp_dir, f'mock.{ext}')),
)
mock_delay = MagicMock(return_value=mock_task)
monkeypatch.setattr('app.routes.pdf_convert.sign_pdf_task.delay', mock_delay)
resp = client.post('/api/convert/sign', data={
'file': (io.BytesIO(_make_pdf()), 'test.pdf'),
'signature': (io.BytesIO(_make_png()), 'sig.png'),
'page': '1',
}, content_type='multipart/form-data')
assert resp.status_code == 202
assert resp.get_json()['task_id'] == 'mock-task-id'
# =========================================================================
# PDF Extra Routes — /api/pdf-tools
# =========================================================================
class TestCropPdf:
def test_no_file(self, client):
resp = client.post('/api/pdf-tools/crop')
assert resp.status_code == 400
def test_success(self, client, monkeypatch):
_, mock_delay = _mock_route(monkeypatch, 'pdf_extra', 'crop_pdf_task')
resp = client.post('/api/pdf-tools/crop', data={
'file': (io.BytesIO(_make_pdf()), 'test.pdf'),
'left': '10', 'right': '10', 'top': '20', 'bottom': '20',
}, content_type='multipart/form-data')
assert resp.status_code == 202
assert resp.get_json()['task_id'] == 'mock-task-id'
mock_delay.assert_called_once()
class TestFlattenPdf:
def test_no_file(self, client):
resp = client.post('/api/pdf-tools/flatten')
assert resp.status_code == 400
def test_success(self, client, monkeypatch):
_, mock_delay = _mock_route(monkeypatch, 'pdf_extra', 'flatten_pdf_task')
resp = client.post('/api/pdf-tools/flatten', data={
'file': (io.BytesIO(_make_pdf()), 'test.pdf'),
}, content_type='multipart/form-data')
assert resp.status_code == 202
mock_delay.assert_called_once()
class TestRepairPdf:
def test_no_file(self, client):
resp = client.post('/api/pdf-tools/repair')
assert resp.status_code == 400
def test_success(self, client, monkeypatch):
_, mock_delay = _mock_route(monkeypatch, 'pdf_extra', 'repair_pdf_task')
resp = client.post('/api/pdf-tools/repair', data={
'file': (io.BytesIO(_make_pdf()), 'test.pdf'),
}, content_type='multipart/form-data')
assert resp.status_code == 202
mock_delay.assert_called_once()
class TestEditMetadata:
def test_no_file(self, client):
resp = client.post('/api/pdf-tools/metadata')
assert resp.status_code == 400
def test_success(self, client, monkeypatch):
_, mock_delay = _mock_route(monkeypatch, 'pdf_extra', 'edit_metadata_task')
resp = client.post('/api/pdf-tools/metadata', data={
'file': (io.BytesIO(_make_pdf()), 'test.pdf'),
'title': 'Test Title',
'author': 'Test Author',
}, content_type='multipart/form-data')
assert resp.status_code == 202
mock_delay.assert_called_once()
# =========================================================================
# Image Extra Routes — /api/image
# =========================================================================
class TestImageCrop:
def test_no_file(self, client):
resp = client.post('/api/image/crop')
assert resp.status_code == 400
def test_success(self, client, monkeypatch):
mock_task = MagicMock()
mock_task.id = 'mock-task-id'
tmp_dir = tempfile.mkdtemp()
save_path = os.path.join(tmp_dir, 'mock.png')
monkeypatch.setattr(
'app.routes.image_extra.validate_actor_file',
lambda f, allowed_types, actor: ('test.png', 'png'),
)
monkeypatch.setattr(
'app.routes.image_extra.generate_safe_path',
lambda ext, folder_type: ('mock-task-id', save_path),
)
mock_delay = MagicMock(return_value=mock_task)
monkeypatch.setattr('app.routes.image_extra.crop_image_task.delay', mock_delay)
resp = client.post('/api/image/crop', data={
'file': (io.BytesIO(_make_png()), 'test.png'),
'left': '0', 'top': '0', 'right': '100', 'bottom': '100',
}, content_type='multipart/form-data')
assert resp.status_code == 202
assert resp.get_json()['task_id'] == 'mock-task-id'
class TestImageRotateFlip:
def test_no_file(self, client):
resp = client.post('/api/image/rotate-flip')
assert resp.status_code == 400
def test_success(self, client, monkeypatch):
mock_task = MagicMock()
mock_task.id = 'mock-task-id'
tmp_dir = tempfile.mkdtemp()
save_path = os.path.join(tmp_dir, 'mock.png')
monkeypatch.setattr(
'app.routes.image_extra.validate_actor_file',
lambda f, allowed_types, actor: ('test.png', 'png'),
)
monkeypatch.setattr(
'app.routes.image_extra.generate_safe_path',
lambda ext, folder_type: ('mock-task-id', save_path),
)
mock_delay = MagicMock(return_value=mock_task)
monkeypatch.setattr('app.routes.image_extra.rotate_flip_image_task.delay', mock_delay)
resp = client.post('/api/image/rotate-flip', data={
'file': (io.BytesIO(_make_png()), 'test.png'),
'rotation': '90',
'flip_horizontal': 'true',
}, content_type='multipart/form-data')
assert resp.status_code == 202
assert resp.get_json()['task_id'] == 'mock-task-id'
# =========================================================================
# Barcode Routes — /api/barcode
# =========================================================================
class TestBarcodeGenerate:
def test_no_data(self, client):
resp = client.post('/api/barcode/generate',
data=json.dumps({}),
content_type='application/json')
assert resp.status_code == 400
def test_success_json(self, client, monkeypatch):
mock_task = MagicMock()
mock_task.id = 'mock-task-id'
tmp_dir = tempfile.mkdtemp()
monkeypatch.setattr(
'app.routes.barcode.generate_safe_path',
lambda ext, folder_type: ('mock-task-id', os.path.join(tmp_dir, 'mock.png')),
)
mock_delay = MagicMock(return_value=mock_task)
monkeypatch.setattr('app.routes.barcode.generate_barcode_task.delay', mock_delay)
resp = client.post('/api/barcode/generate',
data=json.dumps({'data': '12345', 'barcode_type': 'code128'}),
content_type='application/json')
assert resp.status_code == 202
assert resp.get_json()['task_id'] == 'mock-task-id'
def test_invalid_barcode_type(self, client):
resp = client.post('/api/barcode/generate',
data=json.dumps({'data': '12345', 'type': 'invalid_type'}),
content_type='application/json')
assert resp.status_code == 400
# =========================================================================
# Service unit tests
# =========================================================================
class TestBarcodeService:
@pytest.mark.skipif(
not _barcode_available(),
reason='python-barcode not installed'
)
def test_generate_barcode(self, app):
from app.services.barcode_service import generate_barcode
with app.app_context():
tmp_dir = tempfile.mkdtemp()
output_path = os.path.join(tmp_dir, 'test_barcode')
result = generate_barcode('12345678', 'code128', output_path, 'png')
assert 'output_path' in result
assert os.path.exists(result['output_path'])
def test_invalid_barcode_type(self, app):
from app.services.barcode_service import generate_barcode, BarcodeGenerationError
with app.app_context():
tmp_dir = tempfile.mkdtemp()
output_path = os.path.join(tmp_dir, 'test_barcode')
with pytest.raises(BarcodeGenerationError):
generate_barcode('12345', 'nonexistent_type', output_path, 'png')
class TestPdfExtraService:
def test_edit_metadata(self, app):
from app.services.pdf_extra_service import edit_pdf_metadata
with app.app_context():
tmp_dir = tempfile.mkdtemp()
input_path = os.path.join(tmp_dir, 'input.pdf')
output_path = os.path.join(tmp_dir, 'output.pdf')
with open(input_path, 'wb') as f:
f.write(_make_pdf())
edit_pdf_metadata(input_path, output_path, title='Test Title', author='Test Author')
assert os.path.exists(output_path)
assert os.path.getsize(output_path) > 0
def test_flatten_pdf(self, app):
from app.services.pdf_extra_service import flatten_pdf
with app.app_context():
tmp_dir = tempfile.mkdtemp()
input_path = os.path.join(tmp_dir, 'input.pdf')
output_path = os.path.join(tmp_dir, 'output.pdf')
with open(input_path, 'wb') as f:
f.write(_make_pdf())
flatten_pdf(input_path, output_path)
assert os.path.exists(output_path)
def test_repair_pdf(self, app):
from app.services.pdf_extra_service import repair_pdf
with app.app_context():
tmp_dir = tempfile.mkdtemp()
input_path = os.path.join(tmp_dir, 'input.pdf')
output_path = os.path.join(tmp_dir, 'output.pdf')
with open(input_path, 'wb') as f:
f.write(_make_pdf())
repair_pdf(input_path, output_path)
assert os.path.exists(output_path)
def test_crop_pdf(self, app):
from app.services.pdf_extra_service import crop_pdf
with app.app_context():
tmp_dir = tempfile.mkdtemp()
input_path = os.path.join(tmp_dir, 'input.pdf')
output_path = os.path.join(tmp_dir, 'output.pdf')
with open(input_path, 'wb') as f:
f.write(_make_pdf())
crop_pdf(input_path, output_path, margin_left=10, margin_right=10, margin_top=10, margin_bottom=10)
assert os.path.exists(output_path)
class TestImageExtraService:
def test_rotate_flip(self, app):
from app.services.image_extra_service import rotate_flip_image
from PIL import Image
with app.app_context():
tmp_dir = tempfile.mkdtemp()
input_path = os.path.join(tmp_dir, 'input.png')
output_path = os.path.join(tmp_dir, 'output.png')
img = Image.new('RGB', (100, 100), color='red')
img.save(input_path)
rotate_flip_image(input_path, output_path, rotation=90)
assert os.path.exists(output_path)
result = Image.open(output_path)
assert result.size == (100, 100)
def test_crop_image(self, app):
from app.services.image_extra_service import crop_image
from PIL import Image
with app.app_context():
tmp_dir = tempfile.mkdtemp()
input_path = os.path.join(tmp_dir, 'input.png')
output_path = os.path.join(tmp_dir, 'output.png')
img = Image.new('RGB', (200, 200), color='blue')
img.save(input_path)
crop_image(input_path, output_path, left=10, top=10, right=100, bottom=100)
assert os.path.exists(output_path)
result = Image.open(output_path)
assert result.size == (90, 90)
def test_crop_invalid_coords(self, app):
from app.services.image_extra_service import crop_image, ImageExtraError
from PIL import Image
with app.app_context():
tmp_dir = tempfile.mkdtemp()
input_path = os.path.join(tmp_dir, 'input.png')
output_path = os.path.join(tmp_dir, 'output.png')
img = Image.new('RGB', (100, 100), color='blue')
img.save(input_path)
with __import__('pytest').raises(ImageExtraError):
crop_image(input_path, output_path, left=100, top=0, right=50, bottom=100)

View File

@@ -0,0 +1,52 @@
"""Tests for public stats summary endpoint."""
from app.services.account_service import create_user, record_file_history
from app.services.rating_service import submit_rating
class TestStatsSummary:
def test_summary_returns_processing_and_rating_totals(self, app, client):
with app.app_context():
user = create_user("stats@example.com", "testpass123")
record_file_history(
user_id=user["id"],
tool="compress-pdf",
original_filename="input.pdf",
output_filename="output.pdf",
status="completed",
download_url="https://example.com/file.pdf",
)
record_file_history(
user_id=user["id"],
tool="compress-pdf",
original_filename="input-2.pdf",
output_filename="output-2.pdf",
status="completed",
download_url="https://example.com/file-2.pdf",
)
record_file_history(
user_id=user["id"],
tool="repair-pdf",
original_filename="broken.pdf",
output_filename=None,
status="failed",
download_url=None,
metadata={"error": "Repair failed."},
)
submit_rating("compress-pdf", 5, fingerprint="stats-a")
submit_rating("repair-pdf", 4, fingerprint="stats-b")
response = client.get("/api/stats/summary")
assert response.status_code == 200
data = response.get_json()
assert data["total_files_processed"] == 3
assert data["completed_files"] == 2
assert data["failed_files"] == 1
assert data["success_rate"] == 66.7
assert data["files_last_24h"] == 3
assert data["rating_count"] == 2
assert data["average_rating"] == 4.5
assert data["top_tools"][0] == {"tool": "compress-pdf", "count": 2}

View File

@@ -0,0 +1,48 @@
"""Tests for Stripe payment routes."""
import pytest
from unittest.mock import patch, MagicMock
class TestStripeRoutes:
"""Tests for /api/stripe/ endpoints."""
def _login(self, client, email="stripe@test.com", password="testpass123"):
"""Register and login a user."""
client.post("/api/auth/register", json={
"email": email, "password": password,
})
resp = client.post("/api/auth/login", json={
"email": email, "password": password,
})
return resp.get_json()
def test_checkout_requires_auth(self, client):
response = client.post("/api/stripe/create-checkout-session", json={
"billing": "monthly",
})
assert response.status_code == 401
def test_checkout_no_stripe_key(self, client, app):
"""When STRIPE_PRICE_ID_PRO_MONTHLY is not set, return 503."""
self._login(client)
app.config["STRIPE_PRICE_ID_PRO_MONTHLY"] = ""
app.config["STRIPE_PRICE_ID_PRO_YEARLY"] = ""
response = client.post("/api/stripe/create-checkout-session", json={
"billing": "monthly",
})
assert response.status_code == 503
def test_portal_requires_auth(self, client):
response = client.post("/api/stripe/create-portal-session")
assert response.status_code == 401
def test_webhook_missing_signature(self, client):
"""Webhook without config returns ignored status."""
response = client.post(
"/api/stripe/webhook",
data=b'{}',
headers={"Stripe-Signature": "test_sig"},
)
data = response.get_json()
# Without webhook secret, it should be ignored
assert data["status"] in ("ignored", "error")