تم الانتهاء من آخر دفعة تحسينات على المشروع، وتشمل:
تحويل لوحة الإدارة الداخلية من secret header إلى session auth حقيقي مع صلاحيات admin. إضافة دعم إدارة الأدوار من داخل لوحة الإدارة نفسها، مع حماية الحسابات المعتمدة عبر INTERNAL_ADMIN_EMAILS. تحسين بيانات المستخدم في الواجهة والباكند لتشمل role وis_allowlisted_admin. إضافة اختبار frontend مخصص لصفحة /internal/admin بدل الاعتماد فقط على build واختبار routes. تحسين إضافي في الأداء عبر إزالة الاعتماد على pdfjs-dist/pdf.worker في عدّ صفحات PDF واستبداله بمسار أخف باستخدام pdf-lib. تحسين تقسيم الـ chunks في build لتقليل أثر الحزم الكبيرة وفصل أجزاء مثل network, icons, pdf-core, وeditor. التحقق الذي تم: نجاح build للواجهة. نجاح اختبار صفحة الإدارة الداخلية في frontend. نجاح اختبارات auth/admin في backend. نجاح full backend suite مسبقًا مع EXIT:0. ولو تريد نسخة أقصر جدًا، استخدم هذه: آخر التحديثات: تم تحسين نظام الإدارة الداخلية ليعتمد على صلاحيات وجلسات حقيقية بدل secret header، مع إضافة إدارة أدوار من لوحة admin نفسها، وإضافة اختبارات frontend مخصصة للوحة، وتحسين أداء الواجهة عبر إزالة pdf.worker وتحسين تقسيم الـ chunks في build. جميع الاختبارات والتحققات الأساسية المطلوبة نجح
This commit is contained in:
@@ -2,6 +2,7 @@
|
||||
FLASK_ENV=development
|
||||
FLASK_DEBUG=1
|
||||
SECRET_KEY=change-me-in-production
|
||||
INTERNAL_ADMIN_EMAILS=admin@example.com
|
||||
|
||||
# Site Domain (used in sitemap, robots.txt, emails)
|
||||
SITE_DOMAIN=https://saas-pdf.com
|
||||
|
||||
@@ -9,6 +9,29 @@ from app.services.account_service import init_account_db
|
||||
from app.services.rating_service import init_ratings_db
|
||||
from app.services.ai_cost_service import init_ai_cost_db
|
||||
from app.services.site_assistant_service import init_site_assistant_db
|
||||
from app.services.contact_service import init_contact_db
|
||||
from app.services.stripe_service import init_stripe_db
|
||||
|
||||
|
||||
def _init_sentry(app):
|
||||
"""Initialize Sentry error monitoring if DSN is configured."""
|
||||
dsn = app.config.get("SENTRY_DSN", "")
|
||||
if not dsn:
|
||||
return
|
||||
try:
|
||||
import sentry_sdk
|
||||
from sentry_sdk.integrations.flask import FlaskIntegration
|
||||
from sentry_sdk.integrations.celery import CeleryIntegration
|
||||
|
||||
sentry_sdk.init(
|
||||
dsn=dsn,
|
||||
environment=app.config.get("SENTRY_ENVIRONMENT", "development"),
|
||||
integrations=[FlaskIntegration(), CeleryIntegration()],
|
||||
traces_sample_rate=0.1,
|
||||
send_default_pii=False,
|
||||
)
|
||||
except ImportError:
|
||||
app.logger.warning("sentry-sdk not installed — monitoring disabled.")
|
||||
|
||||
|
||||
def create_app(config_name=None):
|
||||
@@ -19,6 +42,9 @@ def create_app(config_name=None):
|
||||
app = Flask(__name__)
|
||||
app.config.from_object(config[config_name])
|
||||
|
||||
# Initialize Sentry early
|
||||
_init_sentry(app)
|
||||
|
||||
# Create upload/output/database directories
|
||||
os.makedirs(app.config["UPLOAD_FOLDER"], exist_ok=True)
|
||||
os.makedirs(app.config["OUTPUT_FOLDER"], exist_ok=True)
|
||||
@@ -79,6 +105,8 @@ def create_app(config_name=None):
|
||||
init_ratings_db()
|
||||
init_ai_cost_db()
|
||||
init_site_assistant_db()
|
||||
init_contact_db()
|
||||
init_stripe_db()
|
||||
|
||||
# Register blueprints
|
||||
from app.routes.health import health_bp
|
||||
@@ -106,6 +134,13 @@ def create_app(config_name=None):
|
||||
from app.routes.pdf_ai import pdf_ai_bp
|
||||
from app.routes.rating import rating_bp
|
||||
from app.routes.assistant import assistant_bp
|
||||
from app.routes.contact import contact_bp
|
||||
from app.routes.stripe import stripe_bp
|
||||
from app.routes.stats import stats_bp
|
||||
from app.routes.pdf_convert import pdf_convert_bp
|
||||
from app.routes.pdf_extra import pdf_extra_bp
|
||||
from app.routes.image_extra import image_extra_bp
|
||||
from app.routes.barcode import barcode_bp
|
||||
|
||||
app.register_blueprint(health_bp, url_prefix="/api")
|
||||
app.register_blueprint(auth_bp, url_prefix="/api/auth")
|
||||
@@ -132,5 +167,12 @@ def create_app(config_name=None):
|
||||
app.register_blueprint(pdf_ai_bp, url_prefix="/api/pdf-ai")
|
||||
app.register_blueprint(rating_bp, url_prefix="/api/ratings")
|
||||
app.register_blueprint(assistant_bp, url_prefix="/api/assistant")
|
||||
app.register_blueprint(contact_bp, url_prefix="/api/contact")
|
||||
app.register_blueprint(stripe_bp, url_prefix="/api/stripe")
|
||||
app.register_blueprint(stats_bp, url_prefix="/api/stats")
|
||||
app.register_blueprint(pdf_convert_bp, url_prefix="/api/convert")
|
||||
app.register_blueprint(pdf_extra_bp, url_prefix="/api/pdf-tools")
|
||||
app.register_blueprint(image_extra_bp, url_prefix="/api/image")
|
||||
app.register_blueprint(barcode_bp, url_prefix="/api/barcode")
|
||||
|
||||
return app
|
||||
|
||||
@@ -40,6 +40,10 @@ def init_celery(app):
|
||||
"app.tasks.qrcode_tasks.*": {"queue": "default"},
|
||||
"app.tasks.html_to_pdf_tasks.*": {"queue": "convert"},
|
||||
"app.tasks.pdf_ai_tasks.*": {"queue": "default"},
|
||||
"app.tasks.pdf_convert_tasks.*": {"queue": "convert"},
|
||||
"app.tasks.pdf_extra_tasks.*": {"queue": "pdf_tools"},
|
||||
"app.tasks.image_extra_tasks.*": {"queue": "image"},
|
||||
"app.tasks.barcode_tasks.*": {"queue": "default"},
|
||||
}
|
||||
|
||||
# Celery Beat — periodic tasks
|
||||
|
||||
@@ -1,27 +1,100 @@
|
||||
"""Internal admin endpoints secured by INTERNAL_ADMIN_SECRET."""
|
||||
from flask import Blueprint, current_app, jsonify, request
|
||||
"""Internal admin endpoints secured by authenticated admin sessions."""
|
||||
from flask import Blueprint, jsonify, request
|
||||
|
||||
from app.extensions import limiter
|
||||
from app.services.account_service import get_user_by_id, update_user_plan
|
||||
from app.services.account_service import get_user_by_id, is_user_admin, set_user_role, update_user_plan
|
||||
from app.services.admin_service import (
|
||||
get_admin_overview,
|
||||
list_admin_contacts,
|
||||
list_admin_users,
|
||||
mark_admin_contact_read,
|
||||
)
|
||||
from app.services.ai_cost_service import get_monthly_spend
|
||||
from app.utils.auth import get_current_user_id
|
||||
|
||||
admin_bp = Blueprint("admin", __name__)
|
||||
|
||||
|
||||
def _check_admin_secret() -> bool:
|
||||
"""Return whether the request carries the correct admin secret."""
|
||||
secret = current_app.config.get("INTERNAL_ADMIN_SECRET", "")
|
||||
if not secret:
|
||||
return False
|
||||
return request.headers.get("X-Admin-Secret", "") == secret
|
||||
def _require_admin_session():
|
||||
"""Return an error response unless the request belongs to an authenticated admin."""
|
||||
user_id = get_current_user_id()
|
||||
if user_id is None:
|
||||
return jsonify({"error": "Authentication required."}), 401
|
||||
if not is_user_admin(user_id):
|
||||
return jsonify({"error": "Admin access required."}), 403
|
||||
return None
|
||||
|
||||
|
||||
@admin_bp.route("/overview", methods=["GET"])
|
||||
@limiter.limit("60/hour")
|
||||
def admin_overview_route():
|
||||
"""Return the internal admin dashboard overview."""
|
||||
auth_error = _require_admin_session()
|
||||
if auth_error:
|
||||
return auth_error
|
||||
|
||||
return jsonify(get_admin_overview()), 200
|
||||
|
||||
|
||||
@admin_bp.route("/users", methods=["GET"])
|
||||
@limiter.limit("60/hour")
|
||||
def admin_users_route():
|
||||
"""Return recent users plus usage summaries for the admin dashboard."""
|
||||
auth_error = _require_admin_session()
|
||||
if auth_error:
|
||||
return auth_error
|
||||
|
||||
query = request.args.get("query", "")
|
||||
try:
|
||||
limit = max(1, min(int(request.args.get("limit", 25)), 100))
|
||||
except ValueError:
|
||||
limit = 25
|
||||
|
||||
return jsonify({"items": list_admin_users(limit=limit, query=query)}), 200
|
||||
|
||||
|
||||
@admin_bp.route("/contacts", methods=["GET"])
|
||||
@limiter.limit("60/hour")
|
||||
def admin_contacts_route():
|
||||
"""Return paginated contact messages for the admin dashboard."""
|
||||
auth_error = _require_admin_session()
|
||||
if auth_error:
|
||||
return auth_error
|
||||
|
||||
try:
|
||||
page = max(1, int(request.args.get("page", 1)))
|
||||
except ValueError:
|
||||
page = 1
|
||||
|
||||
try:
|
||||
per_page = max(1, min(int(request.args.get("per_page", 20)), 100))
|
||||
except ValueError:
|
||||
per_page = 20
|
||||
|
||||
return jsonify(list_admin_contacts(page=page, per_page=per_page)), 200
|
||||
|
||||
|
||||
@admin_bp.route("/contacts/<int:message_id>/read", methods=["POST"])
|
||||
@limiter.limit("120/hour")
|
||||
def admin_contacts_mark_read_route(message_id: int):
|
||||
"""Mark one contact message as read."""
|
||||
auth_error = _require_admin_session()
|
||||
if auth_error:
|
||||
return auth_error
|
||||
|
||||
if not mark_admin_contact_read(message_id):
|
||||
return jsonify({"error": "Message not found."}), 404
|
||||
|
||||
return jsonify({"message": "Message marked as read."}), 200
|
||||
|
||||
|
||||
@admin_bp.route("/users/<int:user_id>/plan", methods=["POST"])
|
||||
@limiter.limit("30/hour")
|
||||
def update_plan_route(user_id: int):
|
||||
"""Change the plan for one user — secured by X-Admin-Secret header."""
|
||||
if not _check_admin_secret():
|
||||
return jsonify({"error": "Unauthorized."}), 401
|
||||
"""Change the plan for one user — admin session required."""
|
||||
auth_error = _require_admin_session()
|
||||
if auth_error:
|
||||
return auth_error
|
||||
|
||||
data = request.get_json(silent=True) or {}
|
||||
plan = str(data.get("plan", "")).strip().lower()
|
||||
@@ -40,12 +113,45 @@ def update_plan_route(user_id: int):
|
||||
return jsonify({"message": "Plan updated.", "user": updated}), 200
|
||||
|
||||
|
||||
@admin_bp.route("/users/<int:user_id>/role", methods=["POST"])
|
||||
@limiter.limit("30/hour")
|
||||
def update_role_route(user_id: int):
|
||||
"""Change the role for one user — admin session required."""
|
||||
auth_error = _require_admin_session()
|
||||
if auth_error:
|
||||
return auth_error
|
||||
|
||||
actor_user_id = get_current_user_id()
|
||||
data = request.get_json(silent=True) or {}
|
||||
role = str(data.get("role", "")).strip().lower()
|
||||
if role not in ("user", "admin"):
|
||||
return jsonify({"error": "Role must be 'user' or 'admin'."}), 400
|
||||
|
||||
user = get_user_by_id(user_id)
|
||||
if user is None:
|
||||
return jsonify({"error": "User not found."}), 404
|
||||
|
||||
if bool(user.get("is_allowlisted_admin")):
|
||||
return jsonify({"error": "Allowlisted admin access is managed by INTERNAL_ADMIN_EMAILS."}), 400
|
||||
|
||||
if actor_user_id == user_id and role != "admin":
|
||||
return jsonify({"error": "You cannot remove your own admin role."}), 400
|
||||
|
||||
try:
|
||||
updated = set_user_role(user_id, role)
|
||||
except ValueError as exc:
|
||||
return jsonify({"error": str(exc)}), 400
|
||||
|
||||
return jsonify({"message": "Role updated.", "user": updated}), 200
|
||||
|
||||
|
||||
@admin_bp.route("/ai-cost", methods=["GET"])
|
||||
@limiter.limit("60/hour")
|
||||
def ai_cost_dashboard():
|
||||
"""Return the current month's AI spending summary."""
|
||||
if not _check_admin_secret():
|
||||
return jsonify({"error": "Unauthorized."}), 401
|
||||
auth_error = _require_admin_session()
|
||||
if auth_error:
|
||||
return auth_error
|
||||
|
||||
spend = get_monthly_spend()
|
||||
return jsonify(spend), 200
|
||||
|
||||
70
backend/app/routes/barcode.py
Normal file
70
backend/app/routes/barcode.py
Normal file
@@ -0,0 +1,70 @@
|
||||
"""Routes for barcode generation."""
|
||||
from flask import Blueprint, request, jsonify
|
||||
|
||||
from app.extensions import limiter
|
||||
from app.services.policy_service import (
|
||||
assert_quota_available,
|
||||
build_task_tracking_kwargs,
|
||||
PolicyError,
|
||||
record_accepted_usage,
|
||||
resolve_web_actor,
|
||||
)
|
||||
from app.services.barcode_service import SUPPORTED_BARCODE_TYPES
|
||||
from app.tasks.barcode_tasks import generate_barcode_task
|
||||
from app.utils.sanitizer import generate_safe_path
|
||||
|
||||
barcode_bp = Blueprint("barcode", __name__)
|
||||
|
||||
|
||||
@barcode_bp.route("/generate", methods=["POST"])
|
||||
@limiter.limit("20/minute")
|
||||
def generate_barcode_route():
|
||||
"""Generate a barcode image.
|
||||
|
||||
Accepts: JSON or form data with:
|
||||
- 'data': String to encode
|
||||
- 'type' (optional): Barcode type (default: code128)
|
||||
- 'format' (optional): "png" or "svg" (default: png)
|
||||
"""
|
||||
if request.is_json:
|
||||
body = request.get_json()
|
||||
data = body.get("data", "").strip()
|
||||
barcode_type = body.get("type", "code128").lower()
|
||||
output_format = body.get("format", "png").lower()
|
||||
else:
|
||||
data = request.form.get("data", "").strip()
|
||||
barcode_type = request.form.get("type", "code128").lower()
|
||||
output_format = request.form.get("format", "png").lower()
|
||||
|
||||
if not data:
|
||||
return jsonify({"error": "Barcode data is required."}), 400
|
||||
|
||||
if len(data) > 200:
|
||||
return jsonify({"error": "Barcode data is too long (max 200 characters)."}), 400
|
||||
|
||||
if barcode_type not in SUPPORTED_BARCODE_TYPES:
|
||||
return jsonify({
|
||||
"error": f"Unsupported barcode type. Supported: {', '.join(SUPPORTED_BARCODE_TYPES)}"
|
||||
}), 400
|
||||
|
||||
if output_format not in ("png", "svg"):
|
||||
output_format = "png"
|
||||
|
||||
actor = resolve_web_actor()
|
||||
try:
|
||||
assert_quota_available(actor)
|
||||
except PolicyError as e:
|
||||
return jsonify({"error": e.message}), e.status_code
|
||||
|
||||
task_id, _ = generate_safe_path("tmp", folder_type="upload")
|
||||
|
||||
task = generate_barcode_task.delay(
|
||||
data, barcode_type, task_id, output_format,
|
||||
**build_task_tracking_kwargs(actor),
|
||||
)
|
||||
record_accepted_usage(actor, "barcode", task.id)
|
||||
|
||||
return jsonify({
|
||||
"task_id": task.id,
|
||||
"message": "Barcode generation started. Poll /api/tasks/{task_id}/status for progress.",
|
||||
}), 202
|
||||
43
backend/app/routes/contact.py
Normal file
43
backend/app/routes/contact.py
Normal file
@@ -0,0 +1,43 @@
|
||||
"""Contact form routes."""
|
||||
import logging
|
||||
import re
|
||||
|
||||
from flask import Blueprint, jsonify, request
|
||||
|
||||
from app.extensions import limiter
|
||||
from app.services.contact_service import save_message
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
contact_bp = Blueprint("contact", __name__)
|
||||
|
||||
EMAIL_RE = re.compile(r"^[a-zA-Z0-9._%+\-]+@[a-zA-Z0-9.\-]+\.[a-zA-Z]{2,}$")
|
||||
|
||||
|
||||
@contact_bp.route("/submit", methods=["POST"])
|
||||
@limiter.limit("5/hour", override_defaults=True)
|
||||
def submit_contact():
|
||||
"""Accept a contact form submission."""
|
||||
data = request.get_json(silent=True) or {}
|
||||
|
||||
name = (data.get("name") or "").strip()
|
||||
email = (data.get("email") or "").strip()
|
||||
category = (data.get("category") or "general").strip()
|
||||
subject = (data.get("subject") or "").strip()
|
||||
message = (data.get("message") or "").strip()
|
||||
|
||||
errors = []
|
||||
if not name or len(name) > 200:
|
||||
errors.append("Name is required (max 200 characters).")
|
||||
if not email or not EMAIL_RE.match(email):
|
||||
errors.append("A valid email address is required.")
|
||||
if not subject or len(subject) > 500:
|
||||
errors.append("Subject is required (max 500 characters).")
|
||||
if not message or len(message) > 5000:
|
||||
errors.append("Message is required (max 5000 characters).")
|
||||
|
||||
if errors:
|
||||
return jsonify({"error": errors[0], "errors": errors}), 400
|
||||
|
||||
result = save_message(name, email, category, subject, message)
|
||||
return jsonify({"message": "Message sent successfully.", **result}), 201
|
||||
147
backend/app/routes/image_extra.py
Normal file
147
backend/app/routes/image_extra.py
Normal file
@@ -0,0 +1,147 @@
|
||||
"""Routes for image extra tools — Crop, Rotate/Flip."""
|
||||
from flask import Blueprint, request, jsonify
|
||||
|
||||
from app.extensions import limiter
|
||||
from app.services.policy_service import (
|
||||
assert_quota_available,
|
||||
build_task_tracking_kwargs,
|
||||
PolicyError,
|
||||
record_accepted_usage,
|
||||
resolve_web_actor,
|
||||
validate_actor_file,
|
||||
)
|
||||
from app.utils.file_validator import FileValidationError
|
||||
from app.utils.sanitizer import generate_safe_path
|
||||
from app.tasks.image_extra_tasks import crop_image_task, rotate_flip_image_task
|
||||
|
||||
image_extra_bp = Blueprint("image_extra", __name__)
|
||||
|
||||
ALLOWED_IMAGE_TYPES = ["png", "jpg", "jpeg", "webp"]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Image Crop — POST /api/image/crop
|
||||
# ---------------------------------------------------------------------------
|
||||
@image_extra_bp.route("/crop", methods=["POST"])
|
||||
@limiter.limit("10/minute")
|
||||
def crop_image_route():
|
||||
"""Crop an image to specified dimensions.
|
||||
|
||||
Accepts: multipart/form-data with:
|
||||
- 'file': Image file
|
||||
- 'left', 'top', 'right', 'bottom': Crop rectangle in pixels
|
||||
"""
|
||||
if "file" not in request.files:
|
||||
return jsonify({"error": "No file provided."}), 400
|
||||
|
||||
file = request.files["file"]
|
||||
|
||||
try:
|
||||
left = int(request.form.get("left", 0))
|
||||
top = int(request.form.get("top", 0))
|
||||
right = int(request.form.get("right", 0))
|
||||
bottom = int(request.form.get("bottom", 0))
|
||||
except (ValueError, TypeError):
|
||||
return jsonify({"error": "Crop dimensions must be integers."}), 400
|
||||
|
||||
if right <= left or bottom <= top:
|
||||
return jsonify({"error": "Invalid crop area: right > left and bottom > top required."}), 400
|
||||
|
||||
try:
|
||||
quality = max(1, min(100, int(request.form.get("quality", 85))))
|
||||
except ValueError:
|
||||
quality = 85
|
||||
|
||||
actor = resolve_web_actor()
|
||||
try:
|
||||
assert_quota_available(actor)
|
||||
except PolicyError as e:
|
||||
return jsonify({"error": e.message}), e.status_code
|
||||
|
||||
try:
|
||||
original_filename, ext = validate_actor_file(
|
||||
file, allowed_types=ALLOWED_IMAGE_TYPES, actor=actor
|
||||
)
|
||||
except FileValidationError as e:
|
||||
return jsonify({"error": e.message}), e.code
|
||||
|
||||
task_id, input_path = generate_safe_path(ext, folder_type="upload")
|
||||
file.save(input_path)
|
||||
|
||||
task = crop_image_task.delay(
|
||||
input_path, task_id, original_filename,
|
||||
left, top, right, bottom, quality,
|
||||
**build_task_tracking_kwargs(actor),
|
||||
)
|
||||
record_accepted_usage(actor, "image-crop", task.id)
|
||||
|
||||
return jsonify({
|
||||
"task_id": task.id,
|
||||
"message": "Cropping started. Poll /api/tasks/{task_id}/status for progress.",
|
||||
}), 202
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Image Rotate/Flip — POST /api/image/rotate-flip
|
||||
# ---------------------------------------------------------------------------
|
||||
@image_extra_bp.route("/rotate-flip", methods=["POST"])
|
||||
@limiter.limit("10/minute")
|
||||
def rotate_flip_image_route():
|
||||
"""Rotate and/or flip an image.
|
||||
|
||||
Accepts: multipart/form-data with:
|
||||
- 'file': Image file
|
||||
- 'rotation' (optional): 0, 90, 180, or 270 (default: 0)
|
||||
- 'flip_horizontal' (optional): "true"/"false" (default: false)
|
||||
- 'flip_vertical' (optional): "true"/"false" (default: false)
|
||||
"""
|
||||
if "file" not in request.files:
|
||||
return jsonify({"error": "No file provided."}), 400
|
||||
|
||||
file = request.files["file"]
|
||||
|
||||
try:
|
||||
rotation = int(request.form.get("rotation", 0))
|
||||
except ValueError:
|
||||
rotation = 0
|
||||
if rotation not in (0, 90, 180, 270):
|
||||
return jsonify({"error": "Rotation must be 0, 90, 180, or 270 degrees."}), 400
|
||||
|
||||
flip_horizontal = request.form.get("flip_horizontal", "false").lower() == "true"
|
||||
flip_vertical = request.form.get("flip_vertical", "false").lower() == "true"
|
||||
|
||||
if rotation == 0 and not flip_horizontal and not flip_vertical:
|
||||
return jsonify({"error": "At least one transformation is required."}), 400
|
||||
|
||||
try:
|
||||
quality = max(1, min(100, int(request.form.get("quality", 85))))
|
||||
except ValueError:
|
||||
quality = 85
|
||||
|
||||
actor = resolve_web_actor()
|
||||
try:
|
||||
assert_quota_available(actor)
|
||||
except PolicyError as e:
|
||||
return jsonify({"error": e.message}), e.status_code
|
||||
|
||||
try:
|
||||
original_filename, ext = validate_actor_file(
|
||||
file, allowed_types=ALLOWED_IMAGE_TYPES, actor=actor
|
||||
)
|
||||
except FileValidationError as e:
|
||||
return jsonify({"error": e.message}), e.code
|
||||
|
||||
task_id, input_path = generate_safe_path(ext, folder_type="upload")
|
||||
file.save(input_path)
|
||||
|
||||
task = rotate_flip_image_task.delay(
|
||||
input_path, task_id, original_filename,
|
||||
rotation, flip_horizontal, flip_vertical, quality,
|
||||
**build_task_tracking_kwargs(actor),
|
||||
)
|
||||
record_accepted_usage(actor, "image-rotate-flip", task.id)
|
||||
|
||||
return jsonify({
|
||||
"task_id": task.id,
|
||||
"message": "Transformation started. Poll /api/tasks/{task_id}/status for progress.",
|
||||
}), 202
|
||||
217
backend/app/routes/pdf_convert.py
Normal file
217
backend/app/routes/pdf_convert.py
Normal file
@@ -0,0 +1,217 @@
|
||||
"""Routes for new PDF conversions — PDF↔PPTX, Excel→PDF, Sign PDF."""
|
||||
import os
|
||||
import uuid
|
||||
|
||||
from flask import Blueprint, request, jsonify, current_app
|
||||
|
||||
from app.extensions import limiter
|
||||
from app.services.policy_service import (
|
||||
assert_quota_available,
|
||||
build_task_tracking_kwargs,
|
||||
PolicyError,
|
||||
record_accepted_usage,
|
||||
resolve_web_actor,
|
||||
validate_actor_file,
|
||||
)
|
||||
from app.utils.file_validator import FileValidationError
|
||||
from app.utils.sanitizer import generate_safe_path
|
||||
from app.tasks.pdf_convert_tasks import (
|
||||
pdf_to_pptx_task,
|
||||
excel_to_pdf_task,
|
||||
pptx_to_pdf_task,
|
||||
sign_pdf_task,
|
||||
)
|
||||
|
||||
pdf_convert_bp = Blueprint("pdf_convert", __name__)
|
||||
|
||||
ALLOWED_IMAGE_TYPES = ["png", "jpg", "jpeg", "webp"]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# PDF to PowerPoint — POST /api/convert/pdf-to-pptx
|
||||
# ---------------------------------------------------------------------------
|
||||
@pdf_convert_bp.route("/pdf-to-pptx", methods=["POST"])
|
||||
@limiter.limit("10/minute")
|
||||
def pdf_to_pptx_route():
|
||||
"""Convert a PDF to PowerPoint (PPTX)."""
|
||||
if "file" not in request.files:
|
||||
return jsonify({"error": "No file provided."}), 400
|
||||
|
||||
file = request.files["file"]
|
||||
actor = resolve_web_actor()
|
||||
try:
|
||||
assert_quota_available(actor)
|
||||
except PolicyError as e:
|
||||
return jsonify({"error": e.message}), e.status_code
|
||||
|
||||
try:
|
||||
original_filename, ext = validate_actor_file(file, allowed_types=["pdf"], actor=actor)
|
||||
except FileValidationError as e:
|
||||
return jsonify({"error": e.message}), e.code
|
||||
|
||||
task_id, input_path = generate_safe_path(ext, folder_type="upload")
|
||||
file.save(input_path)
|
||||
|
||||
task = pdf_to_pptx_task.delay(
|
||||
input_path, task_id, original_filename,
|
||||
**build_task_tracking_kwargs(actor),
|
||||
)
|
||||
record_accepted_usage(actor, "pdf-to-pptx", task.id)
|
||||
|
||||
return jsonify({
|
||||
"task_id": task.id,
|
||||
"message": "Conversion started. Poll /api/tasks/{task_id}/status for progress.",
|
||||
}), 202
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Excel to PDF — POST /api/convert/excel-to-pdf
|
||||
# ---------------------------------------------------------------------------
|
||||
@pdf_convert_bp.route("/excel-to-pdf", methods=["POST"])
|
||||
@limiter.limit("10/minute")
|
||||
def excel_to_pdf_route():
|
||||
"""Convert an Excel file to PDF."""
|
||||
if "file" not in request.files:
|
||||
return jsonify({"error": "No file provided."}), 400
|
||||
|
||||
file = request.files["file"]
|
||||
actor = resolve_web_actor()
|
||||
try:
|
||||
assert_quota_available(actor)
|
||||
except PolicyError as e:
|
||||
return jsonify({"error": e.message}), e.status_code
|
||||
|
||||
try:
|
||||
original_filename, ext = validate_actor_file(
|
||||
file, allowed_types=["xlsx", "xls"], actor=actor
|
||||
)
|
||||
except FileValidationError as e:
|
||||
return jsonify({"error": e.message}), e.code
|
||||
|
||||
task_id, input_path = generate_safe_path(ext, folder_type="upload")
|
||||
file.save(input_path)
|
||||
|
||||
task = excel_to_pdf_task.delay(
|
||||
input_path, task_id, original_filename,
|
||||
**build_task_tracking_kwargs(actor),
|
||||
)
|
||||
record_accepted_usage(actor, "excel-to-pdf", task.id)
|
||||
|
||||
return jsonify({
|
||||
"task_id": task.id,
|
||||
"message": "Conversion started. Poll /api/tasks/{task_id}/status for progress.",
|
||||
}), 202
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# PowerPoint to PDF — POST /api/convert/pptx-to-pdf
|
||||
# ---------------------------------------------------------------------------
|
||||
@pdf_convert_bp.route("/pptx-to-pdf", methods=["POST"])
|
||||
@limiter.limit("10/minute")
|
||||
def pptx_to_pdf_route():
|
||||
"""Convert a PowerPoint file to PDF."""
|
||||
if "file" not in request.files:
|
||||
return jsonify({"error": "No file provided."}), 400
|
||||
|
||||
file = request.files["file"]
|
||||
actor = resolve_web_actor()
|
||||
try:
|
||||
assert_quota_available(actor)
|
||||
except PolicyError as e:
|
||||
return jsonify({"error": e.message}), e.status_code
|
||||
|
||||
try:
|
||||
original_filename, ext = validate_actor_file(
|
||||
file, allowed_types=["pptx", "ppt"], actor=actor
|
||||
)
|
||||
except FileValidationError as e:
|
||||
return jsonify({"error": e.message}), e.code
|
||||
|
||||
task_id, input_path = generate_safe_path(ext, folder_type="upload")
|
||||
file.save(input_path)
|
||||
|
||||
task = pptx_to_pdf_task.delay(
|
||||
input_path, task_id, original_filename,
|
||||
**build_task_tracking_kwargs(actor),
|
||||
)
|
||||
record_accepted_usage(actor, "pptx-to-pdf", task.id)
|
||||
|
||||
return jsonify({
|
||||
"task_id": task.id,
|
||||
"message": "Conversion started. Poll /api/tasks/{task_id}/status for progress.",
|
||||
}), 202
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Sign PDF — POST /api/pdf-tools/sign
|
||||
# ---------------------------------------------------------------------------
|
||||
@pdf_convert_bp.route("/sign", methods=["POST"])
|
||||
@limiter.limit("10/minute")
|
||||
def sign_pdf_route():
|
||||
"""Sign a PDF by overlaying a signature image.
|
||||
|
||||
Accepts: multipart/form-data with:
|
||||
- 'file': PDF file
|
||||
- 'signature': Signature image (PNG/JPG)
|
||||
- 'page' (optional): 1-based page number (default: 1)
|
||||
- 'x', 'y' (optional): Position in points (default: 100, 100)
|
||||
- 'width', 'height' (optional): Size in points (default: 200, 80)
|
||||
"""
|
||||
if "file" not in request.files:
|
||||
return jsonify({"error": "No PDF file provided."}), 400
|
||||
if "signature" not in request.files:
|
||||
return jsonify({"error": "No signature image provided."}), 400
|
||||
|
||||
pdf_file = request.files["file"]
|
||||
sig_file = request.files["signature"]
|
||||
|
||||
actor = resolve_web_actor()
|
||||
try:
|
||||
assert_quota_available(actor)
|
||||
except PolicyError as e:
|
||||
return jsonify({"error": e.message}), e.status_code
|
||||
|
||||
try:
|
||||
original_filename, ext = validate_actor_file(pdf_file, allowed_types=["pdf"], actor=actor)
|
||||
except FileValidationError as e:
|
||||
return jsonify({"error": e.message}), e.code
|
||||
|
||||
try:
|
||||
_, sig_ext = validate_actor_file(sig_file, allowed_types=ALLOWED_IMAGE_TYPES, actor=actor)
|
||||
except FileValidationError as e:
|
||||
return jsonify({"error": f"Signature image: {e.message}"}), e.code
|
||||
|
||||
# Parse position parameters
|
||||
try:
|
||||
page = max(1, int(request.form.get("page", 1))) - 1 # Convert to 0-based
|
||||
x = float(request.form.get("x", 100))
|
||||
y = float(request.form.get("y", 100))
|
||||
width = float(request.form.get("width", 200))
|
||||
height = float(request.form.get("height", 80))
|
||||
except (ValueError, TypeError):
|
||||
return jsonify({"error": "Invalid position parameters."}), 400
|
||||
|
||||
if width <= 0 or height <= 0:
|
||||
return jsonify({"error": "Width and height must be positive."}), 400
|
||||
|
||||
task_id = str(uuid.uuid4())
|
||||
upload_dir = os.path.join(current_app.config["UPLOAD_FOLDER"], task_id)
|
||||
os.makedirs(upload_dir, exist_ok=True)
|
||||
|
||||
input_path = os.path.join(upload_dir, f"{uuid.uuid4()}.pdf")
|
||||
pdf_file.save(input_path)
|
||||
|
||||
signature_path = os.path.join(upload_dir, f"{uuid.uuid4()}.{sig_ext}")
|
||||
sig_file.save(signature_path)
|
||||
|
||||
task = sign_pdf_task.delay(
|
||||
input_path, signature_path, task_id, original_filename,
|
||||
page, x, y, width, height,
|
||||
**build_task_tracking_kwargs(actor),
|
||||
)
|
||||
record_accepted_usage(actor, "sign-pdf", task.id)
|
||||
|
||||
return jsonify({
|
||||
"task_id": task.id,
|
||||
"message": "Signing started. Poll /api/tasks/{task_id}/status for progress.",
|
||||
}), 202
|
||||
209
backend/app/routes/pdf_extra.py
Normal file
209
backend/app/routes/pdf_extra.py
Normal file
@@ -0,0 +1,209 @@
|
||||
"""Routes for extended PDF tools — Crop, Flatten, Repair, Metadata Editor."""
|
||||
from flask import Blueprint, request, jsonify
|
||||
|
||||
from app.extensions import limiter
|
||||
from app.services.policy_service import (
|
||||
assert_quota_available,
|
||||
build_task_tracking_kwargs,
|
||||
PolicyError,
|
||||
record_accepted_usage,
|
||||
resolve_web_actor,
|
||||
validate_actor_file,
|
||||
)
|
||||
from app.utils.file_validator import FileValidationError
|
||||
from app.utils.sanitizer import generate_safe_path
|
||||
from app.tasks.pdf_extra_tasks import (
|
||||
crop_pdf_task,
|
||||
flatten_pdf_task,
|
||||
repair_pdf_task,
|
||||
edit_metadata_task,
|
||||
)
|
||||
|
||||
pdf_extra_bp = Blueprint("pdf_extra", __name__)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Crop PDF — POST /api/pdf-tools/crop
|
||||
# ---------------------------------------------------------------------------
|
||||
@pdf_extra_bp.route("/crop", methods=["POST"])
|
||||
@limiter.limit("10/minute")
|
||||
def crop_pdf_route():
|
||||
"""Crop margins from a PDF.
|
||||
|
||||
Accepts: multipart/form-data with:
|
||||
- 'file': PDF file
|
||||
- 'margin_left', 'margin_right', 'margin_top', 'margin_bottom': Points to crop
|
||||
- 'pages' (optional): "all" or comma-separated page numbers
|
||||
"""
|
||||
if "file" not in request.files:
|
||||
return jsonify({"error": "No file provided."}), 400
|
||||
|
||||
file = request.files["file"]
|
||||
actor = resolve_web_actor()
|
||||
try:
|
||||
assert_quota_available(actor)
|
||||
except PolicyError as e:
|
||||
return jsonify({"error": e.message}), e.status_code
|
||||
|
||||
try:
|
||||
original_filename, ext = validate_actor_file(file, allowed_types=["pdf"], actor=actor)
|
||||
except FileValidationError as e:
|
||||
return jsonify({"error": e.message}), e.code
|
||||
|
||||
try:
|
||||
margin_left = float(request.form.get("margin_left", 0))
|
||||
margin_right = float(request.form.get("margin_right", 0))
|
||||
margin_top = float(request.form.get("margin_top", 0))
|
||||
margin_bottom = float(request.form.get("margin_bottom", 0))
|
||||
except (ValueError, TypeError):
|
||||
return jsonify({"error": "Margin values must be numbers."}), 400
|
||||
|
||||
pages = request.form.get("pages", "all")
|
||||
|
||||
task_id, input_path = generate_safe_path(ext, folder_type="upload")
|
||||
file.save(input_path)
|
||||
|
||||
task = crop_pdf_task.delay(
|
||||
input_path, task_id, original_filename,
|
||||
margin_left, margin_right, margin_top, margin_bottom, pages,
|
||||
**build_task_tracking_kwargs(actor),
|
||||
)
|
||||
record_accepted_usage(actor, "crop-pdf", task.id)
|
||||
|
||||
return jsonify({
|
||||
"task_id": task.id,
|
||||
"message": "Cropping started. Poll /api/tasks/{task_id}/status for progress.",
|
||||
}), 202
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Flatten PDF — POST /api/pdf-tools/flatten
|
||||
# ---------------------------------------------------------------------------
|
||||
@pdf_extra_bp.route("/flatten", methods=["POST"])
|
||||
@limiter.limit("10/minute")
|
||||
def flatten_pdf_route():
|
||||
"""Flatten a PDF — remove interactive forms and annotations."""
|
||||
if "file" not in request.files:
|
||||
return jsonify({"error": "No file provided."}), 400
|
||||
|
||||
file = request.files["file"]
|
||||
actor = resolve_web_actor()
|
||||
try:
|
||||
assert_quota_available(actor)
|
||||
except PolicyError as e:
|
||||
return jsonify({"error": e.message}), e.status_code
|
||||
|
||||
try:
|
||||
original_filename, ext = validate_actor_file(file, allowed_types=["pdf"], actor=actor)
|
||||
except FileValidationError as e:
|
||||
return jsonify({"error": e.message}), e.code
|
||||
|
||||
task_id, input_path = generate_safe_path(ext, folder_type="upload")
|
||||
file.save(input_path)
|
||||
|
||||
task = flatten_pdf_task.delay(
|
||||
input_path, task_id, original_filename,
|
||||
**build_task_tracking_kwargs(actor),
|
||||
)
|
||||
record_accepted_usage(actor, "flatten-pdf", task.id)
|
||||
|
||||
return jsonify({
|
||||
"task_id": task.id,
|
||||
"message": "Flattening started. Poll /api/tasks/{task_id}/status for progress.",
|
||||
}), 202
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Repair PDF — POST /api/pdf-tools/repair
|
||||
# ---------------------------------------------------------------------------
|
||||
@pdf_extra_bp.route("/repair", methods=["POST"])
|
||||
@limiter.limit("10/minute")
|
||||
def repair_pdf_route():
|
||||
"""Attempt to repair a damaged PDF."""
|
||||
if "file" not in request.files:
|
||||
return jsonify({"error": "No file provided."}), 400
|
||||
|
||||
file = request.files["file"]
|
||||
actor = resolve_web_actor()
|
||||
try:
|
||||
assert_quota_available(actor)
|
||||
except PolicyError as e:
|
||||
return jsonify({"error": e.message}), e.status_code
|
||||
|
||||
try:
|
||||
original_filename, ext = validate_actor_file(file, allowed_types=["pdf"], actor=actor)
|
||||
except FileValidationError as e:
|
||||
return jsonify({"error": e.message}), e.code
|
||||
|
||||
task_id, input_path = generate_safe_path(ext, folder_type="upload")
|
||||
file.save(input_path)
|
||||
|
||||
task = repair_pdf_task.delay(
|
||||
input_path, task_id, original_filename,
|
||||
**build_task_tracking_kwargs(actor),
|
||||
)
|
||||
record_accepted_usage(actor, "repair-pdf", task.id)
|
||||
|
||||
return jsonify({
|
||||
"task_id": task.id,
|
||||
"message": "Repair started. Poll /api/tasks/{task_id}/status for progress.",
|
||||
}), 202
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Edit PDF Metadata — POST /api/pdf-tools/metadata
|
||||
# ---------------------------------------------------------------------------
|
||||
@pdf_extra_bp.route("/metadata", methods=["POST"])
|
||||
@limiter.limit("10/minute")
|
||||
def edit_metadata_route():
|
||||
"""Edit PDF metadata fields.
|
||||
|
||||
Accepts: multipart/form-data with:
|
||||
- 'file': PDF file
|
||||
- 'title', 'author', 'subject', 'keywords', 'creator' (optional)
|
||||
"""
|
||||
if "file" not in request.files:
|
||||
return jsonify({"error": "No file provided."}), 400
|
||||
|
||||
file = request.files["file"]
|
||||
title = request.form.get("title")
|
||||
author = request.form.get("author")
|
||||
subject = request.form.get("subject")
|
||||
keywords = request.form.get("keywords")
|
||||
creator = request.form.get("creator")
|
||||
|
||||
if not any([title, author, subject, keywords, creator]):
|
||||
return jsonify({"error": "At least one metadata field must be provided."}), 400
|
||||
|
||||
# Validate string lengths
|
||||
for field_name, field_val in [("title", title), ("author", author),
|
||||
("subject", subject), ("keywords", keywords),
|
||||
("creator", creator)]:
|
||||
if field_val and len(field_val) > 500:
|
||||
return jsonify({"error": f"{field_name} must be 500 characters or less."}), 400
|
||||
|
||||
actor = resolve_web_actor()
|
||||
try:
|
||||
assert_quota_available(actor)
|
||||
except PolicyError as e:
|
||||
return jsonify({"error": e.message}), e.status_code
|
||||
|
||||
try:
|
||||
original_filename, ext = validate_actor_file(file, allowed_types=["pdf"], actor=actor)
|
||||
except FileValidationError as e:
|
||||
return jsonify({"error": e.message}), e.code
|
||||
|
||||
task_id, input_path = generate_safe_path(ext, folder_type="upload")
|
||||
file.save(input_path)
|
||||
|
||||
task = edit_metadata_task.delay(
|
||||
input_path, task_id, original_filename,
|
||||
title, author, subject, keywords, creator,
|
||||
**build_task_tracking_kwargs(actor),
|
||||
)
|
||||
record_accepted_usage(actor, "edit-metadata", task.id)
|
||||
|
||||
return jsonify({
|
||||
"task_id": task.id,
|
||||
"message": "Metadata editing started. Poll /api/tasks/{task_id}/status for progress.",
|
||||
}), 202
|
||||
17
backend/app/routes/stats.py
Normal file
17
backend/app/routes/stats.py
Normal file
@@ -0,0 +1,17 @@
|
||||
"""Public site-level statistics for social proof and developer pages."""
|
||||
from flask import Blueprint, jsonify
|
||||
|
||||
from app.extensions import limiter
|
||||
from app.services.account_service import get_public_history_summary
|
||||
from app.services.rating_service import get_global_rating_summary
|
||||
|
||||
stats_bp = Blueprint("stats", __name__)
|
||||
|
||||
|
||||
@stats_bp.route("/summary", methods=["GET"])
|
||||
@limiter.limit("120/hour")
|
||||
def get_stats_summary_route():
|
||||
"""Return aggregate processing and rating stats safe for public display."""
|
||||
history_summary = get_public_history_summary()
|
||||
rating_summary = get_global_rating_summary()
|
||||
return jsonify({**history_summary, **rating_summary}), 200
|
||||
85
backend/app/routes/stripe.py
Normal file
85
backend/app/routes/stripe.py
Normal file
@@ -0,0 +1,85 @@
|
||||
"""Stripe payment routes — checkout, portal, and webhooks."""
|
||||
import logging
|
||||
|
||||
from flask import Blueprint, current_app, jsonify, request, session
|
||||
|
||||
from app.extensions import limiter
|
||||
from app.services.stripe_service import (
|
||||
create_checkout_session,
|
||||
create_portal_session,
|
||||
handle_webhook_event,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
stripe_bp = Blueprint("stripe", __name__)
|
||||
|
||||
|
||||
def _get_authenticated_user_id() -> int | None:
|
||||
"""Return the logged-in user's ID or None."""
|
||||
return session.get("user_id")
|
||||
|
||||
|
||||
@stripe_bp.route("/create-checkout-session", methods=["POST"])
|
||||
@limiter.limit("10/hour", override_defaults=True)
|
||||
def checkout():
|
||||
"""Create a Stripe Checkout Session for Pro subscription."""
|
||||
user_id = _get_authenticated_user_id()
|
||||
if not user_id:
|
||||
return jsonify({"error": "Authentication required."}), 401
|
||||
|
||||
data = request.get_json(silent=True) or {}
|
||||
billing = data.get("billing", "monthly")
|
||||
|
||||
monthly_price = current_app.config.get("STRIPE_PRICE_ID_PRO_MONTHLY", "")
|
||||
yearly_price = current_app.config.get("STRIPE_PRICE_ID_PRO_YEARLY", "")
|
||||
price_id = yearly_price if billing == "yearly" and yearly_price else monthly_price
|
||||
|
||||
if not price_id:
|
||||
return jsonify({"error": "Payment is not configured yet."}), 503
|
||||
|
||||
frontend_url = current_app.config.get("FRONTEND_URL", "http://localhost:5173")
|
||||
success_url = f"{frontend_url}/account?payment=success"
|
||||
cancel_url = f"{frontend_url}/pricing?payment=cancelled"
|
||||
|
||||
try:
|
||||
url = create_checkout_session(user_id, price_id, success_url, cancel_url)
|
||||
except Exception as e:
|
||||
logger.exception("Stripe checkout session creation failed")
|
||||
return jsonify({"error": "Failed to create payment session."}), 500
|
||||
|
||||
return jsonify({"url": url})
|
||||
|
||||
|
||||
@stripe_bp.route("/create-portal-session", methods=["POST"])
|
||||
@limiter.limit("10/hour", override_defaults=True)
|
||||
def portal():
|
||||
"""Create a Stripe Customer Portal session."""
|
||||
user_id = _get_authenticated_user_id()
|
||||
if not user_id:
|
||||
return jsonify({"error": "Authentication required."}), 401
|
||||
|
||||
frontend_url = current_app.config.get("FRONTEND_URL", "http://localhost:5173")
|
||||
return_url = f"{frontend_url}/account"
|
||||
|
||||
try:
|
||||
url = create_portal_session(user_id, return_url)
|
||||
except Exception as e:
|
||||
logger.exception("Stripe portal session creation failed")
|
||||
return jsonify({"error": "Failed to create portal session."}), 500
|
||||
|
||||
return jsonify({"url": url})
|
||||
|
||||
|
||||
@stripe_bp.route("/webhook", methods=["POST"])
|
||||
def webhook():
|
||||
"""Handle Stripe webhook events. No rate limit — Stripe signs each call."""
|
||||
payload = request.get_data()
|
||||
sig_header = request.headers.get("Stripe-Signature", "")
|
||||
|
||||
result = handle_webhook_event(payload, sig_header)
|
||||
|
||||
if result["status"] == "error":
|
||||
return jsonify(result), 400
|
||||
|
||||
return jsonify(result), 200
|
||||
@@ -34,6 +34,23 @@ from app.tasks.pdf_tools_tasks import (
|
||||
unlock_pdf_task,
|
||||
)
|
||||
from app.tasks.flowchart_tasks import extract_flowchart_task
|
||||
from app.tasks.ocr_tasks import ocr_image_task, ocr_pdf_task
|
||||
from app.tasks.removebg_tasks import remove_bg_task
|
||||
from app.tasks.pdf_ai_tasks import (
|
||||
chat_with_pdf_task, summarize_pdf_task, translate_pdf_task, extract_tables_task,
|
||||
)
|
||||
from app.tasks.pdf_to_excel_tasks import pdf_to_excel_task
|
||||
from app.tasks.html_to_pdf_tasks import html_to_pdf_task
|
||||
from app.tasks.qrcode_tasks import generate_qr_task
|
||||
from app.tasks.pdf_convert_tasks import (
|
||||
pdf_to_pptx_task, excel_to_pdf_task, pptx_to_pdf_task, sign_pdf_task,
|
||||
)
|
||||
from app.tasks.pdf_extra_tasks import (
|
||||
crop_pdf_task, flatten_pdf_task, repair_pdf_task, edit_metadata_task,
|
||||
)
|
||||
from app.tasks.image_extra_tasks import crop_image_task, rotate_flip_image_task
|
||||
from app.tasks.barcode_tasks import generate_barcode_task
|
||||
from app.services.barcode_service import SUPPORTED_BARCODE_TYPES
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -680,3 +697,760 @@ def extract_flowchart_route():
|
||||
)
|
||||
record_accepted_usage(actor, "pdf-flowchart", task.id)
|
||||
return jsonify({"task_id": task.id, "message": "Flowchart extraction started."}), 202
|
||||
|
||||
|
||||
# ===========================================================================
|
||||
# Phase 2: Previously uncovered existing tools
|
||||
# ===========================================================================
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# OCR — POST /api/v1/ocr/image & /api/v1/ocr/pdf
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@v1_bp.route("/ocr/image", methods=["POST"])
|
||||
@limiter.limit("10/minute")
|
||||
def ocr_image_route():
|
||||
"""Extract text from an image using OCR."""
|
||||
actor, err = _resolve_and_check()
|
||||
if err:
|
||||
return err
|
||||
|
||||
if "file" not in request.files:
|
||||
return jsonify({"error": "No file provided."}), 400
|
||||
|
||||
file = request.files["file"]
|
||||
lang = request.form.get("lang", "eng")
|
||||
|
||||
try:
|
||||
original_filename, ext = validate_actor_file(
|
||||
file, allowed_types=ALLOWED_IMAGE_TYPES, actor=actor
|
||||
)
|
||||
except FileValidationError as e:
|
||||
return jsonify({"error": e.message}), e.code
|
||||
|
||||
task_id, input_path = generate_safe_path(ext, folder_type="upload")
|
||||
file.save(input_path)
|
||||
task = ocr_image_task.delay(
|
||||
input_path, task_id, original_filename, lang,
|
||||
**build_task_tracking_kwargs(actor),
|
||||
)
|
||||
record_accepted_usage(actor, "ocr-image", task.id)
|
||||
return jsonify({"task_id": task.id, "message": "OCR started."}), 202
|
||||
|
||||
|
||||
@v1_bp.route("/ocr/pdf", methods=["POST"])
|
||||
@limiter.limit("10/minute")
|
||||
def ocr_pdf_route():
|
||||
"""Extract text from a PDF using OCR."""
|
||||
actor, err = _resolve_and_check()
|
||||
if err:
|
||||
return err
|
||||
|
||||
if "file" not in request.files:
|
||||
return jsonify({"error": "No file provided."}), 400
|
||||
|
||||
file = request.files["file"]
|
||||
lang = request.form.get("lang", "eng")
|
||||
|
||||
try:
|
||||
original_filename, ext = validate_actor_file(file, allowed_types=["pdf"], actor=actor)
|
||||
except FileValidationError as e:
|
||||
return jsonify({"error": e.message}), e.code
|
||||
|
||||
task_id, input_path = generate_safe_path(ext, folder_type="upload")
|
||||
file.save(input_path)
|
||||
task = ocr_pdf_task.delay(
|
||||
input_path, task_id, original_filename, lang,
|
||||
**build_task_tracking_kwargs(actor),
|
||||
)
|
||||
record_accepted_usage(actor, "ocr-pdf", task.id)
|
||||
return jsonify({"task_id": task.id, "message": "OCR started."}), 202
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Remove Background — POST /api/v1/image/remove-bg
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@v1_bp.route("/image/remove-bg", methods=["POST"])
|
||||
@limiter.limit("5/minute")
|
||||
def remove_bg_route():
|
||||
"""Remove background from an image."""
|
||||
actor, err = _resolve_and_check()
|
||||
if err:
|
||||
return err
|
||||
|
||||
if "file" not in request.files:
|
||||
return jsonify({"error": "No file provided."}), 400
|
||||
|
||||
file = request.files["file"]
|
||||
try:
|
||||
original_filename, ext = validate_actor_file(
|
||||
file, allowed_types=ALLOWED_IMAGE_TYPES, actor=actor
|
||||
)
|
||||
except FileValidationError as e:
|
||||
return jsonify({"error": e.message}), e.code
|
||||
|
||||
task_id, input_path = generate_safe_path(ext, folder_type="upload")
|
||||
file.save(input_path)
|
||||
task = remove_bg_task.delay(
|
||||
input_path, task_id, original_filename,
|
||||
**build_task_tracking_kwargs(actor),
|
||||
)
|
||||
record_accepted_usage(actor, "remove-bg", task.id)
|
||||
return jsonify({"task_id": task.id, "message": "Background removal started."}), 202
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# PDF AI — POST /api/v1/pdf-ai/chat, summarize, translate, extract-tables
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@v1_bp.route("/pdf-ai/chat", methods=["POST"])
|
||||
@limiter.limit("5/minute")
|
||||
def chat_pdf_route():
|
||||
"""Chat with a PDF using AI."""
|
||||
actor, err = _resolve_and_check()
|
||||
if err:
|
||||
return err
|
||||
|
||||
if "file" not in request.files:
|
||||
return jsonify({"error": "No file provided."}), 400
|
||||
|
||||
file = request.files["file"]
|
||||
question = request.form.get("question", "").strip()
|
||||
if not question:
|
||||
return jsonify({"error": "Question is required."}), 400
|
||||
|
||||
try:
|
||||
original_filename, ext = validate_actor_file(file, allowed_types=["pdf"], actor=actor)
|
||||
except FileValidationError as e:
|
||||
return jsonify({"error": e.message}), e.code
|
||||
|
||||
task_id, input_path = generate_safe_path(ext, folder_type="upload")
|
||||
file.save(input_path)
|
||||
task = chat_with_pdf_task.delay(
|
||||
input_path, task_id, original_filename, question,
|
||||
**build_task_tracking_kwargs(actor),
|
||||
)
|
||||
record_accepted_usage(actor, "chat-pdf", task.id)
|
||||
return jsonify({"task_id": task.id, "message": "Chat started."}), 202
|
||||
|
||||
|
||||
@v1_bp.route("/pdf-ai/summarize", methods=["POST"])
|
||||
@limiter.limit("5/minute")
|
||||
def summarize_pdf_route():
|
||||
"""Summarize a PDF using AI."""
|
||||
actor, err = _resolve_and_check()
|
||||
if err:
|
||||
return err
|
||||
|
||||
if "file" not in request.files:
|
||||
return jsonify({"error": "No file provided."}), 400
|
||||
|
||||
file = request.files["file"]
|
||||
length = request.form.get("length", "medium")
|
||||
if length not in ("short", "medium", "long"):
|
||||
length = "medium"
|
||||
|
||||
try:
|
||||
original_filename, ext = validate_actor_file(file, allowed_types=["pdf"], actor=actor)
|
||||
except FileValidationError as e:
|
||||
return jsonify({"error": e.message}), e.code
|
||||
|
||||
task_id, input_path = generate_safe_path(ext, folder_type="upload")
|
||||
file.save(input_path)
|
||||
task = summarize_pdf_task.delay(
|
||||
input_path, task_id, original_filename, length,
|
||||
**build_task_tracking_kwargs(actor),
|
||||
)
|
||||
record_accepted_usage(actor, "summarize-pdf", task.id)
|
||||
return jsonify({"task_id": task.id, "message": "Summarization started."}), 202
|
||||
|
||||
|
||||
@v1_bp.route("/pdf-ai/translate", methods=["POST"])
|
||||
@limiter.limit("5/minute")
|
||||
def translate_pdf_route():
|
||||
"""Translate a PDF using AI."""
|
||||
actor, err = _resolve_and_check()
|
||||
if err:
|
||||
return err
|
||||
|
||||
if "file" not in request.files:
|
||||
return jsonify({"error": "No file provided."}), 400
|
||||
|
||||
file = request.files["file"]
|
||||
target_language = request.form.get("target_language", "").strip()
|
||||
if not target_language:
|
||||
return jsonify({"error": "Target language is required."}), 400
|
||||
|
||||
try:
|
||||
original_filename, ext = validate_actor_file(file, allowed_types=["pdf"], actor=actor)
|
||||
except FileValidationError as e:
|
||||
return jsonify({"error": e.message}), e.code
|
||||
|
||||
task_id, input_path = generate_safe_path(ext, folder_type="upload")
|
||||
file.save(input_path)
|
||||
task = translate_pdf_task.delay(
|
||||
input_path, task_id, original_filename, target_language,
|
||||
**build_task_tracking_kwargs(actor),
|
||||
)
|
||||
record_accepted_usage(actor, "translate-pdf", task.id)
|
||||
return jsonify({"task_id": task.id, "message": "Translation started."}), 202
|
||||
|
||||
|
||||
@v1_bp.route("/pdf-ai/extract-tables", methods=["POST"])
|
||||
@limiter.limit("10/minute")
|
||||
def extract_tables_route():
|
||||
"""Extract tables from a PDF using AI."""
|
||||
actor, err = _resolve_and_check()
|
||||
if err:
|
||||
return err
|
||||
|
||||
if "file" not in request.files:
|
||||
return jsonify({"error": "No file provided."}), 400
|
||||
|
||||
file = request.files["file"]
|
||||
try:
|
||||
original_filename, ext = validate_actor_file(file, allowed_types=["pdf"], actor=actor)
|
||||
except FileValidationError as e:
|
||||
return jsonify({"error": e.message}), e.code
|
||||
|
||||
task_id, input_path = generate_safe_path(ext, folder_type="upload")
|
||||
file.save(input_path)
|
||||
task = extract_tables_task.delay(
|
||||
input_path, task_id, original_filename,
|
||||
**build_task_tracking_kwargs(actor),
|
||||
)
|
||||
record_accepted_usage(actor, "extract-tables", task.id)
|
||||
return jsonify({"task_id": task.id, "message": "Table extraction started."}), 202
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# PDF to Excel — POST /api/v1/convert/pdf-to-excel
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@v1_bp.route("/convert/pdf-to-excel", methods=["POST"])
|
||||
@limiter.limit("10/minute")
|
||||
def pdf_to_excel_route():
|
||||
"""Convert a PDF to Excel."""
|
||||
actor, err = _resolve_and_check()
|
||||
if err:
|
||||
return err
|
||||
|
||||
if "file" not in request.files:
|
||||
return jsonify({"error": "No file provided."}), 400
|
||||
|
||||
file = request.files["file"]
|
||||
try:
|
||||
original_filename, ext = validate_actor_file(file, allowed_types=["pdf"], actor=actor)
|
||||
except FileValidationError as e:
|
||||
return jsonify({"error": e.message}), e.code
|
||||
|
||||
task_id, input_path = generate_safe_path(ext, folder_type="upload")
|
||||
file.save(input_path)
|
||||
task = pdf_to_excel_task.delay(
|
||||
input_path, task_id, original_filename,
|
||||
**build_task_tracking_kwargs(actor),
|
||||
)
|
||||
record_accepted_usage(actor, "pdf-to-excel", task.id)
|
||||
return jsonify({"task_id": task.id, "message": "Conversion started."}), 202
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# HTML to PDF — POST /api/v1/convert/html-to-pdf
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@v1_bp.route("/convert/html-to-pdf", methods=["POST"])
|
||||
@limiter.limit("10/minute")
|
||||
def html_to_pdf_route():
|
||||
"""Convert HTML to PDF."""
|
||||
actor, err = _resolve_and_check()
|
||||
if err:
|
||||
return err
|
||||
|
||||
if "file" not in request.files:
|
||||
return jsonify({"error": "No file provided."}), 400
|
||||
|
||||
file = request.files["file"]
|
||||
try:
|
||||
original_filename, ext = validate_actor_file(
|
||||
file, allowed_types=["html", "htm"], actor=actor
|
||||
)
|
||||
except FileValidationError as e:
|
||||
return jsonify({"error": e.message}), e.code
|
||||
|
||||
task_id, input_path = generate_safe_path(ext, folder_type="upload")
|
||||
file.save(input_path)
|
||||
task = html_to_pdf_task.delay(
|
||||
input_path, task_id, original_filename,
|
||||
**build_task_tracking_kwargs(actor),
|
||||
)
|
||||
record_accepted_usage(actor, "html-to-pdf", task.id)
|
||||
return jsonify({"task_id": task.id, "message": "Conversion started."}), 202
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# QR Code — POST /api/v1/qrcode/generate
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@v1_bp.route("/qrcode/generate", methods=["POST"])
|
||||
@limiter.limit("20/minute")
|
||||
def generate_qr_route():
|
||||
"""Generate a QR code."""
|
||||
actor, err = _resolve_and_check()
|
||||
if err:
|
||||
return err
|
||||
|
||||
if request.is_json:
|
||||
body = request.get_json()
|
||||
data = body.get("data", "")
|
||||
size = body.get("size", 300)
|
||||
else:
|
||||
data = request.form.get("data", "")
|
||||
size = request.form.get("size", 300)
|
||||
|
||||
if not str(data).strip():
|
||||
return jsonify({"error": "QR code data is required."}), 400
|
||||
|
||||
try:
|
||||
size = max(100, min(2000, int(size)))
|
||||
except (ValueError, TypeError):
|
||||
size = 300
|
||||
|
||||
task_id = str(uuid.uuid4())
|
||||
task = generate_qr_task.delay(
|
||||
task_id, str(data).strip(), size, "png",
|
||||
**build_task_tracking_kwargs(actor),
|
||||
)
|
||||
record_accepted_usage(actor, "qr-code", task.id)
|
||||
return jsonify({"task_id": task.id, "message": "QR code generation started."}), 202
|
||||
|
||||
|
||||
# ===========================================================================
|
||||
# Phase 2: New tools
|
||||
# ===========================================================================
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# PDF to PowerPoint — POST /api/v1/convert/pdf-to-pptx
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@v1_bp.route("/convert/pdf-to-pptx", methods=["POST"])
|
||||
@limiter.limit("10/minute")
|
||||
def v1_pdf_to_pptx_route():
|
||||
"""Convert a PDF to PowerPoint."""
|
||||
actor, err = _resolve_and_check()
|
||||
if err:
|
||||
return err
|
||||
|
||||
if "file" not in request.files:
|
||||
return jsonify({"error": "No file provided."}), 400
|
||||
|
||||
file = request.files["file"]
|
||||
try:
|
||||
original_filename, ext = validate_actor_file(file, allowed_types=["pdf"], actor=actor)
|
||||
except FileValidationError as e:
|
||||
return jsonify({"error": e.message}), e.code
|
||||
|
||||
task_id, input_path = generate_safe_path(ext, folder_type="upload")
|
||||
file.save(input_path)
|
||||
task = pdf_to_pptx_task.delay(
|
||||
input_path, task_id, original_filename,
|
||||
**build_task_tracking_kwargs(actor),
|
||||
)
|
||||
record_accepted_usage(actor, "pdf-to-pptx", task.id)
|
||||
return jsonify({"task_id": task.id, "message": "Conversion started."}), 202
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Excel to PDF — POST /api/v1/convert/excel-to-pdf
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@v1_bp.route("/convert/excel-to-pdf", methods=["POST"])
|
||||
@limiter.limit("10/minute")
|
||||
def v1_excel_to_pdf_route():
|
||||
"""Convert an Excel file to PDF."""
|
||||
actor, err = _resolve_and_check()
|
||||
if err:
|
||||
return err
|
||||
|
||||
if "file" not in request.files:
|
||||
return jsonify({"error": "No file provided."}), 400
|
||||
|
||||
file = request.files["file"]
|
||||
try:
|
||||
original_filename, ext = validate_actor_file(
|
||||
file, allowed_types=["xlsx", "xls"], actor=actor
|
||||
)
|
||||
except FileValidationError as e:
|
||||
return jsonify({"error": e.message}), e.code
|
||||
|
||||
task_id, input_path = generate_safe_path(ext, folder_type="upload")
|
||||
file.save(input_path)
|
||||
task = excel_to_pdf_task.delay(
|
||||
input_path, task_id, original_filename,
|
||||
**build_task_tracking_kwargs(actor),
|
||||
)
|
||||
record_accepted_usage(actor, "excel-to-pdf", task.id)
|
||||
return jsonify({"task_id": task.id, "message": "Conversion started."}), 202
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# PowerPoint to PDF — POST /api/v1/convert/pptx-to-pdf
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@v1_bp.route("/convert/pptx-to-pdf", methods=["POST"])
|
||||
@limiter.limit("10/minute")
|
||||
def v1_pptx_to_pdf_route():
|
||||
"""Convert a PowerPoint file to PDF."""
|
||||
actor, err = _resolve_and_check()
|
||||
if err:
|
||||
return err
|
||||
|
||||
if "file" not in request.files:
|
||||
return jsonify({"error": "No file provided."}), 400
|
||||
|
||||
file = request.files["file"]
|
||||
try:
|
||||
original_filename, ext = validate_actor_file(
|
||||
file, allowed_types=["pptx", "ppt"], actor=actor
|
||||
)
|
||||
except FileValidationError as e:
|
||||
return jsonify({"error": e.message}), e.code
|
||||
|
||||
task_id, input_path = generate_safe_path(ext, folder_type="upload")
|
||||
file.save(input_path)
|
||||
task = pptx_to_pdf_task.delay(
|
||||
input_path, task_id, original_filename,
|
||||
**build_task_tracking_kwargs(actor),
|
||||
)
|
||||
record_accepted_usage(actor, "pptx-to-pdf", task.id)
|
||||
return jsonify({"task_id": task.id, "message": "Conversion started."}), 202
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Sign PDF — POST /api/v1/pdf-tools/sign
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@v1_bp.route("/pdf-tools/sign", methods=["POST"])
|
||||
@limiter.limit("10/minute")
|
||||
def v1_sign_pdf_route():
|
||||
"""Sign a PDF with a signature image."""
|
||||
actor, err = _resolve_and_check()
|
||||
if err:
|
||||
return err
|
||||
|
||||
if "file" not in request.files:
|
||||
return jsonify({"error": "No PDF file provided."}), 400
|
||||
if "signature" not in request.files:
|
||||
return jsonify({"error": "No signature image provided."}), 400
|
||||
|
||||
pdf_file = request.files["file"]
|
||||
sig_file = request.files["signature"]
|
||||
|
||||
try:
|
||||
original_filename, ext = validate_actor_file(pdf_file, allowed_types=["pdf"], actor=actor)
|
||||
except FileValidationError as e:
|
||||
return jsonify({"error": e.message}), e.code
|
||||
|
||||
try:
|
||||
_, sig_ext = validate_actor_file(sig_file, allowed_types=ALLOWED_IMAGE_TYPES, actor=actor)
|
||||
except FileValidationError as e:
|
||||
return jsonify({"error": f"Signature: {e.message}"}), e.code
|
||||
|
||||
try:
|
||||
page = max(1, int(request.form.get("page", 1))) - 1
|
||||
x = float(request.form.get("x", 100))
|
||||
y = float(request.form.get("y", 100))
|
||||
width = float(request.form.get("width", 200))
|
||||
height = float(request.form.get("height", 80))
|
||||
except (ValueError, TypeError):
|
||||
return jsonify({"error": "Invalid position parameters."}), 400
|
||||
|
||||
task_id = str(uuid.uuid4())
|
||||
upload_dir = os.path.join(current_app.config["UPLOAD_FOLDER"], task_id)
|
||||
os.makedirs(upload_dir, exist_ok=True)
|
||||
input_path = os.path.join(upload_dir, f"{uuid.uuid4()}.pdf")
|
||||
pdf_file.save(input_path)
|
||||
signature_path = os.path.join(upload_dir, f"{uuid.uuid4()}.{sig_ext}")
|
||||
sig_file.save(signature_path)
|
||||
|
||||
task = sign_pdf_task.delay(
|
||||
input_path, signature_path, task_id, original_filename,
|
||||
page, x, y, width, height,
|
||||
**build_task_tracking_kwargs(actor),
|
||||
)
|
||||
record_accepted_usage(actor, "sign-pdf", task.id)
|
||||
return jsonify({"task_id": task.id, "message": "Signing started."}), 202
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Crop PDF — POST /api/v1/pdf-tools/crop
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@v1_bp.route("/pdf-tools/crop", methods=["POST"])
|
||||
@limiter.limit("10/minute")
|
||||
def v1_crop_pdf_route():
|
||||
"""Crop margins from a PDF."""
|
||||
actor, err = _resolve_and_check()
|
||||
if err:
|
||||
return err
|
||||
|
||||
if "file" not in request.files:
|
||||
return jsonify({"error": "No file provided."}), 400
|
||||
|
||||
file = request.files["file"]
|
||||
try:
|
||||
margin_left = float(request.form.get("margin_left", 0))
|
||||
margin_right = float(request.form.get("margin_right", 0))
|
||||
margin_top = float(request.form.get("margin_top", 0))
|
||||
margin_bottom = float(request.form.get("margin_bottom", 0))
|
||||
except (ValueError, TypeError):
|
||||
return jsonify({"error": "Margin values must be numbers."}), 400
|
||||
|
||||
pages = request.form.get("pages", "all")
|
||||
|
||||
try:
|
||||
original_filename, ext = validate_actor_file(file, allowed_types=["pdf"], actor=actor)
|
||||
except FileValidationError as e:
|
||||
return jsonify({"error": e.message}), e.code
|
||||
|
||||
task_id, input_path = generate_safe_path(ext, folder_type="upload")
|
||||
file.save(input_path)
|
||||
task = crop_pdf_task.delay(
|
||||
input_path, task_id, original_filename,
|
||||
margin_left, margin_right, margin_top, margin_bottom, pages,
|
||||
**build_task_tracking_kwargs(actor),
|
||||
)
|
||||
record_accepted_usage(actor, "crop-pdf", task.id)
|
||||
return jsonify({"task_id": task.id, "message": "Cropping started."}), 202
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Flatten PDF — POST /api/v1/pdf-tools/flatten
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@v1_bp.route("/pdf-tools/flatten", methods=["POST"])
|
||||
@limiter.limit("10/minute")
|
||||
def v1_flatten_pdf_route():
|
||||
"""Flatten a PDF."""
|
||||
actor, err = _resolve_and_check()
|
||||
if err:
|
||||
return err
|
||||
|
||||
if "file" not in request.files:
|
||||
return jsonify({"error": "No file provided."}), 400
|
||||
|
||||
file = request.files["file"]
|
||||
try:
|
||||
original_filename, ext = validate_actor_file(file, allowed_types=["pdf"], actor=actor)
|
||||
except FileValidationError as e:
|
||||
return jsonify({"error": e.message}), e.code
|
||||
|
||||
task_id, input_path = generate_safe_path(ext, folder_type="upload")
|
||||
file.save(input_path)
|
||||
task = flatten_pdf_task.delay(
|
||||
input_path, task_id, original_filename,
|
||||
**build_task_tracking_kwargs(actor),
|
||||
)
|
||||
record_accepted_usage(actor, "flatten-pdf", task.id)
|
||||
return jsonify({"task_id": task.id, "message": "Flattening started."}), 202
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Repair PDF — POST /api/v1/pdf-tools/repair
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@v1_bp.route("/pdf-tools/repair", methods=["POST"])
|
||||
@limiter.limit("10/minute")
|
||||
def v1_repair_pdf_route():
|
||||
"""Repair a damaged PDF."""
|
||||
actor, err = _resolve_and_check()
|
||||
if err:
|
||||
return err
|
||||
|
||||
if "file" not in request.files:
|
||||
return jsonify({"error": "No file provided."}), 400
|
||||
|
||||
file = request.files["file"]
|
||||
try:
|
||||
original_filename, ext = validate_actor_file(file, allowed_types=["pdf"], actor=actor)
|
||||
except FileValidationError as e:
|
||||
return jsonify({"error": e.message}), e.code
|
||||
|
||||
task_id, input_path = generate_safe_path(ext, folder_type="upload")
|
||||
file.save(input_path)
|
||||
task = repair_pdf_task.delay(
|
||||
input_path, task_id, original_filename,
|
||||
**build_task_tracking_kwargs(actor),
|
||||
)
|
||||
record_accepted_usage(actor, "repair-pdf", task.id)
|
||||
return jsonify({"task_id": task.id, "message": "Repair started."}), 202
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Edit PDF Metadata — POST /api/v1/pdf-tools/metadata
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@v1_bp.route("/pdf-tools/metadata", methods=["POST"])
|
||||
@limiter.limit("10/minute")
|
||||
def v1_edit_metadata_route():
|
||||
"""Edit PDF metadata."""
|
||||
actor, err = _resolve_and_check()
|
||||
if err:
|
||||
return err
|
||||
|
||||
if "file" not in request.files:
|
||||
return jsonify({"error": "No file provided."}), 400
|
||||
|
||||
file = request.files["file"]
|
||||
title = request.form.get("title")
|
||||
author = request.form.get("author")
|
||||
subject = request.form.get("subject")
|
||||
keywords = request.form.get("keywords")
|
||||
creator = request.form.get("creator")
|
||||
|
||||
if not any([title, author, subject, keywords, creator]):
|
||||
return jsonify({"error": "At least one metadata field required."}), 400
|
||||
|
||||
try:
|
||||
original_filename, ext = validate_actor_file(file, allowed_types=["pdf"], actor=actor)
|
||||
except FileValidationError as e:
|
||||
return jsonify({"error": e.message}), e.code
|
||||
|
||||
task_id, input_path = generate_safe_path(ext, folder_type="upload")
|
||||
file.save(input_path)
|
||||
task = edit_metadata_task.delay(
|
||||
input_path, task_id, original_filename,
|
||||
title, author, subject, keywords, creator,
|
||||
**build_task_tracking_kwargs(actor),
|
||||
)
|
||||
record_accepted_usage(actor, "edit-metadata", task.id)
|
||||
return jsonify({"task_id": task.id, "message": "Metadata editing started."}), 202
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Image Crop — POST /api/v1/image/crop
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@v1_bp.route("/image/crop", methods=["POST"])
|
||||
@limiter.limit("10/minute")
|
||||
def v1_crop_image_route():
|
||||
"""Crop an image."""
|
||||
actor, err = _resolve_and_check()
|
||||
if err:
|
||||
return err
|
||||
|
||||
if "file" not in request.files:
|
||||
return jsonify({"error": "No file provided."}), 400
|
||||
|
||||
file = request.files["file"]
|
||||
try:
|
||||
left = int(request.form.get("left", 0))
|
||||
top = int(request.form.get("top", 0))
|
||||
right = int(request.form.get("right", 0))
|
||||
bottom = int(request.form.get("bottom", 0))
|
||||
except (ValueError, TypeError):
|
||||
return jsonify({"error": "Crop dimensions must be integers."}), 400
|
||||
|
||||
if right <= left or bottom <= top:
|
||||
return jsonify({"error": "Invalid crop area."}), 400
|
||||
|
||||
try:
|
||||
original_filename, ext = validate_actor_file(
|
||||
file, allowed_types=ALLOWED_IMAGE_TYPES, actor=actor
|
||||
)
|
||||
except FileValidationError as e:
|
||||
return jsonify({"error": e.message}), e.code
|
||||
|
||||
task_id, input_path = generate_safe_path(ext, folder_type="upload")
|
||||
file.save(input_path)
|
||||
task = crop_image_task.delay(
|
||||
input_path, task_id, original_filename,
|
||||
left, top, right, bottom,
|
||||
**build_task_tracking_kwargs(actor),
|
||||
)
|
||||
record_accepted_usage(actor, "image-crop", task.id)
|
||||
return jsonify({"task_id": task.id, "message": "Cropping started."}), 202
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Image Rotate/Flip — POST /api/v1/image/rotate-flip
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@v1_bp.route("/image/rotate-flip", methods=["POST"])
|
||||
@limiter.limit("10/minute")
|
||||
def v1_rotate_flip_image_route():
|
||||
"""Rotate and/or flip an image."""
|
||||
actor, err = _resolve_and_check()
|
||||
if err:
|
||||
return err
|
||||
|
||||
if "file" not in request.files:
|
||||
return jsonify({"error": "No file provided."}), 400
|
||||
|
||||
file = request.files["file"]
|
||||
try:
|
||||
rotation = int(request.form.get("rotation", 0))
|
||||
except ValueError:
|
||||
rotation = 0
|
||||
if rotation not in (0, 90, 180, 270):
|
||||
return jsonify({"error": "Rotation must be 0, 90, 180, or 270."}), 400
|
||||
|
||||
flip_horizontal = request.form.get("flip_horizontal", "false").lower() == "true"
|
||||
flip_vertical = request.form.get("flip_vertical", "false").lower() == "true"
|
||||
|
||||
try:
|
||||
original_filename, ext = validate_actor_file(
|
||||
file, allowed_types=ALLOWED_IMAGE_TYPES, actor=actor
|
||||
)
|
||||
except FileValidationError as e:
|
||||
return jsonify({"error": e.message}), e.code
|
||||
|
||||
task_id, input_path = generate_safe_path(ext, folder_type="upload")
|
||||
file.save(input_path)
|
||||
task = rotate_flip_image_task.delay(
|
||||
input_path, task_id, original_filename,
|
||||
rotation, flip_horizontal, flip_vertical,
|
||||
**build_task_tracking_kwargs(actor),
|
||||
)
|
||||
record_accepted_usage(actor, "image-rotate-flip", task.id)
|
||||
return jsonify({"task_id": task.id, "message": "Transformation started."}), 202
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Barcode — POST /api/v1/barcode/generate
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@v1_bp.route("/barcode/generate", methods=["POST"])
|
||||
@limiter.limit("20/minute")
|
||||
def v1_generate_barcode_route():
|
||||
"""Generate a barcode."""
|
||||
actor, err = _resolve_and_check()
|
||||
if err:
|
||||
return err
|
||||
|
||||
if request.is_json:
|
||||
body = request.get_json()
|
||||
data = body.get("data", "").strip()
|
||||
barcode_type = body.get("type", "code128").lower()
|
||||
output_format = body.get("format", "png").lower()
|
||||
else:
|
||||
data = request.form.get("data", "").strip()
|
||||
barcode_type = request.form.get("type", "code128").lower()
|
||||
output_format = request.form.get("format", "png").lower()
|
||||
|
||||
if not data:
|
||||
return jsonify({"error": "Barcode data is required."}), 400
|
||||
|
||||
if barcode_type not in SUPPORTED_BARCODE_TYPES:
|
||||
return jsonify({"error": f"Unsupported type. Supported: {', '.join(SUPPORTED_BARCODE_TYPES)}"}), 400
|
||||
|
||||
if output_format not in ("png", "svg"):
|
||||
output_format = "png"
|
||||
|
||||
task_id = str(uuid.uuid4())
|
||||
task = generate_barcode_task.delay(
|
||||
data, barcode_type, task_id, output_format,
|
||||
**build_task_tracking_kwargs(actor),
|
||||
)
|
||||
record_accepted_usage(actor, "barcode", task.id)
|
||||
return jsonify({"task_id": task.id, "message": "Barcode generation started."}), 202
|
||||
|
||||
@@ -13,6 +13,7 @@ from werkzeug.security import check_password_hash, generate_password_hash
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
VALID_PLANS = {"free", "pro"}
|
||||
VALID_ROLES = {"user", "admin"}
|
||||
|
||||
|
||||
def _utc_now() -> str:
|
||||
@@ -30,6 +31,38 @@ def normalize_plan(plan: str | None) -> str:
|
||||
return "pro" if plan == "pro" else "free"
|
||||
|
||||
|
||||
def normalize_role(role: str | None) -> str:
|
||||
"""Normalize role values to the supported set."""
|
||||
return "admin" if role == "admin" else "user"
|
||||
|
||||
|
||||
def _get_allowlisted_admin_emails() -> set[str]:
|
||||
configured = current_app.config.get("INTERNAL_ADMIN_EMAILS", ())
|
||||
return {
|
||||
str(email).strip().lower()
|
||||
for email in configured
|
||||
if str(email).strip()
|
||||
}
|
||||
|
||||
|
||||
def is_allowlisted_admin_email(email: str | None) -> bool:
|
||||
"""Return whether an email is bootstrapped as admin from configuration."""
|
||||
normalized_email = _normalize_email(email or "")
|
||||
return normalized_email in _get_allowlisted_admin_emails()
|
||||
|
||||
|
||||
def _resolve_row_role(row: sqlite3.Row | None) -> str:
|
||||
if row is None:
|
||||
return "user"
|
||||
|
||||
row_keys = row.keys()
|
||||
stored_role = normalize_role(row["role"]) if "role" in row_keys else "user"
|
||||
email = str(row["email"]).strip().lower() if "email" in row_keys else ""
|
||||
if stored_role == "admin" or email in _get_allowlisted_admin_emails():
|
||||
return "admin"
|
||||
return "user"
|
||||
|
||||
|
||||
def _connect() -> sqlite3.Connection:
|
||||
"""Create a SQLite connection with row access by column name."""
|
||||
db_path = current_app.config["DATABASE_PATH"]
|
||||
@@ -58,6 +91,8 @@ def _serialize_user(row: sqlite3.Row | None) -> dict | None:
|
||||
"id": row["id"],
|
||||
"email": row["email"],
|
||||
"plan": normalize_plan(row["plan"]),
|
||||
"role": _resolve_row_role(row),
|
||||
"is_allowlisted_admin": is_allowlisted_admin_email(row["email"]),
|
||||
"created_at": row["created_at"],
|
||||
}
|
||||
|
||||
@@ -94,6 +129,7 @@ def init_account_db():
|
||||
email TEXT NOT NULL UNIQUE,
|
||||
password_hash TEXT NOT NULL,
|
||||
plan TEXT NOT NULL DEFAULT 'free',
|
||||
role TEXT NOT NULL DEFAULT 'user',
|
||||
created_at TEXT NOT NULL,
|
||||
updated_at TEXT NOT NULL
|
||||
);
|
||||
@@ -159,6 +195,10 @@ def init_account_db():
|
||||
conn.execute(
|
||||
"ALTER TABLE users ADD COLUMN updated_at TEXT NOT NULL DEFAULT ''"
|
||||
)
|
||||
if not _column_exists(conn, "users", "role"):
|
||||
conn.execute(
|
||||
"ALTER TABLE users ADD COLUMN role TEXT NOT NULL DEFAULT 'user'"
|
||||
)
|
||||
|
||||
# Password reset tokens
|
||||
conn.executescript(
|
||||
@@ -194,19 +234,20 @@ def create_user(email: str, password: str) -> dict:
|
||||
"""Create a new user and return the public record."""
|
||||
email = _normalize_email(email)
|
||||
now = _utc_now()
|
||||
role = "admin" if email in _get_allowlisted_admin_emails() else "user"
|
||||
|
||||
try:
|
||||
with _connect() as conn:
|
||||
cursor = conn.execute(
|
||||
"""
|
||||
INSERT INTO users (email, password_hash, plan, created_at, updated_at)
|
||||
VALUES (?, ?, 'free', ?, ?)
|
||||
INSERT INTO users (email, password_hash, plan, role, created_at, updated_at)
|
||||
VALUES (?, ?, 'free', ?, ?, ?)
|
||||
""",
|
||||
(email, generate_password_hash(password), now, now),
|
||||
(email, generate_password_hash(password), role, now, now),
|
||||
)
|
||||
user_id = cursor.lastrowid
|
||||
row = conn.execute(
|
||||
"SELECT id, email, plan, created_at FROM users WHERE id = ?",
|
||||
"SELECT id, email, plan, role, created_at FROM users WHERE id = ?",
|
||||
(user_id,),
|
||||
).fetchone()
|
||||
except sqlite3.IntegrityError as exc:
|
||||
@@ -235,7 +276,44 @@ def get_user_by_id(user_id: int) -> dict | None:
|
||||
"""Fetch a public user record by id."""
|
||||
with _connect() as conn:
|
||||
row = conn.execute(
|
||||
"SELECT id, email, plan, created_at FROM users WHERE id = ?",
|
||||
"SELECT id, email, plan, role, created_at FROM users WHERE id = ?",
|
||||
(user_id,),
|
||||
).fetchone()
|
||||
|
||||
return _serialize_user(row)
|
||||
|
||||
|
||||
def is_user_admin(user_id: int | None) -> bool:
|
||||
"""Return whether one user has internal admin access."""
|
||||
if user_id is None:
|
||||
return False
|
||||
|
||||
with _connect() as conn:
|
||||
row = conn.execute(
|
||||
"SELECT id, email, role FROM users WHERE id = ?",
|
||||
(user_id,),
|
||||
).fetchone()
|
||||
|
||||
return _resolve_row_role(row) == "admin"
|
||||
|
||||
|
||||
def set_user_role(user_id: int, role: str) -> dict | None:
|
||||
"""Update one user role and return the public user record."""
|
||||
normalized_role = normalize_role(role)
|
||||
if normalized_role not in VALID_ROLES:
|
||||
raise ValueError("Invalid role.")
|
||||
|
||||
with _connect() as conn:
|
||||
conn.execute(
|
||||
"""
|
||||
UPDATE users
|
||||
SET role = ?, updated_at = ?
|
||||
WHERE id = ?
|
||||
""",
|
||||
(normalized_role, _utc_now(), user_id),
|
||||
)
|
||||
row = conn.execute(
|
||||
"SELECT id, email, plan, role, created_at FROM users WHERE id = ?",
|
||||
(user_id,),
|
||||
).fetchone()
|
||||
|
||||
@@ -258,7 +336,7 @@ def update_user_plan(user_id: int, plan: str) -> dict | None:
|
||||
(normalized_plan, _utc_now(), user_id),
|
||||
)
|
||||
row = conn.execute(
|
||||
"SELECT id, email, plan, created_at FROM users WHERE id = ?",
|
||||
"SELECT id, email, plan, role, created_at FROM users WHERE id = ?",
|
||||
(user_id,),
|
||||
).fetchone()
|
||||
|
||||
@@ -476,6 +554,60 @@ def list_file_history(user_id: int, limit: int = 50) -> list[dict]:
|
||||
]
|
||||
|
||||
|
||||
def get_public_history_summary(limit_tools: int = 5) -> dict:
|
||||
"""Return aggregate public-friendly processing stats derived from history."""
|
||||
cutoff_24h = (datetime.now(timezone.utc) - timedelta(days=1)).isoformat()
|
||||
|
||||
with _connect() as conn:
|
||||
totals_row = conn.execute(
|
||||
"""
|
||||
SELECT
|
||||
COUNT(*) AS total,
|
||||
COALESCE(SUM(CASE WHEN status = 'completed' THEN 1 ELSE 0 END), 0) AS completed,
|
||||
COALESCE(SUM(CASE WHEN status = 'failed' THEN 1 ELSE 0 END), 0) AS failed
|
||||
FROM file_history
|
||||
"""
|
||||
).fetchone()
|
||||
|
||||
recent_row = conn.execute(
|
||||
"""
|
||||
SELECT COUNT(*) AS total
|
||||
FROM file_history
|
||||
WHERE created_at >= ?
|
||||
""",
|
||||
(cutoff_24h,),
|
||||
).fetchone()
|
||||
|
||||
top_rows = conn.execute(
|
||||
"""
|
||||
SELECT tool, COUNT(*) AS count
|
||||
FROM file_history
|
||||
WHERE status = 'completed'
|
||||
GROUP BY tool
|
||||
ORDER BY count DESC, tool ASC
|
||||
LIMIT ?
|
||||
""",
|
||||
(limit_tools,),
|
||||
).fetchall()
|
||||
|
||||
total = int(totals_row["total"]) if totals_row else 0
|
||||
completed = int(totals_row["completed"]) if totals_row else 0
|
||||
failed = int(totals_row["failed"]) if totals_row else 0
|
||||
success_rate = round((completed / total) * 100, 1) if total else 0.0
|
||||
|
||||
return {
|
||||
"total_files_processed": total,
|
||||
"completed_files": completed,
|
||||
"failed_files": failed,
|
||||
"success_rate": success_rate,
|
||||
"files_last_24h": int(recent_row["total"]) if recent_row else 0,
|
||||
"top_tools": [
|
||||
{"tool": row["tool"], "count": int(row["count"])}
|
||||
for row in top_rows
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
def record_usage_event(
|
||||
user_id: int | None,
|
||||
source: str,
|
||||
@@ -555,7 +687,7 @@ def get_user_by_email(email: str) -> dict | None:
|
||||
email = _normalize_email(email)
|
||||
with _connect() as conn:
|
||||
row = conn.execute(
|
||||
"SELECT id, email, plan, created_at FROM users WHERE email = ?",
|
||||
"SELECT id, email, plan, role, created_at FROM users WHERE email = ?",
|
||||
(email,),
|
||||
).fetchone()
|
||||
return _serialize_user(row)
|
||||
|
||||
288
backend/app/services/admin_service.py
Normal file
288
backend/app/services/admin_service.py
Normal file
@@ -0,0 +1,288 @@
|
||||
"""Internal admin aggregation helpers for operational dashboards."""
|
||||
import json
|
||||
import os
|
||||
import sqlite3
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
from flask import current_app
|
||||
|
||||
from app.services.account_service import is_allowlisted_admin_email, normalize_role
|
||||
from app.services.ai_cost_service import get_monthly_spend
|
||||
from app.services.contact_service import mark_read
|
||||
from app.services.rating_service import get_global_rating_summary
|
||||
|
||||
|
||||
def _connect() -> sqlite3.Connection:
|
||||
db_path = current_app.config["DATABASE_PATH"]
|
||||
db_dir = os.path.dirname(db_path)
|
||||
if db_dir:
|
||||
os.makedirs(db_dir, exist_ok=True)
|
||||
connection = sqlite3.connect(db_path)
|
||||
connection.row_factory = sqlite3.Row
|
||||
return connection
|
||||
|
||||
|
||||
def _parse_metadata(raw_value: str | None) -> dict:
|
||||
if not raw_value:
|
||||
return {}
|
||||
try:
|
||||
parsed = json.loads(raw_value)
|
||||
except json.JSONDecodeError:
|
||||
return {}
|
||||
return parsed if isinstance(parsed, dict) else {}
|
||||
|
||||
|
||||
def get_admin_overview(limit_recent: int = 8, top_tools_limit: int = 6) -> dict:
|
||||
cutoff_24h = (datetime.now(timezone.utc) - timedelta(days=1)).isoformat()
|
||||
ai_cost_summary = get_monthly_spend()
|
||||
|
||||
with _connect() as conn:
|
||||
users_row = conn.execute(
|
||||
"""
|
||||
SELECT
|
||||
COUNT(*) AS total_users,
|
||||
COALESCE(SUM(CASE WHEN plan = 'pro' THEN 1 ELSE 0 END), 0) AS pro_users,
|
||||
COALESCE(SUM(CASE WHEN plan = 'free' THEN 1 ELSE 0 END), 0) AS free_users
|
||||
FROM users
|
||||
"""
|
||||
).fetchone()
|
||||
|
||||
history_row = conn.execute(
|
||||
"""
|
||||
SELECT
|
||||
COUNT(*) AS total_files_processed,
|
||||
COALESCE(SUM(CASE WHEN status = 'completed' THEN 1 ELSE 0 END), 0) AS completed_files,
|
||||
COALESCE(SUM(CASE WHEN status = 'failed' THEN 1 ELSE 0 END), 0) AS failed_files,
|
||||
COALESCE(SUM(CASE WHEN created_at >= ? THEN 1 ELSE 0 END), 0) AS files_last_24h
|
||||
FROM file_history
|
||||
""",
|
||||
(cutoff_24h,),
|
||||
).fetchone()
|
||||
|
||||
top_tools_rows = conn.execute(
|
||||
"""
|
||||
SELECT
|
||||
tool,
|
||||
COUNT(*) AS total_runs,
|
||||
COALESCE(SUM(CASE WHEN status = 'failed' THEN 1 ELSE 0 END), 0) AS failed_runs
|
||||
FROM file_history
|
||||
GROUP BY tool
|
||||
ORDER BY total_runs DESC, tool ASC
|
||||
LIMIT ?
|
||||
""",
|
||||
(top_tools_limit,),
|
||||
).fetchall()
|
||||
|
||||
failure_rows = conn.execute(
|
||||
"""
|
||||
SELECT
|
||||
file_history.id,
|
||||
file_history.user_id,
|
||||
file_history.tool,
|
||||
file_history.original_filename,
|
||||
file_history.metadata_json,
|
||||
file_history.created_at,
|
||||
users.email
|
||||
FROM file_history
|
||||
LEFT JOIN users ON users.id = file_history.user_id
|
||||
WHERE file_history.status = 'failed'
|
||||
ORDER BY file_history.created_at DESC
|
||||
LIMIT ?
|
||||
""",
|
||||
(limit_recent,),
|
||||
).fetchall()
|
||||
|
||||
recent_user_rows = conn.execute(
|
||||
"""
|
||||
SELECT
|
||||
users.id,
|
||||
users.email,
|
||||
users.plan,
|
||||
users.created_at,
|
||||
COALESCE((SELECT COUNT(*) FROM file_history WHERE file_history.user_id = users.id), 0) AS total_tasks,
|
||||
COALESCE((SELECT COUNT(*) FROM api_keys WHERE api_keys.user_id = users.id AND api_keys.revoked_at IS NULL), 0) AS active_api_keys
|
||||
FROM users
|
||||
ORDER BY users.created_at DESC
|
||||
LIMIT ?
|
||||
""",
|
||||
(limit_recent,),
|
||||
).fetchall()
|
||||
|
||||
contact_row = conn.execute(
|
||||
"""
|
||||
SELECT
|
||||
COUNT(*) AS total_messages,
|
||||
COALESCE(SUM(CASE WHEN is_read = 0 THEN 1 ELSE 0 END), 0) AS unread_messages
|
||||
FROM contact_messages
|
||||
"""
|
||||
).fetchone()
|
||||
|
||||
recent_contact_rows = conn.execute(
|
||||
"""
|
||||
SELECT id, name, email, category, subject, message, created_at, is_read
|
||||
FROM contact_messages
|
||||
ORDER BY created_at DESC
|
||||
LIMIT ?
|
||||
""",
|
||||
(limit_recent,),
|
||||
).fetchall()
|
||||
|
||||
total_processed = int(history_row["total_files_processed"]) if history_row else 0
|
||||
completed_files = int(history_row["completed_files"]) if history_row else 0
|
||||
success_rate = round((completed_files / total_processed) * 100, 1) if total_processed else 0.0
|
||||
|
||||
return {
|
||||
"users": {
|
||||
"total": int(users_row["total_users"]) if users_row else 0,
|
||||
"pro": int(users_row["pro_users"]) if users_row else 0,
|
||||
"free": int(users_row["free_users"]) if users_row else 0,
|
||||
},
|
||||
"processing": {
|
||||
"total_files_processed": total_processed,
|
||||
"completed_files": completed_files,
|
||||
"failed_files": int(history_row["failed_files"]) if history_row else 0,
|
||||
"files_last_24h": int(history_row["files_last_24h"]) if history_row else 0,
|
||||
"success_rate": success_rate,
|
||||
},
|
||||
"ratings": get_global_rating_summary(),
|
||||
"ai_cost": {
|
||||
"month": ai_cost_summary["period"],
|
||||
"total_usd": ai_cost_summary["total_cost_usd"],
|
||||
"budget_usd": ai_cost_summary["budget_usd"],
|
||||
"percent_used": ai_cost_summary["budget_used_percent"],
|
||||
},
|
||||
"contacts": {
|
||||
"total_messages": int(contact_row["total_messages"]) if contact_row else 0,
|
||||
"unread_messages": int(contact_row["unread_messages"]) if contact_row else 0,
|
||||
"recent": [
|
||||
{
|
||||
"id": row["id"],
|
||||
"name": row["name"],
|
||||
"email": row["email"],
|
||||
"category": row["category"],
|
||||
"subject": row["subject"],
|
||||
"message": row["message"],
|
||||
"created_at": row["created_at"],
|
||||
"is_read": bool(row["is_read"]),
|
||||
}
|
||||
for row in recent_contact_rows
|
||||
],
|
||||
},
|
||||
"top_tools": [
|
||||
{
|
||||
"tool": row["tool"],
|
||||
"total_runs": int(row["total_runs"]),
|
||||
"failed_runs": int(row["failed_runs"]),
|
||||
}
|
||||
for row in top_tools_rows
|
||||
],
|
||||
"recent_failures": [
|
||||
{
|
||||
"id": row["id"],
|
||||
"user_id": row["user_id"],
|
||||
"email": row["email"],
|
||||
"tool": row["tool"],
|
||||
"original_filename": row["original_filename"],
|
||||
"created_at": row["created_at"],
|
||||
"metadata": _parse_metadata(row["metadata_json"]),
|
||||
}
|
||||
for row in failure_rows
|
||||
],
|
||||
"recent_users": [
|
||||
{
|
||||
"id": row["id"],
|
||||
"email": row["email"],
|
||||
"plan": row["plan"],
|
||||
"created_at": row["created_at"],
|
||||
"total_tasks": int(row["total_tasks"]),
|
||||
"active_api_keys": int(row["active_api_keys"]),
|
||||
}
|
||||
for row in recent_user_rows
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
def list_admin_users(limit: int = 25, query: str = "") -> list[dict]:
|
||||
normalized_query = query.strip().lower()
|
||||
sql = """
|
||||
SELECT
|
||||
users.id,
|
||||
users.email,
|
||||
users.plan,
|
||||
users.role,
|
||||
users.created_at,
|
||||
COALESCE((SELECT COUNT(*) FROM file_history WHERE file_history.user_id = users.id), 0) AS total_tasks,
|
||||
COALESCE((SELECT COUNT(*) FROM file_history WHERE file_history.user_id = users.id AND file_history.status = 'completed'), 0) AS completed_tasks,
|
||||
COALESCE((SELECT COUNT(*) FROM file_history WHERE file_history.user_id = users.id AND file_history.status = 'failed'), 0) AS failed_tasks,
|
||||
COALESCE((SELECT COUNT(*) FROM api_keys WHERE api_keys.user_id = users.id AND api_keys.revoked_at IS NULL), 0) AS active_api_keys
|
||||
FROM users
|
||||
"""
|
||||
params: list[object] = []
|
||||
if normalized_query:
|
||||
sql += " WHERE LOWER(users.email) LIKE ?"
|
||||
params.append(f"%{normalized_query}%")
|
||||
sql += " ORDER BY users.created_at DESC LIMIT ?"
|
||||
params.append(limit)
|
||||
|
||||
with _connect() as conn:
|
||||
rows = conn.execute(sql, tuple(params)).fetchall()
|
||||
|
||||
return [
|
||||
{
|
||||
"id": row["id"],
|
||||
"email": row["email"],
|
||||
"plan": row["plan"],
|
||||
"role": "admin" if is_allowlisted_admin_email(row["email"]) else normalize_role(row["role"]),
|
||||
"is_allowlisted_admin": is_allowlisted_admin_email(row["email"]),
|
||||
"created_at": row["created_at"],
|
||||
"total_tasks": int(row["total_tasks"]),
|
||||
"completed_tasks": int(row["completed_tasks"]),
|
||||
"failed_tasks": int(row["failed_tasks"]),
|
||||
"active_api_keys": int(row["active_api_keys"]),
|
||||
}
|
||||
for row in rows
|
||||
]
|
||||
|
||||
|
||||
def list_admin_contacts(page: int = 1, per_page: int = 20) -> dict:
|
||||
safe_page = max(1, page)
|
||||
safe_per_page = max(1, min(per_page, 100))
|
||||
offset = (safe_page - 1) * safe_per_page
|
||||
|
||||
with _connect() as conn:
|
||||
total_row = conn.execute(
|
||||
"SELECT COUNT(*) AS total, COALESCE(SUM(CASE WHEN is_read = 0 THEN 1 ELSE 0 END), 0) AS unread FROM contact_messages"
|
||||
).fetchone()
|
||||
rows = conn.execute(
|
||||
"""
|
||||
SELECT id, name, email, category, subject, message, created_at, is_read
|
||||
FROM contact_messages
|
||||
ORDER BY created_at DESC
|
||||
LIMIT ? OFFSET ?
|
||||
""",
|
||||
(safe_per_page, offset),
|
||||
).fetchall()
|
||||
|
||||
return {
|
||||
"items": [
|
||||
{
|
||||
"id": row["id"],
|
||||
"name": row["name"],
|
||||
"email": row["email"],
|
||||
"category": row["category"],
|
||||
"subject": row["subject"],
|
||||
"message": row["message"],
|
||||
"created_at": row["created_at"],
|
||||
"is_read": bool(row["is_read"]),
|
||||
}
|
||||
for row in rows
|
||||
],
|
||||
"page": safe_page,
|
||||
"per_page": safe_per_page,
|
||||
"total": int(total_row["total"]) if total_row else 0,
|
||||
"unread": int(total_row["unread"]) if total_row else 0,
|
||||
}
|
||||
|
||||
|
||||
def mark_admin_contact_read(message_id: int) -> bool:
|
||||
return mark_read(message_id)
|
||||
106
backend/app/services/barcode_service.py
Normal file
106
backend/app/services/barcode_service.py
Normal file
@@ -0,0 +1,106 @@
|
||||
"""Barcode generation service."""
|
||||
import os
|
||||
import io
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BarcodeGenerationError(Exception):
|
||||
"""Custom exception for barcode generation failures."""
|
||||
pass
|
||||
|
||||
|
||||
SUPPORTED_BARCODE_TYPES = [
|
||||
"code128",
|
||||
"code39",
|
||||
"ean13",
|
||||
"ean8",
|
||||
"upca",
|
||||
"isbn13",
|
||||
"isbn10",
|
||||
"issn",
|
||||
"pzn",
|
||||
]
|
||||
|
||||
|
||||
def generate_barcode(
|
||||
data: str,
|
||||
barcode_type: str = "code128",
|
||||
output_path: str = "",
|
||||
output_format: str = "png",
|
||||
) -> dict:
|
||||
"""Generate a barcode image.
|
||||
|
||||
Args:
|
||||
data: The data to encode in the barcode
|
||||
barcode_type: Type of barcode (code128, code39, ean13, etc.)
|
||||
output_path: Path for the output image
|
||||
output_format: "png" or "svg"
|
||||
|
||||
Returns:
|
||||
dict with barcode_type, data, and output_size
|
||||
|
||||
Raises:
|
||||
BarcodeGenerationError: If generation fails
|
||||
"""
|
||||
barcode_type = barcode_type.lower()
|
||||
if barcode_type not in SUPPORTED_BARCODE_TYPES:
|
||||
raise BarcodeGenerationError(
|
||||
f"Unsupported barcode type: {barcode_type}. "
|
||||
f"Supported: {', '.join(SUPPORTED_BARCODE_TYPES)}"
|
||||
)
|
||||
|
||||
if not data or not data.strip():
|
||||
raise BarcodeGenerationError("Barcode data cannot be empty.")
|
||||
|
||||
if len(data) > 200:
|
||||
raise BarcodeGenerationError("Barcode data is too long (max 200 characters).")
|
||||
|
||||
try:
|
||||
import barcode
|
||||
from barcode.writer import ImageWriter
|
||||
|
||||
os.makedirs(os.path.dirname(output_path), exist_ok=True)
|
||||
|
||||
# Map friendly names to python-barcode class names
|
||||
type_map = {
|
||||
"code128": "code128",
|
||||
"code39": "code39",
|
||||
"ean13": "ean13",
|
||||
"ean8": "ean8",
|
||||
"upca": "upca",
|
||||
"isbn13": "isbn13",
|
||||
"isbn10": "isbn10",
|
||||
"issn": "issn",
|
||||
"pzn": "pzn",
|
||||
}
|
||||
bc_type = type_map[barcode_type]
|
||||
|
||||
if output_format == "svg":
|
||||
bc = barcode.get(bc_type, data)
|
||||
# barcode.save() appends the extension automatically
|
||||
output_base = output_path.rsplit(".", 1)[0] if "." in output_path else output_path
|
||||
final_path = bc.save(output_base)
|
||||
else:
|
||||
bc = barcode.get(bc_type, data, writer=ImageWriter())
|
||||
output_base = output_path.rsplit(".", 1)[0] if "." in output_path else output_path
|
||||
final_path = bc.save(output_base)
|
||||
|
||||
if not os.path.exists(final_path):
|
||||
raise BarcodeGenerationError("Barcode file was not created.")
|
||||
|
||||
output_size = os.path.getsize(final_path)
|
||||
logger.info(f"Barcode generated: type={barcode_type}, data={data[:20]}... ({output_size} bytes)")
|
||||
|
||||
return {
|
||||
"barcode_type": barcode_type,
|
||||
"data": data,
|
||||
"output_size": output_size,
|
||||
"output_path": final_path,
|
||||
}
|
||||
|
||||
except BarcodeGenerationError:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise BarcodeGenerationError(f"Barcode generation failed: {str(e)}")
|
||||
119
backend/app/services/contact_service.py
Normal file
119
backend/app/services/contact_service.py
Normal file
@@ -0,0 +1,119 @@
|
||||
"""Contact form service — stores messages and sends notification emails."""
|
||||
import logging
|
||||
import os
|
||||
import sqlite3
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from flask import current_app
|
||||
|
||||
from app.services.email_service import send_email
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
VALID_CATEGORIES = {"general", "bug", "feature"}
|
||||
|
||||
|
||||
def _connect() -> sqlite3.Connection:
|
||||
db_path = current_app.config["DATABASE_PATH"]
|
||||
db_dir = os.path.dirname(db_path)
|
||||
if db_dir:
|
||||
os.makedirs(db_dir, exist_ok=True)
|
||||
conn = sqlite3.connect(db_path)
|
||||
conn.row_factory = sqlite3.Row
|
||||
return conn
|
||||
|
||||
|
||||
def init_contact_db() -> None:
|
||||
"""Create the contact_messages table if it doesn't exist."""
|
||||
conn = _connect()
|
||||
try:
|
||||
conn.execute("""
|
||||
CREATE TABLE IF NOT EXISTS contact_messages (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL,
|
||||
email TEXT NOT NULL,
|
||||
category TEXT NOT NULL DEFAULT 'general',
|
||||
subject TEXT NOT NULL,
|
||||
message TEXT NOT NULL,
|
||||
created_at TEXT NOT NULL,
|
||||
is_read INTEGER NOT NULL DEFAULT 0
|
||||
)
|
||||
""")
|
||||
conn.commit()
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
def save_message(name: str, email: str, category: str, subject: str, message: str) -> dict:
|
||||
"""Persist a contact message and send a notification email."""
|
||||
if category not in VALID_CATEGORIES:
|
||||
category = "general"
|
||||
|
||||
now = datetime.now(timezone.utc).isoformat()
|
||||
conn = _connect()
|
||||
try:
|
||||
cursor = conn.execute(
|
||||
"""INSERT INTO contact_messages (name, email, category, subject, message, created_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?)""",
|
||||
(name, email, category, subject, message, now),
|
||||
)
|
||||
conn.commit()
|
||||
msg_id = cursor.lastrowid
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
# Send notification email to admin
|
||||
admin_email = current_app.config.get("SMTP_FROM", "noreply@saas-pdf.com")
|
||||
try:
|
||||
send_email(
|
||||
to=admin_email,
|
||||
subject=f"[SaaS-PDF Contact] [{category}] {subject}",
|
||||
html_body=f"""
|
||||
<h2>New Contact Message</h2>
|
||||
<p><strong>From:</strong> {name} <{email}></p>
|
||||
<p><strong>Category:</strong> {category}</p>
|
||||
<p><strong>Subject:</strong> {subject}</p>
|
||||
<hr />
|
||||
<p>{message}</p>
|
||||
""",
|
||||
)
|
||||
except Exception:
|
||||
logger.exception("Failed to send contact notification email")
|
||||
|
||||
return {"id": msg_id, "created_at": now}
|
||||
|
||||
|
||||
def get_messages(page: int = 1, per_page: int = 20) -> dict:
|
||||
"""Retrieve paginated contact messages (admin use)."""
|
||||
offset = (page - 1) * per_page
|
||||
conn = _connect()
|
||||
try:
|
||||
total = conn.execute("SELECT COUNT(*) FROM contact_messages").fetchone()[0]
|
||||
rows = conn.execute(
|
||||
"SELECT * FROM contact_messages ORDER BY created_at DESC LIMIT ? OFFSET ?",
|
||||
(per_page, offset),
|
||||
).fetchall()
|
||||
messages = [dict(r) for r in rows]
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
return {
|
||||
"messages": messages,
|
||||
"total": total,
|
||||
"page": page,
|
||||
"per_page": per_page,
|
||||
}
|
||||
|
||||
|
||||
def mark_read(message_id: int) -> bool:
|
||||
"""Mark a contact message as read."""
|
||||
conn = _connect()
|
||||
try:
|
||||
result = conn.execute(
|
||||
"UPDATE contact_messages SET is_read = 1 WHERE id = ?",
|
||||
(message_id,),
|
||||
)
|
||||
conn.commit()
|
||||
return result.rowcount > 0
|
||||
finally:
|
||||
conn.close()
|
||||
176
backend/app/services/image_extra_service.py
Normal file
176
backend/app/services/image_extra_service.py
Normal file
@@ -0,0 +1,176 @@
|
||||
"""Image extra tools — Crop, Rotate/Flip."""
|
||||
import os
|
||||
import logging
|
||||
|
||||
from PIL import Image
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ImageExtraError(Exception):
|
||||
"""Custom exception for image extra tool failures."""
|
||||
pass
|
||||
|
||||
|
||||
FORMAT_MAP = {
|
||||
"jpg": "JPEG",
|
||||
"jpeg": "JPEG",
|
||||
"png": "PNG",
|
||||
"webp": "WEBP",
|
||||
}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Image Crop
|
||||
# ---------------------------------------------------------------------------
|
||||
def crop_image(
|
||||
input_path: str,
|
||||
output_path: str,
|
||||
left: int,
|
||||
top: int,
|
||||
right: int,
|
||||
bottom: int,
|
||||
quality: int = 85,
|
||||
) -> dict:
|
||||
"""Crop an image to a specified rectangle.
|
||||
|
||||
Args:
|
||||
input_path: Path to the input image
|
||||
output_path: Path for the cropped output
|
||||
left: Left edge in pixels
|
||||
top: Top edge in pixels
|
||||
right: Right edge in pixels
|
||||
bottom: Bottom edge in pixels
|
||||
quality: Output quality for lossy formats
|
||||
|
||||
Returns:
|
||||
dict with original and cropped dimensions
|
||||
|
||||
Raises:
|
||||
ImageExtraError: If crop fails
|
||||
"""
|
||||
try:
|
||||
os.makedirs(os.path.dirname(output_path), exist_ok=True)
|
||||
|
||||
with Image.open(input_path) as img:
|
||||
orig_w, orig_h = img.size
|
||||
|
||||
if left < 0 or top < 0 or right > orig_w or bottom > orig_h:
|
||||
raise ImageExtraError(
|
||||
f"Crop area ({left},{top},{right},{bottom}) outside image bounds ({orig_w}x{orig_h})."
|
||||
)
|
||||
if left >= right or top >= bottom:
|
||||
raise ImageExtraError("Invalid crop area: left must be < right, top must be < bottom.")
|
||||
|
||||
cropped = img.crop((left, top, right, bottom))
|
||||
|
||||
ext = os.path.splitext(output_path)[1].lower().strip(".")
|
||||
pil_format = FORMAT_MAP.get(ext, "PNG")
|
||||
|
||||
save_kwargs = {"optimize": True}
|
||||
if pil_format in ("JPEG", "WEBP"):
|
||||
save_kwargs["quality"] = quality
|
||||
if cropped.mode in ("RGBA", "P", "LA"):
|
||||
bg = Image.new("RGB", cropped.size, (255, 255, 255))
|
||||
if cropped.mode == "P":
|
||||
cropped = cropped.convert("RGBA")
|
||||
bg.paste(cropped, mask=cropped.split()[-1] if "A" in cropped.mode else None)
|
||||
cropped = bg
|
||||
|
||||
cropped.save(output_path, format=pil_format, **save_kwargs)
|
||||
|
||||
new_w = right - left
|
||||
new_h = bottom - top
|
||||
logger.info(f"Image crop: {orig_w}x{orig_h} → {new_w}x{new_h}")
|
||||
return {
|
||||
"original_width": orig_w,
|
||||
"original_height": orig_h,
|
||||
"cropped_width": new_w,
|
||||
"cropped_height": new_h,
|
||||
}
|
||||
|
||||
except ImageExtraError:
|
||||
raise
|
||||
except (IOError, OSError, Image.DecompressionBombError) as e:
|
||||
raise ImageExtraError(f"Image crop failed: {str(e)}")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Image Rotate / Flip
|
||||
# ---------------------------------------------------------------------------
|
||||
def rotate_flip_image(
|
||||
input_path: str,
|
||||
output_path: str,
|
||||
rotation: int = 0,
|
||||
flip_horizontal: bool = False,
|
||||
flip_vertical: bool = False,
|
||||
quality: int = 85,
|
||||
) -> dict:
|
||||
"""Rotate and/or flip an image.
|
||||
|
||||
Args:
|
||||
input_path: Path to the input image
|
||||
output_path: Path for the output image
|
||||
rotation: Rotation angle (0, 90, 180, 270)
|
||||
flip_horizontal: Mirror horizontally
|
||||
flip_vertical: Mirror vertically
|
||||
quality: Output quality for lossy formats
|
||||
|
||||
Returns:
|
||||
dict with original and new dimensions
|
||||
|
||||
Raises:
|
||||
ImageExtraError: If operation fails
|
||||
"""
|
||||
if rotation not in (0, 90, 180, 270):
|
||||
raise ImageExtraError("Rotation must be 0, 90, 180, or 270 degrees.")
|
||||
|
||||
try:
|
||||
os.makedirs(os.path.dirname(output_path), exist_ok=True)
|
||||
|
||||
with Image.open(input_path) as img:
|
||||
orig_w, orig_h = img.size
|
||||
result = img
|
||||
|
||||
if rotation:
|
||||
# PIL rotates counter-clockwise, so negate for clockwise
|
||||
result = result.rotate(-rotation, expand=True)
|
||||
|
||||
if flip_horizontal:
|
||||
result = result.transpose(Image.Transpose.FLIP_LEFT_RIGHT)
|
||||
|
||||
if flip_vertical:
|
||||
result = result.transpose(Image.Transpose.FLIP_TOP_BOTTOM)
|
||||
|
||||
new_w, new_h = result.size
|
||||
|
||||
ext = os.path.splitext(output_path)[1].lower().strip(".")
|
||||
pil_format = FORMAT_MAP.get(ext, "PNG")
|
||||
|
||||
save_kwargs = {"optimize": True}
|
||||
if pil_format in ("JPEG", "WEBP"):
|
||||
save_kwargs["quality"] = quality
|
||||
if result.mode in ("RGBA", "P", "LA"):
|
||||
bg = Image.new("RGB", result.size, (255, 255, 255))
|
||||
if result.mode == "P":
|
||||
result = result.convert("RGBA")
|
||||
bg.paste(result, mask=result.split()[-1] if "A" in result.mode else None)
|
||||
result = bg
|
||||
|
||||
result.save(output_path, format=pil_format, **save_kwargs)
|
||||
|
||||
logger.info(f"Image rotate/flip: {orig_w}x{orig_h} → {new_w}x{new_h}, rot={rotation}")
|
||||
return {
|
||||
"original_width": orig_w,
|
||||
"original_height": orig_h,
|
||||
"new_width": new_w,
|
||||
"new_height": new_h,
|
||||
"rotation": rotation,
|
||||
"flipped_horizontal": flip_horizontal,
|
||||
"flipped_vertical": flip_vertical,
|
||||
}
|
||||
|
||||
except ImageExtraError:
|
||||
raise
|
||||
except (IOError, OSError, Image.DecompressionBombError) as e:
|
||||
raise ImageExtraError(f"Image rotate/flip failed: {str(e)}")
|
||||
278
backend/app/services/pdf_convert_service.py
Normal file
278
backend/app/services/pdf_convert_service.py
Normal file
@@ -0,0 +1,278 @@
|
||||
"""PDF conversion service — PDF↔PowerPoint, Excel→PDF, PowerPoint→PDF, Sign PDF."""
|
||||
import os
|
||||
import io
|
||||
import logging
|
||||
import subprocess
|
||||
import tempfile
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PDFConvertError(Exception):
|
||||
"""Custom exception for PDF conversion failures."""
|
||||
pass
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# PDF to PowerPoint (PPTX)
|
||||
# ---------------------------------------------------------------------------
|
||||
def pdf_to_pptx(input_path: str, output_path: str) -> dict:
|
||||
"""Convert a PDF to PowerPoint by rendering each page as a slide image.
|
||||
|
||||
Args:
|
||||
input_path: Path to the input PDF
|
||||
output_path: Path for the output PPTX
|
||||
|
||||
Returns:
|
||||
dict with total_slides and output_size
|
||||
|
||||
Raises:
|
||||
PDFConvertError: If conversion fails
|
||||
"""
|
||||
try:
|
||||
from pdf2image import convert_from_path
|
||||
from pptx import Presentation
|
||||
from pptx.util import Inches, Emu
|
||||
|
||||
images = convert_from_path(input_path, dpi=200)
|
||||
if not images:
|
||||
raise PDFConvertError("PDF has no pages or could not be rendered.")
|
||||
|
||||
prs = Presentation()
|
||||
# Use widescreen 16:9 layout
|
||||
prs.slide_width = Inches(13.333)
|
||||
prs.slide_height = Inches(7.5)
|
||||
|
||||
for img in images:
|
||||
slide = prs.slides.add_slide(prs.slide_layouts[6]) # blank layout
|
||||
img_stream = io.BytesIO()
|
||||
img.save(img_stream, format="PNG")
|
||||
img_stream.seek(0)
|
||||
|
||||
# Scale image to fill slide
|
||||
img_w, img_h = img.size
|
||||
slide_w = prs.slide_width
|
||||
slide_h = prs.slide_height
|
||||
ratio = min(slide_w / Emu(int(img_w * 914400 / 200)),
|
||||
slide_h / Emu(int(img_h * 914400 / 200)))
|
||||
pic_w = int(img_w * 914400 / 200 * ratio)
|
||||
pic_h = int(img_h * 914400 / 200 * ratio)
|
||||
left = (slide_w - pic_w) // 2
|
||||
top = (slide_h - pic_h) // 2
|
||||
|
||||
slide.shapes.add_picture(img_stream, left, top, pic_w, pic_h)
|
||||
|
||||
os.makedirs(os.path.dirname(output_path), exist_ok=True)
|
||||
prs.save(output_path)
|
||||
|
||||
output_size = os.path.getsize(output_path)
|
||||
logger.info(f"PDF→PPTX: {len(images)} slides ({output_size} bytes)")
|
||||
return {"total_slides": len(images), "output_size": output_size}
|
||||
|
||||
except PDFConvertError:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise PDFConvertError(f"PDF to PowerPoint conversion failed: {str(e)}")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Excel (XLSX) to PDF
|
||||
# ---------------------------------------------------------------------------
|
||||
def excel_to_pdf(input_path: str, output_dir: str) -> str:
|
||||
"""Convert an Excel file to PDF using LibreOffice headless.
|
||||
|
||||
Args:
|
||||
input_path: Path to the input XLSX/XLS file
|
||||
output_dir: Directory for the output file
|
||||
|
||||
Returns:
|
||||
Path to the converted PDF
|
||||
|
||||
Raises:
|
||||
PDFConvertError: If conversion fails
|
||||
"""
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
user_install_dir = tempfile.mkdtemp(prefix="lo_excel2pdf_")
|
||||
|
||||
cmd = [
|
||||
"soffice",
|
||||
"--headless",
|
||||
"--norestore",
|
||||
f"-env:UserInstallation=file://{user_install_dir}",
|
||||
"--convert-to", "pdf",
|
||||
"--outdir", output_dir,
|
||||
input_path,
|
||||
]
|
||||
|
||||
try:
|
||||
result = subprocess.run(
|
||||
cmd, capture_output=True, text=True, timeout=120,
|
||||
env={**os.environ, "HOME": user_install_dir},
|
||||
)
|
||||
|
||||
input_basename = os.path.splitext(os.path.basename(input_path))[0]
|
||||
output_path = os.path.join(output_dir, f"{input_basename}.pdf")
|
||||
|
||||
if os.path.exists(output_path) and os.path.getsize(output_path) > 0:
|
||||
logger.info(f"Excel→PDF conversion successful: {output_path}")
|
||||
return output_path
|
||||
|
||||
if result.returncode != 0:
|
||||
stderr = result.stderr or ""
|
||||
real_errors = [
|
||||
line for line in stderr.strip().splitlines()
|
||||
if not line.startswith("Warning: failed to launch javaldx")
|
||||
]
|
||||
error_msg = "\n".join(real_errors) if real_errors else stderr
|
||||
raise PDFConvertError(f"Conversion failed: {error_msg or 'Unknown error'}")
|
||||
|
||||
raise PDFConvertError("Output file was not created.")
|
||||
|
||||
except subprocess.TimeoutExpired:
|
||||
raise PDFConvertError("Conversion timed out. File may be too large.")
|
||||
except FileNotFoundError:
|
||||
raise PDFConvertError("LibreOffice is not installed on the server.")
|
||||
finally:
|
||||
import shutil
|
||||
shutil.rmtree(user_install_dir, ignore_errors=True)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# PowerPoint (PPTX) to PDF
|
||||
# ---------------------------------------------------------------------------
|
||||
def pptx_to_pdf(input_path: str, output_dir: str) -> str:
|
||||
"""Convert a PowerPoint file to PDF using LibreOffice headless.
|
||||
|
||||
Args:
|
||||
input_path: Path to the input PPTX/PPT file
|
||||
output_dir: Directory for the output file
|
||||
|
||||
Returns:
|
||||
Path to the converted PDF
|
||||
|
||||
Raises:
|
||||
PDFConvertError: If conversion fails
|
||||
"""
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
user_install_dir = tempfile.mkdtemp(prefix="lo_pptx2pdf_")
|
||||
|
||||
cmd = [
|
||||
"soffice",
|
||||
"--headless",
|
||||
"--norestore",
|
||||
f"-env:UserInstallation=file://{user_install_dir}",
|
||||
"--convert-to", "pdf",
|
||||
"--outdir", output_dir,
|
||||
input_path,
|
||||
]
|
||||
|
||||
try:
|
||||
result = subprocess.run(
|
||||
cmd, capture_output=True, text=True, timeout=120,
|
||||
env={**os.environ, "HOME": user_install_dir},
|
||||
)
|
||||
|
||||
input_basename = os.path.splitext(os.path.basename(input_path))[0]
|
||||
output_path = os.path.join(output_dir, f"{input_basename}.pdf")
|
||||
|
||||
if os.path.exists(output_path) and os.path.getsize(output_path) > 0:
|
||||
logger.info(f"PPTX→PDF conversion successful: {output_path}")
|
||||
return output_path
|
||||
|
||||
if result.returncode != 0:
|
||||
stderr = result.stderr or ""
|
||||
real_errors = [
|
||||
line for line in stderr.strip().splitlines()
|
||||
if not line.startswith("Warning: failed to launch javaldx")
|
||||
]
|
||||
error_msg = "\n".join(real_errors) if real_errors else stderr
|
||||
raise PDFConvertError(f"Conversion failed: {error_msg or 'Unknown error'}")
|
||||
|
||||
raise PDFConvertError("Output file was not created.")
|
||||
|
||||
except subprocess.TimeoutExpired:
|
||||
raise PDFConvertError("Conversion timed out. File may be too large.")
|
||||
except FileNotFoundError:
|
||||
raise PDFConvertError("LibreOffice is not installed on the server.")
|
||||
finally:
|
||||
import shutil
|
||||
shutil.rmtree(user_install_dir, ignore_errors=True)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Sign PDF (overlay signature image on a page)
|
||||
# ---------------------------------------------------------------------------
|
||||
def sign_pdf(
|
||||
input_path: str,
|
||||
signature_path: str,
|
||||
output_path: str,
|
||||
page: int = 0,
|
||||
x: float = 100,
|
||||
y: float = 100,
|
||||
width: float = 200,
|
||||
height: float = 80,
|
||||
) -> dict:
|
||||
"""Overlay a signature image onto a PDF page.
|
||||
|
||||
Args:
|
||||
input_path: Path to the input PDF
|
||||
signature_path: Path to the signature image (PNG with transparency)
|
||||
output_path: Path for the signed output PDF
|
||||
page: 0-based page index to place signature
|
||||
x: X coordinate (points from left)
|
||||
y: Y coordinate (points from bottom)
|
||||
width: Signature width in points
|
||||
height: Signature height in points
|
||||
|
||||
Returns:
|
||||
dict with total_pages and output_size
|
||||
|
||||
Raises:
|
||||
PDFConvertError: If signing fails
|
||||
"""
|
||||
try:
|
||||
from PyPDF2 import PdfReader, PdfWriter
|
||||
from reportlab.pdfgen import canvas as rl_canvas
|
||||
from reportlab.lib.utils import ImageReader
|
||||
|
||||
reader = PdfReader(input_path)
|
||||
total_pages = len(reader.pages)
|
||||
if total_pages == 0:
|
||||
raise PDFConvertError("PDF has no pages.")
|
||||
if page < 0 or page >= total_pages:
|
||||
raise PDFConvertError(f"Page {page + 1} does not exist (PDF has {total_pages} pages).")
|
||||
|
||||
target_page = reader.pages[page]
|
||||
page_box = target_page.mediabox
|
||||
page_width = float(page_box.width)
|
||||
page_height = float(page_box.height)
|
||||
|
||||
# Create overlay PDF with the signature image
|
||||
overlay_stream = io.BytesIO()
|
||||
c = rl_canvas.Canvas(overlay_stream, pagesize=(page_width, page_height))
|
||||
sig_img = ImageReader(signature_path)
|
||||
c.drawImage(sig_img, x, y, width=width, height=height, mask="auto")
|
||||
c.save()
|
||||
overlay_stream.seek(0)
|
||||
|
||||
overlay_reader = PdfReader(overlay_stream)
|
||||
overlay_page = overlay_reader.pages[0]
|
||||
|
||||
writer = PdfWriter()
|
||||
for i, pg in enumerate(reader.pages):
|
||||
if i == page:
|
||||
pg.merge_page(overlay_page)
|
||||
writer.add_page(pg)
|
||||
|
||||
os.makedirs(os.path.dirname(output_path), exist_ok=True)
|
||||
with open(output_path, "wb") as f:
|
||||
writer.write(f)
|
||||
|
||||
output_size = os.path.getsize(output_path)
|
||||
logger.info(f"Sign PDF: signature on page {page + 1} ({output_size} bytes)")
|
||||
return {"total_pages": total_pages, "output_size": output_size, "signed_page": page + 1}
|
||||
|
||||
except PDFConvertError:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise PDFConvertError(f"Failed to sign PDF: {str(e)}")
|
||||
316
backend/app/services/pdf_extra_service.py
Normal file
316
backend/app/services/pdf_extra_service.py
Normal file
@@ -0,0 +1,316 @@
|
||||
"""Extended PDF tools — Crop, Flatten, Repair, Metadata Editor."""
|
||||
import os
|
||||
import io
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PDFExtraError(Exception):
|
||||
"""Custom exception for extended PDF tool failures."""
|
||||
pass
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Crop PDF
|
||||
# ---------------------------------------------------------------------------
|
||||
def crop_pdf(
|
||||
input_path: str,
|
||||
output_path: str,
|
||||
margin_left: float = 0,
|
||||
margin_right: float = 0,
|
||||
margin_top: float = 0,
|
||||
margin_bottom: float = 0,
|
||||
pages: str = "all",
|
||||
) -> dict:
|
||||
"""Crop margins from PDF pages.
|
||||
|
||||
Args:
|
||||
input_path: Path to the input PDF
|
||||
output_path: Path for the cropped output
|
||||
margin_left/right/top/bottom: Points to crop from each side
|
||||
pages: "all" or comma-separated page numbers (1-based)
|
||||
|
||||
Returns:
|
||||
dict with total_pages and output_size
|
||||
|
||||
Raises:
|
||||
PDFExtraError: If cropping fails
|
||||
"""
|
||||
try:
|
||||
from PyPDF2 import PdfReader, PdfWriter
|
||||
|
||||
reader = PdfReader(input_path)
|
||||
writer = PdfWriter()
|
||||
total_pages = len(reader.pages)
|
||||
|
||||
if total_pages == 0:
|
||||
raise PDFExtraError("PDF has no pages.")
|
||||
|
||||
target_indices = _parse_pages(pages, total_pages)
|
||||
|
||||
for i, page in enumerate(reader.pages):
|
||||
if i in target_indices:
|
||||
box = page.mediabox
|
||||
box.lower_left = (
|
||||
float(box.lower_left[0]) + margin_left,
|
||||
float(box.lower_left[1]) + margin_bottom,
|
||||
)
|
||||
box.upper_right = (
|
||||
float(box.upper_right[0]) - margin_right,
|
||||
float(box.upper_right[1]) - margin_top,
|
||||
)
|
||||
page.mediabox = box
|
||||
page.cropbox = box
|
||||
writer.add_page(page)
|
||||
|
||||
os.makedirs(os.path.dirname(output_path), exist_ok=True)
|
||||
with open(output_path, "wb") as f:
|
||||
writer.write(f)
|
||||
|
||||
output_size = os.path.getsize(output_path)
|
||||
logger.info(f"Crop PDF: {len(target_indices)} pages cropped ({output_size} bytes)")
|
||||
return {
|
||||
"total_pages": total_pages,
|
||||
"cropped_pages": len(target_indices),
|
||||
"output_size": output_size,
|
||||
}
|
||||
|
||||
except PDFExtraError:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise PDFExtraError(f"Failed to crop PDF: {str(e)}")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Flatten PDF (remove interactive form fields, annotations)
|
||||
# ---------------------------------------------------------------------------
|
||||
def flatten_pdf(input_path: str, output_path: str) -> dict:
|
||||
"""Flatten a PDF — burn form fields and annotations into static content.
|
||||
|
||||
Args:
|
||||
input_path: Path to the input PDF
|
||||
output_path: Path for the flattened output
|
||||
|
||||
Returns:
|
||||
dict with total_pages and output_size
|
||||
|
||||
Raises:
|
||||
PDFExtraError: If flatten fails
|
||||
"""
|
||||
try:
|
||||
from PyPDF2 import PdfReader, PdfWriter
|
||||
|
||||
reader = PdfReader(input_path)
|
||||
writer = PdfWriter()
|
||||
total_pages = len(reader.pages)
|
||||
|
||||
if total_pages == 0:
|
||||
raise PDFExtraError("PDF has no pages.")
|
||||
|
||||
for page in reader.pages:
|
||||
# Remove annotations to flatten
|
||||
if "/Annots" in page:
|
||||
del page["/Annots"]
|
||||
writer.add_page(page)
|
||||
|
||||
# Remove AcroForm (interactive forms) at document level
|
||||
if "/AcroForm" in writer._root_object:
|
||||
del writer._root_object["/AcroForm"]
|
||||
|
||||
os.makedirs(os.path.dirname(output_path), exist_ok=True)
|
||||
with open(output_path, "wb") as f:
|
||||
writer.write(f)
|
||||
|
||||
output_size = os.path.getsize(output_path)
|
||||
logger.info(f"Flatten PDF: {total_pages} pages ({output_size} bytes)")
|
||||
return {"total_pages": total_pages, "output_size": output_size}
|
||||
|
||||
except PDFExtraError:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise PDFExtraError(f"Failed to flatten PDF: {str(e)}")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Repair PDF
|
||||
# ---------------------------------------------------------------------------
|
||||
def repair_pdf(input_path: str, output_path: str) -> dict:
|
||||
"""Attempt to repair a damaged PDF by re-writing it.
|
||||
|
||||
Args:
|
||||
input_path: Path to the input PDF
|
||||
output_path: Path for the repaired output
|
||||
|
||||
Returns:
|
||||
dict with total_pages, output_size, and repaired flag
|
||||
|
||||
Raises:
|
||||
PDFExtraError: If repair fails
|
||||
"""
|
||||
try:
|
||||
from PyPDF2 import PdfReader, PdfWriter
|
||||
from PyPDF2.errors import PdfReadError
|
||||
|
||||
try:
|
||||
reader = PdfReader(input_path, strict=False)
|
||||
except PdfReadError as e:
|
||||
raise PDFExtraError(f"Cannot read PDF — file may be severely corrupted: {str(e)}")
|
||||
|
||||
writer = PdfWriter()
|
||||
total_pages = len(reader.pages)
|
||||
|
||||
if total_pages == 0:
|
||||
raise PDFExtraError("PDF has no recoverable pages.")
|
||||
|
||||
recovered = 0
|
||||
for i, page in enumerate(reader.pages):
|
||||
try:
|
||||
writer.add_page(page)
|
||||
recovered += 1
|
||||
except Exception:
|
||||
logger.warning(f"Repair: skipped unrecoverable page {i + 1}")
|
||||
|
||||
if recovered == 0:
|
||||
raise PDFExtraError("No pages could be recovered from the PDF.")
|
||||
|
||||
# Copy metadata if available
|
||||
try:
|
||||
if reader.metadata:
|
||||
writer.add_metadata(reader.metadata)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
os.makedirs(os.path.dirname(output_path), exist_ok=True)
|
||||
with open(output_path, "wb") as f:
|
||||
writer.write(f)
|
||||
|
||||
output_size = os.path.getsize(output_path)
|
||||
logger.info(f"Repair PDF: {recovered}/{total_pages} pages recovered ({output_size} bytes)")
|
||||
return {
|
||||
"total_pages": total_pages,
|
||||
"recovered_pages": recovered,
|
||||
"output_size": output_size,
|
||||
"repaired": True,
|
||||
}
|
||||
|
||||
except PDFExtraError:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise PDFExtraError(f"Failed to repair PDF: {str(e)}")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# PDF Metadata Editor
|
||||
# ---------------------------------------------------------------------------
|
||||
def edit_pdf_metadata(
|
||||
input_path: str,
|
||||
output_path: str,
|
||||
title: str | None = None,
|
||||
author: str | None = None,
|
||||
subject: str | None = None,
|
||||
keywords: str | None = None,
|
||||
creator: str | None = None,
|
||||
) -> dict:
|
||||
"""Edit PDF metadata fields.
|
||||
|
||||
Args:
|
||||
input_path: Path to the input PDF
|
||||
output_path: Path for the output PDF
|
||||
title/author/subject/keywords/creator: New metadata values (None = keep existing)
|
||||
|
||||
Returns:
|
||||
dict with updated metadata and output_size
|
||||
|
||||
Raises:
|
||||
PDFExtraError: If metadata edit fails
|
||||
"""
|
||||
try:
|
||||
from PyPDF2 import PdfReader, PdfWriter
|
||||
|
||||
reader = PdfReader(input_path)
|
||||
writer = PdfWriter()
|
||||
|
||||
for page in reader.pages:
|
||||
writer.add_page(page)
|
||||
|
||||
# Build metadata dict
|
||||
metadata = {}
|
||||
if title is not None:
|
||||
metadata["/Title"] = title
|
||||
if author is not None:
|
||||
metadata["/Author"] = author
|
||||
if subject is not None:
|
||||
metadata["/Subject"] = subject
|
||||
if keywords is not None:
|
||||
metadata["/Keywords"] = keywords
|
||||
if creator is not None:
|
||||
metadata["/Creator"] = creator
|
||||
|
||||
if not metadata:
|
||||
raise PDFExtraError("At least one metadata field must be provided.")
|
||||
|
||||
writer.add_metadata(metadata)
|
||||
|
||||
os.makedirs(os.path.dirname(output_path), exist_ok=True)
|
||||
with open(output_path, "wb") as f:
|
||||
writer.write(f)
|
||||
|
||||
output_size = os.path.getsize(output_path)
|
||||
|
||||
# Read back to confirm
|
||||
current_meta = {}
|
||||
try:
|
||||
r2 = PdfReader(output_path)
|
||||
if r2.metadata:
|
||||
current_meta = {
|
||||
"title": r2.metadata.get("/Title", ""),
|
||||
"author": r2.metadata.get("/Author", ""),
|
||||
"subject": r2.metadata.get("/Subject", ""),
|
||||
"keywords": r2.metadata.get("/Keywords", ""),
|
||||
"creator": r2.metadata.get("/Creator", ""),
|
||||
}
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
logger.info(f"Edit metadata: updated {len(metadata)} fields ({output_size} bytes)")
|
||||
return {
|
||||
"total_pages": len(reader.pages),
|
||||
"output_size": output_size,
|
||||
"metadata": current_meta,
|
||||
}
|
||||
|
||||
except PDFExtraError:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise PDFExtraError(f"Failed to edit PDF metadata: {str(e)}")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
def _parse_pages(pages_spec: str, total_pages: int) -> set[int]:
|
||||
"""Parse page specification to set of 0-based indices."""
|
||||
if pages_spec.strip().lower() == "all":
|
||||
return set(range(total_pages))
|
||||
|
||||
indices = set()
|
||||
for part in pages_spec.split(","):
|
||||
part = part.strip()
|
||||
if "-" in part:
|
||||
try:
|
||||
start, end = part.split("-", 1)
|
||||
start = max(1, int(start))
|
||||
end = min(total_pages, int(end))
|
||||
for p in range(start, end + 1):
|
||||
indices.add(p - 1)
|
||||
except ValueError:
|
||||
continue
|
||||
else:
|
||||
try:
|
||||
p = int(part)
|
||||
if 1 <= p <= total_pages:
|
||||
indices.add(p - 1)
|
||||
except ValueError:
|
||||
continue
|
||||
return indices
|
||||
@@ -135,3 +135,21 @@ def get_all_ratings_summary() -> list[dict]:
|
||||
}
|
||||
for row in rows
|
||||
]
|
||||
|
||||
|
||||
def get_global_rating_summary() -> dict:
|
||||
"""Return aggregate rating stats across all rated tools."""
|
||||
with _connect() as conn:
|
||||
row = conn.execute(
|
||||
"""
|
||||
SELECT
|
||||
COUNT(*) AS count,
|
||||
COALESCE(AVG(rating), 0) AS average
|
||||
FROM tool_ratings
|
||||
"""
|
||||
).fetchone()
|
||||
|
||||
return {
|
||||
"rating_count": int(row["count"]) if row else 0,
|
||||
"average_rating": round(row["average"], 1) if row else 0.0,
|
||||
}
|
||||
|
||||
220
backend/app/services/stripe_service.py
Normal file
220
backend/app/services/stripe_service.py
Normal file
@@ -0,0 +1,220 @@
|
||||
"""Stripe payment service — checkout sessions, webhooks, and subscription management."""
|
||||
import logging
|
||||
import os
|
||||
|
||||
import stripe
|
||||
from flask import current_app
|
||||
|
||||
from app.services.account_service import update_user_plan, get_user_by_id, _connect, _utc_now
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _init_stripe():
|
||||
"""Configure stripe with the app's secret key."""
|
||||
stripe.api_key = current_app.config.get("STRIPE_SECRET_KEY", "")
|
||||
|
||||
|
||||
def _ensure_stripe_columns():
|
||||
"""Add stripe_customer_id and stripe_subscription_id columns if missing."""
|
||||
conn = _connect()
|
||||
try:
|
||||
cols = [row["name"] for row in conn.execute("PRAGMA table_info(users)").fetchall()]
|
||||
if "stripe_customer_id" not in cols:
|
||||
conn.execute("ALTER TABLE users ADD COLUMN stripe_customer_id TEXT")
|
||||
if "stripe_subscription_id" not in cols:
|
||||
conn.execute("ALTER TABLE users ADD COLUMN stripe_subscription_id TEXT")
|
||||
conn.commit()
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
def init_stripe_db():
|
||||
"""Initialize stripe-related DB columns."""
|
||||
_ensure_stripe_columns()
|
||||
|
||||
|
||||
def _get_or_create_customer(user_id: int) -> str:
|
||||
"""Get existing Stripe customer or create one."""
|
||||
_init_stripe()
|
||||
conn = _connect()
|
||||
try:
|
||||
row = conn.execute(
|
||||
"SELECT email, stripe_customer_id FROM users WHERE id = ?",
|
||||
(user_id,),
|
||||
).fetchone()
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
if row is None:
|
||||
raise ValueError("User not found.")
|
||||
|
||||
if row["stripe_customer_id"]:
|
||||
return row["stripe_customer_id"]
|
||||
|
||||
# Create new Stripe customer
|
||||
customer = stripe.Customer.create(
|
||||
email=row["email"],
|
||||
metadata={"user_id": str(user_id)},
|
||||
)
|
||||
|
||||
conn = _connect()
|
||||
try:
|
||||
conn.execute(
|
||||
"UPDATE users SET stripe_customer_id = ?, updated_at = ? WHERE id = ?",
|
||||
(customer.id, _utc_now(), user_id),
|
||||
)
|
||||
conn.commit()
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
return customer.id
|
||||
|
||||
|
||||
def create_checkout_session(user_id: int, price_id: str, success_url: str, cancel_url: str) -> str:
|
||||
"""Create a Stripe Checkout Session and return the URL."""
|
||||
_init_stripe()
|
||||
customer_id = _get_or_create_customer(user_id)
|
||||
|
||||
session = stripe.checkout.Session.create(
|
||||
customer=customer_id,
|
||||
payment_method_types=["card"],
|
||||
line_items=[{"price": price_id, "quantity": 1}],
|
||||
mode="subscription",
|
||||
success_url=success_url,
|
||||
cancel_url=cancel_url,
|
||||
metadata={"user_id": str(user_id)},
|
||||
)
|
||||
return session.url
|
||||
|
||||
|
||||
def create_portal_session(user_id: int, return_url: str) -> str:
|
||||
"""Create a Stripe Customer Portal session for managing subscriptions."""
|
||||
_init_stripe()
|
||||
customer_id = _get_or_create_customer(user_id)
|
||||
|
||||
session = stripe.billing_portal.Session.create(
|
||||
customer=customer_id,
|
||||
return_url=return_url,
|
||||
)
|
||||
return session.url
|
||||
|
||||
|
||||
def handle_webhook_event(payload: bytes, sig_header: str) -> dict:
|
||||
"""Process a Stripe webhook event. Returns a status dict."""
|
||||
webhook_secret = current_app.config.get("STRIPE_WEBHOOK_SECRET", "")
|
||||
if not webhook_secret:
|
||||
logger.warning("STRIPE_WEBHOOK_SECRET not configured — ignoring webhook.")
|
||||
return {"status": "ignored", "reason": "no webhook secret"}
|
||||
|
||||
try:
|
||||
event = stripe.Webhook.construct_event(payload, sig_header, webhook_secret)
|
||||
except stripe.SignatureVerificationError:
|
||||
logger.warning("Stripe webhook signature verification failed.")
|
||||
return {"status": "error", "reason": "signature_failed"}
|
||||
except ValueError:
|
||||
logger.warning("Invalid Stripe webhook payload.")
|
||||
return {"status": "error", "reason": "invalid_payload"}
|
||||
|
||||
event_type = event["type"]
|
||||
data_object = event["data"]["object"]
|
||||
|
||||
if event_type == "checkout.session.completed":
|
||||
_handle_checkout_completed(data_object)
|
||||
elif event_type == "customer.subscription.updated":
|
||||
_handle_subscription_updated(data_object)
|
||||
elif event_type == "customer.subscription.deleted":
|
||||
_handle_subscription_deleted(data_object)
|
||||
elif event_type == "invoice.payment_failed":
|
||||
_handle_payment_failed(data_object)
|
||||
|
||||
return {"status": "ok", "event_type": event_type}
|
||||
|
||||
|
||||
def _find_user_by_customer_id(customer_id: str) -> dict | None:
|
||||
"""Find user by Stripe customer ID."""
|
||||
conn = _connect()
|
||||
try:
|
||||
row = conn.execute(
|
||||
"SELECT id, email, plan, created_at FROM users WHERE stripe_customer_id = ?",
|
||||
(customer_id,),
|
||||
).fetchone()
|
||||
finally:
|
||||
conn.close()
|
||||
return dict(row) if row else None
|
||||
|
||||
|
||||
def _handle_checkout_completed(session: dict):
|
||||
"""Handle successful checkout — activate Pro plan."""
|
||||
customer_id = session.get("customer")
|
||||
subscription_id = session.get("subscription")
|
||||
user_id = session.get("metadata", {}).get("user_id")
|
||||
|
||||
if user_id:
|
||||
conn = _connect()
|
||||
try:
|
||||
conn.execute(
|
||||
"UPDATE users SET plan = 'pro', stripe_subscription_id = ?, updated_at = ? WHERE id = ?",
|
||||
(subscription_id, _utc_now(), int(user_id)),
|
||||
)
|
||||
conn.commit()
|
||||
finally:
|
||||
conn.close()
|
||||
logger.info("User %s upgraded to Pro via checkout.", user_id)
|
||||
elif customer_id:
|
||||
user = _find_user_by_customer_id(customer_id)
|
||||
if user:
|
||||
conn = _connect()
|
||||
try:
|
||||
conn.execute(
|
||||
"UPDATE users SET plan = 'pro', stripe_subscription_id = ?, updated_at = ? WHERE id = ?",
|
||||
(subscription_id, _utc_now(), user["id"]),
|
||||
)
|
||||
conn.commit()
|
||||
finally:
|
||||
conn.close()
|
||||
logger.info("User %s upgraded to Pro via checkout (customer match).", user["id"])
|
||||
|
||||
|
||||
def _handle_subscription_updated(subscription: dict):
|
||||
"""Handle subscription changes (upgrade/downgrade)."""
|
||||
customer_id = subscription.get("customer")
|
||||
status = subscription.get("status")
|
||||
user = _find_user_by_customer_id(customer_id)
|
||||
if not user:
|
||||
return
|
||||
|
||||
if status in ("active", "trialing"):
|
||||
update_user_plan(user["id"], "pro")
|
||||
logger.info("User %s subscription active — Pro plan.", user["id"])
|
||||
elif status in ("past_due", "unpaid"):
|
||||
logger.warning("User %s subscription %s.", user["id"], status)
|
||||
elif status in ("canceled", "incomplete_expired"):
|
||||
update_user_plan(user["id"], "free")
|
||||
logger.info("User %s subscription ended — Free plan.", user["id"])
|
||||
|
||||
|
||||
def _handle_subscription_deleted(subscription: dict):
|
||||
"""Handle subscription cancellation."""
|
||||
customer_id = subscription.get("customer")
|
||||
user = _find_user_by_customer_id(customer_id)
|
||||
if user:
|
||||
update_user_plan(user["id"], "free")
|
||||
conn = _connect()
|
||||
try:
|
||||
conn.execute(
|
||||
"UPDATE users SET stripe_subscription_id = NULL, updated_at = ? WHERE id = ?",
|
||||
(_utc_now(), user["id"]),
|
||||
)
|
||||
conn.commit()
|
||||
finally:
|
||||
conn.close()
|
||||
logger.info("User %s subscription deleted — downgraded to Free.", user["id"])
|
||||
|
||||
|
||||
def _handle_payment_failed(invoice: dict):
|
||||
"""Log payment failures."""
|
||||
customer_id = invoice.get("customer")
|
||||
user = _find_user_by_customer_id(customer_id)
|
||||
if user:
|
||||
logger.warning("Payment failed for user %s (customer %s).", user["id"], customer_id)
|
||||
65
backend/app/tasks/barcode_tasks.py
Normal file
65
backend/app/tasks/barcode_tasks.py
Normal file
@@ -0,0 +1,65 @@
|
||||
"""Celery tasks for barcode generation."""
|
||||
import os
|
||||
import logging
|
||||
|
||||
from flask import current_app
|
||||
|
||||
from app.extensions import celery
|
||||
from app.services.barcode_service import generate_barcode, BarcodeGenerationError
|
||||
from app.services.storage_service import storage
|
||||
from app.services.task_tracking_service import finalize_task_tracking
|
||||
from app.utils.sanitizer import cleanup_task_files
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _cleanup(task_id: str):
|
||||
cleanup_task_files(task_id, keep_outputs=not storage.use_s3)
|
||||
|
||||
|
||||
def _get_output_dir(task_id: str) -> str:
|
||||
output_dir = os.path.join(current_app.config["OUTPUT_FOLDER"], task_id)
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
return output_dir
|
||||
|
||||
|
||||
def _finalize_task(
|
||||
task_id, user_id, tool, original_filename, result,
|
||||
usage_source, api_key_id, celery_task_id,
|
||||
):
|
||||
finalize_task_tracking(
|
||||
user_id=user_id, tool=tool, original_filename=original_filename,
|
||||
result=result, usage_source=usage_source, api_key_id=api_key_id,
|
||||
celery_task_id=celery_task_id,
|
||||
)
|
||||
_cleanup(task_id)
|
||||
return result
|
||||
|
||||
|
||||
@celery.task(bind=True, name="app.tasks.barcode_tasks.generate_barcode_task")
|
||||
def generate_barcode_task(
|
||||
self, data, barcode_type, task_id, output_format="png",
|
||||
user_id=None, usage_source="web", api_key_id=None,
|
||||
):
|
||||
output_dir = _get_output_dir(task_id)
|
||||
ext = "svg" if output_format == "svg" else "png"
|
||||
output_path = os.path.join(output_dir, f"{task_id}_barcode.{ext}")
|
||||
|
||||
try:
|
||||
self.update_state(state="PROCESSING", meta={"step": "Generating barcode..."})
|
||||
stats = generate_barcode(data, barcode_type, output_path, output_format)
|
||||
final_path = stats.pop("output_path")
|
||||
|
||||
self.update_state(state="PROCESSING", meta={"step": "Uploading result..."})
|
||||
s3_key = storage.upload_file(final_path, task_id, folder="outputs")
|
||||
download_name = f"barcode_{barcode_type}.{ext}"
|
||||
download_url = storage.generate_presigned_url(s3_key, original_filename=download_name)
|
||||
|
||||
result = {"status": "completed", "download_url": download_url,
|
||||
"filename": download_name, **stats}
|
||||
return _finalize_task(task_id, user_id, "barcode", data[:50],
|
||||
result, usage_source, api_key_id, self.request.id)
|
||||
except BarcodeGenerationError as e:
|
||||
return _finalize_task(task_id, user_id, "barcode", data[:50],
|
||||
{"status": "failed", "error": str(e)},
|
||||
usage_source, api_key_id, self.request.id)
|
||||
111
backend/app/tasks/image_extra_tasks.py
Normal file
111
backend/app/tasks/image_extra_tasks.py
Normal file
@@ -0,0 +1,111 @@
|
||||
"""Celery tasks for image extra tools — Crop, Rotate/Flip."""
|
||||
import os
|
||||
import logging
|
||||
|
||||
from flask import current_app
|
||||
|
||||
from app.extensions import celery
|
||||
from app.services.image_extra_service import (
|
||||
crop_image,
|
||||
rotate_flip_image,
|
||||
ImageExtraError,
|
||||
)
|
||||
from app.services.storage_service import storage
|
||||
from app.services.task_tracking_service import finalize_task_tracking
|
||||
from app.utils.sanitizer import cleanup_task_files
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _cleanup(task_id: str):
|
||||
cleanup_task_files(task_id, keep_outputs=not storage.use_s3)
|
||||
|
||||
|
||||
def _get_output_dir(task_id: str) -> str:
|
||||
output_dir = os.path.join(current_app.config["OUTPUT_FOLDER"], task_id)
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
return output_dir
|
||||
|
||||
|
||||
def _finalize_task(
|
||||
task_id, user_id, tool, original_filename, result,
|
||||
usage_source, api_key_id, celery_task_id,
|
||||
):
|
||||
finalize_task_tracking(
|
||||
user_id=user_id, tool=tool, original_filename=original_filename,
|
||||
result=result, usage_source=usage_source, api_key_id=api_key_id,
|
||||
celery_task_id=celery_task_id,
|
||||
)
|
||||
_cleanup(task_id)
|
||||
return result
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Image Crop
|
||||
# ---------------------------------------------------------------------------
|
||||
@celery.task(bind=True, name="app.tasks.image_extra_tasks.crop_image_task")
|
||||
def crop_image_task(
|
||||
self, input_path, task_id, original_filename,
|
||||
left, top, right, bottom, quality=85,
|
||||
user_id=None, usage_source="web", api_key_id=None,
|
||||
):
|
||||
output_dir = _get_output_dir(task_id)
|
||||
ext = os.path.splitext(original_filename)[1].lower().strip(".")
|
||||
if ext not in ("png", "jpg", "jpeg", "webp"):
|
||||
ext = "png"
|
||||
output_path = os.path.join(output_dir, f"{task_id}_cropped.{ext}")
|
||||
|
||||
try:
|
||||
self.update_state(state="PROCESSING", meta={"step": "Cropping image..."})
|
||||
stats = crop_image(input_path, output_path, left, top, right, bottom, quality)
|
||||
|
||||
self.update_state(state="PROCESSING", meta={"step": "Uploading result..."})
|
||||
s3_key = storage.upload_file(output_path, task_id, folder="outputs")
|
||||
base = os.path.splitext(original_filename)[0]
|
||||
download_name = f"{base}_cropped.{ext}"
|
||||
download_url = storage.generate_presigned_url(s3_key, original_filename=download_name)
|
||||
|
||||
result = {"status": "completed", "download_url": download_url,
|
||||
"filename": download_name, **stats}
|
||||
return _finalize_task(task_id, user_id, "image-crop", original_filename,
|
||||
result, usage_source, api_key_id, self.request.id)
|
||||
except ImageExtraError as e:
|
||||
return _finalize_task(task_id, user_id, "image-crop", original_filename,
|
||||
{"status": "failed", "error": str(e)},
|
||||
usage_source, api_key_id, self.request.id)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Image Rotate/Flip
|
||||
# ---------------------------------------------------------------------------
|
||||
@celery.task(bind=True, name="app.tasks.image_extra_tasks.rotate_flip_image_task")
|
||||
def rotate_flip_image_task(
|
||||
self, input_path, task_id, original_filename,
|
||||
rotation=0, flip_horizontal=False, flip_vertical=False, quality=85,
|
||||
user_id=None, usage_source="web", api_key_id=None,
|
||||
):
|
||||
output_dir = _get_output_dir(task_id)
|
||||
ext = os.path.splitext(original_filename)[1].lower().strip(".")
|
||||
if ext not in ("png", "jpg", "jpeg", "webp"):
|
||||
ext = "png"
|
||||
output_path = os.path.join(output_dir, f"{task_id}_transformed.{ext}")
|
||||
|
||||
try:
|
||||
self.update_state(state="PROCESSING", meta={"step": "Transforming image..."})
|
||||
stats = rotate_flip_image(input_path, output_path, rotation,
|
||||
flip_horizontal, flip_vertical, quality)
|
||||
|
||||
self.update_state(state="PROCESSING", meta={"step": "Uploading result..."})
|
||||
s3_key = storage.upload_file(output_path, task_id, folder="outputs")
|
||||
base = os.path.splitext(original_filename)[0]
|
||||
download_name = f"{base}_transformed.{ext}"
|
||||
download_url = storage.generate_presigned_url(s3_key, original_filename=download_name)
|
||||
|
||||
result = {"status": "completed", "download_url": download_url,
|
||||
"filename": download_name, **stats}
|
||||
return _finalize_task(task_id, user_id, "image-rotate-flip", original_filename,
|
||||
result, usage_source, api_key_id, self.request.id)
|
||||
except ImageExtraError as e:
|
||||
return _finalize_task(task_id, user_id, "image-rotate-flip", original_filename,
|
||||
{"status": "failed", "error": str(e)},
|
||||
usage_source, api_key_id, self.request.id)
|
||||
171
backend/app/tasks/pdf_convert_tasks.py
Normal file
171
backend/app/tasks/pdf_convert_tasks.py
Normal file
@@ -0,0 +1,171 @@
|
||||
"""Celery tasks for new PDF conversions — PDF↔PPTX, Excel→PDF, Sign PDF."""
|
||||
import os
|
||||
import logging
|
||||
|
||||
from flask import current_app
|
||||
|
||||
from app.extensions import celery
|
||||
from app.services.pdf_convert_service import (
|
||||
pdf_to_pptx,
|
||||
excel_to_pdf,
|
||||
pptx_to_pdf,
|
||||
sign_pdf,
|
||||
PDFConvertError,
|
||||
)
|
||||
from app.services.storage_service import storage
|
||||
from app.services.task_tracking_service import finalize_task_tracking
|
||||
from app.utils.sanitizer import cleanup_task_files
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _cleanup(task_id: str):
|
||||
cleanup_task_files(task_id, keep_outputs=not storage.use_s3)
|
||||
|
||||
|
||||
def _get_output_dir(task_id: str) -> str:
|
||||
output_dir = os.path.join(current_app.config["OUTPUT_FOLDER"], task_id)
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
return output_dir
|
||||
|
||||
|
||||
def _finalize_task(
|
||||
task_id, user_id, tool, original_filename, result,
|
||||
usage_source, api_key_id, celery_task_id,
|
||||
):
|
||||
finalize_task_tracking(
|
||||
user_id=user_id, tool=tool, original_filename=original_filename,
|
||||
result=result, usage_source=usage_source, api_key_id=api_key_id,
|
||||
celery_task_id=celery_task_id,
|
||||
)
|
||||
_cleanup(task_id)
|
||||
return result
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# PDF to PowerPoint
|
||||
# ---------------------------------------------------------------------------
|
||||
@celery.task(bind=True, name="app.tasks.pdf_convert_tasks.pdf_to_pptx_task")
|
||||
def pdf_to_pptx_task(
|
||||
self, input_path, task_id, original_filename,
|
||||
user_id=None, usage_source="web", api_key_id=None,
|
||||
):
|
||||
output_dir = _get_output_dir(task_id)
|
||||
output_path = os.path.join(output_dir, f"{task_id}.pptx")
|
||||
try:
|
||||
self.update_state(state="PROCESSING", meta={"step": "Converting PDF to PowerPoint..."})
|
||||
stats = pdf_to_pptx(input_path, output_path)
|
||||
|
||||
self.update_state(state="PROCESSING", meta={"step": "Uploading result..."})
|
||||
s3_key = storage.upload_file(output_path, task_id, folder="outputs")
|
||||
base = os.path.splitext(original_filename)[0]
|
||||
download_name = f"{base}.pptx"
|
||||
download_url = storage.generate_presigned_url(s3_key, original_filename=download_name)
|
||||
|
||||
result = {
|
||||
"status": "completed", "download_url": download_url,
|
||||
"filename": download_name, **stats,
|
||||
}
|
||||
return _finalize_task(task_id, user_id, "pdf-to-pptx", original_filename,
|
||||
result, usage_source, api_key_id, self.request.id)
|
||||
except PDFConvertError as e:
|
||||
return _finalize_task(task_id, user_id, "pdf-to-pptx", original_filename,
|
||||
{"status": "failed", "error": str(e)},
|
||||
usage_source, api_key_id, self.request.id)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Excel to PDF
|
||||
# ---------------------------------------------------------------------------
|
||||
@celery.task(bind=True, name="app.tasks.pdf_convert_tasks.excel_to_pdf_task")
|
||||
def excel_to_pdf_task(
|
||||
self, input_path, task_id, original_filename,
|
||||
user_id=None, usage_source="web", api_key_id=None,
|
||||
):
|
||||
output_dir = _get_output_dir(task_id)
|
||||
try:
|
||||
self.update_state(state="PROCESSING", meta={"step": "Converting Excel to PDF..."})
|
||||
output_path = excel_to_pdf(input_path, output_dir)
|
||||
|
||||
self.update_state(state="PROCESSING", meta={"step": "Uploading result..."})
|
||||
s3_key = storage.upload_file(output_path, task_id, folder="outputs")
|
||||
base = os.path.splitext(original_filename)[0]
|
||||
download_name = f"{base}.pdf"
|
||||
download_url = storage.generate_presigned_url(s3_key, original_filename=download_name)
|
||||
|
||||
output_size = os.path.getsize(output_path)
|
||||
result = {
|
||||
"status": "completed", "download_url": download_url,
|
||||
"filename": download_name, "output_size": output_size,
|
||||
}
|
||||
return _finalize_task(task_id, user_id, "excel-to-pdf", original_filename,
|
||||
result, usage_source, api_key_id, self.request.id)
|
||||
except PDFConvertError as e:
|
||||
return _finalize_task(task_id, user_id, "excel-to-pdf", original_filename,
|
||||
{"status": "failed", "error": str(e)},
|
||||
usage_source, api_key_id, self.request.id)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# PowerPoint to PDF
|
||||
# ---------------------------------------------------------------------------
|
||||
@celery.task(bind=True, name="app.tasks.pdf_convert_tasks.pptx_to_pdf_task")
|
||||
def pptx_to_pdf_task(
|
||||
self, input_path, task_id, original_filename,
|
||||
user_id=None, usage_source="web", api_key_id=None,
|
||||
):
|
||||
output_dir = _get_output_dir(task_id)
|
||||
try:
|
||||
self.update_state(state="PROCESSING", meta={"step": "Converting PowerPoint to PDF..."})
|
||||
output_path = pptx_to_pdf(input_path, output_dir)
|
||||
|
||||
self.update_state(state="PROCESSING", meta={"step": "Uploading result..."})
|
||||
s3_key = storage.upload_file(output_path, task_id, folder="outputs")
|
||||
base = os.path.splitext(original_filename)[0]
|
||||
download_name = f"{base}.pdf"
|
||||
download_url = storage.generate_presigned_url(s3_key, original_filename=download_name)
|
||||
|
||||
output_size = os.path.getsize(output_path)
|
||||
result = {
|
||||
"status": "completed", "download_url": download_url,
|
||||
"filename": download_name, "output_size": output_size,
|
||||
}
|
||||
return _finalize_task(task_id, user_id, "pptx-to-pdf", original_filename,
|
||||
result, usage_source, api_key_id, self.request.id)
|
||||
except PDFConvertError as e:
|
||||
return _finalize_task(task_id, user_id, "pptx-to-pdf", original_filename,
|
||||
{"status": "failed", "error": str(e)},
|
||||
usage_source, api_key_id, self.request.id)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Sign PDF
|
||||
# ---------------------------------------------------------------------------
|
||||
@celery.task(bind=True, name="app.tasks.pdf_convert_tasks.sign_pdf_task")
|
||||
def sign_pdf_task(
|
||||
self, input_path, signature_path, task_id, original_filename,
|
||||
page=0, x=100, y=100, width=200, height=80,
|
||||
user_id=None, usage_source="web", api_key_id=None,
|
||||
):
|
||||
output_dir = _get_output_dir(task_id)
|
||||
output_path = os.path.join(output_dir, f"{task_id}_signed.pdf")
|
||||
try:
|
||||
self.update_state(state="PROCESSING", meta={"step": "Signing PDF..."})
|
||||
stats = sign_pdf(input_path, signature_path, output_path, page, x, y, width, height)
|
||||
|
||||
self.update_state(state="PROCESSING", meta={"step": "Uploading result..."})
|
||||
s3_key = storage.upload_file(output_path, task_id, folder="outputs")
|
||||
base = os.path.splitext(original_filename)[0]
|
||||
download_name = f"{base}_signed.pdf"
|
||||
download_url = storage.generate_presigned_url(s3_key, original_filename=download_name)
|
||||
|
||||
result = {
|
||||
"status": "completed", "download_url": download_url,
|
||||
"filename": download_name, **stats,
|
||||
}
|
||||
return _finalize_task(task_id, user_id, "sign-pdf", original_filename,
|
||||
result, usage_source, api_key_id, self.request.id)
|
||||
except PDFConvertError as e:
|
||||
return _finalize_task(task_id, user_id, "sign-pdf", original_filename,
|
||||
{"status": "failed", "error": str(e)},
|
||||
usage_source, api_key_id, self.request.id)
|
||||
164
backend/app/tasks/pdf_extra_tasks.py
Normal file
164
backend/app/tasks/pdf_extra_tasks.py
Normal file
@@ -0,0 +1,164 @@
|
||||
"""Celery tasks for extended PDF tools — Crop, Flatten, Repair, Metadata."""
|
||||
import os
|
||||
import logging
|
||||
|
||||
from flask import current_app
|
||||
|
||||
from app.extensions import celery
|
||||
from app.services.pdf_extra_service import (
|
||||
crop_pdf,
|
||||
flatten_pdf,
|
||||
repair_pdf,
|
||||
edit_pdf_metadata,
|
||||
PDFExtraError,
|
||||
)
|
||||
from app.services.storage_service import storage
|
||||
from app.services.task_tracking_service import finalize_task_tracking
|
||||
from app.utils.sanitizer import cleanup_task_files
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _cleanup(task_id: str):
|
||||
cleanup_task_files(task_id, keep_outputs=not storage.use_s3)
|
||||
|
||||
|
||||
def _get_output_dir(task_id: str) -> str:
|
||||
output_dir = os.path.join(current_app.config["OUTPUT_FOLDER"], task_id)
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
return output_dir
|
||||
|
||||
|
||||
def _finalize_task(
|
||||
task_id, user_id, tool, original_filename, result,
|
||||
usage_source, api_key_id, celery_task_id,
|
||||
):
|
||||
finalize_task_tracking(
|
||||
user_id=user_id, tool=tool, original_filename=original_filename,
|
||||
result=result, usage_source=usage_source, api_key_id=api_key_id,
|
||||
celery_task_id=celery_task_id,
|
||||
)
|
||||
_cleanup(task_id)
|
||||
return result
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Crop PDF
|
||||
# ---------------------------------------------------------------------------
|
||||
@celery.task(bind=True, name="app.tasks.pdf_extra_tasks.crop_pdf_task")
|
||||
def crop_pdf_task(
|
||||
self, input_path, task_id, original_filename,
|
||||
margin_left=0, margin_right=0, margin_top=0, margin_bottom=0, pages="all",
|
||||
user_id=None, usage_source="web", api_key_id=None,
|
||||
):
|
||||
output_dir = _get_output_dir(task_id)
|
||||
output_path = os.path.join(output_dir, f"{task_id}_cropped.pdf")
|
||||
try:
|
||||
self.update_state(state="PROCESSING", meta={"step": "Cropping PDF..."})
|
||||
stats = crop_pdf(input_path, output_path, margin_left, margin_right,
|
||||
margin_top, margin_bottom, pages)
|
||||
|
||||
self.update_state(state="PROCESSING", meta={"step": "Uploading result..."})
|
||||
s3_key = storage.upload_file(output_path, task_id, folder="outputs")
|
||||
base = os.path.splitext(original_filename)[0]
|
||||
download_name = f"{base}_cropped.pdf"
|
||||
download_url = storage.generate_presigned_url(s3_key, original_filename=download_name)
|
||||
|
||||
result = {"status": "completed", "download_url": download_url,
|
||||
"filename": download_name, **stats}
|
||||
return _finalize_task(task_id, user_id, "crop-pdf", original_filename,
|
||||
result, usage_source, api_key_id, self.request.id)
|
||||
except PDFExtraError as e:
|
||||
return _finalize_task(task_id, user_id, "crop-pdf", original_filename,
|
||||
{"status": "failed", "error": str(e)},
|
||||
usage_source, api_key_id, self.request.id)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Flatten PDF
|
||||
# ---------------------------------------------------------------------------
|
||||
@celery.task(bind=True, name="app.tasks.pdf_extra_tasks.flatten_pdf_task")
|
||||
def flatten_pdf_task(
|
||||
self, input_path, task_id, original_filename,
|
||||
user_id=None, usage_source="web", api_key_id=None,
|
||||
):
|
||||
output_dir = _get_output_dir(task_id)
|
||||
output_path = os.path.join(output_dir, f"{task_id}_flattened.pdf")
|
||||
try:
|
||||
self.update_state(state="PROCESSING", meta={"step": "Flattening PDF..."})
|
||||
stats = flatten_pdf(input_path, output_path)
|
||||
|
||||
self.update_state(state="PROCESSING", meta={"step": "Uploading result..."})
|
||||
s3_key = storage.upload_file(output_path, task_id, folder="outputs")
|
||||
base = os.path.splitext(original_filename)[0]
|
||||
download_name = f"{base}_flattened.pdf"
|
||||
download_url = storage.generate_presigned_url(s3_key, original_filename=download_name)
|
||||
|
||||
result = {"status": "completed", "download_url": download_url,
|
||||
"filename": download_name, **stats}
|
||||
return _finalize_task(task_id, user_id, "flatten-pdf", original_filename,
|
||||
result, usage_source, api_key_id, self.request.id)
|
||||
except PDFExtraError as e:
|
||||
return _finalize_task(task_id, user_id, "flatten-pdf", original_filename,
|
||||
{"status": "failed", "error": str(e)},
|
||||
usage_source, api_key_id, self.request.id)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Repair PDF
|
||||
# ---------------------------------------------------------------------------
|
||||
@celery.task(bind=True, name="app.tasks.pdf_extra_tasks.repair_pdf_task")
|
||||
def repair_pdf_task(
|
||||
self, input_path, task_id, original_filename,
|
||||
user_id=None, usage_source="web", api_key_id=None,
|
||||
):
|
||||
output_dir = _get_output_dir(task_id)
|
||||
output_path = os.path.join(output_dir, f"{task_id}_repaired.pdf")
|
||||
try:
|
||||
self.update_state(state="PROCESSING", meta={"step": "Repairing PDF..."})
|
||||
stats = repair_pdf(input_path, output_path)
|
||||
|
||||
self.update_state(state="PROCESSING", meta={"step": "Uploading result..."})
|
||||
s3_key = storage.upload_file(output_path, task_id, folder="outputs")
|
||||
base = os.path.splitext(original_filename)[0]
|
||||
download_name = f"{base}_repaired.pdf"
|
||||
download_url = storage.generate_presigned_url(s3_key, original_filename=download_name)
|
||||
|
||||
result = {"status": "completed", "download_url": download_url,
|
||||
"filename": download_name, **stats}
|
||||
return _finalize_task(task_id, user_id, "repair-pdf", original_filename,
|
||||
result, usage_source, api_key_id, self.request.id)
|
||||
except PDFExtraError as e:
|
||||
return _finalize_task(task_id, user_id, "repair-pdf", original_filename,
|
||||
{"status": "failed", "error": str(e)},
|
||||
usage_source, api_key_id, self.request.id)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Edit PDF Metadata
|
||||
# ---------------------------------------------------------------------------
|
||||
@celery.task(bind=True, name="app.tasks.pdf_extra_tasks.edit_metadata_task")
|
||||
def edit_metadata_task(
|
||||
self, input_path, task_id, original_filename,
|
||||
title=None, author=None, subject=None, keywords=None, creator=None,
|
||||
user_id=None, usage_source="web", api_key_id=None,
|
||||
):
|
||||
output_dir = _get_output_dir(task_id)
|
||||
output_path = os.path.join(output_dir, f"{task_id}_metadata.pdf")
|
||||
try:
|
||||
self.update_state(state="PROCESSING", meta={"step": "Editing PDF metadata..."})
|
||||
stats = edit_pdf_metadata(input_path, output_path, title, author, subject, keywords, creator)
|
||||
|
||||
self.update_state(state="PROCESSING", meta={"step": "Uploading result..."})
|
||||
s3_key = storage.upload_file(output_path, task_id, folder="outputs")
|
||||
download_name = original_filename
|
||||
download_url = storage.generate_presigned_url(s3_key, original_filename=download_name)
|
||||
|
||||
result = {"status": "completed", "download_url": download_url,
|
||||
"filename": download_name, **stats}
|
||||
return _finalize_task(task_id, user_id, "edit-metadata", original_filename,
|
||||
result, usage_source, api_key_id, self.request.id)
|
||||
except PDFExtraError as e:
|
||||
return _finalize_task(task_id, user_id, "edit-metadata", original_filename,
|
||||
{"status": "failed", "error": str(e)},
|
||||
usage_source, api_key_id, self.request.id)
|
||||
91
backend/app/utils/database.py
Normal file
91
backend/app/utils/database.py
Normal file
@@ -0,0 +1,91 @@
|
||||
"""Database abstraction — supports SQLite (dev) and PostgreSQL (production).
|
||||
|
||||
Usage:
|
||||
from app.utils.database import get_connection
|
||||
|
||||
The returned connection behaves like a sqlite3.Connection with row_factory set.
|
||||
For PostgreSQL it wraps psycopg2 with RealDictCursor for dict-like rows.
|
||||
|
||||
Selection logic:
|
||||
- If DATABASE_URL env var is set (starts with ``postgres``), use PostgreSQL.
|
||||
- Otherwise fall back to SQLite via DATABASE_PATH config.
|
||||
"""
|
||||
import logging
|
||||
import os
|
||||
import sqlite3
|
||||
from contextlib import contextmanager
|
||||
|
||||
from flask import current_app
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_pg_available = False
|
||||
try:
|
||||
import psycopg2
|
||||
import psycopg2.extras
|
||||
_pg_available = True
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
def is_postgres() -> bool:
|
||||
"""Return True when the app is configured to use PostgreSQL."""
|
||||
db_url = os.getenv("DATABASE_URL", "")
|
||||
return db_url.startswith("postgres")
|
||||
|
||||
|
||||
def _sqlite_connect() -> sqlite3.Connection:
|
||||
db_path = current_app.config["DATABASE_PATH"]
|
||||
db_dir = os.path.dirname(db_path)
|
||||
if db_dir:
|
||||
os.makedirs(db_dir, exist_ok=True)
|
||||
conn = sqlite3.connect(db_path)
|
||||
conn.row_factory = sqlite3.Row
|
||||
conn.execute("PRAGMA foreign_keys = ON")
|
||||
return conn
|
||||
|
||||
|
||||
def _pg_connect():
|
||||
"""Return a psycopg2 connection with RealDictCursor."""
|
||||
if not _pg_available:
|
||||
raise RuntimeError("psycopg2 is not installed — cannot use PostgreSQL.")
|
||||
db_url = os.getenv("DATABASE_URL", "")
|
||||
conn = psycopg2.connect(db_url, cursor_factory=psycopg2.extras.RealDictCursor)
|
||||
conn.autocommit = False
|
||||
return conn
|
||||
|
||||
|
||||
def get_connection():
|
||||
"""Get a database connection (SQLite or PostgreSQL based on config)."""
|
||||
if is_postgres():
|
||||
return _pg_connect()
|
||||
return _sqlite_connect()
|
||||
|
||||
|
||||
@contextmanager
|
||||
def db_connection():
|
||||
"""Context manager that yields a connection and handles commit/rollback."""
|
||||
conn = get_connection()
|
||||
try:
|
||||
yield conn
|
||||
conn.commit()
|
||||
except Exception:
|
||||
conn.rollback()
|
||||
raise
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
def adapt_sql(sql: str) -> str:
|
||||
"""Adapt SQLite SQL to PostgreSQL if needed.
|
||||
|
||||
Converts:
|
||||
- INTEGER PRIMARY KEY AUTOINCREMENT -> SERIAL PRIMARY KEY
|
||||
- ? placeholders -> %s placeholders
|
||||
"""
|
||||
if not is_postgres():
|
||||
return sql
|
||||
|
||||
sql = sql.replace("INTEGER PRIMARY KEY AUTOINCREMENT", "SERIAL PRIMARY KEY")
|
||||
sql = sql.replace("?", "%s")
|
||||
return sql
|
||||
Binary file not shown.
@@ -10,10 +10,16 @@ load_dotenv(os.path.join(REPO_ROOT, ".env"))
|
||||
load_dotenv(os.path.join(BASE_DIR, ".env"), override=False)
|
||||
|
||||
|
||||
def _parse_csv_env(name: str) -> tuple[str, ...]:
|
||||
raw_value = os.getenv(name, "")
|
||||
return tuple(item.strip().lower() for item in raw_value.split(",") if item.strip())
|
||||
|
||||
|
||||
class BaseConfig:
|
||||
"""Base configuration."""
|
||||
SECRET_KEY = os.getenv("SECRET_KEY", "change-me-in-production")
|
||||
INTERNAL_ADMIN_SECRET = os.getenv("INTERNAL_ADMIN_SECRET", "")
|
||||
INTERNAL_ADMIN_EMAILS = _parse_csv_env("INTERNAL_ADMIN_EMAILS")
|
||||
|
||||
# File upload settings
|
||||
MAX_CONTENT_LENGTH = int(
|
||||
@@ -47,6 +53,14 @@ class BaseConfig:
|
||||
"bmp": ["image/bmp"],
|
||||
"mp4": ["video/mp4"],
|
||||
"webm": ["video/webm"],
|
||||
"pptx": [
|
||||
"application/vnd.openxmlformats-officedocument.presentationml.presentation"
|
||||
],
|
||||
"ppt": ["application/vnd.ms-powerpoint"],
|
||||
"xlsx": [
|
||||
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
|
||||
],
|
||||
"xls": ["application/vnd.ms-excel"],
|
||||
}
|
||||
|
||||
# File size limits per type (bytes)
|
||||
@@ -64,6 +78,10 @@ class BaseConfig:
|
||||
"bmp": 15 * 1024 * 1024, # 15MB
|
||||
"mp4": 50 * 1024 * 1024, # 50MB
|
||||
"webm": 50 * 1024 * 1024, # 50MB
|
||||
"pptx": 20 * 1024 * 1024, # 20MB
|
||||
"ppt": 20 * 1024 * 1024, # 20MB
|
||||
"xlsx": 15 * 1024 * 1024, # 15MB
|
||||
"xls": 15 * 1024 * 1024, # 15MB
|
||||
}
|
||||
|
||||
# Redis
|
||||
@@ -102,6 +120,22 @@ class BaseConfig:
|
||||
SMTP_USE_TLS = os.getenv("SMTP_USE_TLS", "true").lower() == "true"
|
||||
FRONTEND_URL = os.getenv("FRONTEND_URL", "http://localhost:5173")
|
||||
|
||||
# Stripe
|
||||
STRIPE_SECRET_KEY = os.getenv("STRIPE_SECRET_KEY", "")
|
||||
STRIPE_WEBHOOK_SECRET = os.getenv("STRIPE_WEBHOOK_SECRET", "")
|
||||
STRIPE_PRICE_ID_PRO_MONTHLY = os.getenv("STRIPE_PRICE_ID_PRO_MONTHLY", "")
|
||||
STRIPE_PRICE_ID_PRO_YEARLY = os.getenv("STRIPE_PRICE_ID_PRO_YEARLY", "")
|
||||
|
||||
# Sentry
|
||||
SENTRY_DSN = os.getenv("SENTRY_DSN", "")
|
||||
SENTRY_ENVIRONMENT = os.getenv("SENTRY_ENVIRONMENT", "development")
|
||||
|
||||
# Site domain
|
||||
SITE_DOMAIN = os.getenv("SITE_DOMAIN", "https://saas-pdf.com")
|
||||
|
||||
# PostgreSQL (production) — set DATABASE_URL to use PG instead of SQLite
|
||||
DATABASE_URL = os.getenv("DATABASE_URL", "")
|
||||
|
||||
# Feature flags (default: enabled — set to "false" to disable a feature)
|
||||
FEATURE_EDITOR = os.getenv("FEATURE_EDITOR", "true").lower() == "true"
|
||||
FEATURE_OCR = os.getenv("FEATURE_OCR", "true").lower() == "true"
|
||||
@@ -130,6 +164,9 @@ class TestingConfig(BaseConfig):
|
||||
UPLOAD_FOLDER = "/tmp/test_uploads"
|
||||
OUTPUT_FOLDER = "/tmp/test_outputs"
|
||||
DATABASE_PATH = "/tmp/test_saas_pdf.db"
|
||||
FEATURE_EDITOR = False
|
||||
FEATURE_OCR = False
|
||||
FEATURE_REMOVEBG = False
|
||||
|
||||
# Disable Redis-backed rate limiting; use in-memory instead
|
||||
RATELIMIT_STORAGE_URI = "memory://"
|
||||
|
||||
62
backend/pytest_status.txt
Normal file
62
backend/pytest_status.txt
Normal file
@@ -0,0 +1,62 @@
|
||||
============================= test session starts =============================
|
||||
platform win32 -- Python 3.13.12, pytest-8.4.2, pluggy-1.6.0
|
||||
rootdir: C:\xampp\htdocs\SaaS-PDF
|
||||
plugins: anyio-4.12.1, cov-7.0.0, flask-1.3.0, mock-3.15.1, requests-mock-1.12.1
|
||||
collected 286 items
|
||||
|
||||
backend\tests\test_admin.py .... [ 1%]
|
||||
backend\tests\test_assistant.py ... [ 2%]
|
||||
backend\tests\test_auth.py ..... [ 4%]
|
||||
backend\tests\test_compress.py .. [ 4%]
|
||||
backend\tests\test_compress_image.py ... [ 5%]
|
||||
backend\tests\test_compress_service.py ... [ 6%]
|
||||
backend\tests\test_compress_tasks.py ... [ 8%]
|
||||
backend\tests\test_config.py ... [ 9%]
|
||||
backend\tests\test_contact.py ....... [ 11%]
|
||||
backend\tests\test_convert.py .... [ 12%]
|
||||
backend\tests\test_convert_tasks.py .... [ 14%]
|
||||
backend\tests\test_download.py ..... [ 16%]
|
||||
backend\tests\test_file_validator.py ......... [ 19%]
|
||||
backend\tests\test_flowchart_tasks.py .. [ 19%]
|
||||
backend\tests\test_health.py .. [ 20%]
|
||||
backend\tests\test_history.py ... [ 21%]
|
||||
backend\tests\test_html_to_pdf.py .... [ 23%]
|
||||
backend\tests\test_image.py ... [ 24%]
|
||||
backend\tests\test_image_service.py ... [ 25%]
|
||||
backend\tests\test_image_tasks.py ..... [ 26%]
|
||||
backend\tests\test_load.py ....... [ 29%]
|
||||
backend\tests\test_maintenance_tasks.py ........ [ 32%]
|
||||
backend\tests\test_ocr.py ........ [ 34%]
|
||||
backend\tests\test_ocr_service.py .... [ 36%]
|
||||
backend\tests\test_openrouter_config_service.py ..... [ 38%]
|
||||
backend\tests\test_password_reset.py ........ [ 40%]
|
||||
backend\tests\test_pdf_ai.py ......... [ 44%]
|
||||
backend\tests\test_pdf_editor.py ....... [ 46%]
|
||||
backend\tests\test_pdf_service.py ... [ 47%]
|
||||
backend\tests\test_pdf_to_excel.py .. [ 48%]
|
||||
backend\tests\test_pdf_tools.py ....................................... [ 61%]
|
||||
backend\tests\test_pdf_tools_service.py ........... [ 65%]
|
||||
backend\tests\test_pdf_tools_tasks.py ......... [ 68%]
|
||||
backend\tests\test_phase2_tools.py ........................s........ [ 80%]
|
||||
backend\tests\test_qrcode.py .... [ 81%]
|
||||
backend\tests\test_rate_limiter.py ..... [ 83%]
|
||||
backend\tests\test_rating.py ........ [ 86%]
|
||||
backend\tests\test_removebg.py ... [ 87%]
|
||||
backend\tests\test_sanitizer.py ........ [ 90%]
|
||||
backend\tests\test_site_assistant_service.py .. [ 90%]
|
||||
backend\tests\test_stats.py . [ 91%]
|
||||
backend\tests\test_storage_service.py .... [ 92%]
|
||||
backend\tests\test_stripe.py .... [ 94%]
|
||||
backend\tests\test_tasks_route.py .... [ 95%]
|
||||
backend\tests\test_utils.py .. [ 96%]
|
||||
backend\tests\test_video.py ....... [ 98%]
|
||||
backend\tests\test_video_service.py .. [ 99%]
|
||||
backend\tests\test_video_tasks.py .. [100%]
|
||||
|
||||
============================== warnings summary ===============================
|
||||
backend/tests/test_pdf_tools_service.py::TestMergePdfsService::test_merge_file_not_found_raises
|
||||
C:\xampp\htdocs\SaaS-PDF\.venv\Lib\site-packages\PyPDF2\__init__.py:21: DeprecationWarning: PyPDF2 is deprecated. Please move to the pypdf library instead.
|
||||
warnings.warn(
|
||||
|
||||
-- Docs: https://docs.pytest.org/en/stable/how-to/capture-warnings.html
|
||||
============ 285 passed, 1 skipped, 1 warning in 220.52s (0:03:40) ============
|
||||
@@ -35,6 +35,12 @@ weasyprint>=60.0,<62.0
|
||||
# OCR
|
||||
pytesseract>=0.3.10,<1.0
|
||||
|
||||
# PowerPoint Processing
|
||||
python-pptx>=0.6.21,<2.0
|
||||
|
||||
# Barcode Generation
|
||||
python-barcode>=0.15,<1.0
|
||||
|
||||
# Background Removal
|
||||
rembg>=2.0,<3.0
|
||||
onnxruntime>=1.16,<2.0
|
||||
@@ -48,6 +54,15 @@ requests>=2.31,<3.0
|
||||
# Security
|
||||
werkzeug>=3.0,<4.0
|
||||
|
||||
# Payments
|
||||
stripe>=8.0,<10.0
|
||||
|
||||
# Monitoring
|
||||
sentry-sdk[flask]>=2.0,<3.0
|
||||
|
||||
# PostgreSQL (production)
|
||||
psycopg2-binary>=2.9,<3.0
|
||||
|
||||
# Testing
|
||||
pytest>=8.0,<9.0
|
||||
pytest-flask>=1.3,<2.0
|
||||
|
||||
180
backend/test_all.txt
Normal file
180
backend/test_all.txt
Normal file
@@ -0,0 +1,180 @@
|
||||
........................................................................ [ 30%]
|
||||
.........F
|
||||
================================== FAILURES ===================================
|
||||
____________ TestOcrFeatureFlag.test_ocr_image_disabled_by_default ____________
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\redis\connection.py:378: in connect
|
||||
sock = self.retry.call_with_retry(
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\redis\retry.py:62: in call_with_retry
|
||||
return do()
|
||||
^^^^
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\redis\connection.py:379: in <lambda>
|
||||
lambda: self._connect(), lambda error: self.disconnect(error)
|
||||
^^^^^^^^^^^^^^^
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\redis\connection.py:732: in _connect
|
||||
for res in socket.getaddrinfo(
|
||||
C:\Program Files\WindowsApps\PythonSoftwareFoundation.Python.3.13_3.13.3312.0_x64__qbz5n2kfra8p0\Lib\socket.py:977: in getaddrinfo
|
||||
for res in _socket.getaddrinfo(host, port, family, type, proto, flags):
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
E socket.gaierror: [Errno 11001] getaddrinfo failed
|
||||
|
||||
During handling of the above exception, another exception occurred:
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\celery\backends\redis.py:130: in reconnect_on_error
|
||||
yield
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\celery\backends\redis.py:180: in _consume_from
|
||||
self._pubsub.subscribe(key)
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\redis\client.py:1018: in subscribe
|
||||
ret_val = self.execute_command("SUBSCRIBE", *new_channels.keys())
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\redis\client.py:843: in execute_command
|
||||
self.connection = self.connection_pool.get_connection()
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\redis\utils.py:183: in wrapper
|
||||
return func(*args, **kwargs)
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\redis\connection.py:1483: in get_connection
|
||||
connection.connect()
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\redis\connection.py:384: in connect
|
||||
raise ConnectionError(self._error_message(e))
|
||||
E redis.exceptions.ConnectionError: Error 11001 connecting to redis:6379. getaddrinfo failed.
|
||||
|
||||
During handling of the above exception, another exception occurred:
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\redis\connection.py:378: in connect
|
||||
sock = self.retry.call_with_retry(
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\redis\retry.py:62: in call_with_retry
|
||||
return do()
|
||||
^^^^
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\redis\connection.py:379: in <lambda>
|
||||
lambda: self._connect(), lambda error: self.disconnect(error)
|
||||
^^^^^^^^^^^^^^^
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\redis\connection.py:732: in _connect
|
||||
for res in socket.getaddrinfo(
|
||||
C:\Program Files\WindowsApps\PythonSoftwareFoundation.Python.3.13_3.13.3312.0_x64__qbz5n2kfra8p0\Lib\socket.py:977: in getaddrinfo
|
||||
for res in _socket.getaddrinfo(host, port, family, type, proto, flags):
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
E socket.gaierror: [Errno 11001] getaddrinfo failed
|
||||
|
||||
During handling of the above exception, another exception occurred:
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\celery\backends\redis.py:133: in reconnect_on_error
|
||||
self._ensure(self._reconnect_pubsub, ())
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\celery\backends\redis.py:428: in ensure
|
||||
return retry_over_time(
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\kombu\utils\functional.py:318: in retry_over_time
|
||||
return fun(*args, **kwargs)
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\celery\backends\redis.py:109: in _reconnect_pubsub
|
||||
metas = self.backend.client.mget(self.subscribed_to)
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\redis\commands\core.py:2009: in mget
|
||||
return self.execute_command("MGET", *args, **options)
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\redis\client.py:605: in execute_command
|
||||
return self._execute_command(*args, **options)
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\redis\client.py:611: in _execute_command
|
||||
conn = self.connection or pool.get_connection()
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\redis\utils.py:183: in wrapper
|
||||
return func(*args, **kwargs)
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\redis\connection.py:1483: in get_connection
|
||||
connection.connect()
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\redis\connection.py:384: in connect
|
||||
raise ConnectionError(self._error_message(e))
|
||||
E redis.exceptions.ConnectionError: Error 11001 connecting to redis:6379. getaddrinfo failed.
|
||||
|
||||
The above exception was the direct cause of the following exception:
|
||||
tests\test_ocr.py:18: in test_ocr_image_disabled_by_default
|
||||
response = client.post(
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\werkzeug\test.py:1167: in post
|
||||
return self.open(*args, **kw)
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\flask\testing.py:235: in open
|
||||
response = super().open(
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\werkzeug\test.py:1116: in open
|
||||
response_parts = self.run_wsgi_app(request.environ, buffered=buffered)
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\werkzeug\test.py:988: in run_wsgi_app
|
||||
rv = run_wsgi_app(self.application, environ, buffered=buffered)
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\werkzeug\test.py:1264: in run_wsgi_app
|
||||
app_rv = app(environ, start_response)
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\flask\app.py:1536: in __call__
|
||||
return self.wsgi_app(environ, start_response)
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\flask\app.py:1514: in wsgi_app
|
||||
response = self.handle_exception(e)
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\flask_cors\extension.py:194: in wrapped_function
|
||||
return cors_after_request(app.make_response(f(*args, **kwargs)))
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\flask\app.py:1511: in wsgi_app
|
||||
response = self.full_dispatch_request()
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\flask\app.py:919: in full_dispatch_request
|
||||
rv = self.handle_user_exception(e)
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\flask_cors\extension.py:194: in wrapped_function
|
||||
return cors_after_request(app.make_response(f(*args, **kwargs)))
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\flask\app.py:917: in full_dispatch_request
|
||||
rv = self.dispatch_request()
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\flask\app.py:902: in dispatch_request
|
||||
return self.ensure_sync(self.view_functions[rule.endpoint])(**view_args) # type: ignore[no-any-return]
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\flask_limiter\extension.py:1314: in __inner
|
||||
return cast(R, flask.current_app.ensure_sync(obj)(*a, **k))
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
app\routes\ocr.py:69: in ocr_image_route
|
||||
task = ocr_image_task.delay(
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\celery\app\task.py:444: in delay
|
||||
return self.apply_async(args, kwargs)
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\celery\app\task.py:608: in apply_async
|
||||
return app.send_task(
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\celery\app\base.py:946: in send_task
|
||||
self.backend.on_task_call(P, task_id)
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\celery\backends\redis.py:417: in on_task_call
|
||||
self.result_consumer.consume_from(task_id)
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\celery\backends\redis.py:172: in consume_from
|
||||
return self.start(task_id)
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\celery\backends\redis.py:150: in start
|
||||
self._consume_from(initial_task_id)
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\celery\backends\redis.py:179: in _consume_from
|
||||
with self.reconnect_on_error():
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
C:\Program Files\WindowsApps\PythonSoftwareFoundation.Python.3.13_3.13.3312.0_x64__qbz5n2kfra8p0\Lib\contextlib.py:162: in __exit__
|
||||
self.gen.throw(value)
|
||||
C:\Users\ahmed\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\LocalCache\local-packages\Python313\site-packages\celery\backends\redis.py:136: in reconnect_on_error
|
||||
raise RuntimeError(E_RETRY_LIMIT_EXCEEDED) from e
|
||||
E RuntimeError:
|
||||
E Retry limit exceeded while trying to reconnect to the Celery redis result store backend. The Celery application must be restarted.
|
||||
------------------------------ Captured log call ------------------------------
|
||||
ERROR celery.backends.redis:redis.py:435 Connection to Redis lost: Retry (0/20) now.
|
||||
ERROR celery.backends.redis:redis.py:435 Connection to Redis lost: Retry (1/20) in 1.00 second.
|
||||
ERROR celery.backends.redis:redis.py:435 Connection to Redis lost: Retry (2/20) in 1.00 second.
|
||||
ERROR celery.backends.redis:redis.py:435 Connection to Redis lost: Retry (3/20) in 1.00 second.
|
||||
ERROR celery.backends.redis:redis.py:435 Connection to Redis lost: Retry (4/20) in 1.00 second.
|
||||
ERROR celery.backends.redis:redis.py:435 Connection to Redis lost: Retry (5/20) in 1.00 second.
|
||||
ERROR celery.backends.redis:redis.py:435 Connection to Redis lost: Retry (6/20) in 1.00 second.
|
||||
ERROR celery.backends.redis:redis.py:435 Connection to Redis lost: Retry (7/20) in 1.00 second.
|
||||
ERROR celery.backends.redis:redis.py:435 Connection to Redis lost: Retry (8/20) in 1.00 second.
|
||||
ERROR celery.backends.redis:redis.py:435 Connection to Redis lost: Retry (9/20) in 1.00 second.
|
||||
ERROR celery.backends.redis:redis.py:435 Connection to Redis lost: Retry (10/20) in 1.00 second.
|
||||
ERROR celery.backends.redis:redis.py:435 Connection to Redis lost: Retry (11/20) in 1.00 second.
|
||||
ERROR celery.backends.redis:redis.py:435 Connection to Redis lost: Retry (12/20) in 1.00 second.
|
||||
ERROR celery.backends.redis:redis.py:435 Connection to Redis lost: Retry (13/20) in 1.00 second.
|
||||
ERROR celery.backends.redis:redis.py:435 Connection to Redis lost: Retry (14/20) in 1.00 second.
|
||||
ERROR celery.backends.redis:redis.py:435 Connection to Redis lost: Retry (15/20) in 1.00 second.
|
||||
ERROR celery.backends.redis:redis.py:435 Connection to Redis lost: Retry (16/20) in 1.00 second.
|
||||
ERROR celery.backends.redis:redis.py:435 Connection to Redis lost: Retry (17/20) in 1.00 second.
|
||||
ERROR celery.backends.redis:redis.py:435 Connection to Redis lost: Retry (18/20) in 1.00 second.
|
||||
ERROR celery.backends.redis:redis.py:435 Connection to Redis lost: Retry (19/20) in 1.00 second.
|
||||
CRITICAL celery.backends.redis:redis.py:135
|
||||
Retry limit exceeded while trying to reconnect to the Celery redis result store backend. The Celery application must be restarted.
|
||||
=========================== short test summary info ===========================
|
||||
FAILED tests/test_ocr.py::TestOcrFeatureFlag::test_ocr_image_disabled_by_default
|
||||
!!!!!!!!!!!!!!!!!!!!!!!!!! stopping after 1 failures !!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||
1 failed, 81 passed in 116.84s (0:01:56)
|
||||
8
backend/test_output_phase1.txt
Normal file
8
backend/test_output_phase1.txt
Normal file
@@ -0,0 +1,8 @@
|
||||
ImportError while loading conftest 'C:\xampp\htdocs\SaaS-PDF\backend\tests\conftest.py'.
|
||||
tests\conftest.py:7: in <module>
|
||||
from app import create_app
|
||||
app\__init__.py:13: in <module>
|
||||
from app.services.stripe_service import init_stripe_db
|
||||
app\services\stripe_service.py:5: in <module>
|
||||
import stripe
|
||||
E ModuleNotFoundError: No module named 'stripe'
|
||||
19
backend/test_run.txt
Normal file
19
backend/test_run.txt
Normal file
@@ -0,0 +1,19 @@
|
||||
============================= test session starts =============================
|
||||
platform win32 -- Python 3.13.12, pytest-8.4.2, pluggy-1.6.0 -- C:\Users\ahmed\AppData\Local\Microsoft\WindowsApps\PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0\python.exe
|
||||
cachedir: .pytest_cache
|
||||
rootdir: C:\xampp\htdocs\SaaS-PDF\backend
|
||||
plugins: anyio-4.12.1, cov-7.0.0, flask-1.3.0, mock-3.15.1, requests-mock-1.12.1
|
||||
collecting ... collected 10 items
|
||||
|
||||
tests/test_health.py::test_health_endpoint PASSED [ 10%]
|
||||
tests/test_health.py::test_app_creates PASSED [ 20%]
|
||||
tests/test_config.py::TestConfigEndpoint::test_anonymous_gets_free_limits PASSED [ 30%]
|
||||
tests/test_config.py::TestConfigEndpoint::test_authenticated_free_user_gets_usage PASSED [ 40%]
|
||||
tests/test_config.py::TestConfigEndpoint::test_max_upload_mb_is_correct PASSED [ 50%]
|
||||
tests/test_auth.py::TestAuthRoutes::test_register_success PASSED [ 60%]
|
||||
tests/test_auth.py::TestAuthRoutes::test_register_duplicate_email PASSED [ 70%]
|
||||
tests/test_auth.py::TestAuthRoutes::test_login_and_me PASSED [ 80%]
|
||||
tests/test_auth.py::TestAuthRoutes::test_login_invalid_password PASSED [ 90%]
|
||||
tests/test_auth.py::TestAuthRoutes::test_me_without_session PASSED [100%]
|
||||
|
||||
============================= 10 passed in 9.59s ==============================
|
||||
0
backend/test_run2.txt
Normal file
0
backend/test_run2.txt
Normal file
@@ -9,6 +9,8 @@ from app.services.account_service import init_account_db
|
||||
from app.services.rating_service import init_ratings_db
|
||||
from app.services.ai_cost_service import init_ai_cost_db
|
||||
from app.services.site_assistant_service import init_site_assistant_db
|
||||
from app.services.contact_service import init_contact_db
|
||||
from app.services.stripe_service import init_stripe_db
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -35,6 +37,8 @@ def app():
|
||||
init_ratings_db()
|
||||
init_ai_cost_db()
|
||||
init_site_assistant_db()
|
||||
init_contact_db()
|
||||
init_stripe_db()
|
||||
|
||||
# Create temp directories
|
||||
os.makedirs(app.config['UPLOAD_FOLDER'], exist_ok=True)
|
||||
|
||||
175
backend/tests/test_admin.py
Normal file
175
backend/tests/test_admin.py
Normal file
@@ -0,0 +1,175 @@
|
||||
"""Tests for internal admin dashboard endpoints."""
|
||||
|
||||
from app.services.account_service import create_user, record_file_history, set_user_role, update_user_plan
|
||||
from app.services.contact_service import save_message
|
||||
from app.services.rating_service import submit_rating
|
||||
|
||||
|
||||
class TestInternalAdminRoutes:
|
||||
def test_overview_requires_authenticated_admin(self, client):
|
||||
response = client.get("/api/internal/admin/overview")
|
||||
|
||||
assert response.status_code == 401
|
||||
|
||||
def test_overview_rejects_non_admin_user(self, app, client):
|
||||
with app.app_context():
|
||||
create_user("member@example.com", "testpass123")
|
||||
|
||||
login_response = client.post(
|
||||
"/api/auth/login",
|
||||
json={"email": "member@example.com", "password": "testpass123"},
|
||||
)
|
||||
assert login_response.status_code == 200
|
||||
|
||||
response = client.get("/api/internal/admin/overview")
|
||||
assert response.status_code == 403
|
||||
|
||||
def test_overview_returns_operational_summary(self, app, client):
|
||||
with app.app_context():
|
||||
first_user = create_user("admin-a@example.com", "testpass123")
|
||||
second_user = create_user("admin-b@example.com", "testpass123")
|
||||
set_user_role(first_user["id"], "admin")
|
||||
update_user_plan(second_user["id"], "pro")
|
||||
|
||||
record_file_history(
|
||||
user_id=first_user["id"],
|
||||
tool="compress-pdf",
|
||||
original_filename="one.pdf",
|
||||
output_filename="one-small.pdf",
|
||||
status="completed",
|
||||
download_url="https://example.com/one-small.pdf",
|
||||
)
|
||||
record_file_history(
|
||||
user_id=second_user["id"],
|
||||
tool="repair-pdf",
|
||||
original_filename="broken.pdf",
|
||||
output_filename=None,
|
||||
status="failed",
|
||||
download_url=None,
|
||||
metadata={"error": "Repair failed."},
|
||||
)
|
||||
|
||||
submit_rating("compress-pdf", 5, fingerprint="admin-rating")
|
||||
message = save_message("Admin User", "ops@example.com", "bug", "Need help", "Broken upload")
|
||||
|
||||
login_response = client.post(
|
||||
"/api/auth/login",
|
||||
json={"email": "admin-a@example.com", "password": "testpass123"},
|
||||
)
|
||||
assert login_response.status_code == 200
|
||||
|
||||
response = client.get("/api/internal/admin/overview")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.get_json()
|
||||
assert data["users"]["total"] == 2
|
||||
assert data["users"]["pro"] == 1
|
||||
assert data["processing"]["total_files_processed"] == 2
|
||||
assert data["processing"]["failed_files"] == 1
|
||||
assert data["ratings"]["rating_count"] == 1
|
||||
assert data["contacts"]["unread_messages"] == 1
|
||||
assert data["contacts"]["recent"][0]["id"] == message["id"]
|
||||
assert data["recent_failures"][0]["tool"] == "repair-pdf"
|
||||
|
||||
def test_contacts_can_be_marked_read(self, app, client):
|
||||
with app.app_context():
|
||||
admin_user = create_user("admin-reader@example.com", "testpass123")
|
||||
set_user_role(admin_user["id"], "admin")
|
||||
message = save_message("Reader", "reader@example.com", "general", "Hello", "Please review")
|
||||
|
||||
login_response = client.post(
|
||||
"/api/auth/login",
|
||||
json={"email": "admin-reader@example.com", "password": "testpass123"},
|
||||
)
|
||||
assert login_response.status_code == 200
|
||||
|
||||
mark_response = client.post(f"/api/internal/admin/contacts/{message['id']}/read")
|
||||
assert mark_response.status_code == 200
|
||||
|
||||
contacts_response = client.get("/api/internal/admin/contacts")
|
||||
assert contacts_response.status_code == 200
|
||||
contacts_data = contacts_response.get_json()
|
||||
assert contacts_data["unread"] == 0
|
||||
assert contacts_data["items"][0]["is_read"] is True
|
||||
|
||||
def test_user_plan_can_be_updated(self, app, client):
|
||||
with app.app_context():
|
||||
admin_user = create_user("admin-plan@example.com", "testpass123")
|
||||
user = create_user("plan-change@example.com", "testpass123")
|
||||
set_user_role(admin_user["id"], "admin")
|
||||
|
||||
login_response = client.post(
|
||||
"/api/auth/login",
|
||||
json={"email": "admin-plan@example.com", "password": "testpass123"},
|
||||
)
|
||||
assert login_response.status_code == 200
|
||||
|
||||
response = client.post(
|
||||
f"/api/internal/admin/users/{user['id']}/plan",
|
||||
json={"plan": "pro"},
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.get_json()
|
||||
assert data["user"]["plan"] == "pro"
|
||||
|
||||
def test_user_role_can_be_updated(self, app, client):
|
||||
with app.app_context():
|
||||
admin_user = create_user("admin-role@example.com", "testpass123")
|
||||
user = create_user("member-role@example.com", "testpass123")
|
||||
set_user_role(admin_user["id"], "admin")
|
||||
|
||||
login_response = client.post(
|
||||
"/api/auth/login",
|
||||
json={"email": "admin-role@example.com", "password": "testpass123"},
|
||||
)
|
||||
assert login_response.status_code == 200
|
||||
|
||||
response = client.post(
|
||||
f"/api/internal/admin/users/{user['id']}/role",
|
||||
json={"role": "admin"},
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.get_json()
|
||||
assert data["user"]["role"] == "admin"
|
||||
|
||||
def test_allowlisted_admin_role_cannot_be_changed(self, app, client):
|
||||
app.config["INTERNAL_ADMIN_EMAILS"] = ("bootstrap-admin@example.com",)
|
||||
with app.app_context():
|
||||
actor = create_user("actor-admin@example.com", "testpass123")
|
||||
bootstrap = create_user("bootstrap-admin@example.com", "testpass123")
|
||||
set_user_role(actor["id"], "admin")
|
||||
|
||||
login_response = client.post(
|
||||
"/api/auth/login",
|
||||
json={"email": "actor-admin@example.com", "password": "testpass123"},
|
||||
)
|
||||
assert login_response.status_code == 200
|
||||
|
||||
response = client.post(
|
||||
f"/api/internal/admin/users/{bootstrap['id']}/role",
|
||||
json={"role": "user"},
|
||||
)
|
||||
|
||||
assert response.status_code == 400
|
||||
assert "INTERNAL_ADMIN_EMAILS" in response.get_json()["error"]
|
||||
|
||||
def test_admin_cannot_remove_own_role(self, app, client):
|
||||
with app.app_context():
|
||||
admin_user = create_user("self-admin@example.com", "testpass123")
|
||||
set_user_role(admin_user["id"], "admin")
|
||||
|
||||
login_response = client.post(
|
||||
"/api/auth/login",
|
||||
json={"email": "self-admin@example.com", "password": "testpass123"},
|
||||
)
|
||||
assert login_response.status_code == 200
|
||||
|
||||
response = client.post(
|
||||
f"/api/internal/admin/users/{admin_user['id']}/role",
|
||||
json={"role": "user"},
|
||||
)
|
||||
|
||||
assert response.status_code == 400
|
||||
assert "cannot remove your own admin role" in response.get_json()["error"].lower()
|
||||
@@ -12,6 +12,18 @@ class TestAuthRoutes:
|
||||
data = response.get_json()
|
||||
assert data['user']['email'] == 'user@example.com'
|
||||
assert data['user']['plan'] == 'free'
|
||||
assert data['user']['role'] == 'user'
|
||||
|
||||
def test_register_assigns_admin_role_for_allowlisted_email(self, app, client):
|
||||
app.config['INTERNAL_ADMIN_EMAILS'] = ('admin@example.com',)
|
||||
|
||||
response = client.post(
|
||||
'/api/auth/register',
|
||||
json={'email': 'admin@example.com', 'password': 'secretpass123'},
|
||||
)
|
||||
|
||||
assert response.status_code == 201
|
||||
assert response.get_json()['user']['role'] == 'admin'
|
||||
|
||||
def test_register_duplicate_email(self, client):
|
||||
client.post(
|
||||
|
||||
79
backend/tests/test_contact.py
Normal file
79
backend/tests/test_contact.py
Normal file
@@ -0,0 +1,79 @@
|
||||
"""Tests for the contact form endpoint."""
|
||||
import pytest
|
||||
|
||||
|
||||
class TestContactSubmission:
|
||||
"""Tests for POST /api/contact/submit."""
|
||||
|
||||
def test_submit_success(self, client):
|
||||
response = client.post("/api/contact/submit", json={
|
||||
"name": "Test User",
|
||||
"email": "test@example.com",
|
||||
"category": "general",
|
||||
"subject": "Test Subject",
|
||||
"message": "This is a test message body.",
|
||||
})
|
||||
assert response.status_code == 201
|
||||
data = response.get_json()
|
||||
assert data["message"] == "Message sent successfully."
|
||||
assert "id" in data
|
||||
assert "created_at" in data
|
||||
|
||||
def test_submit_missing_name(self, client):
|
||||
response = client.post("/api/contact/submit", json={
|
||||
"email": "test@example.com",
|
||||
"subject": "Test",
|
||||
"message": "Body",
|
||||
})
|
||||
assert response.status_code == 400
|
||||
assert "Name" in response.get_json()["error"]
|
||||
|
||||
def test_submit_invalid_email(self, client):
|
||||
response = client.post("/api/contact/submit", json={
|
||||
"name": "User",
|
||||
"email": "not-an-email",
|
||||
"subject": "Test",
|
||||
"message": "Body",
|
||||
})
|
||||
assert response.status_code == 400
|
||||
assert "email" in response.get_json()["error"].lower()
|
||||
|
||||
def test_submit_missing_subject(self, client):
|
||||
response = client.post("/api/contact/submit", json={
|
||||
"name": "User",
|
||||
"email": "test@example.com",
|
||||
"subject": "",
|
||||
"message": "Body",
|
||||
})
|
||||
assert response.status_code == 400
|
||||
assert "Subject" in response.get_json()["error"]
|
||||
|
||||
def test_submit_missing_message(self, client):
|
||||
response = client.post("/api/contact/submit", json={
|
||||
"name": "User",
|
||||
"email": "test@example.com",
|
||||
"subject": "Test",
|
||||
"message": "",
|
||||
})
|
||||
assert response.status_code == 400
|
||||
assert "Message" in response.get_json()["error"]
|
||||
|
||||
def test_submit_bug_category(self, client):
|
||||
response = client.post("/api/contact/submit", json={
|
||||
"name": "Bug Reporter",
|
||||
"email": "bug@example.com",
|
||||
"category": "bug",
|
||||
"subject": "Found a bug",
|
||||
"message": "The merge tool crashes on large files.",
|
||||
})
|
||||
assert response.status_code == 201
|
||||
|
||||
def test_submit_invalid_category_defaults_to_general(self, client):
|
||||
response = client.post("/api/contact/submit", json={
|
||||
"name": "User",
|
||||
"email": "test@example.com",
|
||||
"category": "hacking",
|
||||
"subject": "Test",
|
||||
"message": "Body text here.",
|
||||
})
|
||||
assert response.status_code == 201
|
||||
460
backend/tests/test_phase2_tools.py
Normal file
460
backend/tests/test_phase2_tools.py
Normal file
@@ -0,0 +1,460 @@
|
||||
"""Tests for Phase 2 routes — PDF Conversion, PDF Extra, Image Extra, Barcode."""
|
||||
import io
|
||||
import json
|
||||
import os
|
||||
import tempfile
|
||||
from unittest.mock import patch, MagicMock
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
def _barcode_available():
|
||||
"""Check if python-barcode is installed."""
|
||||
try:
|
||||
import barcode # noqa: F401
|
||||
return True
|
||||
except ImportError:
|
||||
return False
|
||||
|
||||
|
||||
# =========================================================================
|
||||
# Helpers
|
||||
# =========================================================================
|
||||
|
||||
def _make_pdf():
|
||||
"""Minimal valid PDF bytes."""
|
||||
return (
|
||||
b"%PDF-1.4\n1 0 obj<</Type/Catalog/Pages 2 0 R>>endobj\n"
|
||||
b"2 0 obj<</Type/Pages/Count 1/Kids[3 0 R]>>endobj\n"
|
||||
b"3 0 obj<</Type/Page/MediaBox[0 0 612 792]/Parent 2 0 R>>endobj\n"
|
||||
b"xref\n0 4\n0000000000 65535 f \n0000000009 00000 n \n"
|
||||
b"0000000058 00000 n \n0000000115 00000 n \n"
|
||||
b"trailer<</Root 1 0 R/Size 4>>\nstartxref\n190\n%%EOF"
|
||||
)
|
||||
|
||||
|
||||
def _make_png():
|
||||
"""Minimal valid PNG bytes."""
|
||||
return (
|
||||
b"\x89PNG\r\n\x1a\n"
|
||||
b"\x00\x00\x00\rIHDR\x00\x00\x00\x01\x00\x00\x00\x01"
|
||||
b"\x08\x02\x00\x00\x00\x90wS\xde"
|
||||
b"\x00\x00\x00\x0cIDATx\x9cc\xf8\x0f\x00\x00\x01\x01\x00\x05"
|
||||
b"\x18\xd8N\x00\x00\x00\x00IEND\xaeB`\x82"
|
||||
)
|
||||
|
||||
|
||||
def _mock_route(monkeypatch, route_module, task_name, validator_name='validate_actor_file'):
|
||||
"""Mock validate + generate_safe_path + celery task for a route module."""
|
||||
mock_task = MagicMock()
|
||||
mock_task.id = 'mock-task-id'
|
||||
tmp_dir = tempfile.mkdtemp()
|
||||
save_path = os.path.join(tmp_dir, 'mock_file')
|
||||
|
||||
monkeypatch.setattr(
|
||||
f'app.routes.{route_module}.validate_actor_file',
|
||||
lambda f, allowed_types, actor: ('test_file', 'pdf'),
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
f'app.routes.{route_module}.generate_safe_path',
|
||||
lambda ext, folder_type: ('mock-task-id', save_path),
|
||||
)
|
||||
mock_delay = MagicMock(return_value=mock_task)
|
||||
monkeypatch.setattr(f'app.routes.{route_module}.{task_name}.delay', mock_delay)
|
||||
return mock_task, mock_delay
|
||||
|
||||
|
||||
# =========================================================================
|
||||
# PDF Convert Routes — /api/convert
|
||||
# =========================================================================
|
||||
|
||||
class TestPdfToPptx:
|
||||
def test_no_file(self, client):
|
||||
resp = client.post('/api/convert/pdf-to-pptx')
|
||||
assert resp.status_code == 400
|
||||
|
||||
def test_success(self, client, monkeypatch):
|
||||
_, mock_delay = _mock_route(monkeypatch, 'pdf_convert', 'pdf_to_pptx_task')
|
||||
resp = client.post('/api/convert/pdf-to-pptx', data={
|
||||
'file': (io.BytesIO(_make_pdf()), 'test.pdf'),
|
||||
}, content_type='multipart/form-data')
|
||||
assert resp.status_code == 202
|
||||
data = resp.get_json()
|
||||
assert data['task_id'] == 'mock-task-id'
|
||||
mock_delay.assert_called_once()
|
||||
|
||||
|
||||
class TestExcelToPdf:
|
||||
def test_no_file(self, client):
|
||||
resp = client.post('/api/convert/excel-to-pdf')
|
||||
assert resp.status_code == 400
|
||||
|
||||
def test_success(self, client, monkeypatch):
|
||||
mock_task = MagicMock()
|
||||
mock_task.id = 'mock-task-id'
|
||||
tmp_dir = tempfile.mkdtemp()
|
||||
save_path = os.path.join(tmp_dir, 'mock.xlsx')
|
||||
|
||||
monkeypatch.setattr(
|
||||
'app.routes.pdf_convert.validate_actor_file',
|
||||
lambda f, allowed_types, actor: ('test.xlsx', 'xlsx'),
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
'app.routes.pdf_convert.generate_safe_path',
|
||||
lambda ext, folder_type: ('mock-task-id', save_path),
|
||||
)
|
||||
mock_delay = MagicMock(return_value=mock_task)
|
||||
monkeypatch.setattr('app.routes.pdf_convert.excel_to_pdf_task.delay', mock_delay)
|
||||
|
||||
# Create a file with xlsx content type
|
||||
resp = client.post('/api/convert/excel-to-pdf', data={
|
||||
'file': (io.BytesIO(b'PK\x03\x04' + b'\x00' * 100), 'test.xlsx'),
|
||||
}, content_type='multipart/form-data')
|
||||
assert resp.status_code == 202
|
||||
assert resp.get_json()['task_id'] == 'mock-task-id'
|
||||
|
||||
|
||||
class TestPptxToPdf:
|
||||
def test_no_file(self, client):
|
||||
resp = client.post('/api/convert/pptx-to-pdf')
|
||||
assert resp.status_code == 400
|
||||
|
||||
def test_success(self, client, monkeypatch):
|
||||
mock_task = MagicMock()
|
||||
mock_task.id = 'mock-task-id'
|
||||
tmp_dir = tempfile.mkdtemp()
|
||||
save_path = os.path.join(tmp_dir, 'mock.pptx')
|
||||
|
||||
monkeypatch.setattr(
|
||||
'app.routes.pdf_convert.validate_actor_file',
|
||||
lambda f, allowed_types, actor: ('test.pptx', 'pptx'),
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
'app.routes.pdf_convert.generate_safe_path',
|
||||
lambda ext, folder_type: ('mock-task-id', save_path),
|
||||
)
|
||||
mock_delay = MagicMock(return_value=mock_task)
|
||||
monkeypatch.setattr('app.routes.pdf_convert.pptx_to_pdf_task.delay', mock_delay)
|
||||
|
||||
resp = client.post('/api/convert/pptx-to-pdf', data={
|
||||
'file': (io.BytesIO(b'PK\x03\x04' + b'\x00' * 100), 'test.pptx'),
|
||||
}, content_type='multipart/form-data')
|
||||
assert resp.status_code == 202
|
||||
assert resp.get_json()['task_id'] == 'mock-task-id'
|
||||
|
||||
|
||||
class TestSignPdf:
|
||||
def test_no_files(self, client):
|
||||
resp = client.post('/api/convert/sign')
|
||||
assert resp.status_code == 400
|
||||
|
||||
def test_missing_signature(self, client):
|
||||
resp = client.post('/api/convert/sign', data={
|
||||
'file': (io.BytesIO(_make_pdf()), 'test.pdf'),
|
||||
}, content_type='multipart/form-data')
|
||||
assert resp.status_code == 400
|
||||
|
||||
def test_success(self, client, monkeypatch):
|
||||
mock_task = MagicMock()
|
||||
mock_task.id = 'mock-task-id'
|
||||
tmp_dir = tempfile.mkdtemp()
|
||||
|
||||
monkeypatch.setattr(
|
||||
'app.routes.pdf_convert.validate_actor_file',
|
||||
lambda f, allowed_types, actor: ('test.pdf', 'pdf'),
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
'app.routes.pdf_convert.generate_safe_path',
|
||||
lambda ext, folder_type: ('mock-task-id', os.path.join(tmp_dir, f'mock.{ext}')),
|
||||
)
|
||||
mock_delay = MagicMock(return_value=mock_task)
|
||||
monkeypatch.setattr('app.routes.pdf_convert.sign_pdf_task.delay', mock_delay)
|
||||
|
||||
resp = client.post('/api/convert/sign', data={
|
||||
'file': (io.BytesIO(_make_pdf()), 'test.pdf'),
|
||||
'signature': (io.BytesIO(_make_png()), 'sig.png'),
|
||||
'page': '1',
|
||||
}, content_type='multipart/form-data')
|
||||
assert resp.status_code == 202
|
||||
assert resp.get_json()['task_id'] == 'mock-task-id'
|
||||
|
||||
|
||||
# =========================================================================
|
||||
# PDF Extra Routes — /api/pdf-tools
|
||||
# =========================================================================
|
||||
|
||||
class TestCropPdf:
|
||||
def test_no_file(self, client):
|
||||
resp = client.post('/api/pdf-tools/crop')
|
||||
assert resp.status_code == 400
|
||||
|
||||
def test_success(self, client, monkeypatch):
|
||||
_, mock_delay = _mock_route(monkeypatch, 'pdf_extra', 'crop_pdf_task')
|
||||
resp = client.post('/api/pdf-tools/crop', data={
|
||||
'file': (io.BytesIO(_make_pdf()), 'test.pdf'),
|
||||
'left': '10', 'right': '10', 'top': '20', 'bottom': '20',
|
||||
}, content_type='multipart/form-data')
|
||||
assert resp.status_code == 202
|
||||
assert resp.get_json()['task_id'] == 'mock-task-id'
|
||||
mock_delay.assert_called_once()
|
||||
|
||||
|
||||
class TestFlattenPdf:
|
||||
def test_no_file(self, client):
|
||||
resp = client.post('/api/pdf-tools/flatten')
|
||||
assert resp.status_code == 400
|
||||
|
||||
def test_success(self, client, monkeypatch):
|
||||
_, mock_delay = _mock_route(monkeypatch, 'pdf_extra', 'flatten_pdf_task')
|
||||
resp = client.post('/api/pdf-tools/flatten', data={
|
||||
'file': (io.BytesIO(_make_pdf()), 'test.pdf'),
|
||||
}, content_type='multipart/form-data')
|
||||
assert resp.status_code == 202
|
||||
mock_delay.assert_called_once()
|
||||
|
||||
|
||||
class TestRepairPdf:
|
||||
def test_no_file(self, client):
|
||||
resp = client.post('/api/pdf-tools/repair')
|
||||
assert resp.status_code == 400
|
||||
|
||||
def test_success(self, client, monkeypatch):
|
||||
_, mock_delay = _mock_route(monkeypatch, 'pdf_extra', 'repair_pdf_task')
|
||||
resp = client.post('/api/pdf-tools/repair', data={
|
||||
'file': (io.BytesIO(_make_pdf()), 'test.pdf'),
|
||||
}, content_type='multipart/form-data')
|
||||
assert resp.status_code == 202
|
||||
mock_delay.assert_called_once()
|
||||
|
||||
|
||||
class TestEditMetadata:
|
||||
def test_no_file(self, client):
|
||||
resp = client.post('/api/pdf-tools/metadata')
|
||||
assert resp.status_code == 400
|
||||
|
||||
def test_success(self, client, monkeypatch):
|
||||
_, mock_delay = _mock_route(monkeypatch, 'pdf_extra', 'edit_metadata_task')
|
||||
resp = client.post('/api/pdf-tools/metadata', data={
|
||||
'file': (io.BytesIO(_make_pdf()), 'test.pdf'),
|
||||
'title': 'Test Title',
|
||||
'author': 'Test Author',
|
||||
}, content_type='multipart/form-data')
|
||||
assert resp.status_code == 202
|
||||
mock_delay.assert_called_once()
|
||||
|
||||
|
||||
# =========================================================================
|
||||
# Image Extra Routes — /api/image
|
||||
# =========================================================================
|
||||
|
||||
class TestImageCrop:
|
||||
def test_no_file(self, client):
|
||||
resp = client.post('/api/image/crop')
|
||||
assert resp.status_code == 400
|
||||
|
||||
def test_success(self, client, monkeypatch):
|
||||
mock_task = MagicMock()
|
||||
mock_task.id = 'mock-task-id'
|
||||
tmp_dir = tempfile.mkdtemp()
|
||||
save_path = os.path.join(tmp_dir, 'mock.png')
|
||||
|
||||
monkeypatch.setattr(
|
||||
'app.routes.image_extra.validate_actor_file',
|
||||
lambda f, allowed_types, actor: ('test.png', 'png'),
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
'app.routes.image_extra.generate_safe_path',
|
||||
lambda ext, folder_type: ('mock-task-id', save_path),
|
||||
)
|
||||
mock_delay = MagicMock(return_value=mock_task)
|
||||
monkeypatch.setattr('app.routes.image_extra.crop_image_task.delay', mock_delay)
|
||||
|
||||
resp = client.post('/api/image/crop', data={
|
||||
'file': (io.BytesIO(_make_png()), 'test.png'),
|
||||
'left': '0', 'top': '0', 'right': '100', 'bottom': '100',
|
||||
}, content_type='multipart/form-data')
|
||||
assert resp.status_code == 202
|
||||
assert resp.get_json()['task_id'] == 'mock-task-id'
|
||||
|
||||
|
||||
class TestImageRotateFlip:
|
||||
def test_no_file(self, client):
|
||||
resp = client.post('/api/image/rotate-flip')
|
||||
assert resp.status_code == 400
|
||||
|
||||
def test_success(self, client, monkeypatch):
|
||||
mock_task = MagicMock()
|
||||
mock_task.id = 'mock-task-id'
|
||||
tmp_dir = tempfile.mkdtemp()
|
||||
save_path = os.path.join(tmp_dir, 'mock.png')
|
||||
|
||||
monkeypatch.setattr(
|
||||
'app.routes.image_extra.validate_actor_file',
|
||||
lambda f, allowed_types, actor: ('test.png', 'png'),
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
'app.routes.image_extra.generate_safe_path',
|
||||
lambda ext, folder_type: ('mock-task-id', save_path),
|
||||
)
|
||||
mock_delay = MagicMock(return_value=mock_task)
|
||||
monkeypatch.setattr('app.routes.image_extra.rotate_flip_image_task.delay', mock_delay)
|
||||
|
||||
resp = client.post('/api/image/rotate-flip', data={
|
||||
'file': (io.BytesIO(_make_png()), 'test.png'),
|
||||
'rotation': '90',
|
||||
'flip_horizontal': 'true',
|
||||
}, content_type='multipart/form-data')
|
||||
assert resp.status_code == 202
|
||||
assert resp.get_json()['task_id'] == 'mock-task-id'
|
||||
|
||||
|
||||
# =========================================================================
|
||||
# Barcode Routes — /api/barcode
|
||||
# =========================================================================
|
||||
|
||||
class TestBarcodeGenerate:
|
||||
def test_no_data(self, client):
|
||||
resp = client.post('/api/barcode/generate',
|
||||
data=json.dumps({}),
|
||||
content_type='application/json')
|
||||
assert resp.status_code == 400
|
||||
|
||||
def test_success_json(self, client, monkeypatch):
|
||||
mock_task = MagicMock()
|
||||
mock_task.id = 'mock-task-id'
|
||||
tmp_dir = tempfile.mkdtemp()
|
||||
|
||||
monkeypatch.setattr(
|
||||
'app.routes.barcode.generate_safe_path',
|
||||
lambda ext, folder_type: ('mock-task-id', os.path.join(tmp_dir, 'mock.png')),
|
||||
)
|
||||
mock_delay = MagicMock(return_value=mock_task)
|
||||
monkeypatch.setattr('app.routes.barcode.generate_barcode_task.delay', mock_delay)
|
||||
|
||||
resp = client.post('/api/barcode/generate',
|
||||
data=json.dumps({'data': '12345', 'barcode_type': 'code128'}),
|
||||
content_type='application/json')
|
||||
assert resp.status_code == 202
|
||||
assert resp.get_json()['task_id'] == 'mock-task-id'
|
||||
|
||||
def test_invalid_barcode_type(self, client):
|
||||
resp = client.post('/api/barcode/generate',
|
||||
data=json.dumps({'data': '12345', 'type': 'invalid_type'}),
|
||||
content_type='application/json')
|
||||
assert resp.status_code == 400
|
||||
|
||||
|
||||
# =========================================================================
|
||||
# Service unit tests
|
||||
# =========================================================================
|
||||
|
||||
class TestBarcodeService:
|
||||
@pytest.mark.skipif(
|
||||
not _barcode_available(),
|
||||
reason='python-barcode not installed'
|
||||
)
|
||||
def test_generate_barcode(self, app):
|
||||
from app.services.barcode_service import generate_barcode
|
||||
with app.app_context():
|
||||
tmp_dir = tempfile.mkdtemp()
|
||||
output_path = os.path.join(tmp_dir, 'test_barcode')
|
||||
result = generate_barcode('12345678', 'code128', output_path, 'png')
|
||||
assert 'output_path' in result
|
||||
assert os.path.exists(result['output_path'])
|
||||
|
||||
def test_invalid_barcode_type(self, app):
|
||||
from app.services.barcode_service import generate_barcode, BarcodeGenerationError
|
||||
with app.app_context():
|
||||
tmp_dir = tempfile.mkdtemp()
|
||||
output_path = os.path.join(tmp_dir, 'test_barcode')
|
||||
with pytest.raises(BarcodeGenerationError):
|
||||
generate_barcode('12345', 'nonexistent_type', output_path, 'png')
|
||||
|
||||
|
||||
class TestPdfExtraService:
|
||||
def test_edit_metadata(self, app):
|
||||
from app.services.pdf_extra_service import edit_pdf_metadata
|
||||
with app.app_context():
|
||||
tmp_dir = tempfile.mkdtemp()
|
||||
input_path = os.path.join(tmp_dir, 'input.pdf')
|
||||
output_path = os.path.join(tmp_dir, 'output.pdf')
|
||||
with open(input_path, 'wb') as f:
|
||||
f.write(_make_pdf())
|
||||
edit_pdf_metadata(input_path, output_path, title='Test Title', author='Test Author')
|
||||
assert os.path.exists(output_path)
|
||||
assert os.path.getsize(output_path) > 0
|
||||
|
||||
def test_flatten_pdf(self, app):
|
||||
from app.services.pdf_extra_service import flatten_pdf
|
||||
with app.app_context():
|
||||
tmp_dir = tempfile.mkdtemp()
|
||||
input_path = os.path.join(tmp_dir, 'input.pdf')
|
||||
output_path = os.path.join(tmp_dir, 'output.pdf')
|
||||
with open(input_path, 'wb') as f:
|
||||
f.write(_make_pdf())
|
||||
flatten_pdf(input_path, output_path)
|
||||
assert os.path.exists(output_path)
|
||||
|
||||
def test_repair_pdf(self, app):
|
||||
from app.services.pdf_extra_service import repair_pdf
|
||||
with app.app_context():
|
||||
tmp_dir = tempfile.mkdtemp()
|
||||
input_path = os.path.join(tmp_dir, 'input.pdf')
|
||||
output_path = os.path.join(tmp_dir, 'output.pdf')
|
||||
with open(input_path, 'wb') as f:
|
||||
f.write(_make_pdf())
|
||||
repair_pdf(input_path, output_path)
|
||||
assert os.path.exists(output_path)
|
||||
|
||||
def test_crop_pdf(self, app):
|
||||
from app.services.pdf_extra_service import crop_pdf
|
||||
with app.app_context():
|
||||
tmp_dir = tempfile.mkdtemp()
|
||||
input_path = os.path.join(tmp_dir, 'input.pdf')
|
||||
output_path = os.path.join(tmp_dir, 'output.pdf')
|
||||
with open(input_path, 'wb') as f:
|
||||
f.write(_make_pdf())
|
||||
crop_pdf(input_path, output_path, margin_left=10, margin_right=10, margin_top=10, margin_bottom=10)
|
||||
assert os.path.exists(output_path)
|
||||
|
||||
|
||||
class TestImageExtraService:
|
||||
def test_rotate_flip(self, app):
|
||||
from app.services.image_extra_service import rotate_flip_image
|
||||
from PIL import Image
|
||||
with app.app_context():
|
||||
tmp_dir = tempfile.mkdtemp()
|
||||
input_path = os.path.join(tmp_dir, 'input.png')
|
||||
output_path = os.path.join(tmp_dir, 'output.png')
|
||||
img = Image.new('RGB', (100, 100), color='red')
|
||||
img.save(input_path)
|
||||
rotate_flip_image(input_path, output_path, rotation=90)
|
||||
assert os.path.exists(output_path)
|
||||
result = Image.open(output_path)
|
||||
assert result.size == (100, 100)
|
||||
|
||||
def test_crop_image(self, app):
|
||||
from app.services.image_extra_service import crop_image
|
||||
from PIL import Image
|
||||
with app.app_context():
|
||||
tmp_dir = tempfile.mkdtemp()
|
||||
input_path = os.path.join(tmp_dir, 'input.png')
|
||||
output_path = os.path.join(tmp_dir, 'output.png')
|
||||
img = Image.new('RGB', (200, 200), color='blue')
|
||||
img.save(input_path)
|
||||
crop_image(input_path, output_path, left=10, top=10, right=100, bottom=100)
|
||||
assert os.path.exists(output_path)
|
||||
result = Image.open(output_path)
|
||||
assert result.size == (90, 90)
|
||||
|
||||
def test_crop_invalid_coords(self, app):
|
||||
from app.services.image_extra_service import crop_image, ImageExtraError
|
||||
from PIL import Image
|
||||
with app.app_context():
|
||||
tmp_dir = tempfile.mkdtemp()
|
||||
input_path = os.path.join(tmp_dir, 'input.png')
|
||||
output_path = os.path.join(tmp_dir, 'output.png')
|
||||
img = Image.new('RGB', (100, 100), color='blue')
|
||||
img.save(input_path)
|
||||
with __import__('pytest').raises(ImageExtraError):
|
||||
crop_image(input_path, output_path, left=100, top=0, right=50, bottom=100)
|
||||
52
backend/tests/test_stats.py
Normal file
52
backend/tests/test_stats.py
Normal file
@@ -0,0 +1,52 @@
|
||||
"""Tests for public stats summary endpoint."""
|
||||
|
||||
from app.services.account_service import create_user, record_file_history
|
||||
from app.services.rating_service import submit_rating
|
||||
|
||||
|
||||
class TestStatsSummary:
|
||||
def test_summary_returns_processing_and_rating_totals(self, app, client):
|
||||
with app.app_context():
|
||||
user = create_user("stats@example.com", "testpass123")
|
||||
|
||||
record_file_history(
|
||||
user_id=user["id"],
|
||||
tool="compress-pdf",
|
||||
original_filename="input.pdf",
|
||||
output_filename="output.pdf",
|
||||
status="completed",
|
||||
download_url="https://example.com/file.pdf",
|
||||
)
|
||||
record_file_history(
|
||||
user_id=user["id"],
|
||||
tool="compress-pdf",
|
||||
original_filename="input-2.pdf",
|
||||
output_filename="output-2.pdf",
|
||||
status="completed",
|
||||
download_url="https://example.com/file-2.pdf",
|
||||
)
|
||||
record_file_history(
|
||||
user_id=user["id"],
|
||||
tool="repair-pdf",
|
||||
original_filename="broken.pdf",
|
||||
output_filename=None,
|
||||
status="failed",
|
||||
download_url=None,
|
||||
metadata={"error": "Repair failed."},
|
||||
)
|
||||
|
||||
submit_rating("compress-pdf", 5, fingerprint="stats-a")
|
||||
submit_rating("repair-pdf", 4, fingerprint="stats-b")
|
||||
|
||||
response = client.get("/api/stats/summary")
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.get_json()
|
||||
assert data["total_files_processed"] == 3
|
||||
assert data["completed_files"] == 2
|
||||
assert data["failed_files"] == 1
|
||||
assert data["success_rate"] == 66.7
|
||||
assert data["files_last_24h"] == 3
|
||||
assert data["rating_count"] == 2
|
||||
assert data["average_rating"] == 4.5
|
||||
assert data["top_tools"][0] == {"tool": "compress-pdf", "count": 2}
|
||||
48
backend/tests/test_stripe.py
Normal file
48
backend/tests/test_stripe.py
Normal file
@@ -0,0 +1,48 @@
|
||||
"""Tests for Stripe payment routes."""
|
||||
import pytest
|
||||
from unittest.mock import patch, MagicMock
|
||||
|
||||
|
||||
class TestStripeRoutes:
|
||||
"""Tests for /api/stripe/ endpoints."""
|
||||
|
||||
def _login(self, client, email="stripe@test.com", password="testpass123"):
|
||||
"""Register and login a user."""
|
||||
client.post("/api/auth/register", json={
|
||||
"email": email, "password": password,
|
||||
})
|
||||
resp = client.post("/api/auth/login", json={
|
||||
"email": email, "password": password,
|
||||
})
|
||||
return resp.get_json()
|
||||
|
||||
def test_checkout_requires_auth(self, client):
|
||||
response = client.post("/api/stripe/create-checkout-session", json={
|
||||
"billing": "monthly",
|
||||
})
|
||||
assert response.status_code == 401
|
||||
|
||||
def test_checkout_no_stripe_key(self, client, app):
|
||||
"""When STRIPE_PRICE_ID_PRO_MONTHLY is not set, return 503."""
|
||||
self._login(client)
|
||||
app.config["STRIPE_PRICE_ID_PRO_MONTHLY"] = ""
|
||||
app.config["STRIPE_PRICE_ID_PRO_YEARLY"] = ""
|
||||
response = client.post("/api/stripe/create-checkout-session", json={
|
||||
"billing": "monthly",
|
||||
})
|
||||
assert response.status_code == 503
|
||||
|
||||
def test_portal_requires_auth(self, client):
|
||||
response = client.post("/api/stripe/create-portal-session")
|
||||
assert response.status_code == 401
|
||||
|
||||
def test_webhook_missing_signature(self, client):
|
||||
"""Webhook without config returns ignored status."""
|
||||
response = client.post(
|
||||
"/api/stripe/webhook",
|
||||
data=b'{}',
|
||||
headers={"Stripe-Signature": "test_sig"},
|
||||
)
|
||||
data = response.get_json()
|
||||
# Without webhook secret, it should be ignored
|
||||
assert data["status"] in ("ignored", "error")
|
||||
@@ -114,6 +114,8 @@ services:
|
||||
- VITE_FEATURE_EDITOR=${VITE_FEATURE_EDITOR:-true}
|
||||
- VITE_FEATURE_OCR=${VITE_FEATURE_OCR:-true}
|
||||
- VITE_FEATURE_REMOVEBG=${VITE_FEATURE_REMOVEBG:-true}
|
||||
- VITE_SITE_DOMAIN=${VITE_SITE_DOMAIN:-}
|
||||
- VITE_SENTRY_DSN=${VITE_SENTRY_DSN:-}
|
||||
volumes:
|
||||
- frontend_build:/app/dist
|
||||
|
||||
|
||||
@@ -114,6 +114,8 @@ services:
|
||||
- VITE_FEATURE_EDITOR=${VITE_FEATURE_EDITOR:-true}
|
||||
- VITE_FEATURE_OCR=${VITE_FEATURE_OCR:-true}
|
||||
- VITE_FEATURE_REMOVEBG=${VITE_FEATURE_REMOVEBG:-true}
|
||||
- VITE_SITE_DOMAIN=${VITE_SITE_DOMAIN:-}
|
||||
- VITE_SENTRY_DSN=${VITE_SENTRY_DSN:-}
|
||||
|
||||
# --- Nginx Reverse Proxy ---
|
||||
nginx:
|
||||
|
||||
@@ -17,7 +17,6 @@
|
||||
"i18next-browser-languagedetector": "^8.0.0",
|
||||
"lucide-react": "^0.400.0",
|
||||
"pdf-lib": "^1.17.1",
|
||||
"pdfjs-dist": "^4.4.168",
|
||||
"react": "^18.3.0",
|
||||
"react-dom": "^18.3.0",
|
||||
"react-dropzone": "^14.2.0",
|
||||
|
||||
@@ -22,6 +22,9 @@ const ForgotPasswordPage = lazy(() => import('@/pages/ForgotPasswordPage'));
|
||||
const ResetPasswordPage = lazy(() => import('@/pages/ResetPasswordPage'));
|
||||
const PricingPage = lazy(() => import('@/pages/PricingPage'));
|
||||
const BlogPage = lazy(() => import('@/pages/BlogPage'));
|
||||
const BlogPostPage = lazy(() => import('@/pages/BlogPostPage'));
|
||||
const DevelopersPage = lazy(() => import('@/pages/DevelopersPage'));
|
||||
const InternalAdminPage = lazy(() => import('@/pages/InternalAdminPage'));
|
||||
|
||||
// Tool Pages
|
||||
const PdfToWord = lazy(() => import('@/components/tools/PdfToWord'));
|
||||
@@ -57,6 +60,19 @@ const SummarizePdf = lazy(() => import('@/components/tools/SummarizePdf'));
|
||||
const TranslatePdf = lazy(() => import('@/components/tools/TranslatePdf'));
|
||||
const TableExtractor = lazy(() => import('@/components/tools/TableExtractor'));
|
||||
|
||||
// Phase 2 lazy imports
|
||||
const PdfToPptx = lazy(() => import('@/components/tools/PdfToPptx'));
|
||||
const ExcelToPdf = lazy(() => import('@/components/tools/ExcelToPdf'));
|
||||
const PptxToPdf = lazy(() => import('@/components/tools/PptxToPdf'));
|
||||
const SignPdf = lazy(() => import('@/components/tools/SignPdf'));
|
||||
const CropPdf = lazy(() => import('@/components/tools/CropPdf'));
|
||||
const FlattenPdf = lazy(() => import('@/components/tools/FlattenPdf'));
|
||||
const RepairPdf = lazy(() => import('@/components/tools/RepairPdf'));
|
||||
const PdfMetadata = lazy(() => import('@/components/tools/PdfMetadata'));
|
||||
const ImageCrop = lazy(() => import('@/components/tools/ImageCrop'));
|
||||
const ImageRotateFlip = lazy(() => import('@/components/tools/ImageRotateFlip'));
|
||||
const BarcodeGenerator = lazy(() => import('@/components/tools/BarcodeGenerator'));
|
||||
|
||||
function LoadingFallback() {
|
||||
return (
|
||||
<div className="flex min-h-[40vh] items-center justify-center">
|
||||
@@ -98,6 +114,9 @@ export default function App() {
|
||||
<Route path="/contact" element={<ContactPage />} />
|
||||
<Route path="/pricing" element={<PricingPage />} />
|
||||
<Route path="/blog" element={<BlogPage />} />
|
||||
<Route path="/blog/:slug" element={<BlogPostPage />} />
|
||||
<Route path="/developers" element={<DevelopersPage />} />
|
||||
<Route path="/internal/admin" element={<InternalAdminPage />} />
|
||||
|
||||
{/* PDF Tools */}
|
||||
<Route path="/tools/pdf-to-word" element={<ToolLandingPage slug="pdf-to-word"><PdfToWord /></ToolLandingPage>} />
|
||||
@@ -147,6 +166,23 @@ export default function App() {
|
||||
<Route path="/tools/word-counter" element={<ToolLandingPage slug="word-counter"><WordCounter /></ToolLandingPage>} />
|
||||
<Route path="/tools/text-cleaner" element={<ToolLandingPage slug="text-cleaner"><TextCleaner /></ToolLandingPage>} />
|
||||
|
||||
{/* Phase 2 – PDF Conversion */}
|
||||
<Route path="/tools/pdf-to-pptx" element={<ToolLandingPage slug="pdf-to-pptx"><PdfToPptx /></ToolLandingPage>} />
|
||||
<Route path="/tools/excel-to-pdf" element={<ToolLandingPage slug="excel-to-pdf"><ExcelToPdf /></ToolLandingPage>} />
|
||||
<Route path="/tools/pptx-to-pdf" element={<ToolLandingPage slug="pptx-to-pdf"><PptxToPdf /></ToolLandingPage>} />
|
||||
<Route path="/tools/sign-pdf" element={<ToolLandingPage slug="sign-pdf"><SignPdf /></ToolLandingPage>} />
|
||||
|
||||
{/* Phase 2 – PDF Extra */}
|
||||
<Route path="/tools/crop-pdf" element={<ToolLandingPage slug="crop-pdf"><CropPdf /></ToolLandingPage>} />
|
||||
<Route path="/tools/flatten-pdf" element={<ToolLandingPage slug="flatten-pdf"><FlattenPdf /></ToolLandingPage>} />
|
||||
<Route path="/tools/repair-pdf" element={<ToolLandingPage slug="repair-pdf"><RepairPdf /></ToolLandingPage>} />
|
||||
<Route path="/tools/pdf-metadata" element={<ToolLandingPage slug="pdf-metadata"><PdfMetadata /></ToolLandingPage>} />
|
||||
|
||||
{/* Phase 2 – Image & Utility */}
|
||||
<Route path="/tools/image-crop" element={<ToolLandingPage slug="image-crop"><ImageCrop /></ToolLandingPage>} />
|
||||
<Route path="/tools/image-rotate-flip" element={<ToolLandingPage slug="image-rotate-flip"><ImageRotateFlip /></ToolLandingPage>} />
|
||||
<Route path="/tools/barcode-generator" element={<ToolLandingPage slug="barcode-generator"><BarcodeGenerator /></ToolLandingPage>} />
|
||||
|
||||
{/* 404 */}
|
||||
<Route path="*" element={<NotFoundPage />} />
|
||||
</Routes>
|
||||
|
||||
@@ -111,6 +111,12 @@ export default function Footer() {
|
||||
>
|
||||
{t('common.blog')}
|
||||
</Link>
|
||||
<Link
|
||||
to="/developers"
|
||||
className="text-sm text-slate-500 transition-colors hover:text-primary-600 dark:text-slate-400 dark:hover:text-primary-400"
|
||||
>
|
||||
{t('common.developers')}
|
||||
</Link>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -92,6 +92,12 @@ export default function Header() {
|
||||
>
|
||||
{t('common.account')}
|
||||
</Link>
|
||||
<Link
|
||||
to="/developers"
|
||||
className="text-sm font-medium text-slate-600 transition-colors hover:text-primary-600 dark:text-slate-300 dark:hover:text-primary-400"
|
||||
>
|
||||
{t('common.developers')}
|
||||
</Link>
|
||||
</nav>
|
||||
|
||||
{/* Actions */}
|
||||
@@ -189,6 +195,13 @@ export default function Header() {
|
||||
>
|
||||
{user?.email || t('common.account')}
|
||||
</Link>
|
||||
<Link
|
||||
to="/developers"
|
||||
onClick={() => setMobileOpen(false)}
|
||||
className="block rounded-lg px-3 py-2.5 text-sm font-medium text-slate-600 transition-colors hover:bg-slate-50 dark:text-slate-300 dark:hover:bg-slate-800"
|
||||
>
|
||||
{t('common.developers')}
|
||||
</Link>
|
||||
</nav>
|
||||
)}
|
||||
</header>
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
import { Helmet } from 'react-helmet-async';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { buildLanguageAlternates, getOgLocale } from '@/utils/seo';
|
||||
|
||||
const SITE_NAME = 'SaaS-PDF';
|
||||
|
||||
@@ -23,9 +25,12 @@ interface SEOHeadProps {
|
||||
* - Optional JSON-LD structured data
|
||||
*/
|
||||
export default function SEOHead({ title, description, path, type = 'website', jsonLd }: SEOHeadProps) {
|
||||
const { i18n } = useTranslation();
|
||||
const origin = typeof window !== 'undefined' ? window.location.origin : '';
|
||||
const canonicalUrl = `${origin}${path}`;
|
||||
const fullTitle = `${title} — ${SITE_NAME}`;
|
||||
const languageAlternates = buildLanguageAlternates(origin, path);
|
||||
const currentOgLocale = getOgLocale(i18n.language);
|
||||
|
||||
const schemas = jsonLd ? (Array.isArray(jsonLd) ? jsonLd : [jsonLd]) : [];
|
||||
|
||||
@@ -34,6 +39,15 @@ export default function SEOHead({ title, description, path, type = 'website', js
|
||||
<title>{fullTitle}</title>
|
||||
<meta name="description" content={description} />
|
||||
<link rel="canonical" href={canonicalUrl} />
|
||||
{languageAlternates.map((alternate) => (
|
||||
<link
|
||||
key={alternate.hrefLang}
|
||||
rel="alternate"
|
||||
hrefLang={alternate.hrefLang}
|
||||
href={alternate.href}
|
||||
/>
|
||||
))}
|
||||
<link rel="alternate" hrefLang="x-default" href={canonicalUrl} />
|
||||
|
||||
{/* OpenGraph */}
|
||||
<meta property="og:title" content={fullTitle} />
|
||||
@@ -41,9 +55,12 @@ export default function SEOHead({ title, description, path, type = 'website', js
|
||||
<meta property="og:url" content={canonicalUrl} />
|
||||
<meta property="og:type" content={type} />
|
||||
<meta property="og:site_name" content={SITE_NAME} />
|
||||
<meta property="og:locale" content="en_US" />
|
||||
<meta property="og:locale:alternate" content="ar_SA" />
|
||||
<meta property="og:locale:alternate" content="fr_FR" />
|
||||
<meta property="og:locale" content={currentOgLocale} />
|
||||
{languageAlternates
|
||||
.filter((alternate) => alternate.ogLocale !== currentOgLocale)
|
||||
.map((alternate) => (
|
||||
<meta key={alternate.ogLocale} property="og:locale:alternate" content={alternate.ogLocale} />
|
||||
))}
|
||||
|
||||
{/* Twitter */}
|
||||
<meta name="twitter:card" content="summary" />
|
||||
|
||||
74
frontend/src/components/seo/SuggestedTools.tsx
Normal file
74
frontend/src/components/seo/SuggestedTools.tsx
Normal file
@@ -0,0 +1,74 @@
|
||||
import { ArrowRight } from 'lucide-react';
|
||||
import { Link } from 'react-router-dom';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { getToolSEO } from '@/config/seoData';
|
||||
|
||||
interface SuggestedToolsProps {
|
||||
currentSlug: string;
|
||||
limit?: number;
|
||||
}
|
||||
|
||||
const CATEGORY_COLORS: Record<string, string> = {
|
||||
PDF: 'bg-red-50 text-red-700 dark:bg-red-900/20 dark:text-red-400',
|
||||
Image: 'bg-emerald-50 text-emerald-700 dark:bg-emerald-900/20 dark:text-emerald-400',
|
||||
AI: 'bg-violet-50 text-violet-700 dark:bg-violet-900/20 dark:text-violet-400',
|
||||
Convert: 'bg-blue-50 text-blue-700 dark:bg-blue-900/20 dark:text-blue-400',
|
||||
Utility: 'bg-amber-50 text-amber-700 dark:bg-amber-900/20 dark:text-amber-400',
|
||||
};
|
||||
|
||||
export default function SuggestedTools({ currentSlug, limit = 3 }: SuggestedToolsProps) {
|
||||
const { t } = useTranslation();
|
||||
const currentTool = getToolSEO(currentSlug);
|
||||
|
||||
if (!currentTool) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const relatedTools = currentTool.relatedSlugs
|
||||
.map((slug) => getToolSEO(slug))
|
||||
.filter(Boolean)
|
||||
.slice(0, limit);
|
||||
|
||||
if (relatedTools.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<section className="mt-6 rounded-2xl border border-slate-200 bg-white p-5 dark:border-slate-700 dark:bg-slate-900/60">
|
||||
<div className="mb-4">
|
||||
<h3 className="text-base font-semibold text-slate-900 dark:text-white">
|
||||
{t('home.suggestedTools')}
|
||||
</h3>
|
||||
<p className="mt-1 text-sm text-slate-600 dark:text-slate-400">
|
||||
{t('home.suggestedToolsDesc')}
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div className="grid gap-3 sm:grid-cols-3">
|
||||
{relatedTools.map((tool) => (
|
||||
<Link
|
||||
key={tool!.slug}
|
||||
to={`/tools/${tool!.slug}`}
|
||||
className="group rounded-xl border border-slate-200 bg-slate-50 p-4 transition-colors hover:border-primary-300 hover:bg-white dark:border-slate-700 dark:bg-slate-800 dark:hover:border-primary-600"
|
||||
>
|
||||
<div className="flex items-start justify-between gap-2">
|
||||
<h4 className="text-sm font-semibold text-slate-800 group-hover:text-primary-600 dark:text-slate-100 dark:group-hover:text-primary-400">
|
||||
{t(`tools.${tool!.i18nKey}.title`)}
|
||||
</h4>
|
||||
<span className={`rounded-full px-2 py-0.5 text-[11px] font-medium ${CATEGORY_COLORS[tool!.category] || ''}`}>
|
||||
{tool!.category}
|
||||
</span>
|
||||
</div>
|
||||
<p className="mt-2 text-xs leading-5 text-slate-600 dark:text-slate-400">
|
||||
{t(`tools.${tool!.i18nKey}.shortDesc`)}
|
||||
</p>
|
||||
<span className="mt-3 inline-flex items-center gap-1 text-xs font-medium text-primary-600 dark:text-primary-400">
|
||||
{t('common.tryOtherTools')}
|
||||
<ArrowRight className="h-3.5 w-3.5" />
|
||||
</span>
|
||||
</Link>
|
||||
))}
|
||||
</div>
|
||||
</section>
|
||||
);
|
||||
}
|
||||
@@ -2,11 +2,12 @@ import { Helmet } from 'react-helmet-async';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { CheckCircle } from 'lucide-react';
|
||||
import { getToolSEO } from '@/config/seoData';
|
||||
import { generateToolSchema, generateBreadcrumbs, generateFAQ } from '@/utils/seo';
|
||||
import { buildLanguageAlternates, generateToolSchema, generateBreadcrumbs, generateFAQ, getOgLocale } from '@/utils/seo';
|
||||
import FAQSection from './FAQSection';
|
||||
import RelatedTools from './RelatedTools';
|
||||
import ToolRating from '@/components/shared/ToolRating';
|
||||
import SharePanel from '@/components/shared/SharePanel';
|
||||
import ToolWorkflowPanel from '@/components/shared/ToolWorkflowPanel';
|
||||
import { useToolRating } from '@/hooks/useToolRating';
|
||||
import { dispatchRatingPrompt } from '@/utils/ratingPrompt';
|
||||
|
||||
@@ -27,7 +28,7 @@ interface ToolLandingPageProps {
|
||||
* feature bullets, and proper meta tags around any tool component.
|
||||
*/
|
||||
export default function ToolLandingPage({ slug, children }: ToolLandingPageProps) {
|
||||
const { t } = useTranslation();
|
||||
const { t, i18n } = useTranslation();
|
||||
const seo = getToolSEO(slug);
|
||||
const ratingData = useToolRating(slug);
|
||||
|
||||
@@ -37,7 +38,10 @@ export default function ToolLandingPage({ slug, children }: ToolLandingPageProps
|
||||
const toolTitle = t(`tools.${seo.i18nKey}.title`);
|
||||
const toolDesc = t(`tools.${seo.i18nKey}.description`);
|
||||
const origin = typeof window !== 'undefined' ? window.location.origin : '';
|
||||
const canonicalUrl = `${origin}/tools/${slug}`;
|
||||
const path = `/tools/${slug}`;
|
||||
const canonicalUrl = `${origin}${path}`;
|
||||
const languageAlternates = buildLanguageAlternates(origin, path);
|
||||
const currentOgLocale = getOgLocale(i18n.language);
|
||||
|
||||
const toolSchema = generateToolSchema({
|
||||
name: toolTitle,
|
||||
@@ -63,12 +67,27 @@ export default function ToolLandingPage({ slug, children }: ToolLandingPageProps
|
||||
<meta name="description" content={seo.metaDescription} />
|
||||
<meta name="keywords" content={seo.keywords} />
|
||||
<link rel="canonical" href={canonicalUrl} />
|
||||
{languageAlternates.map((alternate) => (
|
||||
<link
|
||||
key={alternate.hrefLang}
|
||||
rel="alternate"
|
||||
hrefLang={alternate.hrefLang}
|
||||
href={alternate.href}
|
||||
/>
|
||||
))}
|
||||
<link rel="alternate" hrefLang="x-default" href={canonicalUrl} />
|
||||
|
||||
{/* Open Graph */}
|
||||
<meta property="og:title" content={`${toolTitle} — ${seo.titleSuffix}`} />
|
||||
<meta property="og:description" content={seo.metaDescription} />
|
||||
<meta property="og:url" content={canonicalUrl} />
|
||||
<meta property="og:type" content="website" />
|
||||
<meta property="og:locale" content={currentOgLocale} />
|
||||
{languageAlternates
|
||||
.filter((alternate) => alternate.ogLocale !== currentOgLocale)
|
||||
.map((alternate) => (
|
||||
<meta key={alternate.ogLocale} property="og:locale:alternate" content={alternate.ogLocale} />
|
||||
))}
|
||||
|
||||
{/* Twitter */}
|
||||
<meta name="twitter:card" content="summary" />
|
||||
@@ -109,6 +128,8 @@ export default function ToolLandingPage({ slug, children }: ToolLandingPageProps
|
||||
|
||||
{/* SEO Content Below Tool */}
|
||||
<div className="mx-auto mt-16 max-w-3xl">
|
||||
<ToolWorkflowPanel />
|
||||
|
||||
{/* What this tool does */}
|
||||
<section className="mb-12">
|
||||
<h2 className="mb-4 text-xl font-bold text-slate-900 dark:text-white">
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
import { Download, RotateCcw, Clock } from 'lucide-react';
|
||||
import type { TaskResult } from '@/services/api';
|
||||
import { formatFileSize } from '@/utils/textTools';
|
||||
import { trackEvent } from '@/services/analytics';
|
||||
import { dispatchCurrentToolRatingPrompt } from '@/utils/ratingPrompt';
|
||||
import SharePanel from '@/components/shared/SharePanel';
|
||||
import SuggestedTools from '@/components/seo/SuggestedTools';
|
||||
|
||||
interface DownloadButtonProps {
|
||||
/** Task result containing download URL */
|
||||
@@ -15,6 +17,10 @@ interface DownloadButtonProps {
|
||||
|
||||
export default function DownloadButton({ result, onStartOver }: DownloadButtonProps) {
|
||||
const { t } = useTranslation();
|
||||
const location = useLocation();
|
||||
const currentToolSlug = location.pathname.startsWith('/tools/')
|
||||
? location.pathname.replace('/tools/', '')
|
||||
: null;
|
||||
|
||||
const handleDownloadClick = () => {
|
||||
trackEvent('download_clicked', { filename: result.filename || 'unknown' });
|
||||
@@ -103,6 +109,8 @@ export default function DownloadButton({ result, onStartOver }: DownloadButtonPr
|
||||
<RotateCcw className="h-4 w-4" />
|
||||
{t('common.startOver')}
|
||||
</button>
|
||||
|
||||
{currentToolSlug && <SuggestedTools currentSlug={currentToolSlug} />}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
97
frontend/src/components/shared/SocialProofStrip.tsx
Normal file
97
frontend/src/components/shared/SocialProofStrip.tsx
Normal file
@@ -0,0 +1,97 @@
|
||||
import { useEffect, useState } from 'react';
|
||||
import { Link } from 'react-router-dom';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { Star } from 'lucide-react';
|
||||
import { getToolSEO } from '@/config/seoData';
|
||||
import { getPublicStats, type PublicStatsSummary } from '@/services/api';
|
||||
|
||||
interface SocialProofStripProps {
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export default function SocialProofStrip({ className = '' }: SocialProofStripProps) {
|
||||
const { t } = useTranslation();
|
||||
const [stats, setStats] = useState<PublicStatsSummary | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
let cancelled = false;
|
||||
|
||||
getPublicStats()
|
||||
.then((data) => {
|
||||
if (!cancelled) {
|
||||
setStats(data);
|
||||
}
|
||||
})
|
||||
.catch(() => {
|
||||
if (!cancelled) {
|
||||
setStats(null);
|
||||
}
|
||||
});
|
||||
|
||||
return () => {
|
||||
cancelled = true;
|
||||
};
|
||||
}, []);
|
||||
|
||||
if (!stats) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const topTools = stats.top_tools.slice(0, 3).map((tool) => {
|
||||
const seo = getToolSEO(tool.tool);
|
||||
return seo ? t(`tools.${seo.i18nKey}.title`) : tool.tool;
|
||||
});
|
||||
|
||||
const cards = [
|
||||
{ label: t('socialProof.processedFiles'), value: stats.total_files_processed.toLocaleString() },
|
||||
{ label: t('socialProof.successRate'), value: `${stats.success_rate}%` },
|
||||
{ label: t('socialProof.last24h'), value: stats.files_last_24h.toLocaleString() },
|
||||
{ label: t('socialProof.averageRating'), value: `${stats.average_rating.toFixed(1)} / 5` },
|
||||
];
|
||||
|
||||
return (
|
||||
<section className={`rounded-[2rem] border border-slate-200 bg-white p-6 shadow-sm dark:border-slate-700 dark:bg-slate-900/70 ${className}`.trim()}>
|
||||
<div className="flex flex-col gap-6 lg:flex-row lg:items-center lg:justify-between">
|
||||
<div className="max-w-2xl">
|
||||
<p className="text-sm font-semibold uppercase tracking-[0.2em] text-primary-600 dark:text-primary-400">
|
||||
{t('socialProof.badge')}
|
||||
</p>
|
||||
<h2 className="mt-2 text-2xl font-bold text-slate-900 dark:text-white">
|
||||
{t('socialProof.title')}
|
||||
</h2>
|
||||
<p className="mt-2 text-slate-600 dark:text-slate-400">
|
||||
{t('socialProof.subtitle')}
|
||||
</p>
|
||||
{topTools.length > 0 && (
|
||||
<div className="mt-4 flex flex-wrap gap-2">
|
||||
{topTools.map((tool) => (
|
||||
<span key={tool} className="rounded-full bg-slate-100 px-3 py-1 text-xs font-medium text-slate-700 dark:bg-slate-800 dark:text-slate-200">
|
||||
{tool}
|
||||
</span>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<div className="grid gap-3 sm:grid-cols-2 lg:min-w-[420px]">
|
||||
{cards.map((card) => (
|
||||
<div key={card.label} className="rounded-2xl bg-slate-50 p-4 dark:bg-slate-800/70">
|
||||
<p className="text-xs font-semibold uppercase tracking-widest text-slate-400 dark:text-slate-500">{card.label}</p>
|
||||
<p className="mt-2 text-2xl font-bold text-slate-900 dark:text-white">{card.value}</p>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="mt-5 flex flex-col gap-3 border-t border-slate-200 pt-4 sm:flex-row sm:items-center sm:justify-between dark:border-slate-700">
|
||||
<p className="inline-flex items-center gap-2 text-sm text-slate-600 dark:text-slate-400">
|
||||
<Star className="h-4 w-4 text-amber-500" />
|
||||
{t('socialProof.basedOnRatings', { count: stats.rating_count })}
|
||||
</p>
|
||||
<Link to="/developers" className="text-sm font-semibold text-primary-600 hover:text-primary-700 dark:text-primary-400 dark:hover:text-primary-300">
|
||||
{t('socialProof.viewDevelopers')}
|
||||
</Link>
|
||||
</div>
|
||||
</section>
|
||||
);
|
||||
}
|
||||
84
frontend/src/components/shared/ToolWorkflowPanel.tsx
Normal file
84
frontend/src/components/shared/ToolWorkflowPanel.tsx
Normal file
@@ -0,0 +1,84 @@
|
||||
import { FolderClock, KeyRound, ShieldCheck } from 'lucide-react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { Link } from 'react-router-dom';
|
||||
|
||||
const workflowCards = [
|
||||
{
|
||||
key: 'history',
|
||||
icon: FolderClock,
|
||||
titleKey: 'account.onboardingFirstTaskTitle',
|
||||
descriptionKey: 'account.onboardingFirstTaskDesc',
|
||||
href: '/account',
|
||||
ctaKey: 'common.account',
|
||||
},
|
||||
{
|
||||
key: 'limits',
|
||||
icon: ShieldCheck,
|
||||
titleKey: 'account.onboardingUpgradeTitle',
|
||||
descriptionKey: 'account.onboardingUpgradeDesc',
|
||||
href: '/pricing',
|
||||
ctaKey: 'common.pricing',
|
||||
},
|
||||
{
|
||||
key: 'api',
|
||||
icon: KeyRound,
|
||||
titleKey: 'account.onboardingApiTitle',
|
||||
descriptionKey: 'account.onboardingApiDesc',
|
||||
href: '/developers',
|
||||
ctaKey: 'pages.developers.getApiKey',
|
||||
},
|
||||
] as const;
|
||||
|
||||
export default function ToolWorkflowPanel() {
|
||||
const { t } = useTranslation();
|
||||
|
||||
return (
|
||||
<section className="mb-12 rounded-3xl border border-slate-200 bg-white p-6 shadow-sm dark:border-slate-700 dark:bg-slate-900/70">
|
||||
<div className="flex flex-col gap-3 border-b border-slate-200 pb-5 dark:border-slate-700 sm:flex-row sm:items-end sm:justify-between">
|
||||
<div>
|
||||
<p className="text-sm font-semibold uppercase tracking-[0.2em] text-primary-600 dark:text-primary-300">
|
||||
{t('account.onboardingTitle')}
|
||||
</p>
|
||||
<h2 className="mt-2 text-2xl font-bold text-slate-900 dark:text-white">
|
||||
{t('account.onboardingSubtitle')}
|
||||
</h2>
|
||||
</div>
|
||||
<Link
|
||||
to="/account"
|
||||
className="inline-flex items-center justify-center rounded-full bg-primary-600 px-5 py-2 text-sm font-semibold text-white transition-colors hover:bg-primary-700"
|
||||
>
|
||||
{t('common.account')}
|
||||
</Link>
|
||||
</div>
|
||||
|
||||
<div className="mt-6 grid gap-4 md:grid-cols-3">
|
||||
{workflowCards.map((card) => {
|
||||
const Icon = card.icon;
|
||||
|
||||
return (
|
||||
<article
|
||||
key={card.key}
|
||||
className="rounded-2xl border border-slate-200 bg-slate-50 p-5 dark:border-slate-700 dark:bg-slate-950/50"
|
||||
>
|
||||
<div className="flex h-11 w-11 items-center justify-center rounded-2xl bg-primary-100 text-primary-700 dark:bg-primary-500/15 dark:text-primary-200">
|
||||
<Icon className="h-5 w-5" />
|
||||
</div>
|
||||
<h3 className="mt-4 text-lg font-semibold text-slate-900 dark:text-white">
|
||||
{t(card.titleKey)}
|
||||
</h3>
|
||||
<p className="mt-2 text-sm leading-6 text-slate-600 dark:text-slate-300">
|
||||
{t(card.descriptionKey)}
|
||||
</p>
|
||||
<Link
|
||||
to={card.href}
|
||||
className="mt-4 inline-flex items-center text-sm font-semibold text-primary-700 transition-colors hover:text-primary-800 dark:text-primary-300 dark:hover:text-primary-200"
|
||||
>
|
||||
{t(card.ctaKey)}
|
||||
</Link>
|
||||
</article>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
</section>
|
||||
);
|
||||
}
|
||||
118
frontend/src/components/tools/BarcodeGenerator.tsx
Normal file
118
frontend/src/components/tools/BarcodeGenerator.tsx
Normal file
@@ -0,0 +1,118 @@
|
||||
import { useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { Helmet } from 'react-helmet-async';
|
||||
import { Barcode } from 'lucide-react';
|
||||
import ProgressBar from '@/components/shared/ProgressBar';
|
||||
import AdSlot from '@/components/layout/AdSlot';
|
||||
import { useTaskPolling } from '@/hooks/useTaskPolling';
|
||||
import { generateToolSchema } from '@/utils/seo';
|
||||
import api, { type TaskResponse } from '@/services/api';
|
||||
|
||||
const BARCODE_TYPES = ['code128', 'code39', 'ean13', 'ean8', 'upca', 'isbn13', 'isbn10', 'issn', 'pzn'] as const;
|
||||
|
||||
export default function BarcodeGenerator() {
|
||||
const { t } = useTranslation();
|
||||
const [phase, setPhase] = useState<'input' | 'processing' | 'done'>('input');
|
||||
const [data, setData] = useState('');
|
||||
const [barcodeType, setBarcodeType] = useState('code128');
|
||||
const [format, setFormat] = useState('png');
|
||||
const [taskId, setTaskId] = useState<string | null>(null);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
|
||||
const { status, result, error: taskError } = useTaskPolling({
|
||||
taskId, onComplete: () => setPhase('done'), onError: () => setPhase('done'),
|
||||
});
|
||||
|
||||
const handleGenerate = async () => {
|
||||
if (!data.trim()) return;
|
||||
setError(null); setPhase('processing');
|
||||
try {
|
||||
const res = await api.post<TaskResponse>('/barcode/generate', {
|
||||
data: data.trim(), barcode_type: barcodeType, format,
|
||||
});
|
||||
setTaskId(res.data.task_id);
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to generate barcode.');
|
||||
setPhase('done');
|
||||
}
|
||||
};
|
||||
|
||||
const handleReset = () => { setPhase('input'); setData(''); setBarcodeType('code128'); setFormat('png'); setTaskId(null); setError(null); };
|
||||
|
||||
const downloadUrl = result?.download_url || null;
|
||||
|
||||
const schema = generateToolSchema({ name: t('tools.barcode.title'), description: t('tools.barcode.description'), url: `${window.location.origin}/tools/barcode-generator` });
|
||||
|
||||
return (
|
||||
<>
|
||||
<Helmet>
|
||||
<title>{t('tools.barcode.title')} — {t('common.appName')}</title>
|
||||
<meta name="description" content={t('tools.barcode.description')} />
|
||||
<link rel="canonical" href={`${window.location.origin}/tools/barcode-generator`} />
|
||||
<script type="application/ld+json">{JSON.stringify(schema)}</script>
|
||||
</Helmet>
|
||||
<div className="mx-auto max-w-2xl">
|
||||
<div className="mb-8 text-center">
|
||||
<div className="mx-auto mb-4 flex h-16 w-16 items-center justify-center rounded-2xl bg-amber-100 dark:bg-amber-900/30">
|
||||
<Barcode className="h-8 w-8 text-amber-600 dark:text-amber-400" />
|
||||
</div>
|
||||
<h1 className="section-heading">{t('tools.barcode.title')}</h1>
|
||||
<p className="mt-2 text-slate-500 dark:text-slate-400">{t('tools.barcode.description')}</p>
|
||||
</div>
|
||||
<AdSlot slot="top-banner" format="horizontal" className="mb-6" />
|
||||
{phase === 'input' && (
|
||||
<div className="space-y-4">
|
||||
<div className="rounded-2xl bg-white p-5 ring-1 ring-slate-200 dark:bg-slate-800 dark:ring-slate-700 space-y-4">
|
||||
<div>
|
||||
<label className="mb-2 block text-sm font-medium text-slate-700 dark:text-slate-300">{t('tools.barcode.dataLabel')}</label>
|
||||
<input type="text" value={data} onChange={(e) => setData(e.target.value)}
|
||||
placeholder={t('tools.barcode.dataPlaceholder')}
|
||||
className="w-full rounded-lg border border-slate-300 px-3 py-2 text-sm dark:border-slate-600 dark:bg-slate-700 dark:text-slate-200" />
|
||||
</div>
|
||||
<div>
|
||||
<label className="mb-2 block text-sm font-medium text-slate-700 dark:text-slate-300">{t('tools.barcode.typeLabel')}</label>
|
||||
<select value={barcodeType} onChange={(e) => setBarcodeType(e.target.value)}
|
||||
className="w-full rounded-lg border border-slate-300 px-3 py-2 text-sm dark:border-slate-600 dark:bg-slate-700 dark:text-slate-200">
|
||||
{BARCODE_TYPES.map((bt) => <option key={bt} value={bt}>{bt.toUpperCase()}</option>)}
|
||||
</select>
|
||||
</div>
|
||||
<div>
|
||||
<label className="mb-2 block text-sm font-medium text-slate-700 dark:text-slate-300">{t('tools.barcode.formatLabel')}</label>
|
||||
<select value={format} onChange={(e) => setFormat(e.target.value)}
|
||||
className="w-full rounded-lg border border-slate-300 px-3 py-2 text-sm dark:border-slate-600 dark:bg-slate-700 dark:text-slate-200">
|
||||
<option value="png">PNG</option>
|
||||
<option value="svg">SVG</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
<button onClick={handleGenerate} disabled={!data.trim()}
|
||||
className="btn-primary w-full disabled:opacity-50 disabled:cursor-not-allowed">
|
||||
{t('tools.barcode.shortDesc')}
|
||||
</button>
|
||||
</div>
|
||||
)}
|
||||
{phase === 'processing' && !result && <ProgressBar state={status?.state || 'PENDING'} message={status?.progress} />}
|
||||
{phase === 'done' && downloadUrl && (
|
||||
<div className="space-y-4 text-center">
|
||||
<div className="rounded-2xl bg-white p-6 ring-1 ring-slate-200 dark:bg-slate-800 dark:ring-slate-700">
|
||||
<img src={downloadUrl} alt="Barcode" className="mx-auto max-w-full" />
|
||||
</div>
|
||||
<div className="flex gap-3">
|
||||
<a href={downloadUrl} download className="btn-primary flex-1">{t('common.download')}</a>
|
||||
<button onClick={handleReset} className="btn-secondary flex-1">{t('common.startOver')}</button>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
{phase === 'done' && (taskError || error) && (
|
||||
<div className="space-y-4">
|
||||
<div className="rounded-xl bg-red-50 p-4 ring-1 ring-red-200 dark:bg-red-900/20 dark:ring-red-800">
|
||||
<p className="text-sm text-red-700 dark:text-red-400">{taskError || error}</p>
|
||||
</div>
|
||||
<button onClick={handleReset} className="btn-secondary w-full">{t('common.startOver')}</button>
|
||||
</div>
|
||||
)}
|
||||
<AdSlot slot="bottom-banner" className="mt-8" />
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
}
|
||||
103
frontend/src/components/tools/CropPdf.tsx
Normal file
103
frontend/src/components/tools/CropPdf.tsx
Normal file
@@ -0,0 +1,103 @@
|
||||
import { useState, useEffect } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { Helmet } from 'react-helmet-async';
|
||||
import { Scissors } from 'lucide-react';
|
||||
import FileUploader from '@/components/shared/FileUploader';
|
||||
import ProgressBar from '@/components/shared/ProgressBar';
|
||||
import DownloadButton from '@/components/shared/DownloadButton';
|
||||
import AdSlot from '@/components/layout/AdSlot';
|
||||
import { useTaskPolling } from '@/hooks/useTaskPolling';
|
||||
import { generateToolSchema } from '@/utils/seo';
|
||||
import { useFileStore } from '@/stores/fileStore';
|
||||
import api, { type TaskResponse } from '@/services/api';
|
||||
|
||||
export default function CropPdf() {
|
||||
const { t } = useTranslation();
|
||||
const [phase, setPhase] = useState<'upload' | 'processing' | 'done'>('upload');
|
||||
const [file, setFile] = useState<File | null>(null);
|
||||
const [taskId, setTaskId] = useState<string | null>(null);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [margins, setMargins] = useState({ left: 0, right: 0, top: 0, bottom: 0 });
|
||||
|
||||
const { status, result, error: taskError } = useTaskPolling({
|
||||
taskId, onComplete: () => setPhase('done'), onError: () => setPhase('done'),
|
||||
});
|
||||
|
||||
const storeFile = useFileStore((s) => s.file);
|
||||
const clearStoreFile = useFileStore((s) => s.clearFile);
|
||||
useEffect(() => { if (storeFile) { setFile(storeFile); clearStoreFile(); } }, []); // eslint-disable-line react-hooks/exhaustive-deps
|
||||
|
||||
const handleUpload = async () => {
|
||||
if (!file) return;
|
||||
setError(null); setPhase('processing');
|
||||
try {
|
||||
const fd = new FormData();
|
||||
fd.append('file', file);
|
||||
fd.append('left', String(margins.left));
|
||||
fd.append('right', String(margins.right));
|
||||
fd.append('top', String(margins.top));
|
||||
fd.append('bottom', String(margins.bottom));
|
||||
const res = await api.post<TaskResponse>('/pdf-tools/crop', fd);
|
||||
setTaskId(res.data.task_id);
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to crop PDF.');
|
||||
setPhase('done');
|
||||
}
|
||||
};
|
||||
|
||||
const handleReset = () => { setPhase('upload'); setFile(null); setTaskId(null); setError(null); setMargins({ left: 0, right: 0, top: 0, bottom: 0 }); };
|
||||
|
||||
const schema = generateToolSchema({ name: t('tools.cropPdf.title'), description: t('tools.cropPdf.description'), url: `${window.location.origin}/tools/crop-pdf` });
|
||||
|
||||
return (
|
||||
<>
|
||||
<Helmet>
|
||||
<title>{t('tools.cropPdf.title')} — {t('common.appName')}</title>
|
||||
<meta name="description" content={t('tools.cropPdf.description')} />
|
||||
<link rel="canonical" href={`${window.location.origin}/tools/crop-pdf`} />
|
||||
<script type="application/ld+json">{JSON.stringify(schema)}</script>
|
||||
</Helmet>
|
||||
<div className="mx-auto max-w-2xl">
|
||||
<div className="mb-8 text-center">
|
||||
<div className="mx-auto mb-4 flex h-16 w-16 items-center justify-center rounded-2xl bg-yellow-100 dark:bg-yellow-900/30">
|
||||
<Scissors className="h-8 w-8 text-yellow-600 dark:text-yellow-400" />
|
||||
</div>
|
||||
<h1 className="section-heading">{t('tools.cropPdf.title')}</h1>
|
||||
<p className="mt-2 text-slate-500 dark:text-slate-400">{t('tools.cropPdf.description')}</p>
|
||||
</div>
|
||||
<AdSlot slot="top-banner" format="horizontal" className="mb-6" />
|
||||
{phase === 'upload' && (
|
||||
<div className="space-y-4">
|
||||
<FileUploader onFileSelect={setFile} file={file} accept={{ 'application/pdf': ['.pdf'] }} maxSizeMB={20} acceptLabel="PDF (.pdf)" />
|
||||
{file && (
|
||||
<div className="rounded-2xl bg-white p-5 ring-1 ring-slate-200 dark:bg-slate-800 dark:ring-slate-700">
|
||||
<p className="mb-3 text-sm font-medium text-slate-700 dark:text-slate-300">{t('tools.cropPdf.marginsLabel')}</p>
|
||||
<div className="grid grid-cols-2 gap-3">
|
||||
{(['top', 'bottom', 'left', 'right'] as const).map((side) => (
|
||||
<div key={side}>
|
||||
<label className="mb-1 block text-xs text-slate-500 dark:text-slate-400">{t(`tools.cropPdf.${side}`)}</label>
|
||||
<input type="number" min={0} value={margins[side]} onChange={(e) => setMargins((m) => ({ ...m, [side]: Math.max(0, Number(e.target.value)) }))}
|
||||
className="w-full rounded-lg border border-slate-300 px-3 py-2 text-sm dark:border-slate-600 dark:bg-slate-700 dark:text-slate-200" />
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
{file && <button onClick={handleUpload} className="btn-primary w-full">{t('tools.cropPdf.shortDesc')}</button>}
|
||||
</div>
|
||||
)}
|
||||
{phase === 'processing' && !result && <ProgressBar state={status?.state || 'PENDING'} message={status?.progress} />}
|
||||
{phase === 'done' && result && result.status === 'completed' && <DownloadButton result={result} onStartOver={handleReset} />}
|
||||
{phase === 'done' && (taskError || error) && (
|
||||
<div className="space-y-4">
|
||||
<div className="rounded-xl bg-red-50 p-4 ring-1 ring-red-200 dark:bg-red-900/20 dark:ring-red-800">
|
||||
<p className="text-sm text-red-700 dark:text-red-400">{taskError || error}</p>
|
||||
</div>
|
||||
<button onClick={handleReset} className="btn-secondary w-full">{t('common.startOver')}</button>
|
||||
</div>
|
||||
)}
|
||||
<AdSlot slot="bottom-banner" className="mt-8" />
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
}
|
||||
82
frontend/src/components/tools/ExcelToPdf.tsx
Normal file
82
frontend/src/components/tools/ExcelToPdf.tsx
Normal file
@@ -0,0 +1,82 @@
|
||||
import { useState, useEffect } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { Helmet } from 'react-helmet-async';
|
||||
import { Table2 } from 'lucide-react';
|
||||
import FileUploader from '@/components/shared/FileUploader';
|
||||
import ProgressBar from '@/components/shared/ProgressBar';
|
||||
import DownloadButton from '@/components/shared/DownloadButton';
|
||||
import AdSlot from '@/components/layout/AdSlot';
|
||||
import { useFileUpload } from '@/hooks/useFileUpload';
|
||||
import { useTaskPolling } from '@/hooks/useTaskPolling';
|
||||
import { generateToolSchema } from '@/utils/seo';
|
||||
import { useFileStore } from '@/stores/fileStore';
|
||||
|
||||
export default function ExcelToPdf() {
|
||||
const { t } = useTranslation();
|
||||
const [phase, setPhase] = useState<'upload' | 'processing' | 'done'>('upload');
|
||||
|
||||
const { file, uploadProgress, isUploading, taskId, error: uploadError, selectFile, startUpload, reset } =
|
||||
useFileUpload({ endpoint: '/convert/excel-to-pdf', maxSizeMB: 15, acceptedTypes: ['xlsx', 'xls'] });
|
||||
|
||||
const { status, result, error: taskError } = useTaskPolling({
|
||||
taskId, onComplete: () => setPhase('done'), onError: () => setPhase('done'),
|
||||
});
|
||||
|
||||
const storeFile = useFileStore((s) => s.file);
|
||||
const clearStoreFile = useFileStore((s) => s.clearFile);
|
||||
useEffect(() => { if (storeFile) { selectFile(storeFile); clearStoreFile(); } }, []); // eslint-disable-line react-hooks/exhaustive-deps
|
||||
|
||||
const handleUpload = async () => { const id = await startUpload(); if (id) setPhase('processing'); };
|
||||
const handleReset = () => { reset(); setPhase('upload'); };
|
||||
|
||||
const schema = generateToolSchema({
|
||||
name: t('tools.excelToPdf.title'), description: t('tools.excelToPdf.description'),
|
||||
url: `${window.location.origin}/tools/excel-to-pdf`,
|
||||
});
|
||||
|
||||
return (
|
||||
<>
|
||||
<Helmet>
|
||||
<title>{t('tools.excelToPdf.title')} — {t('common.appName')}</title>
|
||||
<meta name="description" content={t('tools.excelToPdf.description')} />
|
||||
<link rel="canonical" href={`${window.location.origin}/tools/excel-to-pdf`} />
|
||||
<script type="application/ld+json">{JSON.stringify(schema)}</script>
|
||||
</Helmet>
|
||||
<div className="mx-auto max-w-2xl">
|
||||
<div className="mb-8 text-center">
|
||||
<div className="mx-auto mb-4 flex h-16 w-16 items-center justify-center rounded-2xl bg-green-100 dark:bg-green-900/30">
|
||||
<Table2 className="h-8 w-8 text-green-600 dark:text-green-400" />
|
||||
</div>
|
||||
<h1 className="section-heading">{t('tools.excelToPdf.title')}</h1>
|
||||
<p className="mt-2 text-slate-500 dark:text-slate-400">{t('tools.excelToPdf.description')}</p>
|
||||
</div>
|
||||
<AdSlot slot="top-banner" format="horizontal" className="mb-6" />
|
||||
{phase === 'upload' && (
|
||||
<div className="space-y-4">
|
||||
<FileUploader onFileSelect={selectFile} file={file}
|
||||
accept={{
|
||||
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': ['.xlsx'],
|
||||
'application/vnd.ms-excel': ['.xls'],
|
||||
}}
|
||||
maxSizeMB={15} isUploading={isUploading} uploadProgress={uploadProgress}
|
||||
error={uploadError} onReset={handleReset} acceptLabel="Excel (.xlsx, .xls)" />
|
||||
{file && !isUploading && (
|
||||
<button onClick={handleUpload} className="btn-primary w-full">{t('tools.excelToPdf.shortDesc')}</button>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
{phase === 'processing' && !result && <ProgressBar state={status?.state || 'PENDING'} message={status?.progress} />}
|
||||
{phase === 'done' && result && result.status === 'completed' && <DownloadButton result={result} onStartOver={handleReset} />}
|
||||
{phase === 'done' && taskError && (
|
||||
<div className="space-y-4">
|
||||
<div className="rounded-xl bg-red-50 p-4 ring-1 ring-red-200 dark:bg-red-900/20 dark:ring-red-800">
|
||||
<p className="text-sm text-red-700 dark:text-red-400">{taskError}</p>
|
||||
</div>
|
||||
<button onClick={handleReset} className="btn-secondary w-full">{t('common.startOver')}</button>
|
||||
</div>
|
||||
)}
|
||||
<AdSlot slot="bottom-banner" className="mt-8" />
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
}
|
||||
71
frontend/src/components/tools/FlattenPdf.tsx
Normal file
71
frontend/src/components/tools/FlattenPdf.tsx
Normal file
@@ -0,0 +1,71 @@
|
||||
import { useState, useEffect } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { Helmet } from 'react-helmet-async';
|
||||
import { FileCheck } from 'lucide-react';
|
||||
import FileUploader from '@/components/shared/FileUploader';
|
||||
import ProgressBar from '@/components/shared/ProgressBar';
|
||||
import DownloadButton from '@/components/shared/DownloadButton';
|
||||
import AdSlot from '@/components/layout/AdSlot';
|
||||
import { useFileUpload } from '@/hooks/useFileUpload';
|
||||
import { useTaskPolling } from '@/hooks/useTaskPolling';
|
||||
import { generateToolSchema } from '@/utils/seo';
|
||||
import { useFileStore } from '@/stores/fileStore';
|
||||
|
||||
export default function FlattenPdf() {
|
||||
const { t } = useTranslation();
|
||||
const [phase, setPhase] = useState<'upload' | 'processing' | 'done'>('upload');
|
||||
|
||||
const { file, uploadProgress, isUploading, taskId, error: uploadError, selectFile, startUpload, reset } =
|
||||
useFileUpload({ endpoint: '/pdf-tools/flatten', maxSizeMB: 20, acceptedTypes: ['pdf'] });
|
||||
|
||||
const { status, result, error: taskError } = useTaskPolling({
|
||||
taskId, onComplete: () => setPhase('done'), onError: () => setPhase('done'),
|
||||
});
|
||||
|
||||
const storeFile = useFileStore((s) => s.file);
|
||||
const clearStoreFile = useFileStore((s) => s.clearFile);
|
||||
useEffect(() => { if (storeFile) { selectFile(storeFile); clearStoreFile(); } }, []); // eslint-disable-line react-hooks/exhaustive-deps
|
||||
|
||||
const handleUpload = async () => { const id = await startUpload(); if (id) setPhase('processing'); };
|
||||
const handleReset = () => { reset(); setPhase('upload'); };
|
||||
|
||||
const schema = generateToolSchema({ name: t('tools.flattenPdf.title'), description: t('tools.flattenPdf.description'), url: `${window.location.origin}/tools/flatten-pdf` });
|
||||
|
||||
return (
|
||||
<>
|
||||
<Helmet>
|
||||
<title>{t('tools.flattenPdf.title')} — {t('common.appName')}</title>
|
||||
<meta name="description" content={t('tools.flattenPdf.description')} />
|
||||
<link rel="canonical" href={`${window.location.origin}/tools/flatten-pdf`} />
|
||||
<script type="application/ld+json">{JSON.stringify(schema)}</script>
|
||||
</Helmet>
|
||||
<div className="mx-auto max-w-2xl">
|
||||
<div className="mb-8 text-center">
|
||||
<div className="mx-auto mb-4 flex h-16 w-16 items-center justify-center rounded-2xl bg-teal-100 dark:bg-teal-900/30">
|
||||
<FileCheck className="h-8 w-8 text-teal-600 dark:text-teal-400" />
|
||||
</div>
|
||||
<h1 className="section-heading">{t('tools.flattenPdf.title')}</h1>
|
||||
<p className="mt-2 text-slate-500 dark:text-slate-400">{t('tools.flattenPdf.description')}</p>
|
||||
</div>
|
||||
<AdSlot slot="top-banner" format="horizontal" className="mb-6" />
|
||||
{phase === 'upload' && (
|
||||
<div className="space-y-4">
|
||||
<FileUploader onFileSelect={selectFile} file={file} accept={{ 'application/pdf': ['.pdf'] }} maxSizeMB={20} acceptLabel="PDF (.pdf)" />
|
||||
{file && <button onClick={handleUpload} disabled={isUploading} className="btn-primary w-full">{t('tools.flattenPdf.shortDesc')}</button>}
|
||||
</div>
|
||||
)}
|
||||
{phase === 'processing' && !result && <ProgressBar state={status?.state || 'PENDING'} message={status?.progress} />}
|
||||
{phase === 'done' && result && result.status === 'completed' && <DownloadButton result={result} onStartOver={handleReset} />}
|
||||
{phase === 'done' && (taskError || uploadError) && (
|
||||
<div className="space-y-4">
|
||||
<div className="rounded-xl bg-red-50 p-4 ring-1 ring-red-200 dark:bg-red-900/20 dark:ring-red-800">
|
||||
<p className="text-sm text-red-700 dark:text-red-400">{taskError || uploadError}</p>
|
||||
</div>
|
||||
<button onClick={handleReset} className="btn-secondary w-full">{t('common.startOver')}</button>
|
||||
</div>
|
||||
)}
|
||||
<AdSlot slot="bottom-banner" className="mt-8" />
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
}
|
||||
105
frontend/src/components/tools/ImageCrop.tsx
Normal file
105
frontend/src/components/tools/ImageCrop.tsx
Normal file
@@ -0,0 +1,105 @@
|
||||
import { useState, useEffect } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { Helmet } from 'react-helmet-async';
|
||||
import { Crop } from 'lucide-react';
|
||||
import FileUploader from '@/components/shared/FileUploader';
|
||||
import ProgressBar from '@/components/shared/ProgressBar';
|
||||
import DownloadButton from '@/components/shared/DownloadButton';
|
||||
import AdSlot from '@/components/layout/AdSlot';
|
||||
import { useTaskPolling } from '@/hooks/useTaskPolling';
|
||||
import { generateToolSchema } from '@/utils/seo';
|
||||
import { useFileStore } from '@/stores/fileStore';
|
||||
import api, { type TaskResponse } from '@/services/api';
|
||||
|
||||
export default function ImageCrop() {
|
||||
const { t } = useTranslation();
|
||||
const [phase, setPhase] = useState<'upload' | 'processing' | 'done'>('upload');
|
||||
const [file, setFile] = useState<File | null>(null);
|
||||
const [taskId, setTaskId] = useState<string | null>(null);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [coords, setCoords] = useState({ left: 0, top: 0, right: 100, bottom: 100 });
|
||||
|
||||
const { status, result, error: taskError } = useTaskPolling({
|
||||
taskId, onComplete: () => setPhase('done'), onError: () => setPhase('done'),
|
||||
});
|
||||
|
||||
const storeFile = useFileStore((s) => s.file);
|
||||
const clearStoreFile = useFileStore((s) => s.clearFile);
|
||||
useEffect(() => { if (storeFile) { setFile(storeFile); clearStoreFile(); } }, []); // eslint-disable-line react-hooks/exhaustive-deps
|
||||
|
||||
const handleUpload = async () => {
|
||||
if (!file) return;
|
||||
setError(null); setPhase('processing');
|
||||
try {
|
||||
const fd = new FormData();
|
||||
fd.append('file', file);
|
||||
fd.append('left', String(coords.left));
|
||||
fd.append('top', String(coords.top));
|
||||
fd.append('right', String(coords.right));
|
||||
fd.append('bottom', String(coords.bottom));
|
||||
const res = await api.post<TaskResponse>('/image/crop', fd);
|
||||
setTaskId(res.data.task_id);
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to crop image.');
|
||||
setPhase('done');
|
||||
}
|
||||
};
|
||||
|
||||
const handleReset = () => { setPhase('upload'); setFile(null); setTaskId(null); setError(null); setCoords({ left: 0, top: 0, right: 100, bottom: 100 }); };
|
||||
|
||||
const schema = generateToolSchema({ name: t('tools.imageCrop.title'), description: t('tools.imageCrop.description'), url: `${window.location.origin}/tools/image-crop` });
|
||||
|
||||
return (
|
||||
<>
|
||||
<Helmet>
|
||||
<title>{t('tools.imageCrop.title')} — {t('common.appName')}</title>
|
||||
<meta name="description" content={t('tools.imageCrop.description')} />
|
||||
<link rel="canonical" href={`${window.location.origin}/tools/image-crop`} />
|
||||
<script type="application/ld+json">{JSON.stringify(schema)}</script>
|
||||
</Helmet>
|
||||
<div className="mx-auto max-w-2xl">
|
||||
<div className="mb-8 text-center">
|
||||
<div className="mx-auto mb-4 flex h-16 w-16 items-center justify-center rounded-2xl bg-pink-100 dark:bg-pink-900/30">
|
||||
<Crop className="h-8 w-8 text-pink-600 dark:text-pink-400" />
|
||||
</div>
|
||||
<h1 className="section-heading">{t('tools.imageCrop.title')}</h1>
|
||||
<p className="mt-2 text-slate-500 dark:text-slate-400">{t('tools.imageCrop.description')}</p>
|
||||
</div>
|
||||
<AdSlot slot="top-banner" format="horizontal" className="mb-6" />
|
||||
{phase === 'upload' && (
|
||||
<div className="space-y-4">
|
||||
<FileUploader onFileSelect={setFile} file={file}
|
||||
accept={{ 'image/png': ['.png'], 'image/jpeg': ['.jpg', '.jpeg'], 'image/webp': ['.webp'] }}
|
||||
maxSizeMB={10} acceptLabel="Image (.png, .jpg, .webp)" />
|
||||
{file && (
|
||||
<div className="rounded-2xl bg-white p-5 ring-1 ring-slate-200 dark:bg-slate-800 dark:ring-slate-700">
|
||||
<p className="mb-3 text-sm font-medium text-slate-700 dark:text-slate-300">{t('tools.imageCrop.coordsLabel')}</p>
|
||||
<div className="grid grid-cols-2 gap-3">
|
||||
{(['left', 'top', 'right', 'bottom'] as const).map((side) => (
|
||||
<div key={side}>
|
||||
<label className="mb-1 block text-xs text-slate-500 dark:text-slate-400">{t(`tools.imageCrop.${side}`)}</label>
|
||||
<input type="number" min={0} value={coords[side]} onChange={(e) => setCoords((c) => ({ ...c, [side]: Math.max(0, Number(e.target.value)) }))}
|
||||
className="w-full rounded-lg border border-slate-300 px-3 py-2 text-sm dark:border-slate-600 dark:bg-slate-700 dark:text-slate-200" />
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
{file && <button onClick={handleUpload} className="btn-primary w-full">{t('tools.imageCrop.shortDesc')}</button>}
|
||||
</div>
|
||||
)}
|
||||
{phase === 'processing' && !result && <ProgressBar state={status?.state || 'PENDING'} message={status?.progress} />}
|
||||
{phase === 'done' && result && result.status === 'completed' && <DownloadButton result={result} onStartOver={handleReset} />}
|
||||
{phase === 'done' && (taskError || error) && (
|
||||
<div className="space-y-4">
|
||||
<div className="rounded-xl bg-red-50 p-4 ring-1 ring-red-200 dark:bg-red-900/20 dark:ring-red-800">
|
||||
<p className="text-sm text-red-700 dark:text-red-400">{taskError || error}</p>
|
||||
</div>
|
||||
<button onClick={handleReset} className="btn-secondary w-full">{t('common.startOver')}</button>
|
||||
</div>
|
||||
)}
|
||||
<AdSlot slot="bottom-banner" className="mt-8" />
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
}
|
||||
116
frontend/src/components/tools/ImageRotateFlip.tsx
Normal file
116
frontend/src/components/tools/ImageRotateFlip.tsx
Normal file
@@ -0,0 +1,116 @@
|
||||
import { useState, useEffect } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { Helmet } from 'react-helmet-async';
|
||||
import { RotateCw } from 'lucide-react';
|
||||
import FileUploader from '@/components/shared/FileUploader';
|
||||
import ProgressBar from '@/components/shared/ProgressBar';
|
||||
import DownloadButton from '@/components/shared/DownloadButton';
|
||||
import AdSlot from '@/components/layout/AdSlot';
|
||||
import { useTaskPolling } from '@/hooks/useTaskPolling';
|
||||
import { generateToolSchema } from '@/utils/seo';
|
||||
import { useFileStore } from '@/stores/fileStore';
|
||||
import api, { type TaskResponse } from '@/services/api';
|
||||
|
||||
export default function ImageRotateFlip() {
|
||||
const { t } = useTranslation();
|
||||
const [phase, setPhase] = useState<'upload' | 'processing' | 'done'>('upload');
|
||||
const [file, setFile] = useState<File | null>(null);
|
||||
const [taskId, setTaskId] = useState<string | null>(null);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [rotation, setRotation] = useState(0);
|
||||
const [flipH, setFlipH] = useState(false);
|
||||
const [flipV, setFlipV] = useState(false);
|
||||
|
||||
const { status, result, error: taskError } = useTaskPolling({
|
||||
taskId, onComplete: () => setPhase('done'), onError: () => setPhase('done'),
|
||||
});
|
||||
|
||||
const storeFile = useFileStore((s) => s.file);
|
||||
const clearStoreFile = useFileStore((s) => s.clearFile);
|
||||
useEffect(() => { if (storeFile) { setFile(storeFile); clearStoreFile(); } }, []); // eslint-disable-line react-hooks/exhaustive-deps
|
||||
|
||||
const handleUpload = async () => {
|
||||
if (!file) return;
|
||||
setError(null); setPhase('processing');
|
||||
try {
|
||||
const fd = new FormData();
|
||||
fd.append('file', file);
|
||||
fd.append('rotation', String(rotation));
|
||||
fd.append('flip_horizontal', String(flipH));
|
||||
fd.append('flip_vertical', String(flipV));
|
||||
const res = await api.post<TaskResponse>('/image/rotate-flip', fd);
|
||||
setTaskId(res.data.task_id);
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to transform image.');
|
||||
setPhase('done');
|
||||
}
|
||||
};
|
||||
|
||||
const handleReset = () => { setPhase('upload'); setFile(null); setTaskId(null); setError(null); setRotation(0); setFlipH(false); setFlipV(false); };
|
||||
|
||||
const schema = generateToolSchema({ name: t('tools.imageRotateFlip.title'), description: t('tools.imageRotateFlip.description'), url: `${window.location.origin}/tools/image-rotate-flip` });
|
||||
|
||||
return (
|
||||
<>
|
||||
<Helmet>
|
||||
<title>{t('tools.imageRotateFlip.title')} — {t('common.appName')}</title>
|
||||
<meta name="description" content={t('tools.imageRotateFlip.description')} />
|
||||
<link rel="canonical" href={`${window.location.origin}/tools/image-rotate-flip`} />
|
||||
<script type="application/ld+json">{JSON.stringify(schema)}</script>
|
||||
</Helmet>
|
||||
<div className="mx-auto max-w-2xl">
|
||||
<div className="mb-8 text-center">
|
||||
<div className="mx-auto mb-4 flex h-16 w-16 items-center justify-center rounded-2xl bg-cyan-100 dark:bg-cyan-900/30">
|
||||
<RotateCw className="h-8 w-8 text-cyan-600 dark:text-cyan-400" />
|
||||
</div>
|
||||
<h1 className="section-heading">{t('tools.imageRotateFlip.title')}</h1>
|
||||
<p className="mt-2 text-slate-500 dark:text-slate-400">{t('tools.imageRotateFlip.description')}</p>
|
||||
</div>
|
||||
<AdSlot slot="top-banner" format="horizontal" className="mb-6" />
|
||||
{phase === 'upload' && (
|
||||
<div className="space-y-4">
|
||||
<FileUploader onFileSelect={setFile} file={file}
|
||||
accept={{ 'image/png': ['.png'], 'image/jpeg': ['.jpg', '.jpeg'], 'image/webp': ['.webp'] }}
|
||||
maxSizeMB={10} acceptLabel="Image (.png, .jpg, .webp)" />
|
||||
{file && (
|
||||
<div className="rounded-2xl bg-white p-5 ring-1 ring-slate-200 dark:bg-slate-800 dark:ring-slate-700 space-y-4">
|
||||
<div>
|
||||
<label className="mb-2 block text-sm font-medium text-slate-700 dark:text-slate-300">{t('tools.imageRotateFlip.rotationLabel')}</label>
|
||||
<select value={rotation} onChange={(e) => setRotation(Number(e.target.value))}
|
||||
className="w-full rounded-lg border border-slate-300 px-3 py-2 text-sm dark:border-slate-600 dark:bg-slate-700 dark:text-slate-200">
|
||||
<option value={0}>0°</option>
|
||||
<option value={90}>90°</option>
|
||||
<option value={180}>180°</option>
|
||||
<option value={270}>270°</option>
|
||||
</select>
|
||||
</div>
|
||||
<div className="flex gap-4">
|
||||
<label className="flex items-center gap-2 text-sm text-slate-700 dark:text-slate-300">
|
||||
<input type="checkbox" checked={flipH} onChange={(e) => setFlipH(e.target.checked)} className="rounded" />
|
||||
{t('tools.imageRotateFlip.flipHorizontal')}
|
||||
</label>
|
||||
<label className="flex items-center gap-2 text-sm text-slate-700 dark:text-slate-300">
|
||||
<input type="checkbox" checked={flipV} onChange={(e) => setFlipV(e.target.checked)} className="rounded" />
|
||||
{t('tools.imageRotateFlip.flipVertical')}
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
{file && <button onClick={handleUpload} className="btn-primary w-full">{t('tools.imageRotateFlip.shortDesc')}</button>}
|
||||
</div>
|
||||
)}
|
||||
{phase === 'processing' && !result && <ProgressBar state={status?.state || 'PENDING'} message={status?.progress} />}
|
||||
{phase === 'done' && result && result.status === 'completed' && <DownloadButton result={result} onStartOver={handleReset} />}
|
||||
{phase === 'done' && (taskError || error) && (
|
||||
<div className="space-y-4">
|
||||
<div className="rounded-xl bg-red-50 p-4 ring-1 ring-red-200 dark:bg-red-900/20 dark:ring-red-800">
|
||||
<p className="text-sm text-red-700 dark:text-red-400">{taskError || error}</p>
|
||||
</div>
|
||||
<button onClick={handleReset} className="btn-secondary w-full">{t('common.startOver')}</button>
|
||||
</div>
|
||||
)}
|
||||
<AdSlot slot="bottom-banner" className="mt-8" />
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
}
|
||||
101
frontend/src/components/tools/PdfMetadata.tsx
Normal file
101
frontend/src/components/tools/PdfMetadata.tsx
Normal file
@@ -0,0 +1,101 @@
|
||||
import { useState, useEffect } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { Helmet } from 'react-helmet-async';
|
||||
import { FileText } from 'lucide-react';
|
||||
import FileUploader from '@/components/shared/FileUploader';
|
||||
import ProgressBar from '@/components/shared/ProgressBar';
|
||||
import DownloadButton from '@/components/shared/DownloadButton';
|
||||
import AdSlot from '@/components/layout/AdSlot';
|
||||
import { useTaskPolling } from '@/hooks/useTaskPolling';
|
||||
import { generateToolSchema } from '@/utils/seo';
|
||||
import { useFileStore } from '@/stores/fileStore';
|
||||
import api, { type TaskResponse } from '@/services/api';
|
||||
|
||||
export default function PdfMetadata() {
|
||||
const { t } = useTranslation();
|
||||
const [phase, setPhase] = useState<'upload' | 'processing' | 'done'>('upload');
|
||||
const [file, setFile] = useState<File | null>(null);
|
||||
const [taskId, setTaskId] = useState<string | null>(null);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [meta, setMeta] = useState({ title: '', author: '', subject: '', keywords: '', creator: '' });
|
||||
|
||||
const { status, result, error: taskError } = useTaskPolling({
|
||||
taskId, onComplete: () => setPhase('done'), onError: () => setPhase('done'),
|
||||
});
|
||||
|
||||
const storeFile = useFileStore((s) => s.file);
|
||||
const clearStoreFile = useFileStore((s) => s.clearFile);
|
||||
useEffect(() => { if (storeFile) { setFile(storeFile); clearStoreFile(); } }, []); // eslint-disable-line react-hooks/exhaustive-deps
|
||||
|
||||
const handleUpload = async () => {
|
||||
if (!file) return;
|
||||
setError(null); setPhase('processing');
|
||||
try {
|
||||
const fd = new FormData();
|
||||
fd.append('file', file);
|
||||
Object.entries(meta).forEach(([k, v]) => { if (v.trim()) fd.append(k, v.trim()); });
|
||||
const res = await api.post<TaskResponse>('/pdf-tools/metadata', fd);
|
||||
setTaskId(res.data.task_id);
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to edit metadata.');
|
||||
setPhase('done');
|
||||
}
|
||||
};
|
||||
|
||||
const handleReset = () => { setPhase('upload'); setFile(null); setTaskId(null); setError(null); setMeta({ title: '', author: '', subject: '', keywords: '', creator: '' }); };
|
||||
|
||||
const schema = generateToolSchema({ name: t('tools.pdfMetadata.title'), description: t('tools.pdfMetadata.description'), url: `${window.location.origin}/tools/pdf-metadata` });
|
||||
|
||||
const fields = ['title', 'author', 'subject', 'keywords', 'creator'] as const;
|
||||
const fieldLabelKeys: Record<string, string> = { title: 'titleField', author: 'author', subject: 'subject', keywords: 'keywords', creator: 'creator' };
|
||||
|
||||
return (
|
||||
<>
|
||||
<Helmet>
|
||||
<title>{t('tools.pdfMetadata.title')} — {t('common.appName')}</title>
|
||||
<meta name="description" content={t('tools.pdfMetadata.description')} />
|
||||
<link rel="canonical" href={`${window.location.origin}/tools/pdf-metadata`} />
|
||||
<script type="application/ld+json">{JSON.stringify(schema)}</script>
|
||||
</Helmet>
|
||||
<div className="mx-auto max-w-2xl">
|
||||
<div className="mb-8 text-center">
|
||||
<div className="mx-auto mb-4 flex h-16 w-16 items-center justify-center rounded-2xl bg-indigo-100 dark:bg-indigo-900/30">
|
||||
<FileText className="h-8 w-8 text-indigo-600 dark:text-indigo-400" />
|
||||
</div>
|
||||
<h1 className="section-heading">{t('tools.pdfMetadata.title')}</h1>
|
||||
<p className="mt-2 text-slate-500 dark:text-slate-400">{t('tools.pdfMetadata.description')}</p>
|
||||
</div>
|
||||
<AdSlot slot="top-banner" format="horizontal" className="mb-6" />
|
||||
{phase === 'upload' && (
|
||||
<div className="space-y-4">
|
||||
<FileUploader onFileSelect={setFile} file={file} accept={{ 'application/pdf': ['.pdf'] }} maxSizeMB={20} acceptLabel="PDF (.pdf)" />
|
||||
{file && (
|
||||
<div className="rounded-2xl bg-white p-5 ring-1 ring-slate-200 dark:bg-slate-800 dark:ring-slate-700 space-y-3">
|
||||
{fields.map((f) => (
|
||||
<div key={f}>
|
||||
<label className="mb-1 block text-sm font-medium text-slate-700 dark:text-slate-300">{t(`tools.pdfMetadata.${fieldLabelKeys[f]}`)}</label>
|
||||
<input type="text" value={meta[f]} onChange={(e) => setMeta((m) => ({ ...m, [f]: e.target.value }))}
|
||||
className="w-full rounded-lg border border-slate-300 px-3 py-2 text-sm dark:border-slate-600 dark:bg-slate-700 dark:text-slate-200"
|
||||
placeholder={t(`tools.pdfMetadata.${f}Placeholder`)} />
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
{file && <button onClick={handleUpload} className="btn-primary w-full">{t('tools.pdfMetadata.shortDesc')}</button>}
|
||||
</div>
|
||||
)}
|
||||
{phase === 'processing' && !result && <ProgressBar state={status?.state || 'PENDING'} message={status?.progress} />}
|
||||
{phase === 'done' && result && result.status === 'completed' && <DownloadButton result={result} onStartOver={handleReset} />}
|
||||
{phase === 'done' && (taskError || error) && (
|
||||
<div className="space-y-4">
|
||||
<div className="rounded-xl bg-red-50 p-4 ring-1 ring-red-200 dark:bg-red-900/20 dark:ring-red-800">
|
||||
<p className="text-sm text-red-700 dark:text-red-400">{taskError || error}</p>
|
||||
</div>
|
||||
<button onClick={handleReset} className="btn-secondary w-full">{t('common.startOver')}</button>
|
||||
</div>
|
||||
)}
|
||||
<AdSlot slot="bottom-banner" className="mt-8" />
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
}
|
||||
78
frontend/src/components/tools/PdfToPptx.tsx
Normal file
78
frontend/src/components/tools/PdfToPptx.tsx
Normal file
@@ -0,0 +1,78 @@
|
||||
import { useState, useEffect } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { Helmet } from 'react-helmet-async';
|
||||
import { FileText } from 'lucide-react';
|
||||
import FileUploader from '@/components/shared/FileUploader';
|
||||
import ProgressBar from '@/components/shared/ProgressBar';
|
||||
import DownloadButton from '@/components/shared/DownloadButton';
|
||||
import AdSlot from '@/components/layout/AdSlot';
|
||||
import { useFileUpload } from '@/hooks/useFileUpload';
|
||||
import { useTaskPolling } from '@/hooks/useTaskPolling';
|
||||
import { generateToolSchema } from '@/utils/seo';
|
||||
import { useFileStore } from '@/stores/fileStore';
|
||||
|
||||
export default function PdfToPptx() {
|
||||
const { t } = useTranslation();
|
||||
const [phase, setPhase] = useState<'upload' | 'processing' | 'done'>('upload');
|
||||
|
||||
const { file, uploadProgress, isUploading, taskId, error: uploadError, selectFile, startUpload, reset } =
|
||||
useFileUpload({ endpoint: '/convert/pdf-to-pptx', maxSizeMB: 20, acceptedTypes: ['pdf'] });
|
||||
|
||||
const { status, result, error: taskError } = useTaskPolling({
|
||||
taskId, onComplete: () => setPhase('done'), onError: () => setPhase('done'),
|
||||
});
|
||||
|
||||
const storeFile = useFileStore((s) => s.file);
|
||||
const clearStoreFile = useFileStore((s) => s.clearFile);
|
||||
useEffect(() => { if (storeFile) { selectFile(storeFile); clearStoreFile(); } }, []); // eslint-disable-line react-hooks/exhaustive-deps
|
||||
|
||||
const handleUpload = async () => { const id = await startUpload(); if (id) setPhase('processing'); };
|
||||
const handleReset = () => { reset(); setPhase('upload'); };
|
||||
|
||||
const schema = generateToolSchema({
|
||||
name: t('tools.pdfToPptx.title'), description: t('tools.pdfToPptx.description'),
|
||||
url: `${window.location.origin}/tools/pdf-to-pptx`,
|
||||
});
|
||||
|
||||
return (
|
||||
<>
|
||||
<Helmet>
|
||||
<title>{t('tools.pdfToPptx.title')} — {t('common.appName')}</title>
|
||||
<meta name="description" content={t('tools.pdfToPptx.description')} />
|
||||
<link rel="canonical" href={`${window.location.origin}/tools/pdf-to-pptx`} />
|
||||
<script type="application/ld+json">{JSON.stringify(schema)}</script>
|
||||
</Helmet>
|
||||
<div className="mx-auto max-w-2xl">
|
||||
<div className="mb-8 text-center">
|
||||
<div className="mx-auto mb-4 flex h-16 w-16 items-center justify-center rounded-2xl bg-orange-100 dark:bg-orange-900/30">
|
||||
<FileText className="h-8 w-8 text-orange-600 dark:text-orange-400" />
|
||||
</div>
|
||||
<h1 className="section-heading">{t('tools.pdfToPptx.title')}</h1>
|
||||
<p className="mt-2 text-slate-500 dark:text-slate-400">{t('tools.pdfToPptx.description')}</p>
|
||||
</div>
|
||||
<AdSlot slot="top-banner" format="horizontal" className="mb-6" />
|
||||
{phase === 'upload' && (
|
||||
<div className="space-y-4">
|
||||
<FileUploader onFileSelect={selectFile} file={file} accept={{ 'application/pdf': ['.pdf'] }}
|
||||
maxSizeMB={20} isUploading={isUploading} uploadProgress={uploadProgress}
|
||||
error={uploadError} onReset={handleReset} acceptLabel="PDF (.pdf)" />
|
||||
{file && !isUploading && (
|
||||
<button onClick={handleUpload} className="btn-primary w-full">{t('tools.pdfToPptx.shortDesc')}</button>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
{phase === 'processing' && !result && <ProgressBar state={status?.state || 'PENDING'} message={status?.progress} />}
|
||||
{phase === 'done' && result && result.status === 'completed' && <DownloadButton result={result} onStartOver={handleReset} />}
|
||||
{phase === 'done' && taskError && (
|
||||
<div className="space-y-4">
|
||||
<div className="rounded-xl bg-red-50 p-4 ring-1 ring-red-200 dark:bg-red-900/20 dark:ring-red-800">
|
||||
<p className="text-sm text-red-700 dark:text-red-400">{taskError}</p>
|
||||
</div>
|
||||
<button onClick={handleReset} className="btn-secondary w-full">{t('common.startOver')}</button>
|
||||
</div>
|
||||
)}
|
||||
<AdSlot slot="bottom-banner" className="mt-8" />
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
}
|
||||
82
frontend/src/components/tools/PptxToPdf.tsx
Normal file
82
frontend/src/components/tools/PptxToPdf.tsx
Normal file
@@ -0,0 +1,82 @@
|
||||
import { useState, useEffect } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { Helmet } from 'react-helmet-async';
|
||||
import { Presentation } from 'lucide-react';
|
||||
import FileUploader from '@/components/shared/FileUploader';
|
||||
import ProgressBar from '@/components/shared/ProgressBar';
|
||||
import DownloadButton from '@/components/shared/DownloadButton';
|
||||
import AdSlot from '@/components/layout/AdSlot';
|
||||
import { useFileUpload } from '@/hooks/useFileUpload';
|
||||
import { useTaskPolling } from '@/hooks/useTaskPolling';
|
||||
import { generateToolSchema } from '@/utils/seo';
|
||||
import { useFileStore } from '@/stores/fileStore';
|
||||
|
||||
export default function PptxToPdf() {
|
||||
const { t } = useTranslation();
|
||||
const [phase, setPhase] = useState<'upload' | 'processing' | 'done'>('upload');
|
||||
|
||||
const { file, uploadProgress, isUploading, taskId, error: uploadError, selectFile, startUpload, reset } =
|
||||
useFileUpload({ endpoint: '/convert/pptx-to-pdf', maxSizeMB: 20, acceptedTypes: ['pptx', 'ppt'] });
|
||||
|
||||
const { status, result, error: taskError } = useTaskPolling({
|
||||
taskId, onComplete: () => setPhase('done'), onError: () => setPhase('done'),
|
||||
});
|
||||
|
||||
const storeFile = useFileStore((s) => s.file);
|
||||
const clearStoreFile = useFileStore((s) => s.clearFile);
|
||||
useEffect(() => { if (storeFile) { selectFile(storeFile); clearStoreFile(); } }, []); // eslint-disable-line react-hooks/exhaustive-deps
|
||||
|
||||
const handleUpload = async () => { const id = await startUpload(); if (id) setPhase('processing'); };
|
||||
const handleReset = () => { reset(); setPhase('upload'); };
|
||||
|
||||
const schema = generateToolSchema({
|
||||
name: t('tools.pptxToPdf.title'), description: t('tools.pptxToPdf.description'),
|
||||
url: `${window.location.origin}/tools/pptx-to-pdf`,
|
||||
});
|
||||
|
||||
return (
|
||||
<>
|
||||
<Helmet>
|
||||
<title>{t('tools.pptxToPdf.title')} — {t('common.appName')}</title>
|
||||
<meta name="description" content={t('tools.pptxToPdf.description')} />
|
||||
<link rel="canonical" href={`${window.location.origin}/tools/pptx-to-pdf`} />
|
||||
<script type="application/ld+json">{JSON.stringify(schema)}</script>
|
||||
</Helmet>
|
||||
<div className="mx-auto max-w-2xl">
|
||||
<div className="mb-8 text-center">
|
||||
<div className="mx-auto mb-4 flex h-16 w-16 items-center justify-center rounded-2xl bg-orange-100 dark:bg-orange-900/30">
|
||||
<Presentation className="h-8 w-8 text-orange-600 dark:text-orange-400" />
|
||||
</div>
|
||||
<h1 className="section-heading">{t('tools.pptxToPdf.title')}</h1>
|
||||
<p className="mt-2 text-slate-500 dark:text-slate-400">{t('tools.pptxToPdf.description')}</p>
|
||||
</div>
|
||||
<AdSlot slot="top-banner" format="horizontal" className="mb-6" />
|
||||
{phase === 'upload' && (
|
||||
<div className="space-y-4">
|
||||
<FileUploader onFileSelect={selectFile} file={file}
|
||||
accept={{
|
||||
'application/vnd.openxmlformats-officedocument.presentationml.presentation': ['.pptx'],
|
||||
'application/vnd.ms-powerpoint': ['.ppt'],
|
||||
}}
|
||||
maxSizeMB={20} isUploading={isUploading} uploadProgress={uploadProgress}
|
||||
error={uploadError} onReset={handleReset} acceptLabel="PowerPoint (.pptx, .ppt)" />
|
||||
{file && !isUploading && (
|
||||
<button onClick={handleUpload} className="btn-primary w-full">{t('tools.pptxToPdf.shortDesc')}</button>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
{phase === 'processing' && !result && <ProgressBar state={status?.state || 'PENDING'} message={status?.progress} />}
|
||||
{phase === 'done' && result && result.status === 'completed' && <DownloadButton result={result} onStartOver={handleReset} />}
|
||||
{phase === 'done' && taskError && (
|
||||
<div className="space-y-4">
|
||||
<div className="rounded-xl bg-red-50 p-4 ring-1 ring-red-200 dark:bg-red-900/20 dark:ring-red-800">
|
||||
<p className="text-sm text-red-700 dark:text-red-400">{taskError}</p>
|
||||
</div>
|
||||
<button onClick={handleReset} className="btn-secondary w-full">{t('common.startOver')}</button>
|
||||
</div>
|
||||
)}
|
||||
<AdSlot slot="bottom-banner" className="mt-8" />
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
}
|
||||
@@ -2,8 +2,6 @@ import { useState, useEffect } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { Helmet } from 'react-helmet-async';
|
||||
import { ArrowUpDown } from 'lucide-react';
|
||||
import { getDocument, GlobalWorkerOptions } from 'pdfjs-dist';
|
||||
import pdfWorker from 'pdfjs-dist/build/pdf.worker.min.mjs?url';
|
||||
import FileUploader from '@/components/shared/FileUploader';
|
||||
import ProgressBar from '@/components/shared/ProgressBar';
|
||||
import DownloadButton from '@/components/shared/DownloadButton';
|
||||
@@ -11,10 +9,9 @@ import AdSlot from '@/components/layout/AdSlot';
|
||||
import { useFileUpload } from '@/hooks/useFileUpload';
|
||||
import { useTaskPolling } from '@/hooks/useTaskPolling';
|
||||
import { generateToolSchema } from '@/utils/seo';
|
||||
import { getPdfPageCount } from '@/utils/pdfClient';
|
||||
import { useFileStore } from '@/stores/fileStore';
|
||||
|
||||
GlobalWorkerOptions.workerSrc = pdfWorker;
|
||||
|
||||
export default function ReorderPdf() {
|
||||
const { t } = useTranslation();
|
||||
const [phase, setPhase] = useState<'upload' | 'processing' | 'done'>('upload');
|
||||
@@ -47,7 +44,6 @@ export default function ReorderPdf() {
|
||||
|
||||
useEffect(() => {
|
||||
let cancelled = false;
|
||||
let loadingTask: ReturnType<typeof getDocument> | null = null;
|
||||
|
||||
async function detectPageCount(selectedFile: File) {
|
||||
setIsReadingPageCount(true);
|
||||
@@ -55,12 +51,9 @@ export default function ReorderPdf() {
|
||||
setPageCountError(null);
|
||||
|
||||
try {
|
||||
const data = new Uint8Array(await selectedFile.arrayBuffer());
|
||||
loadingTask = getDocument({ data });
|
||||
const pdf = await loadingTask.promise;
|
||||
|
||||
const count = await getPdfPageCount(selectedFile);
|
||||
if (!cancelled) {
|
||||
setPageCount(pdf.numPages);
|
||||
setPageCount(count);
|
||||
}
|
||||
} catch {
|
||||
if (!cancelled) {
|
||||
@@ -70,7 +63,6 @@ export default function ReorderPdf() {
|
||||
if (!cancelled) {
|
||||
setIsReadingPageCount(false);
|
||||
}
|
||||
void loadingTask?.destroy();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -85,7 +77,6 @@ export default function ReorderPdf() {
|
||||
|
||||
return () => {
|
||||
cancelled = true;
|
||||
void loadingTask?.destroy();
|
||||
};
|
||||
}, [file, t]);
|
||||
|
||||
|
||||
71
frontend/src/components/tools/RepairPdf.tsx
Normal file
71
frontend/src/components/tools/RepairPdf.tsx
Normal file
@@ -0,0 +1,71 @@
|
||||
import { useState, useEffect } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { Helmet } from 'react-helmet-async';
|
||||
import { Wrench } from 'lucide-react';
|
||||
import FileUploader from '@/components/shared/FileUploader';
|
||||
import ProgressBar from '@/components/shared/ProgressBar';
|
||||
import DownloadButton from '@/components/shared/DownloadButton';
|
||||
import AdSlot from '@/components/layout/AdSlot';
|
||||
import { useFileUpload } from '@/hooks/useFileUpload';
|
||||
import { useTaskPolling } from '@/hooks/useTaskPolling';
|
||||
import { generateToolSchema } from '@/utils/seo';
|
||||
import { useFileStore } from '@/stores/fileStore';
|
||||
|
||||
export default function RepairPdf() {
|
||||
const { t } = useTranslation();
|
||||
const [phase, setPhase] = useState<'upload' | 'processing' | 'done'>('upload');
|
||||
|
||||
const { file, uploadProgress, isUploading, taskId, error: uploadError, selectFile, startUpload, reset } =
|
||||
useFileUpload({ endpoint: '/pdf-tools/repair', maxSizeMB: 20, acceptedTypes: ['pdf'] });
|
||||
|
||||
const { status, result, error: taskError } = useTaskPolling({
|
||||
taskId, onComplete: () => setPhase('done'), onError: () => setPhase('done'),
|
||||
});
|
||||
|
||||
const storeFile = useFileStore((s) => s.file);
|
||||
const clearStoreFile = useFileStore((s) => s.clearFile);
|
||||
useEffect(() => { if (storeFile) { selectFile(storeFile); clearStoreFile(); } }, []); // eslint-disable-line react-hooks/exhaustive-deps
|
||||
|
||||
const handleUpload = async () => { const id = await startUpload(); if (id) setPhase('processing'); };
|
||||
const handleReset = () => { reset(); setPhase('upload'); };
|
||||
|
||||
const schema = generateToolSchema({ name: t('tools.repairPdf.title'), description: t('tools.repairPdf.description'), url: `${window.location.origin}/tools/repair-pdf` });
|
||||
|
||||
return (
|
||||
<>
|
||||
<Helmet>
|
||||
<title>{t('tools.repairPdf.title')} — {t('common.appName')}</title>
|
||||
<meta name="description" content={t('tools.repairPdf.description')} />
|
||||
<link rel="canonical" href={`${window.location.origin}/tools/repair-pdf`} />
|
||||
<script type="application/ld+json">{JSON.stringify(schema)}</script>
|
||||
</Helmet>
|
||||
<div className="mx-auto max-w-2xl">
|
||||
<div className="mb-8 text-center">
|
||||
<div className="mx-auto mb-4 flex h-16 w-16 items-center justify-center rounded-2xl bg-red-100 dark:bg-red-900/30">
|
||||
<Wrench className="h-8 w-8 text-red-600 dark:text-red-400" />
|
||||
</div>
|
||||
<h1 className="section-heading">{t('tools.repairPdf.title')}</h1>
|
||||
<p className="mt-2 text-slate-500 dark:text-slate-400">{t('tools.repairPdf.description')}</p>
|
||||
</div>
|
||||
<AdSlot slot="top-banner" format="horizontal" className="mb-6" />
|
||||
{phase === 'upload' && (
|
||||
<div className="space-y-4">
|
||||
<FileUploader onFileSelect={selectFile} file={file} accept={{ 'application/pdf': ['.pdf'] }} maxSizeMB={20} acceptLabel="PDF (.pdf)" />
|
||||
{file && <button onClick={handleUpload} disabled={isUploading} className="btn-primary w-full">{t('tools.repairPdf.shortDesc')}</button>}
|
||||
</div>
|
||||
)}
|
||||
{phase === 'processing' && !result && <ProgressBar state={status?.state || 'PENDING'} message={status?.progress} />}
|
||||
{phase === 'done' && result && result.status === 'completed' && <DownloadButton result={result} onStartOver={handleReset} />}
|
||||
{phase === 'done' && (taskError || uploadError) && (
|
||||
<div className="space-y-4">
|
||||
<div className="rounded-xl bg-red-50 p-4 ring-1 ring-red-200 dark:bg-red-900/20 dark:ring-red-800">
|
||||
<p className="text-sm text-red-700 dark:text-red-400">{taskError || uploadError}</p>
|
||||
</div>
|
||||
<button onClick={handleReset} className="btn-secondary w-full">{t('common.startOver')}</button>
|
||||
</div>
|
||||
)}
|
||||
<AdSlot slot="bottom-banner" className="mt-8" />
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
}
|
||||
121
frontend/src/components/tools/SignPdf.tsx
Normal file
121
frontend/src/components/tools/SignPdf.tsx
Normal file
@@ -0,0 +1,121 @@
|
||||
import { useState, useEffect } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { Helmet } from 'react-helmet-async';
|
||||
import { PenTool } from 'lucide-react';
|
||||
import FileUploader from '@/components/shared/FileUploader';
|
||||
import ProgressBar from '@/components/shared/ProgressBar';
|
||||
import DownloadButton from '@/components/shared/DownloadButton';
|
||||
import AdSlot from '@/components/layout/AdSlot';
|
||||
import { useTaskPolling } from '@/hooks/useTaskPolling';
|
||||
import { generateToolSchema } from '@/utils/seo';
|
||||
import { useFileStore } from '@/stores/fileStore';
|
||||
import api, { type TaskResponse } from '@/services/api';
|
||||
|
||||
export default function SignPdf() {
|
||||
const { t } = useTranslation();
|
||||
const [phase, setPhase] = useState<'upload' | 'processing' | 'done'>('upload');
|
||||
const [pdfFile, setPdfFile] = useState<File | null>(null);
|
||||
const [sigFile, setSigFile] = useState<File | null>(null);
|
||||
const [taskId, setTaskId] = useState<string | null>(null);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [page, setPage] = useState(1);
|
||||
|
||||
const { status, result, error: taskError } = useTaskPolling({
|
||||
taskId, onComplete: () => setPhase('done'), onError: () => setPhase('done'),
|
||||
});
|
||||
|
||||
const storeFile = useFileStore((s) => s.file);
|
||||
const clearStoreFile = useFileStore((s) => s.clearFile);
|
||||
useEffect(() => { if (storeFile) { setPdfFile(storeFile); clearStoreFile(); } }, []); // eslint-disable-line react-hooks/exhaustive-deps
|
||||
|
||||
const handleUpload = async () => {
|
||||
if (!pdfFile || !sigFile) return;
|
||||
setError(null);
|
||||
setPhase('processing');
|
||||
try {
|
||||
const fd = new FormData();
|
||||
fd.append('file', pdfFile);
|
||||
fd.append('signature', sigFile);
|
||||
fd.append('page', String(page));
|
||||
const res = await api.post<TaskResponse>('/pdf-tools/sign', fd);
|
||||
setTaskId(res.data.task_id);
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to sign PDF.');
|
||||
setPhase('done');
|
||||
}
|
||||
};
|
||||
|
||||
const handleReset = () => {
|
||||
setPhase('upload'); setPdfFile(null); setSigFile(null);
|
||||
setTaskId(null); setError(null); setPage(1);
|
||||
};
|
||||
|
||||
const schema = generateToolSchema({
|
||||
name: t('tools.signPdf.title'), description: t('tools.signPdf.description'),
|
||||
url: `${window.location.origin}/tools/sign-pdf`,
|
||||
});
|
||||
|
||||
return (
|
||||
<>
|
||||
<Helmet>
|
||||
<title>{t('tools.signPdf.title')} — {t('common.appName')}</title>
|
||||
<meta name="description" content={t('tools.signPdf.description')} />
|
||||
<link rel="canonical" href={`${window.location.origin}/tools/sign-pdf`} />
|
||||
<script type="application/ld+json">{JSON.stringify(schema)}</script>
|
||||
</Helmet>
|
||||
<div className="mx-auto max-w-2xl">
|
||||
<div className="mb-8 text-center">
|
||||
<div className="mx-auto mb-4 flex h-16 w-16 items-center justify-center rounded-2xl bg-blue-100 dark:bg-blue-900/30">
|
||||
<PenTool className="h-8 w-8 text-blue-600 dark:text-blue-400" />
|
||||
</div>
|
||||
<h1 className="section-heading">{t('tools.signPdf.title')}</h1>
|
||||
<p className="mt-2 text-slate-500 dark:text-slate-400">{t('tools.signPdf.description')}</p>
|
||||
</div>
|
||||
<AdSlot slot="top-banner" format="horizontal" className="mb-6" />
|
||||
{phase === 'upload' && (
|
||||
<div className="space-y-4">
|
||||
<div className="rounded-2xl bg-white p-5 ring-1 ring-slate-200 dark:bg-slate-800 dark:ring-slate-700 space-y-4">
|
||||
<div>
|
||||
<label className="mb-2 block text-sm font-medium text-slate-700 dark:text-slate-300">
|
||||
{t('tools.signPdf.pdfLabel')}
|
||||
</label>
|
||||
<FileUploader onFileSelect={setPdfFile} file={pdfFile}
|
||||
accept={{ 'application/pdf': ['.pdf'] }} maxSizeMB={20}
|
||||
acceptLabel="PDF (.pdf)" />
|
||||
</div>
|
||||
<div>
|
||||
<label className="mb-2 block text-sm font-medium text-slate-700 dark:text-slate-300">
|
||||
{t('tools.signPdf.signatureLabel')}
|
||||
</label>
|
||||
<FileUploader onFileSelect={setSigFile} file={sigFile}
|
||||
accept={{ 'image/png': ['.png'], 'image/jpeg': ['.jpg', '.jpeg'] }} maxSizeMB={5}
|
||||
acceptLabel="Image (.png, .jpg)" />
|
||||
</div>
|
||||
<div>
|
||||
<label className="mb-2 block text-sm font-medium text-slate-700 dark:text-slate-300">
|
||||
{t('tools.signPdf.pageLabel')}
|
||||
</label>
|
||||
<input type="number" min={1} value={page} onChange={(e) => setPage(Math.max(1, Number(e.target.value)))}
|
||||
className="w-24 rounded-lg border border-slate-300 px-3 py-2 text-sm dark:border-slate-600 dark:bg-slate-700 dark:text-slate-200" />
|
||||
</div>
|
||||
</div>
|
||||
{pdfFile && sigFile && (
|
||||
<button onClick={handleUpload} className="btn-primary w-full">{t('tools.signPdf.shortDesc')}</button>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
{phase === 'processing' && !result && <ProgressBar state={status?.state || 'PENDING'} message={status?.progress} />}
|
||||
{phase === 'done' && result && result.status === 'completed' && <DownloadButton result={result} onStartOver={handleReset} />}
|
||||
{phase === 'done' && (taskError || error) && (
|
||||
<div className="space-y-4">
|
||||
<div className="rounded-xl bg-red-50 p-4 ring-1 ring-red-200 dark:bg-red-900/20 dark:ring-red-800">
|
||||
<p className="text-sm text-red-700 dark:text-red-400">{taskError || error}</p>
|
||||
</div>
|
||||
<button onClick={handleReset} className="btn-secondary w-full">{t('common.startOver')}</button>
|
||||
</div>
|
||||
)}
|
||||
<AdSlot slot="bottom-banner" className="mt-8" />
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
}
|
||||
@@ -18,6 +18,9 @@ export const PAGE_ROUTES = [
|
||||
'/contact',
|
||||
'/pricing',
|
||||
'/blog',
|
||||
'/blog/:slug',
|
||||
'/developers',
|
||||
'/internal/admin',
|
||||
] as const;
|
||||
|
||||
// ─── Tool routes ─────────────────────────────────────────────────
|
||||
@@ -63,6 +66,23 @@ export const TOOL_ROUTES = [
|
||||
'/tools/video-to-gif',
|
||||
'/tools/word-counter',
|
||||
'/tools/text-cleaner',
|
||||
|
||||
// Phase 2 – PDF Conversion
|
||||
'/tools/pdf-to-pptx',
|
||||
'/tools/excel-to-pdf',
|
||||
'/tools/pptx-to-pdf',
|
||||
'/tools/sign-pdf',
|
||||
|
||||
// Phase 2 – PDF Extra Tools
|
||||
'/tools/crop-pdf',
|
||||
'/tools/flatten-pdf',
|
||||
'/tools/repair-pdf',
|
||||
'/tools/pdf-metadata',
|
||||
|
||||
// Phase 2 – Image & Utility
|
||||
'/tools/image-crop',
|
||||
'/tools/image-rotate-flip',
|
||||
'/tools/barcode-generator',
|
||||
] as const;
|
||||
|
||||
// ─── All routes combined ─────────────────────────────────────────
|
||||
|
||||
@@ -664,6 +664,221 @@ export const TOOLS_SEO: ToolSEO[] = [
|
||||
{ question: 'Can I convert text to uppercase?', answer: 'Yes, you can convert text to uppercase, lowercase, or title case with a single click.' },
|
||||
],
|
||||
},
|
||||
|
||||
// ─── PHASE 2 – PDF CONVERSION ──────────────────────────────
|
||||
{
|
||||
i18nKey: 'pdfToPptx',
|
||||
slug: 'pdf-to-pptx',
|
||||
titleSuffix: 'Free Online PDF to PowerPoint Converter',
|
||||
metaDescription: 'Convert PDF files to PowerPoint (PPTX) presentations online for free. Each PDF page becomes a slide.',
|
||||
category: 'Convert',
|
||||
relatedSlugs: ['pptx-to-pdf', 'pdf-to-word', 'pdf-to-excel', 'pdf-to-images'],
|
||||
keywords: 'pdf to pptx, pdf to powerpoint, convert pdf to pptx, pdf to slides',
|
||||
features: [
|
||||
'Convert each PDF page to a PowerPoint slide',
|
||||
'High-quality image rendering',
|
||||
'No software installation needed',
|
||||
'Files auto-deleted after 30 minutes',
|
||||
],
|
||||
faqs: [
|
||||
{ question: 'How do I convert PDF to PowerPoint?', answer: 'Upload your PDF and our tool converts each page into a slide in a PPTX file. Download the result when ready.' },
|
||||
{ question: 'Is formatting preserved?', answer: 'Each page is rendered as a high-quality image on its own slide, preserving the visual layout perfectly.' },
|
||||
],
|
||||
},
|
||||
{
|
||||
i18nKey: 'excelToPdf',
|
||||
slug: 'excel-to-pdf',
|
||||
titleSuffix: 'Free Online Excel to PDF Converter',
|
||||
metaDescription: 'Convert Excel spreadsheets (XLSX, XLS) to PDF documents online for free. Preserve your table formatting.',
|
||||
category: 'Convert',
|
||||
relatedSlugs: ['pdf-to-excel', 'word-to-pdf', 'pptx-to-pdf'],
|
||||
keywords: 'excel to pdf, xlsx to pdf, convert excel to pdf, spreadsheet to pdf',
|
||||
features: [
|
||||
'Convert XLSX and XLS files to PDF',
|
||||
'Preserves table formatting and layout',
|
||||
'Powered by LibreOffice for accurate conversion',
|
||||
'Free with no signup required',
|
||||
],
|
||||
faqs: [
|
||||
{ question: 'Which Excel formats are supported?', answer: 'We support both XLSX (modern) and XLS (legacy) Excel formats.' },
|
||||
{ question: 'Will my formulas be visible?', answer: 'The PDF will show the computed values, not the formulas, just like a print preview.' },
|
||||
],
|
||||
},
|
||||
{
|
||||
i18nKey: 'pptxToPdf',
|
||||
slug: 'pptx-to-pdf',
|
||||
titleSuffix: 'Free Online PowerPoint to PDF Converter',
|
||||
metaDescription: 'Convert PowerPoint presentations (PPTX, PPT) to PDF online for free. Perfect for sharing slides.',
|
||||
category: 'Convert',
|
||||
relatedSlugs: ['pdf-to-pptx', 'word-to-pdf', 'excel-to-pdf'],
|
||||
keywords: 'pptx to pdf, powerpoint to pdf, convert pptx to pdf, ppt to pdf',
|
||||
features: [
|
||||
'Convert PPTX and PPT files to PDF',
|
||||
'Preserves slide layout and graphics',
|
||||
'Ideal for sharing presentations',
|
||||
'No account needed',
|
||||
],
|
||||
faqs: [
|
||||
{ question: 'Which PowerPoint formats work?', answer: 'Both PPTX and legacy PPT formats are supported.' },
|
||||
{ question: 'Are animations preserved?', answer: 'PDF is a static format, so animations are not included, but all slide content and layout are preserved.' },
|
||||
],
|
||||
},
|
||||
{
|
||||
i18nKey: 'signPdf',
|
||||
slug: 'sign-pdf',
|
||||
titleSuffix: 'Free Online PDF Signer',
|
||||
metaDescription: 'Add your signature image to any PDF document online for free. Sign PDF files without printing.',
|
||||
category: 'PDF',
|
||||
relatedSlugs: ['protect-pdf', 'watermark-pdf', 'pdf-editor', 'flatten-pdf'],
|
||||
keywords: 'sign pdf, add signature to pdf, pdf signer, electronic signature pdf',
|
||||
features: [
|
||||
'Upload your signature image (PNG/JPG)',
|
||||
'Place signature on any page',
|
||||
'No printing or scanning needed',
|
||||
'Secure — files deleted after 30 minutes',
|
||||
],
|
||||
faqs: [
|
||||
{ question: 'How do I sign a PDF?', answer: 'Upload your PDF and a signature image (PNG or JPG). Choose the page and position, then download the signed PDF.' },
|
||||
{ question: 'Is this a legal electronic signature?', answer: 'This tool places a visual signature image on the PDF. For legally binding digital signatures, a certificate-based solution may be required depending on your jurisdiction.' },
|
||||
],
|
||||
},
|
||||
|
||||
// ─── PHASE 2 – PDF EXTRA TOOLS ─────────────────────────────
|
||||
{
|
||||
i18nKey: 'cropPdf',
|
||||
slug: 'crop-pdf',
|
||||
titleSuffix: 'Free Online PDF Cropper',
|
||||
metaDescription: 'Crop PDF pages by adjusting margins online for free. Trim unwanted whitespace from your documents.',
|
||||
category: 'PDF',
|
||||
relatedSlugs: ['rotate-pdf', 'split-pdf', 'pdf-editor', 'flatten-pdf'],
|
||||
keywords: 'crop pdf, trim pdf, pdf cropper, remove pdf margins, resize pdf pages',
|
||||
features: [
|
||||
'Adjust margins (top, bottom, left, right)',
|
||||
'Crop all or specific pages',
|
||||
'Remove unnecessary whitespace',
|
||||
'Free and no signup required',
|
||||
],
|
||||
faqs: [
|
||||
{ question: 'How do I crop a PDF?', answer: 'Upload your PDF, set the margin values to trim from each side, and download the cropped version.' },
|
||||
{ question: 'Can I crop specific pages?', answer: 'Yes, you can specify which pages to crop or apply cropping to all pages at once.' },
|
||||
],
|
||||
},
|
||||
{
|
||||
i18nKey: 'flattenPdf',
|
||||
slug: 'flatten-pdf',
|
||||
titleSuffix: 'Free Online PDF Flattener',
|
||||
metaDescription: 'Flatten PDF forms and annotations online for free. Convert interactive form fields into fixed content.',
|
||||
category: 'PDF',
|
||||
relatedSlugs: ['protect-pdf', 'sign-pdf', 'repair-pdf', 'pdf-editor'],
|
||||
keywords: 'flatten pdf, pdf flattener, remove form fields, flatten annotations',
|
||||
features: [
|
||||
'Remove interactive form fields',
|
||||
'Flatten annotations into fixed content',
|
||||
'Prevent further editing of form data',
|
||||
'Ideal for archiving filled forms',
|
||||
],
|
||||
faqs: [
|
||||
{ question: 'What does flattening a PDF mean?', answer: 'Flattening converts interactive elements (form fields, annotations) into static content that cannot be edited further.' },
|
||||
{ question: 'Why should I flatten a PDF?', answer: 'Flattening is useful for archiving filled forms, reducing file size, and preventing accidental changes to form data.' },
|
||||
],
|
||||
},
|
||||
{
|
||||
i18nKey: 'repairPdf',
|
||||
slug: 'repair-pdf',
|
||||
titleSuffix: 'Free Online PDF Repair Tool',
|
||||
metaDescription: 'Repair corrupted or damaged PDF files online for free. Fix broken PDFs and recover content.',
|
||||
category: 'PDF',
|
||||
relatedSlugs: ['flatten-pdf', 'compress-pdf', 'unlock-pdf', 'pdf-metadata'],
|
||||
keywords: 'repair pdf, fix pdf, broken pdf, corrupted pdf, pdf recovery',
|
||||
features: [
|
||||
'Fix corrupted PDF structures',
|
||||
'Recover readable pages from damaged files',
|
||||
'Re-write clean PDF output',
|
||||
'Free with no file size limits',
|
||||
],
|
||||
faqs: [
|
||||
{ question: 'Can this fix any broken PDF?', answer: 'Our tool attempts to recover as many pages as possible. Severely corrupted files may only be partially recoverable.' },
|
||||
{ question: 'Is my data safe?', answer: 'Yes, all files are processed securely and deleted within 30 minutes.' },
|
||||
],
|
||||
},
|
||||
{
|
||||
i18nKey: 'pdfMetadata',
|
||||
slug: 'pdf-metadata',
|
||||
titleSuffix: 'Free Online PDF Metadata Editor',
|
||||
metaDescription: 'View and edit PDF metadata (title, author, subject, keywords) online for free.',
|
||||
category: 'PDF',
|
||||
relatedSlugs: ['pdf-editor', 'repair-pdf', 'protect-pdf', 'flatten-pdf'],
|
||||
keywords: 'pdf metadata, edit pdf properties, pdf title, pdf author, pdf info editor',
|
||||
features: [
|
||||
'Edit title, author, subject, keywords',
|
||||
'Set custom creator information',
|
||||
'View existing metadata before editing',
|
||||
'Free online tool — no installation',
|
||||
],
|
||||
faqs: [
|
||||
{ question: 'What is PDF metadata?', answer: 'PDF metadata includes properties like title, author, subject, and keywords embedded in the document. Search engines and document managers use this information.' },
|
||||
{ question: 'Can I remove metadata?', answer: 'Yes, leave fields blank to remove existing metadata values.' },
|
||||
],
|
||||
},
|
||||
|
||||
// ─── PHASE 2 – IMAGE & UTILITY ─────────────────────────────
|
||||
{
|
||||
i18nKey: 'imageCrop',
|
||||
slug: 'image-crop',
|
||||
titleSuffix: 'Free Online Image Cropper',
|
||||
metaDescription: 'Crop images online for free. Specify exact pixel coordinates to trim your photos and graphics.',
|
||||
category: 'Image',
|
||||
relatedSlugs: ['image-resize', 'image-rotate-flip', 'compress-image', 'image-converter'],
|
||||
keywords: 'crop image, image cropper, trim image, cut image, photo crop',
|
||||
features: [
|
||||
'Specify exact crop coordinates in pixels',
|
||||
'Supports PNG, JPG, and WebP',
|
||||
'High-quality output',
|
||||
'Free — no watermarks added',
|
||||
],
|
||||
faqs: [
|
||||
{ question: 'How do I crop an image?', answer: 'Upload your image, enter the left, top, right, and bottom pixel coordinates for the crop area, then download the result.' },
|
||||
{ question: 'What formats are supported?', answer: 'PNG, JPG/JPEG, and WebP images are supported.' },
|
||||
],
|
||||
},
|
||||
{
|
||||
i18nKey: 'imageRotateFlip',
|
||||
slug: 'image-rotate-flip',
|
||||
titleSuffix: 'Free Online Image Rotate & Flip Tool',
|
||||
metaDescription: 'Rotate and flip images online for free. Rotate by 90°, 180°, or 270° and flip horizontally or vertically.',
|
||||
category: 'Image',
|
||||
relatedSlugs: ['image-crop', 'image-resize', 'compress-image', 'image-converter'],
|
||||
keywords: 'rotate image, flip image, image rotator, mirror image, image orientation',
|
||||
features: [
|
||||
'Rotate images by 90°, 180°, or 270°',
|
||||
'Flip horizontally or vertically',
|
||||
'Combine rotation and flip in one step',
|
||||
'Supports PNG, JPG, and WebP',
|
||||
],
|
||||
faqs: [
|
||||
{ question: 'Can I rotate and flip at the same time?', answer: 'Yes, you can combine rotation and flip operations in a single step.' },
|
||||
{ question: 'Does rotating affect quality?', answer: 'No, rotation is lossless for PNG. For JPG, the quality is preserved as closely as possible.' },
|
||||
],
|
||||
},
|
||||
{
|
||||
i18nKey: 'barcode',
|
||||
slug: 'barcode-generator',
|
||||
titleSuffix: 'Free Online Barcode Generator',
|
||||
metaDescription: 'Generate barcodes online for free. Supports Code128, Code39, EAN-13, UPC-A, ISBN, and more formats.',
|
||||
category: 'Utility',
|
||||
relatedSlugs: ['qr-code'],
|
||||
keywords: 'barcode generator, create barcode, code128, ean13, upc barcode, free barcode',
|
||||
features: [
|
||||
'Support for Code128, Code39, EAN-13, UPC-A, ISBN and more',
|
||||
'Output as PNG or SVG',
|
||||
'Instant generation with preview',
|
||||
'Free with no limits',
|
||||
],
|
||||
faqs: [
|
||||
{ question: 'What barcode formats are supported?', answer: 'We support Code128, Code39, EAN-13, EAN-8, UPC-A, ISBN-13, ISBN-10, ISSN, and PZN barcode formats.' },
|
||||
{ question: 'What is the difference between a barcode and a QR code?', answer: 'Barcodes are one-dimensional (linear) and hold less data. QR codes are two-dimensional and can store more information. Use both from our tools.' },
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
/** Look up a tool's SEO data by slug */
|
||||
|
||||
533
frontend/src/content/blogArticles.ts
Normal file
533
frontend/src/content/blogArticles.ts
Normal file
@@ -0,0 +1,533 @@
|
||||
export type BlogLocale = 'en' | 'ar' | 'fr';
|
||||
|
||||
interface LocalizedText {
|
||||
en: string;
|
||||
ar: string;
|
||||
fr: string;
|
||||
}
|
||||
|
||||
interface BlogArticleSection {
|
||||
heading: LocalizedText;
|
||||
paragraphs: LocalizedText[];
|
||||
bullets?: LocalizedText[];
|
||||
}
|
||||
|
||||
export interface BlogArticle {
|
||||
slug: string;
|
||||
category: 'PDF' | 'Image' | 'AI';
|
||||
publishedAt: string;
|
||||
readingMinutes: number;
|
||||
toolSlugs: string[];
|
||||
title: LocalizedText;
|
||||
excerpt: LocalizedText;
|
||||
seoDescription: LocalizedText;
|
||||
keyTakeaways: LocalizedText[];
|
||||
sections: BlogArticleSection[];
|
||||
}
|
||||
|
||||
export interface LocalizedBlogArticle {
|
||||
slug: string;
|
||||
category: BlogArticle['category'];
|
||||
publishedAt: string;
|
||||
readingMinutes: number;
|
||||
toolSlugs: string[];
|
||||
title: string;
|
||||
excerpt: string;
|
||||
seoDescription: string;
|
||||
keyTakeaways: string[];
|
||||
sections: Array<{
|
||||
heading: string;
|
||||
paragraphs: string[];
|
||||
bullets: string[];
|
||||
}>;
|
||||
}
|
||||
|
||||
export const BLOG_ARTICLES: BlogArticle[] = [
|
||||
{
|
||||
slug: 'how-to-compress-pdf-online',
|
||||
category: 'PDF',
|
||||
publishedAt: '2025-01-15',
|
||||
readingMinutes: 4,
|
||||
toolSlugs: ['compress-pdf', 'merge-pdf', 'pdf-to-word'],
|
||||
title: {
|
||||
en: 'How to Compress PDFs Without Losing Quality',
|
||||
ar: 'كيف تضغط ملفات PDF دون فقدان الجودة',
|
||||
fr: 'Comment compresser des PDF sans perte de qualité',
|
||||
},
|
||||
excerpt: {
|
||||
en: 'Learn the best techniques to reduce PDF file size while maintaining document quality for sharing and uploading.',
|
||||
ar: 'تعلّم أفضل الطرق لتقليل حجم ملفات PDF مع الحفاظ على جودة المستند للمشاركة والرفع.',
|
||||
fr: 'Découvrez les meilleures techniques pour réduire la taille des PDF tout en conservant la qualité du document.',
|
||||
},
|
||||
seoDescription: {
|
||||
en: 'A practical guide to reducing PDF size without ruining text clarity, image fidelity, or upload readiness.',
|
||||
ar: 'دليل عملي لتقليل حجم PDF بدون الإضرار بوضوح النص أو جودة الصور أو جاهزية الرفع.',
|
||||
fr: 'Guide pratique pour réduire la taille d’un PDF sans dégrader la netteté du texte ni la qualité des images.',
|
||||
},
|
||||
keyTakeaways: [
|
||||
{
|
||||
en: 'Start with balanced compression before trying aggressive settings.',
|
||||
ar: 'ابدأ دائماً بضغط متوازن قبل تجربة الإعدادات القوية.',
|
||||
fr: 'Commencez par une compression équilibrée avant les réglages agressifs.',
|
||||
},
|
||||
{
|
||||
en: 'Image-heavy PDFs shrink the most, while text-heavy files often need less compression.',
|
||||
ar: 'ملفات PDF الغنية بالصور تنخفض أكثر، بينما الملفات النصية تحتاج عادةً ضغطاً أقل.',
|
||||
fr: 'Les PDF riches en images gagnent le plus, alors que les fichiers textuels nécessitent souvent moins de compression.',
|
||||
},
|
||||
{
|
||||
en: 'Review the final file before sending it to clients or uploading it to portals.',
|
||||
ar: 'راجع الملف النهائي قبل إرساله للعملاء أو رفعه إلى أي بوابة.',
|
||||
fr: 'Vérifiez toujours le fichier final avant de l’envoyer à un client ou de le téléverser.',
|
||||
},
|
||||
],
|
||||
sections: [
|
||||
{
|
||||
heading: {
|
||||
en: 'Why PDFs become too large',
|
||||
ar: 'لماذا تصبح ملفات PDF كبيرة جداً',
|
||||
fr: 'Pourquoi certains PDF deviennent trop lourds',
|
||||
},
|
||||
paragraphs: [
|
||||
{
|
||||
en: 'Most oversized PDFs are caused by embedded images, repeated scans, or exported documents that keep unnecessary metadata. The file may look simple, but it can still contain large assets behind the scenes.',
|
||||
ar: 'معظم ملفات PDF الكبيرة يكون سببها الصور المضمّنة، أو المسح الضوئي المتكرر، أو التصدير من برامج تحتفظ ببيانات وصفية غير ضرورية. قد يبدو الملف بسيطاً لكنه يحمل عناصر ثقيلة في الخلفية.',
|
||||
fr: 'La plupart des PDF volumineux sont dus aux images intégrées, aux scans répétés ou aux exports qui conservent trop de métadonnées. Le document peut sembler simple tout en contenant des éléments lourds en arrière-plan.',
|
||||
},
|
||||
{
|
||||
en: 'If your goal is email delivery, portal upload, or faster downloads, the right compression level matters more than chasing the smallest possible file.',
|
||||
ar: 'إذا كان هدفك هو الإرسال بالبريد أو الرفع إلى بوابة أو تسريع التنزيل، فإن اختيار مستوى الضغط المناسب أهم من مطاردة أصغر حجم ممكن.',
|
||||
fr: 'Si votre objectif est l’envoi par e-mail, le dépôt sur un portail ou un téléchargement plus rapide, le bon niveau de compression compte davantage que la taille minimale absolue.',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
heading: {
|
||||
en: 'A safer compression workflow',
|
||||
ar: 'سير عمل أكثر أماناً للضغط',
|
||||
fr: 'Une méthode de compression plus sûre',
|
||||
},
|
||||
paragraphs: [
|
||||
{
|
||||
en: 'Use balanced compression first, inspect pages with small text or charts, then decide whether you need a stronger setting. This prevents quality regressions that are hard to notice until a document reaches a customer.',
|
||||
ar: 'استخدم الضغط المتوازن أولاً، ثم افحص الصفحات التي تحتوي على نص صغير أو مخططات، وبعدها قرر إن كنت بحاجة إلى ضغط أقوى. بهذه الطريقة تتجنب تدهور الجودة الذي قد لا تلاحظه إلا بعد وصول الملف إلى العميل.',
|
||||
fr: 'Appliquez d’abord une compression équilibrée, vérifiez les pages contenant du petit texte ou des graphiques, puis décidez si un niveau plus fort est nécessaire. Cela évite les régressions de qualité détectées trop tard.',
|
||||
},
|
||||
],
|
||||
bullets: [
|
||||
{
|
||||
en: 'Compress before merging large reports to keep the final package smaller.',
|
||||
ar: 'اضغط الملفات قبل دمج التقارير الكبيرة للحفاظ على حجم الناتج النهائي أصغر.',
|
||||
fr: 'Compressez avant de fusionner de gros rapports afin de réduire le poids final.',
|
||||
},
|
||||
{
|
||||
en: 'Keep an original copy when handling signed or compliance documents.',
|
||||
ar: 'احتفظ بنسخة أصلية عند التعامل مع مستندات موقعة أو مرتبطة بالامتثال.',
|
||||
fr: 'Conservez une copie originale pour les documents signés ou réglementaires.',
|
||||
},
|
||||
{
|
||||
en: 'If a portal still rejects the file, remove metadata after compression.',
|
||||
ar: 'إذا استمرت البوابة في رفض الملف، فاحذف البيانات الوصفية بعد الضغط.',
|
||||
fr: 'Si le portail refuse encore le fichier, supprimez les métadonnées après compression.',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
slug: 'convert-images-without-losing-quality',
|
||||
category: 'Image',
|
||||
publishedAt: '2025-01-10',
|
||||
readingMinutes: 4,
|
||||
toolSlugs: ['image-converter', 'image-resize', 'compress-image'],
|
||||
title: {
|
||||
en: 'Convert Images Between Formats Losslessly',
|
||||
ar: 'تحويل الصور بين الصيغ دون فقدان',
|
||||
fr: 'Convertir des images entre formats sans perte',
|
||||
},
|
||||
excerpt: {
|
||||
en: 'A complete guide to converting between PNG, JPG, WebP and other image formats while preserving quality.',
|
||||
ar: 'دليل كامل للتحويل بين PNG وJPG وWebP وغيرها مع الحفاظ على الجودة.',
|
||||
fr: 'Guide complet pour convertir entre PNG, JPG, WebP et d’autres formats tout en préservant la qualité.',
|
||||
},
|
||||
seoDescription: {
|
||||
en: 'Choose the right image format for screenshots, product photos, transparent graphics, and web performance.',
|
||||
ar: 'اختر صيغة الصورة المناسبة للقطات الشاشة وصور المنتجات والعناصر الشفافة وأداء الويب.',
|
||||
fr: 'Choisissez le bon format pour les captures d’écran, les photos produit, les graphismes transparents et la performance web.',
|
||||
},
|
||||
keyTakeaways: [
|
||||
{
|
||||
en: 'PNG is best for transparency and interface graphics.',
|
||||
ar: 'PNG هو الأفضل للشفافية ورسومات الواجهات.',
|
||||
fr: 'Le PNG reste idéal pour la transparence et les interfaces.',
|
||||
},
|
||||
{
|
||||
en: 'JPG is efficient for photos, while WebP often gives the best web balance.',
|
||||
ar: 'JPG مناسب للصور الفوتوغرافية، بينما WebP يقدّم غالباً أفضل توازن للويب.',
|
||||
fr: 'Le JPG convient aux photos, tandis que le WebP offre souvent le meilleur compromis pour le web.',
|
||||
},
|
||||
{
|
||||
en: 'Resize before compressing if you need smaller files for publishing.',
|
||||
ar: 'غيّر الأبعاد قبل الضغط إذا كنت تحتاج ملفات أصغر للنشر.',
|
||||
fr: 'Redimensionnez avant de compresser si vous visez des fichiers plus légers pour la publication.',
|
||||
},
|
||||
],
|
||||
sections: [
|
||||
{
|
||||
heading: {
|
||||
en: 'Pick the format for the job',
|
||||
ar: 'اختر الصيغة بحسب المهمة',
|
||||
fr: 'Choisir le format selon l’usage',
|
||||
},
|
||||
paragraphs: [
|
||||
{
|
||||
en: 'Image conversion is not only about compatibility. It affects loading speed, transparency support, print quality, and how clean the asset looks after repeated editing.',
|
||||
ar: 'تحويل الصور لا يتعلق بالتوافق فقط. بل يؤثر في سرعة التحميل، ودعم الشفافية، وجودة الطباعة، ومدى نظافة الملف بعد التعديل المتكرر.',
|
||||
fr: 'La conversion d’image ne concerne pas seulement la compatibilité. Elle influence la vitesse de chargement, la transparence, la qualité d’impression et la tenue de l’image après plusieurs éditions.',
|
||||
},
|
||||
{
|
||||
en: 'Screenshots and diagrams usually benefit from PNG or WebP, while camera photos often work better as JPG or WebP with moderate compression.',
|
||||
ar: 'لقطات الشاشة والرسومات تستفيد غالباً من PNG أو WebP، بينما صور الكاميرا تعمل عادةً بشكل أفضل مع JPG أو WebP بضغط متوسط.',
|
||||
fr: 'Les captures d’écran et schémas profitent souvent du PNG ou du WebP, alors que les photos d’appareil se prêtent mieux au JPG ou au WebP avec une compression modérée.',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
heading: {
|
||||
en: 'Avoid hidden quality loss',
|
||||
ar: 'تجنب فقدان الجودة غير الملحوظ',
|
||||
fr: 'Éviter les pertes de qualité invisibles',
|
||||
},
|
||||
paragraphs: [
|
||||
{
|
||||
en: 'Repeated conversions between lossy formats degrade sharp edges and text overlays. Keep one high-quality master file, then export the version you need for delivery.',
|
||||
ar: 'التحويل المتكرر بين الصيغ الضائعة يضعف الحواف الحادة والنصوص فوق الصور. احتفظ بنسخة رئيسية عالية الجودة ثم صدّر النسخة المناسبة للتسليم.',
|
||||
fr: 'Les conversions répétées entre formats avec perte dégradent les bords nets et les textes incrustés. Conservez un original de qualité, puis exportez uniquement la version de diffusion.',
|
||||
},
|
||||
],
|
||||
bullets: [
|
||||
{
|
||||
en: 'Resize hero images before uploading them to your website.',
|
||||
ar: 'غيّر أبعاد الصور الرئيسية قبل رفعها إلى موقعك.',
|
||||
fr: 'Redimensionnez les visuels principaux avant de les envoyer sur votre site.',
|
||||
},
|
||||
{
|
||||
en: 'Use transparent PNG or WebP for logos and overlays.',
|
||||
ar: 'استخدم PNG أو WebP شفافاً للشعارات والعناصر المركبة.',
|
||||
fr: 'Utilisez du PNG ou du WebP transparent pour les logos et surimpressions.',
|
||||
},
|
||||
{
|
||||
en: 'Choose compression after you confirm the final dimensions.',
|
||||
ar: 'اختر الضغط بعد التأكد من الأبعاد النهائية للصورة.',
|
||||
fr: 'Choisissez la compression après validation des dimensions finales.',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
slug: 'ocr-extract-text-from-images',
|
||||
category: 'AI',
|
||||
publishedAt: '2025-01-05',
|
||||
readingMinutes: 5,
|
||||
toolSlugs: ['ocr', 'pdf-to-word', 'extract-tables'],
|
||||
title: {
|
||||
en: 'Extract Text from Scanned Documents with OCR',
|
||||
ar: 'استخراج النص من المستندات الممسوحة بـ OCR',
|
||||
fr: 'Extraire du texte de documents numérisés avec l’OCR',
|
||||
},
|
||||
excerpt: {
|
||||
en: 'Turn scanned PDFs and images into editable, searchable text using our AI-powered OCR technology.',
|
||||
ar: 'حوّل ملفات PDF الممسوحة والصور إلى نص قابل للتعديل والبحث باستخدام OCR المدعوم بالذكاء الاصطناعي.',
|
||||
fr: 'Transformez les PDF numérisés et les images en texte modifiable et recherchable grâce à l’OCR.',
|
||||
},
|
||||
seoDescription: {
|
||||
en: 'Improve OCR accuracy with better scans, language selection, and post-processing workflows for editable output.',
|
||||
ar: 'ارفع دقة OCR من خلال تحسين المسح الضوئي واختيار اللغة وسير العمل اللاحق للحصول على نص قابل للتحرير.',
|
||||
fr: 'Améliorez la précision OCR grâce à de meilleurs scans, au bon choix de langue et à un workflow de correction.',
|
||||
},
|
||||
keyTakeaways: [
|
||||
{
|
||||
en: 'Clean scans and the right OCR language dramatically improve accuracy.',
|
||||
ar: 'المسح النظيف واختيار لغة OCR الصحيحة يرفعان الدقة بشكل كبير.',
|
||||
fr: 'Un scan propre et la bonne langue OCR améliorent fortement la précision.',
|
||||
},
|
||||
{
|
||||
en: 'OCR is ideal for searchable archives, not perfect page design recreation.',
|
||||
ar: 'OCR مناسب للأرشفة القابلة للبحث، وليس لإعادة تصميم الصفحة بدقة كاملة.',
|
||||
fr: 'L’OCR sert surtout à rendre les archives recherchables, pas à reproduire parfaitement la mise en page.',
|
||||
},
|
||||
{
|
||||
en: 'Use table extraction or Word conversion after OCR when structure matters.',
|
||||
ar: 'استخدم استخراج الجداول أو التحويل إلى Word بعد OCR عندما تكون البنية مهمة.',
|
||||
fr: 'Utilisez ensuite l’extraction de tableaux ou la conversion Word si la structure compte.',
|
||||
},
|
||||
],
|
||||
sections: [
|
||||
{
|
||||
heading: {
|
||||
en: 'What OCR is good at',
|
||||
ar: 'ما الذي يتقنه OCR',
|
||||
fr: 'Ce que l’OCR fait bien',
|
||||
},
|
||||
paragraphs: [
|
||||
{
|
||||
en: 'OCR turns image-based text into selectable text that you can search, copy, and reuse. It is especially useful for scanned contracts, invoices, receipts, and photographed notes.',
|
||||
ar: 'يقوم OCR بتحويل النص الموجود داخل الصور إلى نص يمكن تحديده والبحث فيه ونسخه وإعادة استخدامه. وهو مفيد خصوصاً للعقود الممسوحة والفواتير والإيصالات والملاحظات المصوّرة.',
|
||||
fr: 'L’OCR transforme le texte présent dans une image en texte sélectionnable, copiable et recherchable. Il est particulièrement utile pour les contrats scannés, factures, reçus et notes photographiées.',
|
||||
},
|
||||
{
|
||||
en: 'It works best when text is high contrast, upright, and captured at a readable resolution. Blurry or skewed pages still work, but you should expect more cleanup afterward.',
|
||||
ar: 'يعمل بأفضل شكل عندما يكون النص واضح التباين ومستقيماً وبدقة مناسبة. يمكنه التعامل مع الصفحات المشوشة أو المائلة، لكنك ستحتاج عادةً إلى تنظيف أكبر بعد الاستخراج.',
|
||||
fr: 'Il fonctionne mieux quand le texte est net, bien contrasté, droit et d’une résolution suffisante. Les pages floues ou inclinées restent possibles, mais demandent plus de corrections ensuite.',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
heading: {
|
||||
en: 'How to improve the final output',
|
||||
ar: 'كيف تحسن النتيجة النهائية',
|
||||
fr: 'Comment améliorer le résultat final',
|
||||
},
|
||||
paragraphs: [
|
||||
{
|
||||
en: 'Before running OCR, crop noisy margins and rotate crooked images. After extraction, move structured content into Word or a spreadsheet if you need real editing rather than plain text.',
|
||||
ar: 'قبل تشغيل OCR، قص الحواف المزعجة ودوّر الصور المائلة. وبعد الاستخراج، انقل المحتوى المنظم إلى Word أو جدول إذا كنت تحتاج تحريراً فعلياً وليس نصاً مجرداً فقط.',
|
||||
fr: 'Avant de lancer l’OCR, rognez les marges inutiles et redressez les images. Après extraction, envoyez le contenu structuré vers Word ou un tableur si vous avez besoin d’édition réelle.',
|
||||
},
|
||||
],
|
||||
bullets: [
|
||||
{
|
||||
en: 'Use the exact OCR language whenever possible.',
|
||||
ar: 'استخدم لغة OCR الدقيقة كلما أمكن.',
|
||||
fr: 'Choisissez la langue OCR exacte dès que possible.',
|
||||
},
|
||||
{
|
||||
en: 'Split mixed documents when only a few pages need OCR.',
|
||||
ar: 'قسّم المستندات المختلطة عندما تكون بضع صفحات فقط بحاجة إلى OCR.',
|
||||
fr: 'Découpez les documents mixtes si seules quelques pages nécessitent l’OCR.',
|
||||
},
|
||||
{
|
||||
en: 'Review numbers and names manually before final delivery.',
|
||||
ar: 'راجع الأرقام والأسماء يدوياً قبل التسليم النهائي.',
|
||||
fr: 'Vérifiez manuellement les nombres et noms avant livraison.',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
slug: 'merge-split-pdf-files',
|
||||
category: 'PDF',
|
||||
publishedAt: '2024-12-28',
|
||||
readingMinutes: 4,
|
||||
toolSlugs: ['merge-pdf', 'split-pdf', 'extract-pages'],
|
||||
title: {
|
||||
en: 'Master Merging and Splitting PDF Files',
|
||||
ar: 'إتقان دمج وتقسيم ملفات PDF',
|
||||
fr: 'Maîtriser la fusion et la division de fichiers PDF',
|
||||
},
|
||||
excerpt: {
|
||||
en: 'Step-by-step guide to combining multiple PDFs into one or splitting a large PDF into separate files.',
|
||||
ar: 'دليل خطوة بخطوة لدمج عدة ملفات PDF في ملف واحد أو تقسيم ملف كبير إلى ملفات منفصلة.',
|
||||
fr: 'Guide pas à pas pour combiner plusieurs PDF en un seul ou découper un gros document en plusieurs fichiers.',
|
||||
},
|
||||
seoDescription: {
|
||||
en: 'Organize reports, contracts, and attachments faster by choosing the right merge, split, or extract workflow.',
|
||||
ar: 'نظّم التقارير والعقود والمرفقات بسرعة أكبر باختيار سير العمل المناسب للدمج أو التقسيم أو الاستخراج.',
|
||||
fr: 'Organisez plus vite rapports, contrats et pièces jointes en choisissant le bon workflow de fusion, division ou extraction.',
|
||||
},
|
||||
keyTakeaways: [
|
||||
{
|
||||
en: 'Merge for delivery packages, split for review and routing.',
|
||||
ar: 'استخدم الدمج لحزم التسليم، والتقسيم للمراجعة والتوزيع.',
|
||||
fr: 'Fusionnez pour livrer un dossier complet, découpez pour relire ou distribuer.',
|
||||
},
|
||||
{
|
||||
en: 'Extract only the pages you need instead of duplicating large files.',
|
||||
ar: 'استخرج الصفحات المطلوبة فقط بدلاً من تكرار الملفات الكبيرة كاملة.',
|
||||
fr: 'Extrayez uniquement les pages utiles plutôt que de dupliquer de gros fichiers.',
|
||||
},
|
||||
{
|
||||
en: 'Reorder pages before final export when document sequence matters.',
|
||||
ar: 'أعد ترتيب الصفحات قبل التصدير النهائي عندما يكون التسلسل مهماً.',
|
||||
fr: 'Réorganisez les pages avant export si l’ordre du document est critique.',
|
||||
},
|
||||
],
|
||||
sections: [
|
||||
{
|
||||
heading: {
|
||||
en: 'When to merge and when to split',
|
||||
ar: 'متى تدمج ومتى تقسّم',
|
||||
fr: 'Quand fusionner et quand séparer',
|
||||
},
|
||||
paragraphs: [
|
||||
{
|
||||
en: 'Merging is useful when you need one clean delivery package for a client, regulator, or internal archive. Splitting helps when each stakeholder only needs specific pages or sections.',
|
||||
ar: 'الدمج مفيد عندما تحتاج حزمة تسليم نظيفة واحدة للعميل أو للجهة التنظيمية أو للأرشفة الداخلية. أما التقسيم فيفيد عندما يحتاج كل طرف صفحات أو أقساماً محددة فقط.',
|
||||
fr: 'La fusion est utile pour produire un dossier unique à transmettre à un client, un régulateur ou un archivage interne. La division aide quand chaque destinataire n’a besoin que de certaines pages.',
|
||||
},
|
||||
{
|
||||
en: 'A good workflow often combines both: extract or split first, then merge only the pages that belong together.',
|
||||
ar: 'وغالباً ما يجمع سير العمل الجيد بين الاثنين: استخرج أو قسّم أولاً، ثم ادمج الصفحات التي يجب أن تبقى معاً فقط.',
|
||||
fr: 'Le meilleur workflow combine souvent les deux: extraire ou découper d’abord, puis fusionner uniquement les pages réellement liées.',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
heading: {
|
||||
en: 'Reduce mistakes in document assembly',
|
||||
ar: 'قلّل أخطاء تجميع المستندات',
|
||||
fr: 'Réduire les erreurs d’assemblage',
|
||||
},
|
||||
paragraphs: [
|
||||
{
|
||||
en: 'Before sending a compiled PDF, verify page order, duplicate pages, and section breaks. A fast visual scan after assembly is cheaper than reissuing the wrong document later.',
|
||||
ar: 'قبل إرسال PDF مجمّع، تأكد من ترتيب الصفحات وتكرارها وفواصل الأقسام. المراجعة البصرية السريعة بعد التجميع أقل كلفة بكثير من إعادة إصدار المستند لاحقاً بشكل خاطئ.',
|
||||
fr: 'Avant d’envoyer un PDF assemblé, vérifiez l’ordre des pages, les doublons et les ruptures de section. Un contrôle visuel rapide coûte moins cher qu’une réémission ultérieure.',
|
||||
},
|
||||
],
|
||||
bullets: [
|
||||
{
|
||||
en: 'Name output files by audience or purpose.',
|
||||
ar: 'سمِّ الملفات الناتجة وفق الجمهور أو الغرض.',
|
||||
fr: 'Nommez les fichiers selon le destinataire ou l’usage.',
|
||||
},
|
||||
{
|
||||
en: 'Compress final bundles only after page order is locked.',
|
||||
ar: 'اضغط الحزم النهائية فقط بعد تثبيت ترتيب الصفحات.',
|
||||
fr: 'Compressez le lot final uniquement une fois l’ordre figé.',
|
||||
},
|
||||
{
|
||||
en: 'Use page extraction when only annexes or signatures are required.',
|
||||
ar: 'استخدم استخراج الصفحات عندما تحتاج فقط الملاحق أو صفحات التوقيع.',
|
||||
fr: 'Utilisez l’extraction si vous n’avez besoin que des annexes ou signatures.',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
slug: 'ai-chat-with-pdf-documents',
|
||||
category: 'AI',
|
||||
publishedAt: '2024-12-20',
|
||||
readingMinutes: 5,
|
||||
toolSlugs: ['chat-pdf', 'summarize-pdf', 'translate-pdf'],
|
||||
title: {
|
||||
en: 'Chat with Your PDF Documents Using AI',
|
||||
ar: 'تحدث مع مستندات PDF باستخدام الذكاء الاصطناعي',
|
||||
fr: 'Discutez avec vos documents PDF grâce à l’IA',
|
||||
},
|
||||
excerpt: {
|
||||
en: 'Discover how AI can help you ask questions and get instant answers from any PDF document.',
|
||||
ar: 'اكتشف كيف يساعدك الذكاء الاصطناعي على طرح الأسئلة والحصول على إجابات فورية من أي مستند PDF.',
|
||||
fr: 'Découvrez comment l’IA peut vous aider à poser des questions et obtenir des réponses instantanées à partir de n’importe quel PDF.',
|
||||
},
|
||||
seoDescription: {
|
||||
en: 'Use AI chat, summaries, and translation together to move from document reading to faster decisions.',
|
||||
ar: 'استخدم المحادثة والملخصات والترجمة بالذكاء الاصطناعي معاً للانتقال من القراءة إلى القرار بشكل أسرع.',
|
||||
fr: 'Combinez chat IA, résumés et traduction pour passer plus vite de la lecture à la décision.',
|
||||
},
|
||||
keyTakeaways: [
|
||||
{
|
||||
en: 'AI chat is strongest when you ask narrow, contextual questions.',
|
||||
ar: 'المحادثة الذكية تكون أقوى عندما تطرح أسئلة ضيقة ومحددة بالسياق.',
|
||||
fr: 'Le chat IA est plus performant avec des questions précises et contextualisées.',
|
||||
},
|
||||
{
|
||||
en: 'Summaries help you orient quickly before deeper analysis.',
|
||||
ar: 'الملخصات تساعدك على التوجيه السريع قبل التحليل المتعمق.',
|
||||
fr: 'Les résumés aident à s’orienter rapidement avant une analyse plus profonde.',
|
||||
},
|
||||
{
|
||||
en: 'Translation expands access, but the original document should still be reviewed for critical decisions.',
|
||||
ar: 'الترجمة توسّع الوصول، لكن يجب مراجعة المستند الأصلي عند اتخاذ قرارات حساسة.',
|
||||
fr: 'La traduction élargit l’accès, mais le document source doit être relu pour les décisions critiques.',
|
||||
},
|
||||
],
|
||||
sections: [
|
||||
{
|
||||
heading: {
|
||||
en: 'Turn long documents into answers',
|
||||
ar: 'حوّل المستندات الطويلة إلى إجابات',
|
||||
fr: 'Transformer de longs documents en réponses',
|
||||
},
|
||||
paragraphs: [
|
||||
{
|
||||
en: 'AI chat becomes useful when documents are long, repetitive, or packed with details. Instead of scrolling for one clause or number, you can ask targeted questions and move faster.',
|
||||
ar: 'تصبح محادثة الذكاء الاصطناعي مفيدة عندما تكون المستندات طويلة أو متكررة أو مليئة بالتفاصيل. بدلاً من التمرير بحثاً عن بند أو رقم واحد، يمكنك طرح أسئلة محددة والتحرك بسرعة أكبر.',
|
||||
fr: 'Le chat IA devient particulièrement utile lorsque les documents sont longs, répétitifs ou très denses. Plutôt que de parcourir chaque page, vous posez une question ciblée et avancez plus vite.',
|
||||
},
|
||||
{
|
||||
en: 'This works especially well for policy manuals, proposals, research PDFs, and contract drafts that need quick understanding before deeper review.',
|
||||
ar: 'ويناسب ذلك بشكل خاص أدلة السياسات، والعروض، والأبحاث، ومسودات العقود التي تحتاج إلى فهم سريع قبل المراجعة المتعمقة.',
|
||||
fr: 'Cela fonctionne particulièrement bien pour les manuels, propositions, recherches PDF et projets de contrat nécessitant une compréhension rapide avant relecture détaillée.',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
heading: {
|
||||
en: 'Build a practical AI workflow',
|
||||
ar: 'ابنِ سير عمل عملياً مع الذكاء الاصطناعي',
|
||||
fr: 'Construire un workflow IA pratique',
|
||||
},
|
||||
paragraphs: [
|
||||
{
|
||||
en: 'Start with a summary, ask follow-up questions about the exact section you care about, then translate or export only what needs to be shared. This keeps the workflow focused and auditable.',
|
||||
ar: 'ابدأ بملخص، ثم اطرح أسئلة متابعة حول القسم الذي يهمك فعلاً، ثم ترجم أو صدّر فقط ما يلزم مشاركته. هكذا يبقى سير العمل مركزاً وقابلاً للمراجعة.',
|
||||
fr: 'Commencez par un résumé, posez ensuite des questions ciblées sur la section utile, puis traduisez ou partagez seulement ce qui doit l’être. Le flux de travail reste ainsi concentré et contrôlable.',
|
||||
},
|
||||
],
|
||||
bullets: [
|
||||
{
|
||||
en: 'Ask for page references when you need validation.',
|
||||
ar: 'اطلب الإشارة إلى الصفحات عندما تحتاج إلى تحقق إضافي.',
|
||||
fr: 'Demandez des références de pages lorsque vous avez besoin de validation.',
|
||||
},
|
||||
{
|
||||
en: 'Use summaries before meetings or handoffs.',
|
||||
ar: 'استخدم الملخصات قبل الاجتماعات أو التسليمات.',
|
||||
fr: 'Utilisez les résumés avant une réunion ou un transfert.',
|
||||
},
|
||||
{
|
||||
en: 'Keep human review in the loop for legal or financial material.',
|
||||
ar: 'أبقِ المراجعة البشرية حاضرة مع المواد القانونية أو المالية.',
|
||||
fr: 'Gardez toujours une relecture humaine pour les contenus juridiques ou financiers.',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
export function normalizeBlogLocale(language: string): BlogLocale {
|
||||
const baseLanguage = language.split('-')[0] as BlogLocale;
|
||||
return baseLanguage === 'ar' || baseLanguage === 'fr' ? baseLanguage : 'en';
|
||||
}
|
||||
|
||||
function localizeText(text: LocalizedText, locale: BlogLocale): string {
|
||||
return text[locale] || text.en;
|
||||
}
|
||||
|
||||
export function getLocalizedBlogArticle(article: BlogArticle, locale: BlogLocale): LocalizedBlogArticle {
|
||||
return {
|
||||
slug: article.slug,
|
||||
category: article.category,
|
||||
publishedAt: article.publishedAt,
|
||||
readingMinutes: article.readingMinutes,
|
||||
toolSlugs: article.toolSlugs,
|
||||
title: localizeText(article.title, locale),
|
||||
excerpt: localizeText(article.excerpt, locale),
|
||||
seoDescription: localizeText(article.seoDescription, locale),
|
||||
keyTakeaways: article.keyTakeaways.map((item) => localizeText(item, locale)),
|
||||
sections: article.sections.map((section) => ({
|
||||
heading: localizeText(section.heading, locale),
|
||||
paragraphs: section.paragraphs.map((paragraph) => localizeText(paragraph, locale)),
|
||||
bullets: (section.bullets || []).map((bullet) => localizeText(bullet, locale)),
|
||||
})),
|
||||
};
|
||||
}
|
||||
|
||||
export function getBlogArticleBySlug(slug: string): BlogArticle | undefined {
|
||||
return BLOG_ARTICLES.find((article) => article.slug === slug);
|
||||
}
|
||||
@@ -17,6 +17,8 @@
|
||||
"privacy": "سياسة الخصوصية",
|
||||
"terms": "شروط الاستخدام",
|
||||
"language": "اللغة",
|
||||
"search": "بحث",
|
||||
"clear": "مسح",
|
||||
"allTools": "كل الأدوات",
|
||||
"account": "الحساب",
|
||||
"signIn": "تسجيل الدخول",
|
||||
@@ -27,6 +29,7 @@
|
||||
"contact": "اتصل بنا",
|
||||
"pricing": "الأسعار",
|
||||
"blog": "المدونة",
|
||||
"developers": "للمطورين",
|
||||
"send": "إرسال",
|
||||
"subject": "الموضوع",
|
||||
"message": "الرسالة",
|
||||
@@ -121,6 +124,8 @@
|
||||
"editNowTooltip": "افتح أداة تحسين PDF السريعة لإنشاء نسخة نظيفة قابلة للتنزيل",
|
||||
"suggestedTools": "الأدوات المقترحة لملفك",
|
||||
"suggestedToolsDesc": "بعد رفع الملف سنعرض الأدوات المتوافقة تلقائيًا: تحرير نص، تمييز، دمج/تقسيم، ضغط، تحويل إلى Word/صورة، تحويل فيديو إلى GIF، والمزيد.",
|
||||
"searchToolsPlaceholder": "ابحث عن الأدوات حسب الاسم أو الاستخدام",
|
||||
"noSearchResults": "لم نعثر على أدوات مطابقة لعبارة البحث بعد. جرّب كلمات أوسع مثل PDF أو صورة أو OCR أو ضغط.",
|
||||
"selectTool": "اختر أداة",
|
||||
"fileDetected": "اكتشفنا ملف {{type}}",
|
||||
"unsupportedFile": "نوع الملف غير مدعوم. جرب PDF أو Word أو صور أو فيديو.",
|
||||
@@ -137,6 +142,17 @@
|
||||
"feature3Title": "أمان مدمج",
|
||||
"feature3Desc": "قم بالوصول إلى ملفاتك بأمان، محمية بتشفير تلقائي."
|
||||
},
|
||||
"socialProof": {
|
||||
"badge": "موثوق من فرق نشطة",
|
||||
"title": "نشاط حي للمنتج من عمليات حقيقية وتقييمات فعلية",
|
||||
"subtitle": "هذه الأرقام تأتي مباشرة من المهام المكتملة وتقييمات الأدوات عبر المنصة.",
|
||||
"processedFiles": "الملفات المعالجة",
|
||||
"successRate": "معدل النجاح",
|
||||
"last24h": "آخر 24 ساعة",
|
||||
"averageRating": "متوسط التقييم",
|
||||
"basedOnRatings": "استنادًا إلى {{count}} تقييمًا مرسلاً",
|
||||
"viewDevelopers": "عرض وثائق المطورين"
|
||||
},
|
||||
"pages": {
|
||||
"about": {
|
||||
"metaDescription": "تعرّف على SaaS-PDF — أدوات ملفات مجانية وسريعة وآمنة عبر الإنترنت لملفات PDF والصور والفيديو والنصوص. لا حاجة للتسجيل.",
|
||||
@@ -311,14 +327,62 @@
|
||||
"q": "ما طرق الدفع المقبولة؟",
|
||||
"a": "نقبل جميع بطاقات الائتمان/الخصم الرئيسية وPayPal. تتم معالجة جميع المدفوعات بأمان عبر Stripe."
|
||||
}
|
||||
]
|
||||
],
|
||||
"trustTitle": "مصمم للفرق التي تحتاج سرعة ووضوحًا في النتائج",
|
||||
"trustSubtitle": "المنصة نفسها تدعم الاستخدام السريع من المتصفح، والعمل المتكرر عبر الحساب، وتدفقات المستندات عبر API.",
|
||||
"trustFastTitle": "معالجة سريعة",
|
||||
"trustFastDesc": "تنفيذ المهام غير المتزامنة والعمال المحسّنون يساعدان على استمرار الأعمال الثقيلة دون تعطيل الواجهة.",
|
||||
"trustPrivateTitle": "الخصوصية افتراضيًا",
|
||||
"trustPrivateDesc": "يتم التحقق من الملفات ومعالجتها بأمان ثم تنظيفها تلقائيًا بعد نافذة الاحتفاظ.",
|
||||
"trustApiTitle": "جاهز للتكامل",
|
||||
"trustApiDesc": "يمكن لمساحات العمل الاحترافية إنشاء مفاتيح API وربط الأدوات نفسها مع الأتمتة الداخلية أو تدفقات العملاء."
|
||||
},
|
||||
"developers": {
|
||||
"metaDescription": "استكشف بوابة مطوري SaaS-PDF، وتدفق API غير المتزامن، والنقاط الجاهزة لأتمتة المستندات.",
|
||||
"badge": "بوابة المطورين",
|
||||
"title": "ابنِ تدفقات المستندات فوق واجهة SaaS-PDF البرمجية",
|
||||
"subtitle": "استخدم نقاط التحويل والضغط وOCR والذكاء الاصطناعي نفسها الموجودة خلف التطبيق داخل أنظمتك الخاصة.",
|
||||
"getApiKey": "احصل على مفتاح API",
|
||||
"comparePlans": "قارن الخطط",
|
||||
"openDocs": "افتح وثائق المطورين",
|
||||
"ctaTitle": "هل تحتاج وصول API لمهام مستندات متكررة؟",
|
||||
"ctaSubtitle": "انتقل من الاستخدام الفردي في المتصفح إلى تدفقات قائمة على الحساب مع نقاط غير متزامنة ومفاتيح API.",
|
||||
"authExampleTitle": "مثال: رفع ملف",
|
||||
"authExampleSubtitle": "أرسل ملفًا إلى نقطة v1 باستخدام مفتاح API ثم استلم معرف مهمة للمعالجة غير المتزامنة.",
|
||||
"pollExampleTitle": "مثال: متابعة حالة المهمة",
|
||||
"pollExampleSubtitle": "افحص نقطة حالة المهمة حتى تكتمل المعالجة، ثم استخدم رابط التحميل المعاد.",
|
||||
"endpointsTitle": "مجموعات النقاط الحالية",
|
||||
"endpointsSubtitle": "هذه المجموعات تعكس مسارات API الحقيقية المتاحة حاليًا في الباكند.",
|
||||
"groupConvert": "التحويلات",
|
||||
"groupPdf": "عمليات PDF",
|
||||
"groupAi": "الذكاء الاصطناعي والاستخراج",
|
||||
"steps": {
|
||||
"createKey": {
|
||||
"title": "أنشئ المفتاح",
|
||||
"description": "ولّد مفتاح API احترافي من مساحة عمل حسابك حتى تتمكن الأتمتة من المصادقة بأمان."
|
||||
},
|
||||
"sendFile": {
|
||||
"title": "أرسل الملف",
|
||||
"description": "أرسل الملف المصدر إلى النقطة المطلوبة واحفظ معرف المهمة المعاد داخل نظامك."
|
||||
},
|
||||
"pollStatus": {
|
||||
"title": "تابع ثم حمّل",
|
||||
"description": "تتبّع اكتمال المهمة عبر نقطة الحالة ثم نزّل الملف المعالج عندما يصبح جاهزًا."
|
||||
}
|
||||
}
|
||||
},
|
||||
"blog": {
|
||||
"metaTitle": "المدونة — نصائح ودروس وتحديثات",
|
||||
"metaDescription": "تعلم كيفية ضغط وتحويل وتعديل وإدارة ملفات PDF مع أدلتنا ودروسنا الاحترافية.",
|
||||
"title": "المدونة",
|
||||
"subtitle": "نصائح ودروس تعليمية وتحديثات المنتج لمساعدتك على العمل بذكاء.",
|
||||
"searchPlaceholder": "ابحث في المقالات حسب الموضوع أو الأداة أو سير العمل",
|
||||
"readMore": "اقرأ المزيد",
|
||||
"backToBlog": "العودة إلى المدونة",
|
||||
"readTime": "{{count}} دقيقة قراءة",
|
||||
"keyTakeaways": "أهم النقاط",
|
||||
"featuredTools": "الأدوات المذكورة في هذا الدليل",
|
||||
"noResults": "لم نجد مقالات مطابقة لعبارة البحث. جرّب PDF أو OCR أو AI أو صورة أو تحويل.",
|
||||
"comingSoon": "مقالات أخرى قادمة قريبًا — تابعنا!",
|
||||
"posts": {
|
||||
"compressPdf": {
|
||||
@@ -758,6 +822,91 @@
|
||||
"processingFailed": "فشل استخراج الجداول. يرجى تجربة ملف PDF مختلف.",
|
||||
"invalidFile": "ملف PDF غير صالح أو تالف. يرجى رفع ملف PDF صحيح."
|
||||
}
|
||||
},
|
||||
"pdfToPptx": {
|
||||
"title": "PDF إلى باوربوينت",
|
||||
"description": "حوّل ملفات PDF إلى عروض تقديمية (PPTX). كل صفحة تصبح شريحة.",
|
||||
"shortDesc": "تحويل إلى PPTX"
|
||||
},
|
||||
"excelToPdf": {
|
||||
"title": "إكسل إلى PDF",
|
||||
"description": "حوّل جداول بيانات إكسل (XLSX, XLS) إلى مستندات PDF.",
|
||||
"shortDesc": "تحويل إلى PDF"
|
||||
},
|
||||
"pptxToPdf": {
|
||||
"title": "باوربوينت إلى PDF",
|
||||
"description": "حوّل عروض باوربوينت التقديمية (PPTX, PPT) إلى PDF.",
|
||||
"shortDesc": "تحويل إلى PDF"
|
||||
},
|
||||
"signPdf": {
|
||||
"title": "توقيع PDF",
|
||||
"description": "أضف صورة توقيعك إلى أي مستند PDF. وقّع بدون طباعة.",
|
||||
"shortDesc": "توقيع PDF",
|
||||
"pdfLabel": "مستند PDF",
|
||||
"signatureLabel": "صورة التوقيع",
|
||||
"pageLabel": "رقم الصفحة"
|
||||
},
|
||||
"cropPdf": {
|
||||
"title": "قص PDF",
|
||||
"description": "قص صفحات PDF عبر تعديل الهوامش. أزل المساحات البيضاء غير المرغوبة.",
|
||||
"shortDesc": "قص PDF",
|
||||
"marginsLabel": "الهوامش المراد قصها (نقاط)",
|
||||
"top": "أعلى",
|
||||
"bottom": "أسفل",
|
||||
"left": "يسار",
|
||||
"right": "يمين"
|
||||
},
|
||||
"flattenPdf": {
|
||||
"title": "تسطيح PDF",
|
||||
"description": "سطّح نماذج PDF والتعليقات التوضيحية. حوّل الحقول التفاعلية إلى محتوى ثابت.",
|
||||
"shortDesc": "تسطيح PDF"
|
||||
},
|
||||
"repairPdf": {
|
||||
"title": "إصلاح PDF",
|
||||
"description": "أصلح ملفات PDF التالفة أو المعطوبة. استعد المحتوى القابل للقراءة.",
|
||||
"shortDesc": "إصلاح PDF"
|
||||
},
|
||||
"pdfMetadata": {
|
||||
"title": "محرر بيانات PDF الوصفية",
|
||||
"description": "عرض وتحرير البيانات الوصفية لملفات PDF بما في ذلك العنوان والمؤلف والموضوع.",
|
||||
"shortDesc": "تحرير البيانات الوصفية",
|
||||
"titleField": "العنوان",
|
||||
"titlePlaceholder": "عنوان المستند",
|
||||
"author": "المؤلف",
|
||||
"authorPlaceholder": "اسم المؤلف",
|
||||
"subject": "الموضوع",
|
||||
"subjectPlaceholder": "موضوع المستند",
|
||||
"keywords": "الكلمات المفتاحية",
|
||||
"keywordsPlaceholder": "كلمة1، كلمة2، ...",
|
||||
"creator": "المنشئ",
|
||||
"creatorPlaceholder": "اسم التطبيق"
|
||||
},
|
||||
"imageCrop": {
|
||||
"title": "قص الصورة",
|
||||
"description": "قص الصور عبر تحديد إحداثيات البكسل.",
|
||||
"shortDesc": "قص الصورة",
|
||||
"coordsLabel": "إحداثيات القص (بكسل)",
|
||||
"left": "يسار",
|
||||
"top": "أعلى",
|
||||
"right": "يمين",
|
||||
"bottom": "أسفل"
|
||||
},
|
||||
"imageRotateFlip": {
|
||||
"title": "تدوير وقلب الصورة",
|
||||
"description": "دوّر الصور 90° أو 180° أو 270° واقلبها أفقياً أو عمودياً.",
|
||||
"shortDesc": "تحويل الصورة",
|
||||
"rotationLabel": "التدوير",
|
||||
"flipHorizontal": "قلب أفقي",
|
||||
"flipVertical": "قلب عمودي"
|
||||
},
|
||||
"barcode": {
|
||||
"title": "مولد الباركود",
|
||||
"description": "أنشئ باركود بتنسيقات متعددة: Code128، EAN-13، UPC-A، ISBN والمزيد.",
|
||||
"shortDesc": "إنشاء باركود",
|
||||
"dataLabel": "بيانات الباركود",
|
||||
"dataPlaceholder": "أدخل البيانات للترميز...",
|
||||
"typeLabel": "نوع الباركود",
|
||||
"formatLabel": "تنسيق الإخراج"
|
||||
}
|
||||
},
|
||||
"account": {
|
||||
@@ -805,6 +954,24 @@
|
||||
"historySubtitle": "ستظهر هنا تلقائيًا كل المهام الناجحة أو الفاشلة المرتبطة بحسابك.",
|
||||
"historyLoading": "جارٍ تحميل النشاط الأخير...",
|
||||
"historyEmpty": "لا يوجد سجل ملفات بعد. عالج أي ملف أثناء تسجيل الدخول وسيظهر هنا.",
|
||||
"dashboardTitle": "نظرة عامة على مساحة العمل",
|
||||
"dashboardSubtitle": "عرض سريع لأنماط المعالجة الحديثة، والإخفاقات، وخطوات الإعداد التالية.",
|
||||
"metricProcessed": "الملفات المعالجة",
|
||||
"metricSuccessRate": "معدل النجاح",
|
||||
"metricFavoriteTool": "الأداة الأكثر استخدامًا",
|
||||
"metricFavoriteToolEmpty": "لا يوجد نشاط بعد",
|
||||
"metricFailures": "المهام الفاشلة",
|
||||
"topToolsTitle": "أكثر الأدوات استخدامًا",
|
||||
"issuesTitle": "المشكلات الأخيرة",
|
||||
"issuesEmpty": "لا توجد إخفاقات حديثة. آخر عملياتك اكتملت بنجاح.",
|
||||
"onboardingTitle": "الخطوات التالية",
|
||||
"onboardingSubtitle": "استخدم هذه القائمة لتحويل الحساب الجديد إلى سير عمل متكرر.",
|
||||
"onboardingFirstTaskTitle": "أكمل أول مهمة بعد تسجيل الدخول",
|
||||
"onboardingFirstTaskDesc": "عالج ملفًا واحدًا على الأقل أثناء تسجيل الدخول حتى يبدأ سجل مساحة العمل والقياسات بالامتلاء.",
|
||||
"onboardingUpgradeTitle": "حدّد إن كنت تحتاج حدود Pro",
|
||||
"onboardingUpgradeDesc": "قم بالترقية عندما تحتاج حصصًا أعلى أو وصول API أو سير عمل أنظف للفريق.",
|
||||
"onboardingApiTitle": "أنشئ مفتاح API",
|
||||
"onboardingApiDesc": "يمكن لمستخدمي Pro إنشاء مفاتيح API لربط معالجة المستندات بالأدوات الداخلية أو تدفقات العملاء.",
|
||||
"downloadResult": "تحميل النتيجة",
|
||||
"createdAt": "تاريخ الإنشاء",
|
||||
"originalFile": "الملف الأصلي",
|
||||
@@ -1188,6 +1355,106 @@
|
||||
{"q": "هل يمكنني تحويل النص إلى أحرف كبيرة؟", "a": "نعم، يمكنك التحويل إلى أحرف كبيرة أو صغيرة أو حالة العنوان أو حالة الجملة بنقرة واحدة."},
|
||||
{"q": "هل يدعم النص العربي؟", "a": "نعم، يتضمن خياراً خاصاً لإزالة التشكيل العربي (الحركات) من النص."}
|
||||
]
|
||||
},
|
||||
"pdfToPptx": {
|
||||
"whatItDoes": "حوّل ملفات PDF إلى عروض تقديمية باوربوينت. كل صفحة تُعرض كصورة عالية الجودة على شريحة منفصلة.",
|
||||
"howToUse": ["ارفع ملف PDF.", "انتظر حتى يكتمل التحويل.", "حمّل عرض PPTX التقديمي."],
|
||||
"benefits": ["كل صفحة تصبح شريحة", "عرض صور عالية الجودة", "لا حاجة لبرامج", "سريع ومجاني"],
|
||||
"useCases": ["تحويل التقارير إلى شرائح عرض تقديمي", "إعادة استخدام محتوى PDF للاجتماعات"],
|
||||
"faq": [
|
||||
{"q": "كيف يعمل تحويل PDF إلى باوربوينت؟", "a": "يتم عرض كل صفحة PDF كصورة عالية الجودة ووضعها على شريحة باوربوينت خاصة بها."}
|
||||
]
|
||||
},
|
||||
"excelToPdf": {
|
||||
"whatItDoes": "حوّل جداول بيانات إكسل إلى مستندات PDF مع الحفاظ على تنسيق الجداول والخطوط.",
|
||||
"howToUse": ["ارفع ملف XLSX أو XLS.", "انتظر التحويل.", "حمّل PDF."],
|
||||
"benefits": ["يدعم XLSX و XLS", "يحافظ على تنسيق الجداول", "دقة عالية", "مجاني وفوري"],
|
||||
"useCases": ["مشاركة الجداول كملفات PDF غير قابلة للتعديل", "إنشاء نسخ جاهزة للطباعة"],
|
||||
"faq": [
|
||||
{"q": "ما تنسيقات إكسل المدعومة؟", "a": "يدعم تنسيقي XLSX الحديث و XLS القديم."}
|
||||
]
|
||||
},
|
||||
"pptxToPdf": {
|
||||
"whatItDoes": "حوّل عروض باوربوينت التقديمية إلى PDF للمشاركة والطباعة بسهولة.",
|
||||
"howToUse": ["ارفع ملف PPTX أو PPT.", "انتظر التحويل.", "حمّل PDF."],
|
||||
"benefits": ["يدعم PPTX و PPT", "يحافظ على تخطيط الشرائح", "ممتاز للمشاركة", "لا حاجة لحساب"],
|
||||
"useCases": ["مشاركة العروض التقديمية كملفات PDF", "إنشاء عروض جاهزة للطباعة"],
|
||||
"faq": [
|
||||
{"q": "هل تُحفظ الانتقالات؟", "a": "PDF تنسيق ثابت، لذا لا تُضمّن الانتقالات والرسوم المتحركة، لكن كل المحتوى المرئي يُحفظ."}
|
||||
]
|
||||
},
|
||||
"signPdf": {
|
||||
"whatItDoes": "أضف صورة توقيعك إلى مستندات PDF بدون طباعة.",
|
||||
"howToUse": ["ارفع مستند PDF.", "ارفع صورة التوقيع (PNG أو JPG).", "اختر رقم الصفحة.", "حمّل PDF الموقّع."],
|
||||
"benefits": ["لا طباعة أو مسح ضوئي", "يدعم PNG و JPG", "ضع التوقيع على أي صفحة", "معالجة آمنة"],
|
||||
"useCases": ["توقيع العقود إلكترونياً", "إضافة توقيعات الموافقة إلى المستندات"],
|
||||
"faq": [
|
||||
{"q": "هل هذا توقيع إلكتروني قانوني؟", "a": "يضع صورة توقيع مرئية. للتوقيعات الرقمية الملزمة قانونياً، قد تكون هناك حاجة لحلول قائمة على الشهادات."}
|
||||
]
|
||||
},
|
||||
"cropPdf": {
|
||||
"whatItDoes": "قص صفحات PDF بتعديل الهوامش. أزل المساحات البيضاء غير المرغوبة.",
|
||||
"howToUse": ["ارفع ملف PDF.", "حدد قيم الهوامش.", "حمّل PDF المقصوص."],
|
||||
"benefits": ["تحكم دقيق بالهوامش", "قص جميع الصفحات", "إزالة المساحات البيضاء", "مجاني وسريع"],
|
||||
"useCases": ["إزالة الهوامش العريضة", "تعديل أحجام الصفحات"],
|
||||
"faq": [
|
||||
{"q": "ما وحدات الهوامش؟", "a": "الهوامش بنقاط PDF (نقطة واحدة = 1/72 بوصة)."}
|
||||
]
|
||||
},
|
||||
"flattenPdf": {
|
||||
"whatItDoes": "سطّح نماذج وتعليقات PDF، وحوّل العناصر التفاعلية إلى محتوى ثابت.",
|
||||
"howToUse": ["ارفع PDF مع نماذج أو تعليقات.", "الأداة تسطّح جميع العناصر التفاعلية.", "حمّل PDF المسطّح."],
|
||||
"benefits": ["إزالة تفاعلية حقول النماذج", "تسطيح التعليقات", "منع التحرير", "مثالي للأرشفة"],
|
||||
"useCases": ["أرشفة النماذج المعبأة", "إرسال النماذج المكتملة بأمان"],
|
||||
"faq": [
|
||||
{"q": "ماذا يعني التسطيح؟", "a": "يحوّل العناصر التفاعلية مثل حقول النماذج والتعليقات إلى محتوى دائم غير قابل للتحرير."}
|
||||
]
|
||||
},
|
||||
"repairPdf": {
|
||||
"whatItDoes": "أصلح ملفات PDF التالفة بإعادة كتابة بنية المستند واستعادة الصفحات المقروءة.",
|
||||
"howToUse": ["ارفع PDF التالف.", "الأداة تحاول إصلاح المستند.", "حمّل PDF المُصلح."],
|
||||
"benefits": ["إصلاح بنى PDF المعطوبة", "استعادة المحتوى", "إخراج نظيف", "مجاني"],
|
||||
"useCases": ["استعادة ملفات PDF التالفة", "إصلاح ملفات لا تفتح"],
|
||||
"faq": [
|
||||
{"q": "هل يمكنه إصلاح أي PDF معطوب؟", "a": "الأداة تستعيد أكبر عدد ممكن من الصفحات. الملفات شديدة التلف قد تكون قابلة للاستعادة جزئياً فقط."}
|
||||
]
|
||||
},
|
||||
"pdfMetadata": {
|
||||
"whatItDoes": "عرض وتحرير خصائص مستند PDF بما في ذلك العنوان والمؤلف والموضوع والكلمات المفتاحية.",
|
||||
"howToUse": ["ارفع ملف PDF.", "املأ حقول البيانات الوصفية.", "حمّل PDF المحدّث."],
|
||||
"benefits": ["تحرير العنوان والمؤلف والمزيد", "تحسين قابلية البحث", "تنظيف خصائص المستند", "لا حاجة لبرامج"],
|
||||
"useCases": ["تعيين عناوين مناسبة لإدارة المستندات", "إضافة معلومات المؤلف"],
|
||||
"faq": [
|
||||
{"q": "ما هي البيانات الوصفية لـ PDF؟", "a": "تشمل خصائص المستند مثل العنوان والمؤلف والكلمات المفتاحية التي تساعد في تنظيم وبحث ملفات PDF."}
|
||||
]
|
||||
},
|
||||
"imageCrop": {
|
||||
"whatItDoes": "قص الصور بتحديد إحداثيات البكسل الدقيقة. يدعم PNG و JPG و WebP.",
|
||||
"howToUse": ["ارفع صورتك.", "أدخل إحداثيات القص.", "حمّل الصورة المقصوصة."],
|
||||
"benefits": ["قص دقيق على مستوى البكسل", "يدعم PNG و JPG و WebP", "مخرجات عالية الجودة", "بدون علامات مائية"],
|
||||
"useCases": ["قص صور المنتجات", "إزالة الحواف غير المرغوبة"],
|
||||
"faq": [
|
||||
{"q": "كيف تعمل الإحداثيات؟", "a": "اليسار والأعلى يحددان نقطة البداية، واليمين والأسفل يحددان نقطة النهاية لمنطقة القص بالبكسل."}
|
||||
]
|
||||
},
|
||||
"imageRotateFlip": {
|
||||
"whatItDoes": "دوّر الصور 90° أو 180° أو 270° واقلبها أفقياً أو عمودياً. ادمج العمليات في خطوة واحدة.",
|
||||
"howToUse": ["ارفع صورتك.", "اختر زاوية التدوير وخيارات القلب.", "حمّل الصورة المحوّلة."],
|
||||
"benefits": ["تدوير 90° أو 180° أو 270°", "قلب أفقي أو عمودي", "دمج العمليات", "يدعم عدة تنسيقات"],
|
||||
"useCases": ["تصحيح اتجاه الصور", "إنشاء صور معكوسة"],
|
||||
"faq": [
|
||||
{"q": "هل يمكنني دمج التدوير والقلب؟", "a": "نعم، يتم تطبيق التدوير والقلب معاً في عملية واحدة."}
|
||||
]
|
||||
},
|
||||
"barcode": {
|
||||
"whatItDoes": "أنشئ باركود بتنسيقات متعددة بما في ذلك Code128 و Code39 و EAN-13 و UPC-A و ISBN والمزيد. حمّل كـ PNG أو SVG.",
|
||||
"howToUse": ["أدخل البيانات للترميز.", "اختر نوع الباركود.", "اختر تنسيق الإخراج (PNG أو SVG).", "أنشئ وحمّل الباركود."],
|
||||
"benefits": ["تنسيقات باركود متعددة", "إخراج PNG و SVG", "إنشاء فوري", "مجاني وبلا حدود"],
|
||||
"useCases": ["إنشاء باركود المنتجات", "إنشاء ملصقات المخزون", "إنتاج باركود ISBN للكتب"],
|
||||
"faq": [
|
||||
{"q": "ما تنسيقات الباركود المتاحة؟", "a": "Code128, Code39, EAN-13, EAN-8, UPC-A, ISBN-13, ISBN-10, ISSN, و PZN."},
|
||||
{"q": "ما الفرق عن رمز QR؟", "a": "الباركود خطي (أحادي البعد) بسعة بيانات أقل. رموز QR ثنائية الأبعاد وتخزن معلومات أكثر."}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,6 +17,8 @@
|
||||
"privacy": "Privacy Policy",
|
||||
"terms": "Terms of Service",
|
||||
"language": "Language",
|
||||
"search": "Search",
|
||||
"clear": "Clear",
|
||||
"allTools": "All Tools",
|
||||
"account": "Account",
|
||||
"signIn": "Sign In",
|
||||
@@ -27,6 +29,7 @@
|
||||
"contact": "Contact",
|
||||
"pricing": "Pricing",
|
||||
"blog": "Blog",
|
||||
"developers": "Developers",
|
||||
"send": "Send",
|
||||
"subject": "Subject",
|
||||
"message": "Message",
|
||||
@@ -121,6 +124,8 @@
|
||||
"editNowTooltip": "Open quick PDF optimization for a cleaner downloadable copy",
|
||||
"suggestedTools": "Suggested Tools for Your File",
|
||||
"suggestedToolsDesc": "After uploading, we automatically show compatible tools: text editing, highlighting, merge/split, compress, convert to Word/image, video to GIF, and more.",
|
||||
"searchToolsPlaceholder": "Search tools by name or use case",
|
||||
"noSearchResults": "No tools matched your search yet. Try a broader term like PDF, image, OCR, or compress.",
|
||||
"selectTool": "Choose a Tool",
|
||||
"fileDetected": "We detected a {{type}} file",
|
||||
"unsupportedFile": "This file type is not supported. Try PDF, Word, images, or video.",
|
||||
@@ -137,6 +142,17 @@
|
||||
"feature3Title": "Built-in security",
|
||||
"feature3Desc": "Access files securely, protected by automatic encryption."
|
||||
},
|
||||
"socialProof": {
|
||||
"badge": "Trusted by active teams",
|
||||
"title": "Live product activity from real processing and ratings",
|
||||
"subtitle": "These numbers come directly from completed tasks and tool feedback across the platform.",
|
||||
"processedFiles": "Files processed",
|
||||
"successRate": "Success rate",
|
||||
"last24h": "Last 24 hours",
|
||||
"averageRating": "Average rating",
|
||||
"basedOnRatings": "Based on {{count}} submitted ratings",
|
||||
"viewDevelopers": "View developer docs"
|
||||
},
|
||||
"pages": {
|
||||
"about": {
|
||||
"metaDescription": "Learn about SaaS-PDF — free, fast, and secure online file tools for PDFs, images, video, and text. No registration required.",
|
||||
@@ -311,14 +327,62 @@
|
||||
"q": "What payment methods do you accept?",
|
||||
"a": "We accept all major credit/debit cards and PayPal. All payments are securely processed via Stripe."
|
||||
}
|
||||
]
|
||||
],
|
||||
"trustTitle": "Built for teams that need speed and predictability",
|
||||
"trustSubtitle": "The same platform powers quick browser workflows, recurring account usage, and API-driven document pipelines.",
|
||||
"trustFastTitle": "Fast processing",
|
||||
"trustFastDesc": "Async task handling and optimized workers keep heavy jobs moving without blocking the interface.",
|
||||
"trustPrivateTitle": "Private by default",
|
||||
"trustPrivateDesc": "Uploads are validated, processed securely, and cleaned up automatically after the retention window.",
|
||||
"trustApiTitle": "Ready for integration",
|
||||
"trustApiDesc": "Pro workspaces can generate API keys and connect the same tools to internal automations and client flows."
|
||||
},
|
||||
"developers": {
|
||||
"metaDescription": "Explore the SaaS-PDF developer portal, async API flow, and production-ready endpoints for document automation.",
|
||||
"badge": "Developer Portal",
|
||||
"title": "Build document workflows on top of the SaaS-PDF API",
|
||||
"subtitle": "Use the same conversion, compression, OCR, and AI endpoints behind the web app inside your own systems.",
|
||||
"getApiKey": "Get an API key",
|
||||
"comparePlans": "Compare plans",
|
||||
"openDocs": "Open developer docs",
|
||||
"ctaTitle": "Need API access for repeated document jobs?",
|
||||
"ctaSubtitle": "Move from one-off browser usage to account-based workflows with async endpoints and API keys.",
|
||||
"authExampleTitle": "Example: upload a file",
|
||||
"authExampleSubtitle": "Send a file to a v1 endpoint with your API key and receive a task identifier for async processing.",
|
||||
"pollExampleTitle": "Example: poll task status",
|
||||
"pollExampleSubtitle": "Check the task endpoint until processing finishes, then use the returned download URL.",
|
||||
"endpointsTitle": "Current endpoint groups",
|
||||
"endpointsSubtitle": "These groups reflect real API routes currently available in the backend.",
|
||||
"groupConvert": "Conversions",
|
||||
"groupPdf": "PDF operations",
|
||||
"groupAi": "AI and extraction",
|
||||
"steps": {
|
||||
"createKey": {
|
||||
"title": "Create your key",
|
||||
"description": "Generate a Pro API key from your account workspace so your automation can authenticate securely."
|
||||
},
|
||||
"sendFile": {
|
||||
"title": "Send a file",
|
||||
"description": "Post the source file to the target endpoint and store the returned task ID in your system."
|
||||
},
|
||||
"pollStatus": {
|
||||
"title": "Poll and download",
|
||||
"description": "Track task completion through the status endpoint and download the processed file when ready."
|
||||
}
|
||||
}
|
||||
},
|
||||
"blog": {
|
||||
"metaTitle": "Blog — Tips, Tutorials & Updates",
|
||||
"metaDescription": "Learn how to compress, convert, edit, and manage PDF files with our expert guides and tutorials.",
|
||||
"title": "Blog",
|
||||
"subtitle": "Tips, tutorials, and product updates to help you work smarter.",
|
||||
"searchPlaceholder": "Search articles by topic, tool, or workflow",
|
||||
"readMore": "Read more",
|
||||
"backToBlog": "Back to blog",
|
||||
"readTime": "{{count}} min read",
|
||||
"keyTakeaways": "Key takeaways",
|
||||
"featuredTools": "Tools mentioned in this guide",
|
||||
"noResults": "No articles matched your search. Try PDF, OCR, AI, image, or convert.",
|
||||
"comingSoon": "More articles coming soon — stay tuned!",
|
||||
"posts": {
|
||||
"compressPdf": {
|
||||
@@ -758,6 +822,91 @@
|
||||
"processingFailed": "Failed to extract tables. Please try a different PDF.",
|
||||
"invalidFile": "Invalid or corrupted PDF file. Please upload a valid PDF."
|
||||
}
|
||||
},
|
||||
"pdfToPptx": {
|
||||
"title": "PDF to PowerPoint",
|
||||
"description": "Convert PDF files to PowerPoint (PPTX) presentations. Each page becomes a slide.",
|
||||
"shortDesc": "Convert to PPTX"
|
||||
},
|
||||
"excelToPdf": {
|
||||
"title": "Excel to PDF",
|
||||
"description": "Convert Excel spreadsheets (XLSX, XLS) to PDF documents online.",
|
||||
"shortDesc": "Convert to PDF"
|
||||
},
|
||||
"pptxToPdf": {
|
||||
"title": "PowerPoint to PDF",
|
||||
"description": "Convert PowerPoint presentations (PPTX, PPT) to PDF format.",
|
||||
"shortDesc": "Convert to PDF"
|
||||
},
|
||||
"signPdf": {
|
||||
"title": "Sign PDF",
|
||||
"description": "Add your signature image to any PDF document. Sign PDFs without printing.",
|
||||
"shortDesc": "Sign PDF",
|
||||
"pdfLabel": "PDF Document",
|
||||
"signatureLabel": "Signature Image",
|
||||
"pageLabel": "Page Number"
|
||||
},
|
||||
"cropPdf": {
|
||||
"title": "Crop PDF",
|
||||
"description": "Crop PDF pages by adjusting margins. Remove unwanted whitespace.",
|
||||
"shortDesc": "Crop PDF",
|
||||
"marginsLabel": "Margins to trim (points)",
|
||||
"top": "Top",
|
||||
"bottom": "Bottom",
|
||||
"left": "Left",
|
||||
"right": "Right"
|
||||
},
|
||||
"flattenPdf": {
|
||||
"title": "Flatten PDF",
|
||||
"description": "Flatten PDF forms and annotations. Convert interactive fields to fixed content.",
|
||||
"shortDesc": "Flatten PDF"
|
||||
},
|
||||
"repairPdf": {
|
||||
"title": "Repair PDF",
|
||||
"description": "Repair corrupted or damaged PDF files. Fix broken documents and recover content.",
|
||||
"shortDesc": "Repair PDF"
|
||||
},
|
||||
"pdfMetadata": {
|
||||
"title": "PDF Metadata Editor",
|
||||
"description": "View and edit PDF metadata including title, author, subject, and keywords.",
|
||||
"shortDesc": "Edit Metadata",
|
||||
"titleField": "Title",
|
||||
"titlePlaceholder": "Document title",
|
||||
"author": "Author",
|
||||
"authorPlaceholder": "Author name",
|
||||
"subject": "Subject",
|
||||
"subjectPlaceholder": "Document subject",
|
||||
"keywords": "Keywords",
|
||||
"keywordsPlaceholder": "keyword1, keyword2, ...",
|
||||
"creator": "Creator",
|
||||
"creatorPlaceholder": "Application name"
|
||||
},
|
||||
"imageCrop": {
|
||||
"title": "Crop Image",
|
||||
"description": "Crop images online by specifying exact pixel coordinates.",
|
||||
"shortDesc": "Crop Image",
|
||||
"coordsLabel": "Crop coordinates (pixels)",
|
||||
"left": "Left",
|
||||
"top": "Top",
|
||||
"right": "Right",
|
||||
"bottom": "Bottom"
|
||||
},
|
||||
"imageRotateFlip": {
|
||||
"title": "Rotate & Flip Image",
|
||||
"description": "Rotate images by 90°, 180°, or 270° and flip horizontally or vertically.",
|
||||
"shortDesc": "Transform Image",
|
||||
"rotationLabel": "Rotation",
|
||||
"flipHorizontal": "Flip Horizontal",
|
||||
"flipVertical": "Flip Vertical"
|
||||
},
|
||||
"barcode": {
|
||||
"title": "Barcode Generator",
|
||||
"description": "Generate barcodes in various formats: Code128, EAN-13, UPC-A, ISBN, and more.",
|
||||
"shortDesc": "Generate Barcode",
|
||||
"dataLabel": "Barcode Data",
|
||||
"dataPlaceholder": "Enter data to encode...",
|
||||
"typeLabel": "Barcode Type",
|
||||
"formatLabel": "Output Format"
|
||||
}
|
||||
},
|
||||
"account": {
|
||||
@@ -805,6 +954,24 @@
|
||||
"historySubtitle": "Completed and failed tasks tied to your account appear here automatically.",
|
||||
"historyLoading": "Loading recent activity...",
|
||||
"historyEmpty": "No file history yet. Process a file while signed in and it will appear here.",
|
||||
"dashboardTitle": "Workspace overview",
|
||||
"dashboardSubtitle": "A quick view of your recent processing patterns, failures, and next setup steps.",
|
||||
"metricProcessed": "Processed files",
|
||||
"metricSuccessRate": "Success rate",
|
||||
"metricFavoriteTool": "Most used tool",
|
||||
"metricFavoriteToolEmpty": "No activity yet",
|
||||
"metricFailures": "Failed tasks",
|
||||
"topToolsTitle": "Top tools",
|
||||
"issuesTitle": "Recent issues",
|
||||
"issuesEmpty": "No recent failures. Your latest runs completed successfully.",
|
||||
"onboardingTitle": "Next steps",
|
||||
"onboardingSubtitle": "Use this checklist to turn a new account into a repeatable workflow.",
|
||||
"onboardingFirstTaskTitle": "Complete your first signed-in task",
|
||||
"onboardingFirstTaskDesc": "Process at least one file while logged in so your workspace history and metrics start filling in.",
|
||||
"onboardingUpgradeTitle": "Decide if you need Pro limits",
|
||||
"onboardingUpgradeDesc": "Upgrade when you need higher quotas, API access, or a cleaner team workflow.",
|
||||
"onboardingApiTitle": "Create an API key",
|
||||
"onboardingApiDesc": "Pro users can generate API keys to connect document processing to internal tools or customer flows.",
|
||||
"downloadResult": "Download Result",
|
||||
"createdAt": "Created",
|
||||
"originalFile": "Original file",
|
||||
@@ -1188,6 +1355,116 @@
|
||||
{"q": "Can I convert text to uppercase?", "a": "Yes, you can convert to uppercase, lowercase, title case, or sentence case with a single click."},
|
||||
{"q": "Does it support Arabic text?", "a": "Yes, it includes a special option to remove Arabic diacritics (tashkeel/harakat) from text."}
|
||||
]
|
||||
},
|
||||
"pdfToPptx": {
|
||||
"whatItDoes": "Convert PDF files to PowerPoint presentations. Each page is rendered as a high-quality slide image, preserving the visual layout perfectly.",
|
||||
"howToUse": ["Upload your PDF file.", "Wait for the conversion to process.", "Download your PPTX presentation."],
|
||||
"benefits": ["Each PDF page becomes a slide", "High-quality image rendering", "No software required", "Fast and free"],
|
||||
"useCases": ["Turning reports into presentation slides", "Repurposing PDF content for meetings", "Creating editable presentations from static documents"],
|
||||
"faq": [
|
||||
{"q": "How does PDF to PowerPoint conversion work?", "a": "Each PDF page is rendered as a high-quality image and placed on its own PowerPoint slide."},
|
||||
{"q": "Can I edit the text in the resulting slides?", "a": "The slides contain page images. For text editing, convert to Word first."}
|
||||
]
|
||||
},
|
||||
"excelToPdf": {
|
||||
"whatItDoes": "Convert Excel spreadsheets to PDF documents while preserving table formatting, fonts, and layout.",
|
||||
"howToUse": ["Upload your XLSX or XLS file.", "Wait for the conversion.", "Download the PDF."],
|
||||
"benefits": ["Supports XLSX and XLS", "Preserves table formatting", "Uses LibreOffice for accuracy", "Free and instant"],
|
||||
"useCases": ["Sharing spreadsheets as non-editable PDFs", "Creating printable versions of reports", "Archiving financial documents"],
|
||||
"faq": [
|
||||
{"q": "Which Excel formats are supported?", "a": "Both modern XLSX and legacy XLS formats are fully supported."},
|
||||
{"q": "Are charts preserved?", "a": "Yes, charts and formatting are preserved in the PDF output."}
|
||||
]
|
||||
},
|
||||
"pptxToPdf": {
|
||||
"whatItDoes": "Convert PowerPoint presentations to PDF format for easy sharing and printing. Preserves slide layout and graphics.",
|
||||
"howToUse": ["Upload your PPTX or PPT file.", "Wait for the conversion to complete.", "Download the PDF."],
|
||||
"benefits": ["Supports PPTX and PPT", "Preserves slide layout", "Great for sharing", "No account needed"],
|
||||
"useCases": ["Sharing presentations as PDFs", "Creating print-ready slide decks", "Archiving presentations"],
|
||||
"faq": [
|
||||
{"q": "Are slide transitions preserved?", "a": "PDF is static, so transitions and animations are not included, but all visual content is preserved."},
|
||||
{"q": "Can I convert PPT files too?", "a": "Yes, both PPTX and legacy PPT formats are supported."}
|
||||
]
|
||||
},
|
||||
"signPdf": {
|
||||
"whatItDoes": "Add your signature image to PDF documents without printing. Upload a signature image and place it on any page.",
|
||||
"howToUse": ["Upload your PDF document.", "Upload your signature image (PNG or JPG).", "Choose the page number.", "Download the signed PDF."],
|
||||
"benefits": ["No printing or scanning needed", "Supports PNG and JPG signatures", "Place on any page", "Secure processing"],
|
||||
"useCases": ["Signing contracts electronically", "Adding approval signatures to documents", "Signing forms without printing"],
|
||||
"faq": [
|
||||
{"q": "Is this a legal electronic signature?", "a": "This places a visual signature image. For legally binding digital signatures, certificate-based solutions may be required."},
|
||||
{"q": "What image formats work for signatures?", "a": "PNG (recommended for transparency) and JPG formats are supported."}
|
||||
]
|
||||
},
|
||||
"cropPdf": {
|
||||
"whatItDoes": "Crop PDF pages by adjusting margins. Remove unwanted whitespace and trim documents to the desired size.",
|
||||
"howToUse": ["Upload your PDF.", "Set margin values for each side.", "Download the cropped PDF."],
|
||||
"benefits": ["Precise margin control", "Trim all pages at once", "Remove unwanted whitespace", "Free and fast"],
|
||||
"useCases": ["Removing wide margins from scanned documents", "Trimming PDFs for better screen reading", "Adjusting page sizes for printing"],
|
||||
"faq": [
|
||||
{"q": "What units are the margins in?", "a": "Margins are specified in PDF points (1 point = 1/72 inch)."},
|
||||
{"q": "Can I crop specific pages only?", "a": "Currently all pages are cropped with the same margins."}
|
||||
]
|
||||
},
|
||||
"flattenPdf": {
|
||||
"whatItDoes": "Flatten PDF forms and annotations, converting interactive elements into static content that cannot be edited.",
|
||||
"howToUse": ["Upload your PDF with forms or annotations.", "The tool flattens all interactive elements.", "Download the flattened PDF."],
|
||||
"benefits": ["Remove form field interactivity", "Flatten annotations", "Prevent further editing", "Ideal for archiving"],
|
||||
"useCases": ["Archiving filled forms", "Sending completed forms securely", "Reducing file complexity"],
|
||||
"faq": [
|
||||
{"q": "What does flattening mean?", "a": "Flattening converts interactive elements like form fields and annotations into permanent, non-editable content."},
|
||||
{"q": "Can I undo flattening?", "a": "No, flattening is permanent. Keep a backup of the original file."}
|
||||
]
|
||||
},
|
||||
"repairPdf": {
|
||||
"whatItDoes": "Repair corrupted or damaged PDF files by re-writing the document structure and recovering readable pages.",
|
||||
"howToUse": ["Upload your corrupted PDF.", "The tool attempts to fix the document.", "Download the repaired PDF."],
|
||||
"benefits": ["Fix broken PDF structures", "Recover readable content", "Re-write clean output", "Free to use"],
|
||||
"useCases": ["Recovering damaged PDFs from old storage", "Fixing PDFs that won't open", "Repairing partially downloaded files"],
|
||||
"faq": [
|
||||
{"q": "Can it fix any broken PDF?", "a": "The tool recovers as many pages as possible. Severely corrupted files may only be partially recoverable."},
|
||||
{"q": "Will the repaired file look the same?", "a": "In most cases yes, though some complex elements may be simplified during repair."}
|
||||
]
|
||||
},
|
||||
"pdfMetadata": {
|
||||
"whatItDoes": "View and edit PDF document properties including title, author, subject, keywords, and creator information.",
|
||||
"howToUse": ["Upload your PDF.", "Fill in the metadata fields you want to change.", "Download the updated PDF."],
|
||||
"benefits": ["Edit title, author, and more", "Improve document searchability", "Clean up document properties", "No software needed"],
|
||||
"useCases": ["Setting proper titles for document management", "Adding author information", "Preparing PDFs for archival systems"],
|
||||
"faq": [
|
||||
{"q": "What is PDF metadata?", "a": "Metadata includes document properties like title, author, and keywords that help organize and search PDFs."},
|
||||
{"q": "Can I remove all metadata?", "a": "Yes, leave all fields blank to clear existing metadata."}
|
||||
]
|
||||
},
|
||||
"imageCrop": {
|
||||
"whatItDoes": "Crop images by specifying exact pixel coordinates. Supports PNG, JPG, and WebP formats.",
|
||||
"howToUse": ["Upload your image.", "Enter crop coordinates (left, top, right, bottom).", "Download the cropped image."],
|
||||
"benefits": ["Precise pixel-level cropping", "Supports PNG, JPG, WebP", "High-quality output", "No watermarks"],
|
||||
"useCases": ["Trimming product photos", "Removing unwanted borders", "Creating thumbnails"],
|
||||
"faq": [
|
||||
{"q": "How do coordinates work?", "a": "Left and top define the starting point, right and bottom define the ending point of the crop area in pixels."},
|
||||
{"q": "What formats are supported?", "a": "PNG, JPG/JPEG, and WebP are supported."}
|
||||
]
|
||||
},
|
||||
"imageRotateFlip": {
|
||||
"whatItDoes": "Rotate images by 90°, 180°, or 270° and flip them horizontally or vertically. Combine operations in one step.",
|
||||
"howToUse": ["Upload your image.", "Choose rotation angle and flip options.", "Download the transformed image."],
|
||||
"benefits": ["Rotate by 90°, 180°, or 270°", "Flip horizontal or vertical", "Combine operations", "Supports multiple formats"],
|
||||
"useCases": ["Fixing photo orientation", "Creating mirror images", "Adjusting scanned document rotation"],
|
||||
"faq": [
|
||||
{"q": "Can I combine rotation and flip?", "a": "Yes, rotation and flip are applied together in a single operation."},
|
||||
{"q": "Does rotation affect quality?", "a": "Rotation is lossless for PNG. JPG quality is preserved as closely as possible."}
|
||||
]
|
||||
},
|
||||
"barcode": {
|
||||
"whatItDoes": "Generate barcodes in various formats including Code128, Code39, EAN-13, UPC-A, ISBN, and more. Download as PNG or SVG.",
|
||||
"howToUse": ["Enter the data to encode.", "Select a barcode type.", "Choose output format (PNG or SVG).", "Generate and download your barcode."],
|
||||
"benefits": ["Multiple barcode formats", "PNG and SVG output", "Instant generation", "Free and unlimited"],
|
||||
"useCases": ["Generating product barcodes", "Creating inventory labels", "Producing ISBN barcodes for books"],
|
||||
"faq": [
|
||||
{"q": "What barcode formats are available?", "a": "Code128, Code39, EAN-13, EAN-8, UPC-A, ISBN-13, ISBN-10, ISSN, and PZN."},
|
||||
{"q": "What is the difference from a QR code?", "a": "Barcodes are linear (1D) with less data capacity. QR codes are 2D and store more information."}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,6 +17,8 @@
|
||||
"privacy": "Politique de confidentialité",
|
||||
"terms": "Conditions d'utilisation",
|
||||
"language": "Langue",
|
||||
"search": "Rechercher",
|
||||
"clear": "Effacer",
|
||||
"allTools": "Tous les outils",
|
||||
"account": "Compte",
|
||||
"signIn": "Se connecter",
|
||||
@@ -27,6 +29,7 @@
|
||||
"contact": "Contact",
|
||||
"pricing": "Tarifs",
|
||||
"blog": "Blog",
|
||||
"developers": "Développeurs",
|
||||
"send": "Envoyer",
|
||||
"subject": "Sujet",
|
||||
"message": "Message",
|
||||
@@ -121,6 +124,8 @@
|
||||
"editNowTooltip": "Ouvrir l'optimiseur PDF rapide pour générer une copie propre téléchargeable",
|
||||
"suggestedTools": "Outils suggérés pour votre fichier",
|
||||
"suggestedToolsDesc": "Après le téléchargement, nous affichons automatiquement les outils compatibles : édition de texte, surlignage, fusion/division, compression, conversion en Word/image, vidéo en GIF, et plus.",
|
||||
"searchToolsPlaceholder": "Rechercher un outil par nom ou usage",
|
||||
"noSearchResults": "Aucun outil ne correspond encore à votre recherche. Essayez un terme plus large comme PDF, image, OCR ou compression.",
|
||||
"selectTool": "Choisir un outil",
|
||||
"fileDetected": "Nous avons détecté un fichier {{type}}",
|
||||
"unsupportedFile": "Ce type de fichier n'est pas pris en charge. Essayez PDF, Word, images ou vidéo.",
|
||||
@@ -137,6 +142,17 @@
|
||||
"feature3Title": "Sécurité intégrée",
|
||||
"feature3Desc": "Accédez aux fichiers en toute sécurité, protégés par un cryptage automatique."
|
||||
},
|
||||
"socialProof": {
|
||||
"badge": "Adopté par des équipes actives",
|
||||
"title": "Une activité produit réelle issue des traitements et des avis",
|
||||
"subtitle": "Ces chiffres proviennent directement des tâches terminées et des retours sur les outils dans toute la plateforme.",
|
||||
"processedFiles": "Fichiers traités",
|
||||
"successRate": "Taux de réussite",
|
||||
"last24h": "Dernières 24 h",
|
||||
"averageRating": "Note moyenne",
|
||||
"basedOnRatings": "Basé sur {{count}} avis envoyés",
|
||||
"viewDevelopers": "Voir la documentation développeur"
|
||||
},
|
||||
"pages": {
|
||||
"about": {
|
||||
"metaDescription": "Découvrez SaaS-PDF — outils en ligne gratuits, rapides et sécurisés pour les PDF, images, vidéos et textes. Aucune inscription requise.",
|
||||
@@ -311,14 +327,62 @@
|
||||
"q": "Quels moyens de paiement acceptez-vous ?",
|
||||
"a": "Nous acceptons toutes les cartes de crédit/débit principales et PayPal. Tous les paiements sont traités de manière sécurisée via Stripe."
|
||||
}
|
||||
]
|
||||
],
|
||||
"trustTitle": "Conçu pour les équipes qui ont besoin de vitesse et de prévisibilité",
|
||||
"trustSubtitle": "La même plateforme prend en charge les usages rapides dans le navigateur, les workflows récurrents liés au compte et les pipelines documentaires via API.",
|
||||
"trustFastTitle": "Traitement rapide",
|
||||
"trustFastDesc": "Les tâches asynchrones et les workers optimisés permettent aux traitements lourds d'avancer sans bloquer l'interface.",
|
||||
"trustPrivateTitle": "Privé par défaut",
|
||||
"trustPrivateDesc": "Les fichiers sont validés, traités de manière sécurisée puis supprimés automatiquement après la fenêtre de rétention.",
|
||||
"trustApiTitle": "Prêt pour l'intégration",
|
||||
"trustApiDesc": "Les espaces Pro peuvent générer des clés API et connecter les mêmes outils à des automatisations internes ou à des parcours clients."
|
||||
},
|
||||
"developers": {
|
||||
"metaDescription": "Explorez le portail développeur SaaS-PDF, le flux API asynchrone et les endpoints prêts pour l'automatisation documentaire.",
|
||||
"badge": "Portail développeur",
|
||||
"title": "Construisez des workflows documentaires sur l'API SaaS-PDF",
|
||||
"subtitle": "Utilisez dans vos propres systèmes les mêmes endpoints de conversion, compression, OCR et IA que ceux de l'application web.",
|
||||
"getApiKey": "Obtenir une clé API",
|
||||
"comparePlans": "Comparer les forfaits",
|
||||
"openDocs": "Ouvrir la documentation développeur",
|
||||
"ctaTitle": "Besoin d'un accès API pour des traitements documentaires récurrents ?",
|
||||
"ctaSubtitle": "Passez d'un usage ponctuel dans le navigateur à des workflows basés sur un compte avec endpoints asynchrones et clés API.",
|
||||
"authExampleTitle": "Exemple : envoyer un fichier",
|
||||
"authExampleSubtitle": "Envoyez un fichier à un endpoint v1 avec votre clé API et récupérez un identifiant de tâche pour le traitement asynchrone.",
|
||||
"pollExampleTitle": "Exemple : interroger l'état d'une tâche",
|
||||
"pollExampleSubtitle": "Vérifiez l'endpoint d'état jusqu'à la fin du traitement, puis utilisez l'URL de téléchargement renvoyée.",
|
||||
"endpointsTitle": "Groupes d'endpoints actuels",
|
||||
"endpointsSubtitle": "Ces groupes reflètent les routes API réellement disponibles aujourd'hui dans le backend.",
|
||||
"groupConvert": "Conversions",
|
||||
"groupPdf": "Opérations PDF",
|
||||
"groupAi": "IA et extraction",
|
||||
"steps": {
|
||||
"createKey": {
|
||||
"title": "Créez votre clé",
|
||||
"description": "Générez une clé API Pro depuis votre espace compte afin que votre automatisation puisse s'authentifier de façon sécurisée."
|
||||
},
|
||||
"sendFile": {
|
||||
"title": "Envoyez un fichier",
|
||||
"description": "Postez le fichier source vers l'endpoint cible et enregistrez l'identifiant de tâche retourné dans votre système."
|
||||
},
|
||||
"pollStatus": {
|
||||
"title": "Vérifiez puis téléchargez",
|
||||
"description": "Suivez la progression via l'endpoint d'état puis téléchargez le fichier traité dès qu'il est prêt."
|
||||
}
|
||||
}
|
||||
},
|
||||
"blog": {
|
||||
"metaTitle": "Blog — Conseils, tutoriels et mises à jour",
|
||||
"metaDescription": "Apprenez à compresser, convertir, éditer et gérer des fichiers PDF avec nos guides et tutoriels experts.",
|
||||
"title": "Blog",
|
||||
"subtitle": "Conseils, tutoriels et mises à jour produit pour vous aider à travailler plus intelligemment.",
|
||||
"searchPlaceholder": "Rechercher des articles par sujet, outil ou workflow",
|
||||
"readMore": "Lire la suite",
|
||||
"backToBlog": "Retour au blog",
|
||||
"readTime": "{{count}} min de lecture",
|
||||
"keyTakeaways": "À retenir",
|
||||
"featuredTools": "Outils mentionnés dans ce guide",
|
||||
"noResults": "Aucun article ne correspond à votre recherche. Essayez PDF, OCR, IA, image ou conversion.",
|
||||
"comingSoon": "D'autres articles arrivent bientôt — restez connecté !",
|
||||
"posts": {
|
||||
"compressPdf": {
|
||||
@@ -758,6 +822,91 @@
|
||||
"processingFailed": "Échec de l'extraction des tableaux. Veuillez essayer un autre PDF.",
|
||||
"invalidFile": "Fichier PDF invalide ou corrompu. Veuillez télécharger un PDF valide."
|
||||
}
|
||||
},
|
||||
"pdfToPptx": {
|
||||
"title": "PDF vers PowerPoint",
|
||||
"description": "Convertissez des fichiers PDF en présentations PowerPoint (PPTX). Chaque page devient une diapositive.",
|
||||
"shortDesc": "Convertir en PPTX"
|
||||
},
|
||||
"excelToPdf": {
|
||||
"title": "Excel vers PDF",
|
||||
"description": "Convertissez des feuilles de calcul Excel (XLSX, XLS) en documents PDF.",
|
||||
"shortDesc": "Convertir en PDF"
|
||||
},
|
||||
"pptxToPdf": {
|
||||
"title": "PowerPoint vers PDF",
|
||||
"description": "Convertissez des présentations PowerPoint (PPTX, PPT) au format PDF.",
|
||||
"shortDesc": "Convertir en PDF"
|
||||
},
|
||||
"signPdf": {
|
||||
"title": "Signer un PDF",
|
||||
"description": "Ajoutez votre image de signature à n'importe quel document PDF. Signez sans imprimer.",
|
||||
"shortDesc": "Signer le PDF",
|
||||
"pdfLabel": "Document PDF",
|
||||
"signatureLabel": "Image de signature",
|
||||
"pageLabel": "Numéro de page"
|
||||
},
|
||||
"cropPdf": {
|
||||
"title": "Rogner un PDF",
|
||||
"description": "Rognez les pages PDF en ajustant les marges. Supprimez les espaces blancs indésirables.",
|
||||
"shortDesc": "Rogner le PDF",
|
||||
"marginsLabel": "Marges à rogner (points)",
|
||||
"top": "Haut",
|
||||
"bottom": "Bas",
|
||||
"left": "Gauche",
|
||||
"right": "Droite"
|
||||
},
|
||||
"flattenPdf": {
|
||||
"title": "Aplatir un PDF",
|
||||
"description": "Aplatissez les formulaires et annotations PDF. Convertissez les champs interactifs en contenu fixe.",
|
||||
"shortDesc": "Aplatir le PDF"
|
||||
},
|
||||
"repairPdf": {
|
||||
"title": "Réparer un PDF",
|
||||
"description": "Réparez les fichiers PDF corrompus ou endommagés. Récupérez le contenu lisible.",
|
||||
"shortDesc": "Réparer le PDF"
|
||||
},
|
||||
"pdfMetadata": {
|
||||
"title": "Éditeur de métadonnées PDF",
|
||||
"description": "Affichez et modifiez les métadonnées PDF : titre, auteur, sujet et mots-clés.",
|
||||
"shortDesc": "Modifier les métadonnées",
|
||||
"titleField": "Titre",
|
||||
"titlePlaceholder": "Titre du document",
|
||||
"author": "Auteur",
|
||||
"authorPlaceholder": "Nom de l'auteur",
|
||||
"subject": "Sujet",
|
||||
"subjectPlaceholder": "Sujet du document",
|
||||
"keywords": "Mots-clés",
|
||||
"keywordsPlaceholder": "mot1, mot2, ...",
|
||||
"creator": "Créateur",
|
||||
"creatorPlaceholder": "Nom de l'application"
|
||||
},
|
||||
"imageCrop": {
|
||||
"title": "Rogner une image",
|
||||
"description": "Rognez des images en spécifiant les coordonnées exactes en pixels.",
|
||||
"shortDesc": "Rogner l'image",
|
||||
"coordsLabel": "Coordonnées de rognage (pixels)",
|
||||
"left": "Gauche",
|
||||
"top": "Haut",
|
||||
"right": "Droite",
|
||||
"bottom": "Bas"
|
||||
},
|
||||
"imageRotateFlip": {
|
||||
"title": "Rotation et retournement d'image",
|
||||
"description": "Faites pivoter les images de 90°, 180° ou 270° et retournez-les horizontalement ou verticalement.",
|
||||
"shortDesc": "Transformer l'image",
|
||||
"rotationLabel": "Rotation",
|
||||
"flipHorizontal": "Retournement horizontal",
|
||||
"flipVertical": "Retournement vertical"
|
||||
},
|
||||
"barcode": {
|
||||
"title": "Générateur de code-barres",
|
||||
"description": "Générez des codes-barres en plusieurs formats : Code128, EAN-13, UPC-A, ISBN et plus.",
|
||||
"shortDesc": "Générer un code-barres",
|
||||
"dataLabel": "Données du code-barres",
|
||||
"dataPlaceholder": "Entrez les données à encoder...",
|
||||
"typeLabel": "Type de code-barres",
|
||||
"formatLabel": "Format de sortie"
|
||||
}
|
||||
},
|
||||
"account": {
|
||||
@@ -805,6 +954,24 @@
|
||||
"historySubtitle": "Les tâches réussies et échouées liées à votre compte apparaissent ici automatiquement.",
|
||||
"historyLoading": "Chargement de l'activité récente...",
|
||||
"historyEmpty": "Aucun historique pour l'instant. Traitez un fichier en étant connecté et il apparaîtra ici.",
|
||||
"dashboardTitle": "Vue d'ensemble de l'espace",
|
||||
"dashboardSubtitle": "Un aperçu rapide de vos usages récents, des échecs et des prochaines étapes de configuration.",
|
||||
"metricProcessed": "Fichiers traités",
|
||||
"metricSuccessRate": "Taux de réussite",
|
||||
"metricFavoriteTool": "Outil le plus utilisé",
|
||||
"metricFavoriteToolEmpty": "Aucune activité pour le moment",
|
||||
"metricFailures": "Tâches en échec",
|
||||
"topToolsTitle": "Outils les plus utilisés",
|
||||
"issuesTitle": "Problèmes récents",
|
||||
"issuesEmpty": "Aucun échec récent. Vos derniers traitements se sont terminés avec succès.",
|
||||
"onboardingTitle": "Étapes suivantes",
|
||||
"onboardingSubtitle": "Utilisez cette checklist pour transformer un nouveau compte en workflow réutilisable.",
|
||||
"onboardingFirstTaskTitle": "Terminez votre première tâche connectée",
|
||||
"onboardingFirstTaskDesc": "Traitez au moins un fichier en étant connecté pour commencer à alimenter l'historique et les indicateurs de votre espace.",
|
||||
"onboardingUpgradeTitle": "Évaluez si vous avez besoin de Pro",
|
||||
"onboardingUpgradeDesc": "Passez à Pro si vous avez besoin de quotas plus élevés, d'un accès API ou d'un workflow d'équipe plus propre.",
|
||||
"onboardingApiTitle": "Créez une clé API",
|
||||
"onboardingApiDesc": "Les utilisateurs Pro peuvent générer des clés API pour relier le traitement documentaire à des outils internes ou à des parcours clients.",
|
||||
"downloadResult": "Télécharger le résultat",
|
||||
"createdAt": "Créé le",
|
||||
"originalFile": "Fichier source",
|
||||
@@ -1188,6 +1355,86 @@
|
||||
{"q": "Puis-je convertir le texte en majuscules ?", "a": "Oui, vous pouvez convertir en majuscules, minuscules, casse de titre ou casse de phrase en un clic."},
|
||||
{"q": "Est-ce que ça prend en charge le texte arabe ?", "a": "Oui, il inclut une option spéciale pour supprimer les diacritiques arabes (tachkil) du texte."}
|
||||
]
|
||||
},
|
||||
"pdfToPptx": {
|
||||
"whatItDoes": "Convertissez des fichiers PDF en présentations PowerPoint. Chaque page est rendue comme une image haute qualité sur une diapositive séparée.",
|
||||
"howToUse": ["Téléchargez votre fichier PDF.", "Attendez la fin de la conversion.", "Téléchargez votre présentation PPTX."],
|
||||
"benefits": ["Chaque page devient une diapositive", "Rendu d'images haute qualité", "Aucun logiciel requis", "Rapide et gratuit"],
|
||||
"useCases": ["Transformer des rapports en diapositives de présentation", "Réutiliser le contenu PDF pour des réunions"],
|
||||
"faq": [{"q": "Comment fonctionne la conversion PDF vers PowerPoint ?", "a": "Chaque page PDF est rendue comme image haute qualité et placée sur sa propre diapositive PowerPoint."}]
|
||||
},
|
||||
"excelToPdf": {
|
||||
"whatItDoes": "Convertissez des feuilles de calcul Excel en documents PDF en préservant le formatage des tableaux.",
|
||||
"howToUse": ["Téléchargez votre fichier XLSX ou XLS.", "Attendez la conversion.", "Téléchargez le PDF."],
|
||||
"benefits": ["Supporte XLSX et XLS", "Préserve le formatage", "Haute précision", "Gratuit et instantané"],
|
||||
"useCases": ["Partager des feuilles de calcul en PDF non modifiables", "Créer des versions imprimables"],
|
||||
"faq": [{"q": "Quels formats Excel sont supportés ?", "a": "Les formats XLSX moderne et XLS hérité sont entièrement supportés."}]
|
||||
},
|
||||
"pptxToPdf": {
|
||||
"whatItDoes": "Convertissez des présentations PowerPoint en format PDF pour un partage et une impression faciles.",
|
||||
"howToUse": ["Téléchargez votre fichier PPTX ou PPT.", "Attendez la conversion.", "Téléchargez le PDF."],
|
||||
"benefits": ["Supporte PPTX et PPT", "Préserve la mise en page des diapositives", "Idéal pour le partage", "Aucun compte nécessaire"],
|
||||
"useCases": ["Partager des présentations en PDF", "Créer des dossiers de diapositives prêts à imprimer"],
|
||||
"faq": [{"q": "Les transitions sont-elles préservées ?", "a": "Le PDF est un format statique, donc les transitions et animations ne sont pas incluses, mais tout le contenu visuel est préservé."}]
|
||||
},
|
||||
"signPdf": {
|
||||
"whatItDoes": "Ajoutez votre image de signature à des documents PDF sans imprimer.",
|
||||
"howToUse": ["Téléchargez votre document PDF.", "Téléchargez votre image de signature (PNG ou JPG).", "Choisissez le numéro de page.", "Téléchargez le PDF signé."],
|
||||
"benefits": ["Pas d'impression ni de numérisation", "Supporte PNG et JPG", "Placez sur n'importe quelle page", "Traitement sécurisé"],
|
||||
"useCases": ["Signer des contrats électroniquement", "Ajouter des signatures d'approbation"],
|
||||
"faq": [{"q": "Est-ce une signature électronique légale ?", "a": "Cela place une image de signature visuelle. Pour des signatures numériques juridiquement contraignantes, des solutions basées sur des certificats peuvent être nécessaires."}]
|
||||
},
|
||||
"cropPdf": {
|
||||
"whatItDoes": "Rognez les pages PDF en ajustant les marges. Supprimez les espaces blancs indésirables.",
|
||||
"howToUse": ["Téléchargez votre PDF.", "Définissez les valeurs de marge.", "Téléchargez le PDF rogné."],
|
||||
"benefits": ["Contrôle précis des marges", "Rogner toutes les pages", "Supprimer les espaces blancs", "Gratuit et rapide"],
|
||||
"useCases": ["Supprimer les grandes marges", "Ajuster les tailles de page"],
|
||||
"faq": [{"q": "Quelles sont les unités des marges ?", "a": "Les marges sont spécifiées en points PDF (1 point = 1/72 pouce)."}]
|
||||
},
|
||||
"flattenPdf": {
|
||||
"whatItDoes": "Aplatissez les formulaires et annotations PDF, convertissant les éléments interactifs en contenu statique.",
|
||||
"howToUse": ["Téléchargez votre PDF avec formulaires ou annotations.", "L'outil aplatit tous les éléments interactifs.", "Téléchargez le PDF aplati."],
|
||||
"benefits": ["Supprimer l'interactivité des formulaires", "Aplatir les annotations", "Empêcher la modification", "Idéal pour l'archivage"],
|
||||
"useCases": ["Archiver des formulaires remplis", "Envoyer des formulaires complétés en toute sécurité"],
|
||||
"faq": [{"q": "Que signifie aplatir ?", "a": "L'aplatissement convertit les éléments interactifs en contenu permanent non modifiable."}]
|
||||
},
|
||||
"repairPdf": {
|
||||
"whatItDoes": "Réparez les fichiers PDF corrompus en réécrivant la structure du document.",
|
||||
"howToUse": ["Téléchargez votre PDF corrompu.", "L'outil tente de réparer le document.", "Téléchargez le PDF réparé."],
|
||||
"benefits": ["Réparer les structures PDF cassées", "Récupérer le contenu", "Sortie propre", "Gratuit"],
|
||||
"useCases": ["Récupérer des PDF endommagés", "Réparer des fichiers qui ne s'ouvrent pas"],
|
||||
"faq": [{"q": "Peut-il réparer n'importe quel PDF cassé ?", "a": "L'outil récupère autant de pages que possible. Les fichiers gravement corrompus peuvent n'être que partiellement récupérables."}]
|
||||
},
|
||||
"pdfMetadata": {
|
||||
"whatItDoes": "Affichez et modifiez les propriétés du document PDF : titre, auteur, sujet et mots-clés.",
|
||||
"howToUse": ["Téléchargez votre PDF.", "Remplissez les champs de métadonnées.", "Téléchargez le PDF mis à jour."],
|
||||
"benefits": ["Modifier le titre, l'auteur et plus", "Améliorer la recherchabilité", "Nettoyer les propriétés", "Aucun logiciel nécessaire"],
|
||||
"useCases": ["Définir des titres appropriés pour la gestion documentaire", "Ajouter des informations d'auteur"],
|
||||
"faq": [{"q": "Que sont les métadonnées PDF ?", "a": "Les métadonnées incluent les propriétés du document comme le titre, l'auteur et les mots-clés qui aident à organiser et rechercher les PDF."}]
|
||||
},
|
||||
"imageCrop": {
|
||||
"whatItDoes": "Rognez des images en spécifiant des coordonnées exactes en pixels. Supporte PNG, JPG et WebP.",
|
||||
"howToUse": ["Téléchargez votre image.", "Entrez les coordonnées de rognage.", "Téléchargez l'image rognée."],
|
||||
"benefits": ["Rognage précis au pixel", "Supporte PNG, JPG, WebP", "Sortie haute qualité", "Sans filigrane"],
|
||||
"useCases": ["Rogner des photos de produits", "Supprimer les bordures indésirables"],
|
||||
"faq": [{"q": "Comment fonctionnent les coordonnées ?", "a": "Gauche et haut définissent le point de départ, droite et bas définissent le point de fin de la zone de rognage en pixels."}]
|
||||
},
|
||||
"imageRotateFlip": {
|
||||
"whatItDoes": "Faites pivoter les images de 90°, 180° ou 270° et retournez-les horizontalement ou verticalement.",
|
||||
"howToUse": ["Téléchargez votre image.", "Choisissez l'angle de rotation et les options de retournement.", "Téléchargez l'image transformée."],
|
||||
"benefits": ["Rotation de 90°, 180° ou 270°", "Retournement horizontal ou vertical", "Combiner les opérations", "Supporte plusieurs formats"],
|
||||
"useCases": ["Corriger l'orientation des photos", "Créer des images miroir"],
|
||||
"faq": [{"q": "Puis-je combiner rotation et retournement ?", "a": "Oui, la rotation et le retournement sont appliqués ensemble en une seule opération."}]
|
||||
},
|
||||
"barcode": {
|
||||
"whatItDoes": "Générez des codes-barres en plusieurs formats : Code128, Code39, EAN-13, UPC-A, ISBN et plus. Téléchargez en PNG ou SVG.",
|
||||
"howToUse": ["Entrez les données à encoder.", "Sélectionnez un type de code-barres.", "Choisissez le format de sortie (PNG ou SVG).", "Générez et téléchargez votre code-barres."],
|
||||
"benefits": ["Plusieurs formats de code-barres", "Sortie PNG et SVG", "Génération instantanée", "Gratuit et illimité"],
|
||||
"useCases": ["Générer des codes-barres de produits", "Créer des étiquettes d'inventaire", "Produire des codes-barres ISBN pour les livres"],
|
||||
"faq": [
|
||||
{"q": "Quels formats de code-barres sont disponibles ?", "a": "Code128, Code39, EAN-13, EAN-8, UPC-A, ISBN-13, ISBN-10, ISSN et PZN."},
|
||||
{"q": "Quelle est la différence avec un code QR ?", "a": "Les codes-barres sont linéaires (1D) avec moins de capacité de données. Les codes QR sont bidimensionnels (2D) et stockent plus d'informations."}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,6 +2,8 @@ import { useEffect, useMemo, useState, type FormEvent } from 'react';
|
||||
import { Helmet } from 'react-helmet-async';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import {
|
||||
AlertTriangle,
|
||||
BarChart3,
|
||||
BadgeCheck,
|
||||
Check,
|
||||
Copy,
|
||||
@@ -33,11 +35,27 @@ const toolKeyMap: Record<string, string> = {
|
||||
'pdf-to-word': 'tools.pdfToWord.title',
|
||||
'word-to-pdf': 'tools.wordToPdf.title',
|
||||
'compress-pdf': 'tools.compressPdf.title',
|
||||
'compress-image': 'tools.compressImage.title',
|
||||
'crop-pdf': 'tools.cropPdf.title',
|
||||
'crop-image': 'tools.imageCrop.title',
|
||||
'edit-metadata': 'tools.pdfMetadata.title',
|
||||
'excel-to-pdf': 'tools.excelToPdf.title',
|
||||
'extract-pages': 'tools.extractPages.title',
|
||||
'extract-tables': 'tools.tableExtractor.title',
|
||||
'flatten-pdf': 'tools.flattenPdf.title',
|
||||
'html-to-pdf': 'tools.htmlToPdf.title',
|
||||
'image-convert': 'tools.imageConvert.title',
|
||||
'image-converter': 'tools.imageConvert.title',
|
||||
'image-crop': 'tools.imageCrop.title',
|
||||
'image-resize': 'tools.imageConvert.title',
|
||||
'image-rotate-flip': 'tools.imageRotateFlip.title',
|
||||
'video-to-gif': 'tools.videoToGif.title',
|
||||
'merge-pdf': 'tools.mergePdf.title',
|
||||
'ocr': 'tools.ocr.title',
|
||||
'split-pdf': 'tools.splitPdf.title',
|
||||
'pdf-metadata': 'tools.pdfMetadata.title',
|
||||
'pdf-to-excel': 'tools.pdfToExcel.title',
|
||||
'pdf-to-pptx': 'tools.pdfToPptx.title',
|
||||
'rotate-pdf': 'tools.rotatePdf.title',
|
||||
'page-numbers': 'tools.pageNumbers.title',
|
||||
'pdf-to-images': 'tools.pdfToImages.title',
|
||||
@@ -45,8 +63,21 @@ const toolKeyMap: Record<string, string> = {
|
||||
'watermark-pdf': 'tools.watermarkPdf.title',
|
||||
'protect-pdf': 'tools.protectPdf.title',
|
||||
'unlock-pdf': 'tools.unlockPdf.title',
|
||||
'repair-pdf': 'tools.repairPdf.title',
|
||||
'remove-background': 'tools.removeBg.title',
|
||||
'remove-bg': 'tools.removeBg.title',
|
||||
'remove-watermark-pdf': 'tools.removeWatermark.title',
|
||||
'reorder-pdf': 'tools.reorderPdf.title',
|
||||
'sign-pdf': 'tools.signPdf.title',
|
||||
'summarize-pdf': 'tools.summarizePdf.title',
|
||||
'translate-pdf': 'tools.translatePdf.title',
|
||||
'chat-pdf': 'tools.chatPdf.title',
|
||||
'barcode': 'tools.barcode.title',
|
||||
'barcode-generator': 'tools.barcode.title',
|
||||
'pptx-to-pdf': 'tools.pptxToPdf.title',
|
||||
'pdf-flowchart': 'tools.pdfFlowchart.title',
|
||||
'pdf-flowchart-sample': 'tools.pdfFlowchart.title',
|
||||
'qr-code': 'tools.qrCode.title',
|
||||
};
|
||||
|
||||
function formatHistoryTool(tool: string, t: (key: string) => string) {
|
||||
@@ -93,6 +124,50 @@ export default function AccountPage() {
|
||||
[i18n.language]
|
||||
);
|
||||
|
||||
const dashboardMetrics = useMemo(() => {
|
||||
const completedItems = historyItems.filter((item) => item.status === 'completed');
|
||||
const failedItems = historyItems.filter((item) => item.status !== 'completed');
|
||||
const toolCounts = historyItems.reduce<Record<string, number>>((acc, item) => {
|
||||
acc[item.tool] = (acc[item.tool] || 0) + 1;
|
||||
return acc;
|
||||
}, {});
|
||||
|
||||
const favoriteToolSlug = Object.entries(toolCounts)
|
||||
.sort((left, right) => right[1] - left[1])[0]?.[0] || null;
|
||||
|
||||
return {
|
||||
totalProcessed: historyItems.length,
|
||||
completedCount: completedItems.length,
|
||||
failedCount: failedItems.length,
|
||||
favoriteToolSlug,
|
||||
successRate: historyItems.length ? Math.round((completedItems.length / historyItems.length) * 100) : 0,
|
||||
topTools: Object.entries(toolCounts)
|
||||
.sort((left, right) => right[1] - left[1])
|
||||
.slice(0, 4),
|
||||
recentFailures: failedItems.slice(0, 3),
|
||||
onboardingItems: [
|
||||
{
|
||||
key: 'firstTask',
|
||||
done: historyItems.length > 0,
|
||||
title: t('account.onboardingFirstTaskTitle'),
|
||||
description: t('account.onboardingFirstTaskDesc'),
|
||||
},
|
||||
{
|
||||
key: 'upgrade',
|
||||
done: user?.plan === 'pro',
|
||||
title: t('account.onboardingUpgradeTitle'),
|
||||
description: t('account.onboardingUpgradeDesc'),
|
||||
},
|
||||
{
|
||||
key: 'apiKey',
|
||||
done: user?.plan !== 'pro' ? false : apiKeys.some((key) => !key.revoked_at),
|
||||
title: t('account.onboardingApiTitle'),
|
||||
description: t('account.onboardingApiDesc'),
|
||||
},
|
||||
],
|
||||
};
|
||||
}, [apiKeys, historyItems, t, user?.plan]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!user) {
|
||||
setHistoryItems([]);
|
||||
@@ -314,6 +389,106 @@ export default function AccountPage() {
|
||||
</section>
|
||||
)}
|
||||
|
||||
<section className="card rounded-[2rem] p-0">
|
||||
<div className="border-b border-slate-200 px-6 py-5 dark:border-slate-700">
|
||||
<div className="flex items-center gap-3">
|
||||
<BarChart3 className="h-5 w-5 text-primary-600 dark:text-primary-400" />
|
||||
<div>
|
||||
<h2 className="text-xl font-semibold text-slate-900 dark:text-white">
|
||||
{t('account.dashboardTitle')}
|
||||
</h2>
|
||||
<p className="text-sm text-slate-500 dark:text-slate-400">
|
||||
{t('account.dashboardSubtitle')}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="space-y-6 p-6">
|
||||
<div className="grid gap-4 md:grid-cols-2 xl:grid-cols-4">
|
||||
<div className="rounded-[1.5rem] bg-slate-50 p-5 dark:bg-slate-800/80">
|
||||
<p className="text-xs font-semibold uppercase tracking-widest text-slate-400 dark:text-slate-500">{t('account.metricProcessed')}</p>
|
||||
<p className="mt-2 text-3xl font-bold text-slate-900 dark:text-white">{dashboardMetrics.totalProcessed}</p>
|
||||
</div>
|
||||
<div className="rounded-[1.5rem] bg-slate-50 p-5 dark:bg-slate-800/80">
|
||||
<p className="text-xs font-semibold uppercase tracking-widest text-slate-400 dark:text-slate-500">{t('account.metricSuccessRate')}</p>
|
||||
<p className="mt-2 text-3xl font-bold text-slate-900 dark:text-white">{dashboardMetrics.successRate}%</p>
|
||||
</div>
|
||||
<div className="rounded-[1.5rem] bg-slate-50 p-5 dark:bg-slate-800/80">
|
||||
<p className="text-xs font-semibold uppercase tracking-widest text-slate-400 dark:text-slate-500">{t('account.metricFavoriteTool')}</p>
|
||||
<p className="mt-2 text-lg font-semibold text-slate-900 dark:text-white">
|
||||
{dashboardMetrics.favoriteToolSlug
|
||||
? formatHistoryTool(dashboardMetrics.favoriteToolSlug, t)
|
||||
: t('account.metricFavoriteToolEmpty')}
|
||||
</p>
|
||||
</div>
|
||||
<div className="rounded-[1.5rem] bg-slate-50 p-5 dark:bg-slate-800/80">
|
||||
<p className="text-xs font-semibold uppercase tracking-widest text-slate-400 dark:text-slate-500">{t('account.metricFailures')}</p>
|
||||
<p className="mt-2 text-3xl font-bold text-slate-900 dark:text-white">{dashboardMetrics.failedCount}</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="grid gap-4 lg:grid-cols-[1fr_1fr_1.1fr]">
|
||||
<div className="rounded-[1.5rem] border border-slate-200 p-5 dark:border-slate-700">
|
||||
<h3 className="text-base font-semibold text-slate-900 dark:text-white">{t('account.topToolsTitle')}</h3>
|
||||
<div className="mt-4 space-y-3">
|
||||
{dashboardMetrics.topTools.length === 0 ? (
|
||||
<p className="text-sm text-slate-500 dark:text-slate-400">{t('account.historyEmpty')}</p>
|
||||
) : (
|
||||
dashboardMetrics.topTools.map(([tool, count]) => (
|
||||
<div key={tool} className="flex items-center justify-between rounded-xl bg-slate-50 px-4 py-3 dark:bg-slate-800/70">
|
||||
<span className="text-sm font-medium text-slate-800 dark:text-slate-100">{formatHistoryTool(tool, t)}</span>
|
||||
<span className="text-sm font-semibold text-primary-600 dark:text-primary-400">{count}</span>
|
||||
</div>
|
||||
))
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="rounded-[1.5rem] border border-slate-200 p-5 dark:border-slate-700">
|
||||
<h3 className="text-base font-semibold text-slate-900 dark:text-white">{t('account.issuesTitle')}</h3>
|
||||
<div className="mt-4 space-y-3">
|
||||
{dashboardMetrics.recentFailures.length === 0 ? (
|
||||
<p className="text-sm text-slate-500 dark:text-slate-400">{t('account.issuesEmpty')}</p>
|
||||
) : (
|
||||
dashboardMetrics.recentFailures.map((item) => (
|
||||
<div key={item.id} className="rounded-xl bg-red-50 px-4 py-3 dark:bg-red-950/30">
|
||||
<div className="flex items-start gap-3">
|
||||
<AlertTriangle className="mt-0.5 h-4 w-4 text-red-500" />
|
||||
<div>
|
||||
<p className="text-sm font-semibold text-red-800 dark:text-red-300">{formatHistoryTool(item.tool, t)}</p>
|
||||
<p className="mt-1 text-xs text-red-700 dark:text-red-400">{typeof item.metadata?.error === 'string' ? item.metadata.error : t('account.statusFailed')}</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
))
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="rounded-[1.5rem] border border-slate-200 p-5 dark:border-slate-700">
|
||||
<h3 className="text-base font-semibold text-slate-900 dark:text-white">{t('account.onboardingTitle')}</h3>
|
||||
<p className="mt-1 text-sm text-slate-500 dark:text-slate-400">{t('account.onboardingSubtitle')}</p>
|
||||
<div className="mt-4 space-y-3">
|
||||
{dashboardMetrics.onboardingItems.map((item) => (
|
||||
<div key={item.key} className="rounded-xl bg-slate-50 px-4 py-3 dark:bg-slate-800/70">
|
||||
<div className="flex items-start gap-3">
|
||||
<span className={`mt-0.5 inline-flex h-5 w-5 items-center justify-center rounded-full ${item.done ? 'bg-emerald-100 text-emerald-700 dark:bg-emerald-900/30 dark:text-emerald-300' : 'bg-slate-200 text-slate-600 dark:bg-slate-700 dark:text-slate-300'}`}>
|
||||
{item.done ? <Check className="h-3.5 w-3.5" /> : <span className="text-[10px] font-bold">•</span>}
|
||||
</span>
|
||||
<div>
|
||||
<p className="text-sm font-semibold text-slate-900 dark:text-white">{item.title}</p>
|
||||
<p className="mt-1 text-xs leading-5 text-slate-500 dark:text-slate-400">{item.description}</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
|
||||
{/* API Key Management — Pro only */}
|
||||
{user.plan === 'pro' && (
|
||||
<section className="card rounded-[2rem] p-0">
|
||||
|
||||
@@ -1,57 +1,41 @@
|
||||
import { useDeferredValue } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { Link } from 'react-router-dom';
|
||||
import { useSearchParams } from 'react-router-dom';
|
||||
import SEOHead from '@/components/seo/SEOHead';
|
||||
import { generateWebPage } from '@/utils/seo';
|
||||
import { BookOpen, Calendar, ArrowRight } from 'lucide-react';
|
||||
|
||||
interface BlogPost {
|
||||
slug: string;
|
||||
titleKey: string;
|
||||
excerptKey: string;
|
||||
date: string;
|
||||
category: string;
|
||||
}
|
||||
|
||||
const BLOG_POSTS: BlogPost[] = [
|
||||
{
|
||||
slug: 'how-to-compress-pdf-online',
|
||||
titleKey: 'pages.blog.posts.compressPdf.title',
|
||||
excerptKey: 'pages.blog.posts.compressPdf.excerpt',
|
||||
date: '2025-01-15',
|
||||
category: 'PDF',
|
||||
},
|
||||
{
|
||||
slug: 'convert-images-without-losing-quality',
|
||||
titleKey: 'pages.blog.posts.imageConvert.title',
|
||||
excerptKey: 'pages.blog.posts.imageConvert.excerpt',
|
||||
date: '2025-01-10',
|
||||
category: 'Image',
|
||||
},
|
||||
{
|
||||
slug: 'ocr-extract-text-from-images',
|
||||
titleKey: 'pages.blog.posts.ocrGuide.title',
|
||||
excerptKey: 'pages.blog.posts.ocrGuide.excerpt',
|
||||
date: '2025-01-05',
|
||||
category: 'AI',
|
||||
},
|
||||
{
|
||||
slug: 'merge-split-pdf-files',
|
||||
titleKey: 'pages.blog.posts.mergeSplit.title',
|
||||
excerptKey: 'pages.blog.posts.mergeSplit.excerpt',
|
||||
date: '2024-12-28',
|
||||
category: 'PDF',
|
||||
},
|
||||
{
|
||||
slug: 'ai-chat-with-pdf-documents',
|
||||
titleKey: 'pages.blog.posts.aiChat.title',
|
||||
excerptKey: 'pages.blog.posts.aiChat.excerpt',
|
||||
date: '2024-12-20',
|
||||
category: 'AI',
|
||||
},
|
||||
];
|
||||
import { BookOpen, Calendar, ArrowRight, Search, X } from 'lucide-react';
|
||||
import {
|
||||
BLOG_ARTICLES,
|
||||
getLocalizedBlogArticle,
|
||||
normalizeBlogLocale,
|
||||
} from '@/content/blogArticles';
|
||||
|
||||
export default function BlogPage() {
|
||||
const { t } = useTranslation();
|
||||
const { t, i18n } = useTranslation();
|
||||
const [searchParams, setSearchParams] = useSearchParams();
|
||||
const query = searchParams.get('q') || '';
|
||||
const deferredQuery = useDeferredValue(query.trim().toLowerCase());
|
||||
const locale = normalizeBlogLocale(i18n.language);
|
||||
|
||||
const posts = BLOG_ARTICLES.map((article) => getLocalizedBlogArticle(article, locale));
|
||||
|
||||
const filteredPosts = !deferredQuery
|
||||
? posts
|
||||
: posts.filter((post) => {
|
||||
const haystack = `${post.title} ${post.excerpt} ${post.category}`.toLowerCase();
|
||||
return haystack.includes(deferredQuery);
|
||||
});
|
||||
|
||||
const updateQuery = (value: string) => {
|
||||
const nextParams = new URLSearchParams(searchParams);
|
||||
if (value.trim()) {
|
||||
nextParams.set('q', value);
|
||||
} else {
|
||||
nextParams.delete('q');
|
||||
}
|
||||
setSearchParams(nextParams, { replace: true });
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
@@ -79,8 +63,32 @@ export default function BlogPage() {
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div className="mb-8 rounded-2xl border border-slate-200 bg-white p-4 shadow-sm dark:border-slate-700 dark:bg-slate-900/70">
|
||||
<div className="flex flex-col gap-3 sm:flex-row sm:items-center">
|
||||
<label className="relative flex-1">
|
||||
<Search className="pointer-events-none absolute start-3 top-1/2 h-4 w-4 -translate-y-1/2 text-slate-400" />
|
||||
<input
|
||||
value={query}
|
||||
onChange={(event) => updateQuery(event.target.value)}
|
||||
placeholder={t('pages.blog.searchPlaceholder')}
|
||||
className="w-full rounded-xl border border-slate-200 bg-slate-50 py-3 pl-10 pr-4 text-sm text-slate-900 outline-none transition-colors focus:border-primary-400 focus:bg-white dark:border-slate-700 dark:bg-slate-800 dark:text-white dark:focus:border-primary-500"
|
||||
/>
|
||||
</label>
|
||||
{query && (
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => updateQuery('')}
|
||||
className="inline-flex items-center justify-center gap-2 rounded-xl border border-slate-200 px-4 py-3 text-sm font-medium text-slate-700 hover:bg-slate-50 dark:border-slate-700 dark:text-slate-200 dark:hover:bg-slate-800"
|
||||
>
|
||||
<X className="h-4 w-4" />
|
||||
{t('common.clear')}
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="space-y-6">
|
||||
{BLOG_POSTS.map((post) => (
|
||||
{filteredPosts.map((post) => (
|
||||
<article
|
||||
key={post.slug}
|
||||
className="rounded-xl border border-slate-200 bg-white p-6 shadow-sm transition-shadow hover:shadow-md dark:border-slate-700 dark:bg-slate-800"
|
||||
@@ -91,15 +99,15 @@ export default function BlogPage() {
|
||||
</span>
|
||||
<span className="flex items-center gap-1 text-sm text-slate-500 dark:text-slate-400">
|
||||
<Calendar className="h-3.5 w-3.5" />
|
||||
{post.date}
|
||||
{post.publishedAt}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<h2 className="mb-2 text-xl font-semibold text-slate-900 dark:text-white">
|
||||
{t(post.titleKey)}
|
||||
{post.title}
|
||||
</h2>
|
||||
<p className="mb-4 text-slate-600 dark:text-slate-400 leading-relaxed">
|
||||
{t(post.excerptKey)}
|
||||
{post.excerpt}
|
||||
</p>
|
||||
|
||||
<Link
|
||||
@@ -112,6 +120,14 @@ export default function BlogPage() {
|
||||
))}
|
||||
</div>
|
||||
|
||||
{filteredPosts.length === 0 && (
|
||||
<div className="mt-10 rounded-2xl border border-dashed border-slate-300 bg-slate-50 p-8 text-center dark:border-slate-600 dark:bg-slate-800/50">
|
||||
<p className="text-base font-medium text-slate-700 dark:text-slate-200">
|
||||
{t('pages.blog.noResults')}
|
||||
</p>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Coming Soon */}
|
||||
<div className="mt-10 rounded-xl border-2 border-dashed border-slate-300 bg-slate-50 p-8 text-center dark:border-slate-600 dark:bg-slate-800/50">
|
||||
<p className="text-lg font-medium text-slate-600 dark:text-slate-400">
|
||||
|
||||
189
frontend/src/pages/BlogPostPage.tsx
Normal file
189
frontend/src/pages/BlogPostPage.tsx
Normal file
@@ -0,0 +1,189 @@
|
||||
import { Calendar, ChevronLeft, Clock } from 'lucide-react';
|
||||
import { Link, useParams } from 'react-router-dom';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import SEOHead from '@/components/seo/SEOHead';
|
||||
import { getToolSEO } from '@/config/seoData';
|
||||
import {
|
||||
BLOG_ARTICLES,
|
||||
getBlogArticleBySlug,
|
||||
getLocalizedBlogArticle,
|
||||
normalizeBlogLocale,
|
||||
} from '@/content/blogArticles';
|
||||
import { generateBlogPosting, generateBreadcrumbs, generateWebPage } from '@/utils/seo';
|
||||
import NotFoundPage from './NotFoundPage';
|
||||
|
||||
export default function BlogPostPage() {
|
||||
const { slug } = useParams();
|
||||
const { t, i18n } = useTranslation();
|
||||
const locale = normalizeBlogLocale(i18n.language);
|
||||
const article = slug ? getBlogArticleBySlug(slug) : undefined;
|
||||
|
||||
if (!article) {
|
||||
return <NotFoundPage />;
|
||||
}
|
||||
|
||||
const localizedArticle = getLocalizedBlogArticle(article, locale);
|
||||
const path = `/blog/${localizedArticle.slug}`;
|
||||
const url = `${window.location.origin}${path}`;
|
||||
|
||||
const breadcrumbs = generateBreadcrumbs([
|
||||
{ name: t('common.home'), url: window.location.origin },
|
||||
{ name: t('common.blog'), url: `${window.location.origin}/blog` },
|
||||
{ name: localizedArticle.title, url },
|
||||
]);
|
||||
|
||||
const relatedArticles = BLOG_ARTICLES
|
||||
.filter((candidate) => candidate.slug !== article.slug)
|
||||
.slice(0, 3)
|
||||
.map((candidate) => getLocalizedBlogArticle(candidate, locale));
|
||||
|
||||
return (
|
||||
<>
|
||||
<SEOHead
|
||||
title={localizedArticle.title}
|
||||
description={localizedArticle.seoDescription}
|
||||
path={path}
|
||||
type="article"
|
||||
jsonLd={[
|
||||
generateWebPage({
|
||||
name: localizedArticle.title,
|
||||
description: localizedArticle.seoDescription,
|
||||
url,
|
||||
}),
|
||||
generateBlogPosting({
|
||||
headline: localizedArticle.title,
|
||||
description: localizedArticle.seoDescription,
|
||||
url,
|
||||
datePublished: localizedArticle.publishedAt,
|
||||
inLanguage: locale,
|
||||
}),
|
||||
breadcrumbs,
|
||||
]}
|
||||
/>
|
||||
|
||||
<article className="mx-auto max-w-4xl">
|
||||
<Link
|
||||
to="/blog"
|
||||
className="mb-6 inline-flex items-center gap-2 text-sm font-medium text-primary-600 hover:text-primary-700 dark:text-primary-400 dark:hover:text-primary-300"
|
||||
>
|
||||
<ChevronLeft className="h-4 w-4" />
|
||||
{t('pages.blog.backToBlog')}
|
||||
</Link>
|
||||
|
||||
<header className="rounded-3xl border border-slate-200 bg-white p-8 shadow-sm dark:border-slate-700 dark:bg-slate-900/70">
|
||||
<div className="mb-4 flex flex-wrap items-center gap-3 text-sm text-slate-500 dark:text-slate-400">
|
||||
<span className="rounded-full bg-primary-100 px-3 py-1 font-medium text-primary-700 dark:bg-primary-900/30 dark:text-primary-300">
|
||||
{localizedArticle.category}
|
||||
</span>
|
||||
<span className="inline-flex items-center gap-1">
|
||||
<Calendar className="h-3.5 w-3.5" />
|
||||
{localizedArticle.publishedAt}
|
||||
</span>
|
||||
<span className="inline-flex items-center gap-1">
|
||||
<Clock className="h-3.5 w-3.5" />
|
||||
{t('pages.blog.readTime', { count: localizedArticle.readingMinutes })}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<h1 className="text-3xl font-bold tracking-tight text-slate-900 dark:text-white sm:text-4xl">
|
||||
{localizedArticle.title}
|
||||
</h1>
|
||||
<p className="mt-4 text-lg leading-8 text-slate-600 dark:text-slate-400">
|
||||
{localizedArticle.excerpt}
|
||||
</p>
|
||||
</header>
|
||||
|
||||
<div className="mt-8 grid gap-8 lg:grid-cols-[minmax(0,1fr)_300px]">
|
||||
<div className="space-y-8">
|
||||
<section className="rounded-2xl border border-slate-200 bg-slate-50 p-6 dark:border-slate-700 dark:bg-slate-800/60">
|
||||
<h2 className="text-lg font-semibold text-slate-900 dark:text-white">
|
||||
{t('pages.blog.keyTakeaways')}
|
||||
</h2>
|
||||
<ul className="mt-4 space-y-3">
|
||||
{localizedArticle.keyTakeaways.map((item) => (
|
||||
<li key={item} className="text-sm leading-6 text-slate-700 dark:text-slate-300">
|
||||
• {item}
|
||||
</li>
|
||||
))}
|
||||
</ul>
|
||||
</section>
|
||||
|
||||
{localizedArticle.sections.map((section) => (
|
||||
<section key={section.heading} className="rounded-2xl border border-slate-200 bg-white p-8 shadow-sm dark:border-slate-700 dark:bg-slate-900/70">
|
||||
<h2 className="text-2xl font-semibold text-slate-900 dark:text-white">
|
||||
{section.heading}
|
||||
</h2>
|
||||
<div className="mt-4 space-y-4">
|
||||
{section.paragraphs.map((paragraph) => (
|
||||
<p key={paragraph} className="leading-8 text-slate-700 dark:text-slate-300">
|
||||
{paragraph}
|
||||
</p>
|
||||
))}
|
||||
</div>
|
||||
{section.bullets.length > 0 && (
|
||||
<ul className="mt-5 space-y-3 rounded-2xl bg-slate-50 p-5 text-sm text-slate-700 dark:bg-slate-800 dark:text-slate-300">
|
||||
{section.bullets.map((bullet) => (
|
||||
<li key={bullet}>• {bullet}</li>
|
||||
))}
|
||||
</ul>
|
||||
)}
|
||||
</section>
|
||||
))}
|
||||
</div>
|
||||
|
||||
<aside className="space-y-6">
|
||||
<section className="rounded-2xl border border-slate-200 bg-white p-6 shadow-sm dark:border-slate-700 dark:bg-slate-900/70">
|
||||
<h2 className="text-lg font-semibold text-slate-900 dark:text-white">
|
||||
{t('pages.blog.featuredTools')}
|
||||
</h2>
|
||||
<div className="mt-4 space-y-3">
|
||||
{localizedArticle.toolSlugs.map((toolSlug) => {
|
||||
const tool = getToolSEO(toolSlug);
|
||||
if (!tool) {
|
||||
return null;
|
||||
}
|
||||
return (
|
||||
<Link
|
||||
key={toolSlug}
|
||||
to={`/tools/${toolSlug}`}
|
||||
className="block rounded-xl border border-slate-200 p-4 transition-colors hover:border-primary-300 hover:bg-slate-50 dark:border-slate-700 dark:hover:border-primary-600 dark:hover:bg-slate-800"
|
||||
>
|
||||
<p className="font-medium text-slate-900 dark:text-white">
|
||||
{t(`tools.${tool.i18nKey}.title`)}
|
||||
</p>
|
||||
<p className="mt-1 text-sm text-slate-600 dark:text-slate-400">
|
||||
{t(`tools.${tool.i18nKey}.shortDesc`)}
|
||||
</p>
|
||||
</Link>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
</section>
|
||||
|
||||
<section className="rounded-2xl border border-slate-200 bg-white p-6 shadow-sm dark:border-slate-700 dark:bg-slate-900/70">
|
||||
<h2 className="text-lg font-semibold text-slate-900 dark:text-white">
|
||||
{t('common.blog')}
|
||||
</h2>
|
||||
<div className="mt-4 space-y-3">
|
||||
{relatedArticles.map((relatedArticle) => (
|
||||
<Link
|
||||
key={relatedArticle.slug}
|
||||
to={`/blog/${relatedArticle.slug}`}
|
||||
className="block rounded-xl border border-slate-200 p-4 transition-colors hover:border-primary-300 hover:bg-slate-50 dark:border-slate-700 dark:hover:border-primary-600 dark:hover:bg-slate-800"
|
||||
>
|
||||
<p className="font-medium text-slate-900 dark:text-white">
|
||||
{relatedArticle.title}
|
||||
</p>
|
||||
<p className="mt-1 text-sm text-slate-600 dark:text-slate-400">
|
||||
{relatedArticle.excerpt}
|
||||
</p>
|
||||
</Link>
|
||||
))}
|
||||
</div>
|
||||
</section>
|
||||
</aside>
|
||||
</div>
|
||||
</article>
|
||||
</>
|
||||
);
|
||||
}
|
||||
@@ -1,11 +1,13 @@
|
||||
import { useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { Helmet } from 'react-helmet-async';
|
||||
import { Mail, Send, CheckCircle } from 'lucide-react';
|
||||
import { Mail, Send, CheckCircle, AlertCircle, Loader2 } from 'lucide-react';
|
||||
import SEOHead from '@/components/seo/SEOHead';
|
||||
import { generateWebPage } from '@/utils/seo';
|
||||
import axios from 'axios';
|
||||
|
||||
const CONTACT_EMAIL = 'support@saas-pdf.com';
|
||||
const API_BASE = import.meta.env.VITE_API_URL || '';
|
||||
|
||||
type Category = 'general' | 'bug' | 'feature';
|
||||
|
||||
@@ -13,21 +15,37 @@ export default function ContactPage() {
|
||||
const { t } = useTranslation();
|
||||
const [category, setCategory] = useState<Category>('general');
|
||||
const [submitted, setSubmitted] = useState(false);
|
||||
const [loading, setLoading] = useState(false);
|
||||
const [error, setError] = useState('');
|
||||
|
||||
const placeholderKey = `pages.contact.${category}Placeholder` as const;
|
||||
|
||||
function handleSubmit(e: React.FormEvent<HTMLFormElement>) {
|
||||
async function handleSubmit(e: React.FormEvent<HTMLFormElement>) {
|
||||
e.preventDefault();
|
||||
setError('');
|
||||
setLoading(true);
|
||||
|
||||
const form = e.currentTarget;
|
||||
const data = new FormData(form);
|
||||
const subject = data.get('subject') as string;
|
||||
const body = data.get('message') as string;
|
||||
const name = data.get('name') as string;
|
||||
|
||||
// Open user's email client with pre-filled fields
|
||||
const mailto = `mailto:${CONTACT_EMAIL}?subject=${encodeURIComponent(`[${category}] ${subject}`)}&body=${encodeURIComponent(`From: ${name}\n\n${body}`)}`;
|
||||
window.location.href = mailto;
|
||||
try {
|
||||
await axios.post(`${API_BASE}/api/contact/submit`, {
|
||||
name: data.get('name'),
|
||||
email: data.get('email'),
|
||||
category,
|
||||
subject: data.get('subject'),
|
||||
message: data.get('message'),
|
||||
});
|
||||
setSubmitted(true);
|
||||
} catch (err: unknown) {
|
||||
if (axios.isAxiosError(err) && err.response?.data?.error) {
|
||||
setError(err.response.data.error);
|
||||
} else {
|
||||
setError(t('pages.contact.errorMessage', 'Failed to send message. Please try again.'));
|
||||
}
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
}
|
||||
|
||||
if (submitted) {
|
||||
@@ -156,13 +174,22 @@ export default function ContactPage() {
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Error */}
|
||||
{error && (
|
||||
<div className="flex items-center gap-2 rounded-lg border border-red-200 bg-red-50 px-4 py-3 text-sm text-red-700 dark:border-red-800 dark:bg-red-900/30 dark:text-red-300">
|
||||
<AlertCircle className="h-4 w-4 shrink-0" />
|
||||
{error}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Submit */}
|
||||
<button
|
||||
type="submit"
|
||||
className="flex w-full items-center justify-center gap-2 rounded-lg bg-primary-600 px-6 py-3 font-medium text-white transition-colors hover:bg-primary-700"
|
||||
disabled={loading}
|
||||
className="flex w-full items-center justify-center gap-2 rounded-lg bg-primary-600 px-6 py-3 font-medium text-white transition-colors hover:bg-primary-700 disabled:opacity-50"
|
||||
>
|
||||
<Send className="h-4 w-4" />
|
||||
{t('common.send')}
|
||||
{loading ? <Loader2 className="h-4 w-4 animate-spin" /> : <Send className="h-4 w-4" />}
|
||||
{loading ? t('common.sending', 'Sending...') : t('common.send')}
|
||||
</button>
|
||||
</form>
|
||||
|
||||
|
||||
125
frontend/src/pages/DevelopersPage.tsx
Normal file
125
frontend/src/pages/DevelopersPage.tsx
Normal file
@@ -0,0 +1,125 @@
|
||||
import SEOHead from '@/components/seo/SEOHead';
|
||||
import SocialProofStrip from '@/components/shared/SocialProofStrip';
|
||||
import { generateWebPage } from '@/utils/seo';
|
||||
import { Link } from 'react-router-dom';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { Code2, KeyRound, Rocket, Workflow } from 'lucide-react';
|
||||
|
||||
const QUICKSTART_STEPS = ['createKey', 'sendFile', 'pollStatus'] as const;
|
||||
|
||||
const ENDPOINT_GROUPS = [
|
||||
{
|
||||
titleKey: 'pages.developers.groupConvert',
|
||||
endpoints: ['/api/v1/convert/pdf-to-word', '/api/v1/convert/word-to-pdf', '/api/v1/convert/pdf-to-excel', '/api/v1/convert/pdf-to-pptx'],
|
||||
},
|
||||
{
|
||||
titleKey: 'pages.developers.groupPdf',
|
||||
endpoints: ['/api/v1/compress/pdf', '/api/v1/pdf-tools/merge', '/api/v1/pdf-tools/split', '/api/v1/pdf-tools/sign'],
|
||||
},
|
||||
{
|
||||
titleKey: 'pages.developers.groupAi',
|
||||
endpoints: ['/api/v1/pdf-ai/chat', '/api/v1/pdf-ai/summarize', '/api/v1/ocr/pdf', '/api/v1/image/remove-bg'],
|
||||
},
|
||||
];
|
||||
|
||||
const CURL_UPLOAD = `curl -X POST https://your-domain.example/api/v1/convert/pdf-to-word \\
|
||||
-H "X-API-Key: spdf_your_api_key" \\
|
||||
-F "file=@./sample.pdf"`;
|
||||
|
||||
const CURL_POLL = `curl https://your-domain.example/api/v1/tasks/<task_id>/status \\
|
||||
-H "X-API-Key: spdf_your_api_key"`;
|
||||
|
||||
export default function DevelopersPage() {
|
||||
const { t } = useTranslation();
|
||||
|
||||
return (
|
||||
<>
|
||||
<SEOHead
|
||||
title={t('pages.developers.title')}
|
||||
description={t('pages.developers.metaDescription')}
|
||||
path="/developers"
|
||||
jsonLd={generateWebPage({
|
||||
name: t('pages.developers.title'),
|
||||
description: t('pages.developers.metaDescription'),
|
||||
url: `${window.location.origin}/developers`,
|
||||
})}
|
||||
/>
|
||||
|
||||
<div className="mx-auto max-w-6xl space-y-10">
|
||||
<section className="rounded-[2.5rem] bg-gradient-to-br from-sky-100 via-white to-emerald-50 p-8 shadow-sm ring-1 ring-sky-200 dark:from-sky-950/40 dark:via-slate-950 dark:to-emerald-950/20 dark:ring-sky-900/40 sm:p-10">
|
||||
<div className="max-w-3xl">
|
||||
<div className="inline-flex items-center gap-2 rounded-full bg-white/80 px-4 py-2 text-sm font-semibold text-sky-900 ring-1 ring-sky-200 dark:bg-sky-400/10 dark:text-sky-200 dark:ring-sky-700/40">
|
||||
<Code2 className="h-4 w-4" />
|
||||
{t('pages.developers.badge')}
|
||||
</div>
|
||||
<h1 className="mt-5 text-3xl font-black tracking-tight text-slate-900 dark:text-white sm:text-5xl">
|
||||
{t('pages.developers.title')}
|
||||
</h1>
|
||||
<p className="mt-4 max-w-2xl text-lg leading-8 text-slate-600 dark:text-slate-300">
|
||||
{t('pages.developers.subtitle')}
|
||||
</p>
|
||||
<div className="mt-6 flex flex-col gap-3 sm:flex-row">
|
||||
<Link to="/account" className="inline-flex items-center justify-center rounded-xl bg-primary-600 px-5 py-3 text-sm font-semibold text-white transition-colors hover:bg-primary-700">
|
||||
{t('pages.developers.getApiKey')}
|
||||
</Link>
|
||||
<Link to="/pricing" className="inline-flex items-center justify-center rounded-xl border border-slate-200 px-5 py-3 text-sm font-semibold text-slate-700 transition-colors hover:bg-slate-50 dark:border-slate-700 dark:text-slate-200 dark:hover:bg-slate-800">
|
||||
{t('pages.developers.comparePlans')}
|
||||
</Link>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
|
||||
<SocialProofStrip />
|
||||
|
||||
<section className="grid gap-4 lg:grid-cols-3">
|
||||
{QUICKSTART_STEPS.map((step, index) => {
|
||||
const Icon = step === 'createKey' ? KeyRound : step === 'sendFile' ? Rocket : Workflow;
|
||||
return (
|
||||
<article key={step} className="rounded-[1.75rem] border border-slate-200 bg-white p-6 shadow-sm dark:border-slate-700 dark:bg-slate-900/70">
|
||||
<div className="flex h-12 w-12 items-center justify-center rounded-2xl bg-slate-100 text-slate-800 dark:bg-slate-800 dark:text-slate-100">
|
||||
<Icon className="h-6 w-6" />
|
||||
</div>
|
||||
<p className="mt-4 text-xs font-semibold uppercase tracking-[0.2em] text-slate-400 dark:text-slate-500">0{index + 1}</p>
|
||||
<h2 className="mt-2 text-xl font-semibold text-slate-900 dark:text-white">{t(`pages.developers.steps.${step}.title`)}</h2>
|
||||
<p className="mt-2 text-sm leading-7 text-slate-600 dark:text-slate-400">{t(`pages.developers.steps.${step}.description`)}</p>
|
||||
</article>
|
||||
);
|
||||
})}
|
||||
</section>
|
||||
|
||||
<section className="grid gap-6 lg:grid-cols-2">
|
||||
<article className="rounded-[2rem] border border-slate-200 bg-white p-6 shadow-sm dark:border-slate-700 dark:bg-slate-900/70">
|
||||
<h2 className="text-xl font-semibold text-slate-900 dark:text-white">{t('pages.developers.authExampleTitle')}</h2>
|
||||
<p className="mt-2 text-sm leading-7 text-slate-600 dark:text-slate-400">{t('pages.developers.authExampleSubtitle')}</p>
|
||||
<pre className="mt-4 overflow-x-auto rounded-2xl bg-slate-950 p-4 text-sm text-sky-100"><code>{CURL_UPLOAD}</code></pre>
|
||||
</article>
|
||||
<article className="rounded-[2rem] border border-slate-200 bg-white p-6 shadow-sm dark:border-slate-700 dark:bg-slate-900/70">
|
||||
<h2 className="text-xl font-semibold text-slate-900 dark:text-white">{t('pages.developers.pollExampleTitle')}</h2>
|
||||
<p className="mt-2 text-sm leading-7 text-slate-600 dark:text-slate-400">{t('pages.developers.pollExampleSubtitle')}</p>
|
||||
<pre className="mt-4 overflow-x-auto rounded-2xl bg-slate-950 p-4 text-sm text-emerald-100"><code>{CURL_POLL}</code></pre>
|
||||
</article>
|
||||
</section>
|
||||
|
||||
<section className="rounded-[2rem] border border-slate-200 bg-white p-6 shadow-sm dark:border-slate-700 dark:bg-slate-900/70">
|
||||
<h2 className="text-2xl font-semibold text-slate-900 dark:text-white">{t('pages.developers.endpointsTitle')}</h2>
|
||||
<p className="mt-2 text-sm leading-7 text-slate-600 dark:text-slate-400">{t('pages.developers.endpointsSubtitle')}</p>
|
||||
|
||||
<div className="mt-6 grid gap-4 lg:grid-cols-3">
|
||||
{ENDPOINT_GROUPS.map((group) => (
|
||||
<article key={group.titleKey} className="rounded-[1.5rem] bg-slate-50 p-5 dark:bg-slate-800/70">
|
||||
<h3 className="text-base font-semibold text-slate-900 dark:text-white">{t(group.titleKey)}</h3>
|
||||
<ul className="mt-4 space-y-2">
|
||||
{group.endpoints.map((endpoint) => (
|
||||
<li key={endpoint} className="rounded-xl bg-white px-3 py-2 font-mono text-xs text-slate-700 ring-1 ring-slate-200 dark:bg-slate-900 dark:text-slate-200 dark:ring-slate-700">
|
||||
{endpoint}
|
||||
</li>
|
||||
))}
|
||||
</ul>
|
||||
</article>
|
||||
))}
|
||||
</div>
|
||||
</section>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
}
|
||||
@@ -1,4 +1,6 @@
|
||||
import { useDeferredValue } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useSearchParams } from 'react-router-dom';
|
||||
import SEOHead from '@/components/seo/SEOHead';
|
||||
import { generateOrganization } from '@/utils/seo';
|
||||
import {
|
||||
@@ -29,10 +31,13 @@ import {
|
||||
MessageSquare,
|
||||
Languages,
|
||||
Table,
|
||||
Search,
|
||||
X,
|
||||
} from 'lucide-react';
|
||||
import ToolCard from '@/components/shared/ToolCard';
|
||||
import HeroUploadZone from '@/components/shared/HeroUploadZone';
|
||||
import AdSlot from '@/components/layout/AdSlot';
|
||||
import SocialProofStrip from '@/components/shared/SocialProofStrip';
|
||||
|
||||
interface ToolInfo {
|
||||
key: string;
|
||||
@@ -81,6 +86,31 @@ const otherTools: ToolInfo[] = [
|
||||
|
||||
export default function HomePage() {
|
||||
const { t } = useTranslation();
|
||||
const [searchParams, setSearchParams] = useSearchParams();
|
||||
const query = searchParams.get('q') || '';
|
||||
const deferredQuery = useDeferredValue(query.trim().toLowerCase());
|
||||
|
||||
const matchesTool = (tool: ToolInfo) => {
|
||||
if (!deferredQuery) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const haystack = `${t(`tools.${tool.key}.title`)} ${t(`tools.${tool.key}.shortDesc`)}`.toLowerCase();
|
||||
return haystack.includes(deferredQuery);
|
||||
};
|
||||
|
||||
const filteredPdfTools = pdfTools.filter(matchesTool);
|
||||
const filteredOtherTools = otherTools.filter(matchesTool);
|
||||
|
||||
const updateQuery = (value: string) => {
|
||||
const nextParams = new URLSearchParams(searchParams);
|
||||
if (value.trim()) {
|
||||
nextParams.set('q', value);
|
||||
} else {
|
||||
nextParams.delete('q');
|
||||
}
|
||||
setSearchParams(nextParams, { replace: true });
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
@@ -97,7 +127,7 @@ export default function HomePage() {
|
||||
description: t('home.heroSub'),
|
||||
potentialAction: {
|
||||
'@type': 'SearchAction',
|
||||
target: `${window.location.origin}/tools/{search_term_string}`,
|
||||
target: `${window.location.origin}/?q={search_term_string}`,
|
||||
'query-input': 'required name=search_term_string',
|
||||
},
|
||||
},
|
||||
@@ -123,13 +153,79 @@ export default function HomePage() {
|
||||
{/* Ad Slot */}
|
||||
<AdSlot slot="home-top" format="horizontal" className="mb-8" />
|
||||
|
||||
<SocialProofStrip className="mb-10" />
|
||||
|
||||
<section className="mb-10 rounded-3xl border border-slate-200 bg-white p-6 shadow-sm dark:border-slate-700 dark:bg-slate-900/70">
|
||||
<div className="flex flex-col gap-3 lg:flex-row lg:items-center lg:justify-between">
|
||||
<div>
|
||||
<h2 className="text-xl font-semibold text-slate-900 dark:text-white">
|
||||
{t('common.search')}
|
||||
</h2>
|
||||
<p className="mt-1 text-sm text-slate-600 dark:text-slate-400">
|
||||
{t('home.searchToolsPlaceholder')}
|
||||
</p>
|
||||
</div>
|
||||
<div className="flex w-full flex-col gap-3 sm:flex-row lg:max-w-2xl">
|
||||
<label className="relative flex-1">
|
||||
<Search className="pointer-events-none absolute start-3 top-1/2 h-4 w-4 -translate-y-1/2 text-slate-400" />
|
||||
<input
|
||||
value={query}
|
||||
onChange={(event) => updateQuery(event.target.value)}
|
||||
placeholder={t('home.searchToolsPlaceholder')}
|
||||
className="w-full rounded-xl border border-slate-200 bg-slate-50 py-3 pl-10 pr-4 text-sm text-slate-900 outline-none transition-colors focus:border-primary-400 focus:bg-white dark:border-slate-700 dark:bg-slate-800 dark:text-white dark:focus:border-primary-500"
|
||||
/>
|
||||
</label>
|
||||
{query && (
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => updateQuery('')}
|
||||
className="inline-flex items-center justify-center gap-2 rounded-xl border border-slate-200 px-4 py-3 text-sm font-medium text-slate-700 hover:bg-slate-50 dark:border-slate-700 dark:text-slate-200 dark:hover:bg-slate-800"
|
||||
>
|
||||
<X className="h-4 w-4" />
|
||||
{t('common.clear')}
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
|
||||
<section className="mb-12 rounded-[2rem] border border-slate-200 bg-white p-8 shadow-sm dark:border-slate-700 dark:bg-slate-900/70">
|
||||
<div className="flex flex-col gap-6 lg:flex-row lg:items-center lg:justify-between">
|
||||
<div className="max-w-2xl">
|
||||
<p className="text-sm font-semibold uppercase tracking-[0.2em] text-primary-600 dark:text-primary-400">
|
||||
{t('common.developers')}
|
||||
</p>
|
||||
<h2 className="mt-2 text-2xl font-bold text-slate-900 dark:text-white">
|
||||
{t('pages.developers.ctaTitle')}
|
||||
</h2>
|
||||
<p className="mt-2 text-slate-600 dark:text-slate-400">
|
||||
{t('pages.developers.ctaSubtitle')}
|
||||
</p>
|
||||
</div>
|
||||
<div className="flex flex-col gap-3 sm:flex-row">
|
||||
<a
|
||||
href="/developers"
|
||||
className="inline-flex items-center justify-center rounded-xl bg-primary-600 px-5 py-3 text-sm font-semibold text-white transition-colors hover:bg-primary-700"
|
||||
>
|
||||
{t('pages.developers.openDocs')}
|
||||
</a>
|
||||
<a
|
||||
href="/account"
|
||||
className="inline-flex items-center justify-center rounded-xl border border-slate-200 px-5 py-3 text-sm font-semibold text-slate-700 transition-colors hover:bg-slate-50 dark:border-slate-700 dark:text-slate-200 dark:hover:bg-slate-800"
|
||||
>
|
||||
{t('pages.developers.getApiKey')}
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
|
||||
{/* Tools Grid */}
|
||||
<section>
|
||||
<h2 className="mb-6 text-center text-xl font-semibold text-slate-800 dark:text-slate-200">
|
||||
{t('home.pdfTools')}
|
||||
</h2>
|
||||
<div className="grid gap-4 sm:grid-cols-2 lg:grid-cols-3 xl:grid-cols-4 mb-10">
|
||||
{pdfTools.map((tool) => (
|
||||
{filteredPdfTools.map((tool) => (
|
||||
<ToolCard
|
||||
key={tool.key}
|
||||
to={tool.path}
|
||||
@@ -145,7 +241,7 @@ export default function HomePage() {
|
||||
{t('home.otherTools', 'Other Tools')}
|
||||
</h2>
|
||||
<div className="grid gap-4 sm:grid-cols-2 lg:grid-cols-3 xl:grid-cols-4 mb-12">
|
||||
{otherTools.map((tool) => (
|
||||
{filteredOtherTools.map((tool) => (
|
||||
<ToolCard
|
||||
key={tool.key}
|
||||
to={tool.path}
|
||||
@@ -156,6 +252,14 @@ export default function HomePage() {
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
|
||||
{filteredPdfTools.length + filteredOtherTools.length === 0 && (
|
||||
<div className="mb-12 rounded-2xl border border-dashed border-slate-300 bg-slate-50 p-8 text-center dark:border-slate-600 dark:bg-slate-800/50">
|
||||
<p className="text-base font-medium text-slate-700 dark:text-slate-200">
|
||||
{t('home.noSearchResults')}
|
||||
</p>
|
||||
</div>
|
||||
)}
|
||||
</section>
|
||||
|
||||
{/* Features / Why Choose Us */}
|
||||
|
||||
141
frontend/src/pages/InternalAdminPage.test.tsx
Normal file
141
frontend/src/pages/InternalAdminPage.test.tsx
Normal file
@@ -0,0 +1,141 @@
|
||||
import { fireEvent, render, screen, waitFor } from '@testing-library/react';
|
||||
import { HelmetProvider } from 'react-helmet-async';
|
||||
import { MemoryRouter } from 'react-router-dom';
|
||||
import { beforeEach, describe, expect, it, vi, type Mock } from 'vitest';
|
||||
import InternalAdminPage from './InternalAdminPage';
|
||||
import { useAuthStore } from '@/stores/authStore';
|
||||
import {
|
||||
getInternalAdminContacts,
|
||||
getInternalAdminOverview,
|
||||
listInternalAdminUsers,
|
||||
markInternalAdminContactRead,
|
||||
updateInternalAdminUserPlan,
|
||||
updateInternalAdminUserRole,
|
||||
} from '@/services/api';
|
||||
|
||||
vi.mock('@/stores/authStore', () => ({
|
||||
useAuthStore: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock('@/services/api', () => ({
|
||||
getInternalAdminContacts: vi.fn(),
|
||||
getInternalAdminOverview: vi.fn(),
|
||||
listInternalAdminUsers: vi.fn(),
|
||||
markInternalAdminContactRead: vi.fn(),
|
||||
updateInternalAdminUserPlan: vi.fn(),
|
||||
updateInternalAdminUserRole: vi.fn(),
|
||||
}));
|
||||
|
||||
const authState = {
|
||||
user: null as null | { email: string; role: string },
|
||||
initialized: true,
|
||||
isLoading: false,
|
||||
login: vi.fn(),
|
||||
logout: vi.fn(),
|
||||
};
|
||||
|
||||
function renderPage() {
|
||||
return render(
|
||||
<HelmetProvider>
|
||||
<MemoryRouter>
|
||||
<InternalAdminPage />
|
||||
</MemoryRouter>
|
||||
</HelmetProvider>
|
||||
);
|
||||
}
|
||||
|
||||
describe('InternalAdminPage', () => {
|
||||
beforeEach(() => {
|
||||
authState.user = null;
|
||||
authState.initialized = true;
|
||||
authState.isLoading = false;
|
||||
authState.login = vi.fn();
|
||||
authState.logout = vi.fn();
|
||||
|
||||
((useAuthStore as unknown) as Mock).mockImplementation(
|
||||
(selector: (state: typeof authState) => unknown) => selector(authState)
|
||||
);
|
||||
(getInternalAdminOverview as Mock).mockReset();
|
||||
(listInternalAdminUsers as Mock).mockReset();
|
||||
(getInternalAdminContacts as Mock).mockReset();
|
||||
(markInternalAdminContactRead as Mock).mockReset();
|
||||
(updateInternalAdminUserPlan as Mock).mockReset();
|
||||
(updateInternalAdminUserRole as Mock).mockReset();
|
||||
});
|
||||
|
||||
it('shows the admin sign-in form for anonymous users', () => {
|
||||
renderPage();
|
||||
|
||||
expect(screen.getByText('Admin sign in')).toBeTruthy();
|
||||
expect(screen.getByPlaceholderText('admin@example.com')).toBeTruthy();
|
||||
});
|
||||
|
||||
it('shows the permission warning for signed-in non-admin users', () => {
|
||||
authState.user = { email: 'member@example.com', role: 'user' };
|
||||
|
||||
renderPage();
|
||||
|
||||
expect(screen.getByText('No admin permission')).toBeTruthy();
|
||||
expect(screen.getAllByText(/member@example.com/)).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('loads dashboard data for admins and allows promoting a user role', async () => {
|
||||
authState.user = { email: 'admin@example.com', role: 'admin' };
|
||||
(getInternalAdminOverview as Mock).mockResolvedValue({
|
||||
users: { total: 2, pro: 1, free: 1 },
|
||||
processing: {
|
||||
total_files_processed: 5,
|
||||
completed_files: 4,
|
||||
failed_files: 1,
|
||||
files_last_24h: 2,
|
||||
success_rate: 80,
|
||||
},
|
||||
ratings: { average_rating: 4.8, rating_count: 14 },
|
||||
ai_cost: { month: '2026-03', total_usd: 12.5, budget_usd: 50, percent_used: 25 },
|
||||
contacts: { total_messages: 1, unread_messages: 1, recent: [] },
|
||||
top_tools: [{ tool: 'compress-pdf', total_runs: 10, failed_runs: 1 }],
|
||||
recent_failures: [],
|
||||
recent_users: [],
|
||||
});
|
||||
(listInternalAdminUsers as Mock).mockResolvedValue([
|
||||
{
|
||||
id: 2,
|
||||
email: 'operator@example.com',
|
||||
plan: 'free',
|
||||
role: 'user',
|
||||
is_allowlisted_admin: false,
|
||||
created_at: '2026-03-16T10:00:00Z',
|
||||
total_tasks: 3,
|
||||
completed_tasks: 2,
|
||||
failed_tasks: 1,
|
||||
active_api_keys: 0,
|
||||
},
|
||||
]);
|
||||
(getInternalAdminContacts as Mock).mockResolvedValue({
|
||||
items: [],
|
||||
page: 1,
|
||||
per_page: 12,
|
||||
total: 0,
|
||||
unread: 0,
|
||||
});
|
||||
(updateInternalAdminUserRole as Mock).mockResolvedValue({
|
||||
id: 2,
|
||||
email: 'operator@example.com',
|
||||
plan: 'free',
|
||||
role: 'admin',
|
||||
created_at: '2026-03-16T10:00:00Z',
|
||||
});
|
||||
|
||||
renderPage();
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Users and monetization')).toBeTruthy();
|
||||
});
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: 'Set admin' }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(updateInternalAdminUserRole).toHaveBeenCalledWith(2, 'admin');
|
||||
});
|
||||
});
|
||||
});
|
||||
593
frontend/src/pages/InternalAdminPage.tsx
Normal file
593
frontend/src/pages/InternalAdminPage.tsx
Normal file
@@ -0,0 +1,593 @@
|
||||
import { useEffect, useMemo, useState, type FormEvent } from 'react';
|
||||
import { Helmet } from 'react-helmet-async';
|
||||
import { Link } from 'react-router-dom';
|
||||
import {
|
||||
AlertTriangle,
|
||||
BarChart3,
|
||||
Inbox,
|
||||
LogOut,
|
||||
RefreshCcw,
|
||||
Search,
|
||||
ShieldCheck,
|
||||
Users,
|
||||
Zap,
|
||||
} from 'lucide-react';
|
||||
import {
|
||||
getInternalAdminContacts,
|
||||
getInternalAdminOverview,
|
||||
listInternalAdminUsers,
|
||||
markInternalAdminContactRead,
|
||||
updateInternalAdminUserRole,
|
||||
updateInternalAdminUserPlan,
|
||||
type InternalAdminContact,
|
||||
type InternalAdminOverview,
|
||||
type InternalAdminUser,
|
||||
} from '@/services/api';
|
||||
import { useAuthStore } from '@/stores/authStore';
|
||||
|
||||
function formatMoney(value: number) {
|
||||
return new Intl.NumberFormat('en-US', {
|
||||
style: 'currency',
|
||||
currency: 'USD',
|
||||
maximumFractionDigits: 2,
|
||||
}).format(value);
|
||||
}
|
||||
|
||||
export default function InternalAdminPage() {
|
||||
const user = useAuthStore((state) => state.user);
|
||||
const initialized = useAuthStore((state) => state.initialized);
|
||||
const authLoading = useAuthStore((state) => state.isLoading);
|
||||
const login = useAuthStore((state) => state.login);
|
||||
const logout = useAuthStore((state) => state.logout);
|
||||
|
||||
const [email, setEmail] = useState('');
|
||||
const [password, setPassword] = useState('');
|
||||
const [overview, setOverview] = useState<InternalAdminOverview | null>(null);
|
||||
const [users, setUsers] = useState<InternalAdminUser[]>([]);
|
||||
const [contacts, setContacts] = useState<InternalAdminContact[]>([]);
|
||||
const [contactMeta, setContactMeta] = useState({ total: 0, unread: 0, page: 1, perPage: 12 });
|
||||
const [userQuery, setUserQuery] = useState('');
|
||||
const [loading, setLoading] = useState(false);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [loginError, setLoginError] = useState<string | null>(null);
|
||||
const [updatingUserId, setUpdatingUserId] = useState<number | null>(null);
|
||||
const [updatingRoleUserId, setUpdatingRoleUserId] = useState<number | null>(null);
|
||||
const [markingMessageId, setMarkingMessageId] = useState<number | null>(null);
|
||||
|
||||
const isAdmin = user?.role === 'admin';
|
||||
|
||||
const metricCards = useMemo(() => {
|
||||
if (!overview) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return [
|
||||
{
|
||||
key: 'users',
|
||||
title: 'Total users',
|
||||
value: overview.users.total.toLocaleString(),
|
||||
caption: `${overview.users.pro} pro / ${overview.users.free} free`,
|
||||
icon: Users,
|
||||
},
|
||||
{
|
||||
key: 'processing',
|
||||
title: 'Files processed',
|
||||
value: overview.processing.total_files_processed.toLocaleString(),
|
||||
caption: `${overview.processing.files_last_24h} in the last 24h`,
|
||||
icon: BarChart3,
|
||||
},
|
||||
{
|
||||
key: 'success',
|
||||
title: 'Success rate',
|
||||
value: `${overview.processing.success_rate}%`,
|
||||
caption: `${overview.processing.failed_files} failures tracked`,
|
||||
icon: ShieldCheck,
|
||||
},
|
||||
{
|
||||
key: 'contacts',
|
||||
title: 'Unread contacts',
|
||||
value: overview.contacts.unread_messages.toLocaleString(),
|
||||
caption: `${overview.contacts.total_messages} total inbox items`,
|
||||
icon: Inbox,
|
||||
},
|
||||
{
|
||||
key: 'ai-cost',
|
||||
title: 'AI spend',
|
||||
value: formatMoney(overview.ai_cost.total_usd),
|
||||
caption: `${overview.ai_cost.percent_used}% of ${formatMoney(overview.ai_cost.budget_usd)} budget`,
|
||||
icon: Zap,
|
||||
},
|
||||
{
|
||||
key: 'ratings',
|
||||
title: 'Average rating',
|
||||
value: overview.ratings.average_rating.toFixed(1),
|
||||
caption: `${overview.ratings.rating_count} ratings collected`,
|
||||
icon: RefreshCcw,
|
||||
},
|
||||
];
|
||||
}, [overview]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!isAdmin) {
|
||||
setOverview(null);
|
||||
setUsers([]);
|
||||
setContacts([]);
|
||||
return;
|
||||
}
|
||||
|
||||
void loadDashboard(userQuery);
|
||||
}, [isAdmin]);
|
||||
|
||||
async function loadDashboard(query = '') {
|
||||
setLoading(true);
|
||||
setError(null);
|
||||
|
||||
try {
|
||||
const [overviewData, usersData, contactsData] = await Promise.all([
|
||||
getInternalAdminOverview(),
|
||||
listInternalAdminUsers(query),
|
||||
getInternalAdminContacts(1, 12),
|
||||
]);
|
||||
|
||||
setOverview(overviewData);
|
||||
setUsers(usersData);
|
||||
setContacts(contactsData.items);
|
||||
setContactMeta({
|
||||
total: contactsData.total,
|
||||
unread: contactsData.unread,
|
||||
page: contactsData.page,
|
||||
perPage: contactsData.per_page,
|
||||
});
|
||||
} catch (loadError) {
|
||||
setError(loadError instanceof Error ? loadError.message : 'Unable to load internal admin dashboard.');
|
||||
setOverview(null);
|
||||
setUsers([]);
|
||||
setContacts([]);
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
}
|
||||
|
||||
async function handleLogin(event: FormEvent<HTMLFormElement>) {
|
||||
event.preventDefault();
|
||||
setLoginError(null);
|
||||
setError(null);
|
||||
|
||||
try {
|
||||
const authenticatedUser = await login(email, password);
|
||||
if (authenticatedUser.role !== 'admin') {
|
||||
setLoginError('This account does not have internal admin access.');
|
||||
}
|
||||
setPassword('');
|
||||
} catch (loginAttemptError) {
|
||||
setLoginError(loginAttemptError instanceof Error ? loginAttemptError.message : 'Unable to sign in.');
|
||||
}
|
||||
}
|
||||
|
||||
async function handleRefresh() {
|
||||
if (!isAdmin) {
|
||||
return;
|
||||
}
|
||||
await loadDashboard(userQuery);
|
||||
}
|
||||
|
||||
async function handleSearch(event: FormEvent<HTMLFormElement>) {
|
||||
event.preventDefault();
|
||||
if (!isAdmin) {
|
||||
return;
|
||||
}
|
||||
await loadDashboard(userQuery);
|
||||
}
|
||||
|
||||
async function handlePlanChange(userId: number, plan: 'free' | 'pro') {
|
||||
if (!isAdmin) {
|
||||
return;
|
||||
}
|
||||
|
||||
setUpdatingUserId(userId);
|
||||
setError(null);
|
||||
try {
|
||||
await updateInternalAdminUserPlan(userId, plan);
|
||||
await loadDashboard(userQuery);
|
||||
} catch (updateError) {
|
||||
setError(updateError instanceof Error ? updateError.message : 'Unable to update plan.');
|
||||
} finally {
|
||||
setUpdatingUserId(null);
|
||||
}
|
||||
}
|
||||
|
||||
async function handleMarkRead(messageId: number) {
|
||||
if (!isAdmin) {
|
||||
return;
|
||||
}
|
||||
|
||||
setMarkingMessageId(messageId);
|
||||
setError(null);
|
||||
try {
|
||||
await markInternalAdminContactRead(messageId);
|
||||
await loadDashboard(userQuery);
|
||||
} catch (markError) {
|
||||
setError(markError instanceof Error ? markError.message : 'Unable to update contact message.');
|
||||
} finally {
|
||||
setMarkingMessageId(null);
|
||||
}
|
||||
}
|
||||
|
||||
async function handleRoleChange(userId: number, role: 'user' | 'admin') {
|
||||
if (!isAdmin) {
|
||||
return;
|
||||
}
|
||||
|
||||
setUpdatingRoleUserId(userId);
|
||||
setError(null);
|
||||
try {
|
||||
await updateInternalAdminUserRole(userId, role);
|
||||
await loadDashboard(userQuery);
|
||||
} catch (updateError) {
|
||||
setError(updateError instanceof Error ? updateError.message : 'Unable to update role.');
|
||||
} finally {
|
||||
setUpdatingRoleUserId(null);
|
||||
}
|
||||
}
|
||||
|
||||
async function handleLogout() {
|
||||
setError(null);
|
||||
setLoginError(null);
|
||||
await logout();
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="mx-auto max-w-7xl space-y-8">
|
||||
<Helmet>
|
||||
<title>Internal Admin | SaaS PDF</title>
|
||||
<meta name="robots" content="noindex,nofollow" />
|
||||
</Helmet>
|
||||
|
||||
<section className="rounded-3xl border border-slate-200 bg-white p-6 shadow-sm dark:border-slate-700 dark:bg-slate-900/70">
|
||||
<div className="flex flex-col gap-4 lg:flex-row lg:items-end lg:justify-between">
|
||||
<div>
|
||||
<p className="text-sm font-semibold uppercase tracking-[0.2em] text-primary-600 dark:text-primary-300">
|
||||
Internal operations
|
||||
</p>
|
||||
<h1 className="mt-2 text-3xl font-bold text-slate-900 dark:text-white">
|
||||
Admin control room
|
||||
</h1>
|
||||
<p className="mt-3 max-w-3xl text-sm leading-6 text-slate-600 dark:text-slate-300">
|
||||
This area now uses the normal app session plus admin permissions. Only signed-in allowlisted admins can
|
||||
inspect operations, edit plans, and process the support inbox.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{user ? (
|
||||
<div className="flex flex-col items-start gap-2 rounded-2xl border border-slate-200 bg-slate-50 px-4 py-3 text-sm dark:border-slate-700 dark:bg-slate-950/50">
|
||||
<span className="font-semibold text-slate-900 dark:text-white">{user.email}</span>
|
||||
<span className="text-slate-600 dark:text-slate-300">Role: {user.role}</span>
|
||||
</div>
|
||||
) : null}
|
||||
</div>
|
||||
</section>
|
||||
|
||||
{error && (
|
||||
<div className="flex items-start gap-3 rounded-2xl border border-rose-200 bg-rose-50 p-4 text-sm text-rose-700 dark:border-rose-500/30 dark:bg-rose-500/10 dark:text-rose-200">
|
||||
<AlertTriangle className="mt-0.5 h-5 w-5 shrink-0" />
|
||||
<span>{error}</span>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{!initialized || authLoading ? (
|
||||
<section className="rounded-3xl border border-slate-200 bg-white p-8 text-sm text-slate-600 shadow-sm dark:border-slate-700 dark:bg-slate-900/70 dark:text-slate-300">
|
||||
Checking admin session...
|
||||
</section>
|
||||
) : !user ? (
|
||||
<section className="rounded-3xl border border-slate-200 bg-white p-8 shadow-sm dark:border-slate-700 dark:bg-slate-900/70">
|
||||
<div className="max-w-lg">
|
||||
<h2 className="text-2xl font-bold text-slate-900 dark:text-white">Admin sign in</h2>
|
||||
<p className="mt-3 text-sm leading-6 text-slate-600 dark:text-slate-300">
|
||||
Use an allowlisted internal account to start a normal authenticated session. Admin access is decided by
|
||||
server-side permissions, not a client-side secret.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<form onSubmit={handleLogin} className="mt-6 grid gap-4 md:max-w-xl">
|
||||
<input
|
||||
type="email"
|
||||
autoComplete="email"
|
||||
value={email}
|
||||
onChange={(event) => setEmail(event.target.value)}
|
||||
placeholder="admin@example.com"
|
||||
className="rounded-2xl border border-slate-300 bg-white px-4 py-3 text-sm text-slate-900 shadow-sm outline-none transition focus:border-primary-500 focus:ring-2 focus:ring-primary-200 dark:border-slate-600 dark:bg-slate-950 dark:text-slate-100 dark:focus:ring-primary-500/30"
|
||||
/>
|
||||
<input
|
||||
type="password"
|
||||
autoComplete="current-password"
|
||||
value={password}
|
||||
onChange={(event) => setPassword(event.target.value)}
|
||||
placeholder="Password"
|
||||
className="rounded-2xl border border-slate-300 bg-white px-4 py-3 text-sm text-slate-900 shadow-sm outline-none transition focus:border-primary-500 focus:ring-2 focus:ring-primary-200 dark:border-slate-600 dark:bg-slate-950 dark:text-slate-100 dark:focus:ring-primary-500/30"
|
||||
/>
|
||||
{loginError && (
|
||||
<div className="rounded-2xl border border-rose-200 bg-rose-50 px-4 py-3 text-sm text-rose-700 dark:border-rose-500/30 dark:bg-rose-500/10 dark:text-rose-200">
|
||||
{loginError}
|
||||
</div>
|
||||
)}
|
||||
<button
|
||||
type="submit"
|
||||
className="rounded-2xl bg-primary-600 px-5 py-3 text-sm font-semibold text-white transition-colors hover:bg-primary-700"
|
||||
>
|
||||
Sign in as admin
|
||||
</button>
|
||||
</form>
|
||||
</section>
|
||||
) : !isAdmin ? (
|
||||
<section className="rounded-3xl border border-amber-200 bg-amber-50 p-8 shadow-sm dark:border-amber-500/30 dark:bg-amber-500/10">
|
||||
<h2 className="text-2xl font-bold text-slate-900 dark:text-white">No admin permission</h2>
|
||||
<p className="mt-3 max-w-2xl text-sm leading-6 text-slate-700 dark:text-slate-300">
|
||||
You are signed in as {user.email}, but this account is not in the internal admin allowlist and does not
|
||||
carry the admin role.
|
||||
</p>
|
||||
<div className="mt-5 flex flex-wrap gap-3">
|
||||
<Link
|
||||
to="/account"
|
||||
className="rounded-2xl border border-slate-300 px-4 py-2.5 text-sm font-semibold text-slate-700 transition-colors hover:border-slate-400 hover:text-slate-900 dark:border-slate-600 dark:text-slate-200 dark:hover:border-slate-500"
|
||||
>
|
||||
Back to account
|
||||
</Link>
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => void handleLogout()}
|
||||
className="inline-flex items-center gap-2 rounded-2xl bg-slate-900 px-4 py-2.5 text-sm font-semibold text-white transition-colors hover:bg-slate-800 dark:bg-white dark:text-slate-900 dark:hover:bg-slate-200"
|
||||
>
|
||||
<LogOut className="h-4 w-4" />
|
||||
Sign out
|
||||
</button>
|
||||
</div>
|
||||
</section>
|
||||
) : (
|
||||
<>
|
||||
<section className="grid gap-4 md:grid-cols-2 xl:grid-cols-3">
|
||||
{metricCards.map((card) => {
|
||||
const Icon = card.icon;
|
||||
|
||||
return (
|
||||
<article
|
||||
key={card.key}
|
||||
className="rounded-3xl border border-slate-200 bg-white p-5 shadow-sm dark:border-slate-700 dark:bg-slate-900/70"
|
||||
>
|
||||
<div className="flex items-start justify-between gap-4">
|
||||
<div>
|
||||
<p className="text-sm font-medium text-slate-500 dark:text-slate-400">{card.title}</p>
|
||||
<p className="mt-3 text-3xl font-bold text-slate-900 dark:text-white">{card.value}</p>
|
||||
<p className="mt-2 text-sm text-slate-600 dark:text-slate-300">{card.caption}</p>
|
||||
</div>
|
||||
<div className="flex h-11 w-11 items-center justify-center rounded-2xl bg-primary-100 text-primary-700 dark:bg-primary-500/15 dark:text-primary-200">
|
||||
<Icon className="h-5 w-5" />
|
||||
</div>
|
||||
</div>
|
||||
</article>
|
||||
);
|
||||
})}
|
||||
</section>
|
||||
|
||||
<section className="grid gap-6 xl:grid-cols-[1.3fr_0.9fr]">
|
||||
<div className="space-y-6">
|
||||
<article className="rounded-3xl border border-slate-200 bg-white p-6 shadow-sm dark:border-slate-700 dark:bg-slate-900/70">
|
||||
<div className="flex flex-col gap-4 lg:flex-row lg:items-center lg:justify-between">
|
||||
<div>
|
||||
<h2 className="text-xl font-bold text-slate-900 dark:text-white">Users and monetization</h2>
|
||||
<p className="mt-2 text-sm text-slate-600 dark:text-slate-300">
|
||||
Review plan mix, API adoption, and failed task concentration before support tickets pile up.
|
||||
</p>
|
||||
</div>
|
||||
<form onSubmit={handleSearch} className="flex w-full max-w-md items-center gap-2">
|
||||
<div className="relative flex-1">
|
||||
<Search className="pointer-events-none absolute left-3 top-1/2 h-4 w-4 -translate-y-1/2 text-slate-400" />
|
||||
<input
|
||||
type="search"
|
||||
value={userQuery}
|
||||
onChange={(event) => setUserQuery(event.target.value)}
|
||||
placeholder="Search user email"
|
||||
className="w-full rounded-2xl border border-slate-300 bg-white py-2.5 pl-10 pr-4 text-sm text-slate-900 outline-none transition focus:border-primary-500 focus:ring-2 focus:ring-primary-200 dark:border-slate-600 dark:bg-slate-950 dark:text-slate-100 dark:focus:ring-primary-500/30"
|
||||
/>
|
||||
</div>
|
||||
<button
|
||||
type="submit"
|
||||
className="rounded-2xl border border-slate-300 px-4 py-2.5 text-sm font-semibold text-slate-700 transition-colors hover:border-slate-400 hover:text-slate-900 dark:border-slate-600 dark:text-slate-200 dark:hover:border-slate-500"
|
||||
>
|
||||
Search
|
||||
</button>
|
||||
</form>
|
||||
</div>
|
||||
|
||||
<div className="mt-6 overflow-x-auto">
|
||||
<table className="min-w-full divide-y divide-slate-200 text-sm dark:divide-slate-700">
|
||||
<thead>
|
||||
<tr className="text-left text-slate-500 dark:text-slate-400">
|
||||
<th className="py-3 pe-4 font-medium">User</th>
|
||||
<th className="py-3 pe-4 font-medium">Role</th>
|
||||
<th className="py-3 pe-4 font-medium">Plan</th>
|
||||
<th className="py-3 pe-4 font-medium">Tasks</th>
|
||||
<th className="py-3 pe-4 font-medium">API keys</th>
|
||||
<th className="py-3 font-medium">Action</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody className="divide-y divide-slate-100 dark:divide-slate-800">
|
||||
{users.map((user) => (
|
||||
<tr key={user.id} className="text-slate-700 dark:text-slate-200">
|
||||
<td className="py-4 pe-4">
|
||||
<div className="font-semibold text-slate-900 dark:text-white">{user.email}</div>
|
||||
<div className="mt-1 text-xs text-slate-500 dark:text-slate-400">Created {user.created_at}</div>
|
||||
</td>
|
||||
<td className="py-4 pe-4">
|
||||
<div className="flex flex-col gap-1">
|
||||
<span className="capitalize">{user.role}</span>
|
||||
{user.is_allowlisted_admin ? (
|
||||
<span className="text-xs text-primary-700 dark:text-primary-300">Bootstrap allowlist</span>
|
||||
) : null}
|
||||
</div>
|
||||
</td>
|
||||
<td className="py-4 pe-4 capitalize">{user.plan}</td>
|
||||
<td className="py-4 pe-4">{user.completed_tasks} complete / {user.failed_tasks} failed</td>
|
||||
<td className="py-4 pe-4">{user.active_api_keys}</td>
|
||||
<td className="py-4">
|
||||
<div className="flex flex-wrap gap-2">
|
||||
<button
|
||||
type="button"
|
||||
disabled={updatingUserId === user.id || user.plan === 'free'}
|
||||
onClick={() => void handlePlanChange(user.id, 'free')}
|
||||
className="rounded-full border border-slate-300 px-3 py-1.5 text-xs font-semibold text-slate-700 transition-colors hover:border-slate-400 hover:text-slate-900 disabled:cursor-not-allowed disabled:opacity-50 dark:border-slate-600 dark:text-slate-200 dark:hover:border-slate-500"
|
||||
>
|
||||
Set free
|
||||
</button>
|
||||
<button
|
||||
type="button"
|
||||
disabled={updatingUserId === user.id || user.plan === 'pro'}
|
||||
onClick={() => void handlePlanChange(user.id, 'pro')}
|
||||
className="rounded-full bg-primary-600 px-3 py-1.5 text-xs font-semibold text-white transition-colors hover:bg-primary-700 disabled:cursor-not-allowed disabled:opacity-50"
|
||||
>
|
||||
Set pro
|
||||
</button>
|
||||
<button
|
||||
type="button"
|
||||
disabled={user.is_allowlisted_admin || updatingRoleUserId === user.id || user.role === 'user'}
|
||||
onClick={() => void handleRoleChange(user.id, 'user')}
|
||||
className="rounded-full border border-slate-300 px-3 py-1.5 text-xs font-semibold text-slate-700 transition-colors hover:border-slate-400 hover:text-slate-900 disabled:cursor-not-allowed disabled:opacity-50 dark:border-slate-600 dark:text-slate-200 dark:hover:border-slate-500"
|
||||
>
|
||||
Set user
|
||||
</button>
|
||||
<button
|
||||
type="button"
|
||||
disabled={user.is_allowlisted_admin || updatingRoleUserId === user.id || user.role === 'admin'}
|
||||
onClick={() => void handleRoleChange(user.id, 'admin')}
|
||||
className="rounded-full bg-slate-900 px-3 py-1.5 text-xs font-semibold text-white transition-colors hover:bg-slate-800 disabled:cursor-not-allowed disabled:opacity-50 dark:bg-white dark:text-slate-900 dark:hover:bg-slate-200"
|
||||
>
|
||||
Set admin
|
||||
</button>
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
))}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</article>
|
||||
|
||||
<article className="rounded-3xl border border-slate-200 bg-white p-6 shadow-sm dark:border-slate-700 dark:bg-slate-900/70">
|
||||
<div className="flex items-start justify-between gap-4">
|
||||
<div>
|
||||
<h2 className="text-xl font-bold text-slate-900 dark:text-white">Recent failures</h2>
|
||||
<p className="mt-2 text-sm text-slate-600 dark:text-slate-300">
|
||||
These entries help isolate tool instability and prioritize support follow-up.
|
||||
</p>
|
||||
</div>
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => void handleRefresh()}
|
||||
className="inline-flex items-center gap-2 rounded-full border border-slate-300 px-4 py-2 text-sm font-semibold text-slate-700 transition-colors hover:border-slate-400 hover:text-slate-900 dark:border-slate-600 dark:text-slate-200 dark:hover:border-slate-500"
|
||||
>
|
||||
<RefreshCcw className={`h-4 w-4${loading ? ' animate-spin' : ''}`} />
|
||||
Refresh
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<div className="mt-6 space-y-3">
|
||||
{overview?.recent_failures.length ? overview.recent_failures.map((failure) => (
|
||||
<div
|
||||
key={failure.id}
|
||||
className="rounded-2xl border border-rose-100 bg-rose-50/80 p-4 dark:border-rose-500/20 dark:bg-rose-500/10"
|
||||
>
|
||||
<div className="flex flex-col gap-2 sm:flex-row sm:items-start sm:justify-between">
|
||||
<div>
|
||||
<p className="font-semibold text-slate-900 dark:text-white">{failure.tool}</p>
|
||||
<p className="mt-1 text-sm text-slate-600 dark:text-slate-300">
|
||||
{failure.original_filename || 'Unknown file'}
|
||||
{failure.email ? ` / ${failure.email}` : ''}
|
||||
</p>
|
||||
</div>
|
||||
<span className="text-xs text-slate-500 dark:text-slate-400">{failure.created_at}</span>
|
||||
</div>
|
||||
<p className="mt-3 text-sm text-rose-700 dark:text-rose-200">
|
||||
{typeof failure.metadata.error === 'string' ? failure.metadata.error : 'Processing failed without a structured error message.'}
|
||||
</p>
|
||||
</div>
|
||||
)) : (
|
||||
<p className="text-sm text-slate-600 dark:text-slate-300">No recent failures.</p>
|
||||
)}
|
||||
</div>
|
||||
</article>
|
||||
</div>
|
||||
|
||||
<div className="space-y-6">
|
||||
<article className="rounded-3xl border border-slate-200 bg-white p-6 shadow-sm dark:border-slate-700 dark:bg-slate-900/70">
|
||||
<h2 className="text-xl font-bold text-slate-900 dark:text-white">Top tools</h2>
|
||||
<div className="mt-5 space-y-3">
|
||||
{overview?.top_tools.length ? overview.top_tools.map((tool) => (
|
||||
<div key={tool.tool} className="rounded-2xl border border-slate-200 p-4 dark:border-slate-700">
|
||||
<div className="flex items-center justify-between gap-4">
|
||||
<div>
|
||||
<p className="font-semibold text-slate-900 dark:text-white">{tool.tool}</p>
|
||||
<p className="mt-1 text-sm text-slate-500 dark:text-slate-400">{tool.total_runs} total runs</p>
|
||||
</div>
|
||||
<span className="rounded-full bg-rose-100 px-3 py-1 text-xs font-semibold text-rose-700 dark:bg-rose-500/10 dark:text-rose-200">
|
||||
{tool.failed_runs} failed
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
)) : (
|
||||
<p className="text-sm text-slate-600 dark:text-slate-300">No tool activity yet.</p>
|
||||
)}
|
||||
</div>
|
||||
</article>
|
||||
|
||||
<article className="rounded-3xl border border-slate-200 bg-white p-6 shadow-sm dark:border-slate-700 dark:bg-slate-900/70">
|
||||
<div className="flex items-start justify-between gap-4">
|
||||
<div>
|
||||
<h2 className="text-xl font-bold text-slate-900 dark:text-white">Contact inbox</h2>
|
||||
<p className="mt-2 text-sm text-slate-600 dark:text-slate-300">
|
||||
{contactMeta.unread} unread of {contactMeta.total} total messages.
|
||||
</p>
|
||||
</div>
|
||||
<span className="rounded-full bg-primary-100 px-3 py-1 text-xs font-semibold text-primary-700 dark:bg-primary-500/10 dark:text-primary-200">
|
||||
Page {contactMeta.page}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<div className="mt-5 space-y-3">
|
||||
{contacts.length ? contacts.map((contact) => (
|
||||
<div key={contact.id} className="rounded-2xl border border-slate-200 p-4 dark:border-slate-700">
|
||||
<div className="flex items-start justify-between gap-4">
|
||||
<div>
|
||||
<p className="font-semibold text-slate-900 dark:text-white">{contact.subject || 'No subject'}</p>
|
||||
<p className="mt-1 text-sm text-slate-500 dark:text-slate-400">
|
||||
{contact.name} / {contact.email} / {contact.category}
|
||||
</p>
|
||||
</div>
|
||||
<span className="text-xs text-slate-500 dark:text-slate-400">{contact.created_at}</span>
|
||||
</div>
|
||||
<p className="mt-3 text-sm leading-6 text-slate-600 dark:text-slate-300">{contact.message}</p>
|
||||
{!contact.is_read ? (
|
||||
<button
|
||||
type="button"
|
||||
disabled={markingMessageId === contact.id}
|
||||
onClick={() => void handleMarkRead(contact.id)}
|
||||
className="mt-4 rounded-full bg-slate-900 px-4 py-2 text-xs font-semibold text-white transition-colors hover:bg-slate-800 disabled:cursor-not-allowed disabled:opacity-50 dark:bg-white dark:text-slate-900 dark:hover:bg-slate-200"
|
||||
>
|
||||
Mark as read
|
||||
</button>
|
||||
) : (
|
||||
<span className="mt-4 inline-flex rounded-full bg-emerald-100 px-3 py-1 text-xs font-semibold text-emerald-700 dark:bg-emerald-500/10 dark:text-emerald-200">
|
||||
Read
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
)) : (
|
||||
<p className="text-sm text-slate-600 dark:text-slate-300">No contact messages found.</p>
|
||||
)}
|
||||
</div>
|
||||
</article>
|
||||
</div>
|
||||
</section>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -1,8 +1,14 @@
|
||||
import { useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { Link } from 'react-router-dom';
|
||||
import SEOHead from '@/components/seo/SEOHead';
|
||||
import { generateWebPage } from '@/utils/seo';
|
||||
import { Check, X, Zap, Crown } from 'lucide-react';
|
||||
import { Check, X, Zap, Crown, Loader2 } from 'lucide-react';
|
||||
import axios from 'axios';
|
||||
import { useAuthStore } from '@/stores/authStore';
|
||||
import SocialProofStrip from '@/components/shared/SocialProofStrip';
|
||||
|
||||
const API_BASE = import.meta.env.VITE_API_URL || '';
|
||||
|
||||
interface PlanFeature {
|
||||
key: string;
|
||||
@@ -25,6 +31,29 @@ const FEATURES: PlanFeature[] = [
|
||||
|
||||
export default function PricingPage() {
|
||||
const { t } = useTranslation();
|
||||
const user = useAuthStore((s) => s.user);
|
||||
const [loading, setLoading] = useState(false);
|
||||
|
||||
async function handleUpgrade(billing: 'monthly' | 'yearly') {
|
||||
if (!user) {
|
||||
window.location.href = '/account?redirect=pricing';
|
||||
return;
|
||||
}
|
||||
setLoading(true);
|
||||
try {
|
||||
const { data } = await axios.post(
|
||||
`${API_BASE}/api/stripe/create-checkout-session`,
|
||||
{ billing },
|
||||
{ withCredentials: true },
|
||||
);
|
||||
if (data.url) window.location.href = data.url;
|
||||
} catch {
|
||||
// Stripe not configured yet — show message
|
||||
alert(t('pages.pricing.stripeNotReady', 'Payment system is being set up. Please try again later.'));
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
}
|
||||
|
||||
function renderValue(val: boolean | string) {
|
||||
if (val === true) return <Check className="mx-auto h-5 w-5 text-green-500" />;
|
||||
@@ -56,6 +85,8 @@ export default function PricingPage() {
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<SocialProofStrip className="mb-12" />
|
||||
|
||||
{/* Plan Cards */}
|
||||
<div className="mb-16 grid gap-8 md:grid-cols-2">
|
||||
{/* Free Plan */}
|
||||
@@ -137,17 +168,49 @@ export default function PricingPage() {
|
||||
</ul>
|
||||
|
||||
<button
|
||||
disabled
|
||||
onClick={() => handleUpgrade('monthly')}
|
||||
disabled={loading || user?.plan === 'pro'}
|
||||
className="block w-full rounded-xl bg-primary-600 py-3 text-center text-sm font-semibold text-white transition-colors hover:bg-primary-700 disabled:cursor-not-allowed disabled:opacity-60"
|
||||
>
|
||||
{t('pages.pricing.comingSoon', 'Coming Soon')}
|
||||
{loading ? (
|
||||
<Loader2 className="mx-auto h-5 w-5 animate-spin" />
|
||||
) : user?.plan === 'pro' ? (
|
||||
t('pages.pricing.currentPlan', 'Current Plan')
|
||||
) : (
|
||||
t('pages.pricing.upgradeToPro', 'Upgrade to Pro')
|
||||
)}
|
||||
</button>
|
||||
<p className="mt-2 text-center text-xs text-slate-500 dark:text-slate-400">
|
||||
{t('pages.pricing.stripeNote', 'Stripe payment integration coming soon')}
|
||||
{t('pages.pricing.securePayment', 'Secure payment via Stripe')}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<section className="mb-16 rounded-[2rem] border border-slate-200 bg-white p-8 shadow-sm dark:border-slate-700 dark:bg-slate-900/70">
|
||||
<div className="max-w-3xl">
|
||||
<h2 className="text-2xl font-bold text-slate-900 dark:text-white">
|
||||
{t('pages.pricing.trustTitle')}
|
||||
</h2>
|
||||
<p className="mt-3 text-slate-600 dark:text-slate-400">
|
||||
{t('pages.pricing.trustSubtitle')}
|
||||
</p>
|
||||
</div>
|
||||
<div className="mt-8 grid gap-4 md:grid-cols-3">
|
||||
<div className="rounded-2xl bg-slate-50 p-5 dark:bg-slate-800/70">
|
||||
<h3 className="font-semibold text-slate-900 dark:text-white">{t('pages.pricing.trustFastTitle')}</h3>
|
||||
<p className="mt-2 text-sm leading-6 text-slate-600 dark:text-slate-400">{t('pages.pricing.trustFastDesc')}</p>
|
||||
</div>
|
||||
<div className="rounded-2xl bg-slate-50 p-5 dark:bg-slate-800/70">
|
||||
<h3 className="font-semibold text-slate-900 dark:text-white">{t('pages.pricing.trustPrivateTitle')}</h3>
|
||||
<p className="mt-2 text-sm leading-6 text-slate-600 dark:text-slate-400">{t('pages.pricing.trustPrivateDesc')}</p>
|
||||
</div>
|
||||
<div className="rounded-2xl bg-slate-50 p-5 dark:bg-slate-800/70">
|
||||
<h3 className="font-semibold text-slate-900 dark:text-white">{t('pages.pricing.trustApiTitle')}</h3>
|
||||
<p className="mt-2 text-sm leading-6 text-slate-600 dark:text-slate-400">{t('pages.pricing.trustApiDesc')}</p>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
|
||||
{/* Comparison Table */}
|
||||
<div className="mb-16 overflow-hidden rounded-2xl border border-slate-200 dark:border-slate-700">
|
||||
<table className="w-full text-sm">
|
||||
@@ -210,7 +273,7 @@ export default function PricingPage() {
|
||||
{t('pages.pricing.faq3q', 'What payment methods do you accept?')}
|
||||
</h3>
|
||||
<p className="text-sm text-slate-600 dark:text-slate-400">
|
||||
{t('pages.pricing.faq3a', 'We will support credit/debit cards and PayPal via Stripe. Payment integration is launching soon.')}
|
||||
{t('pages.pricing.faq3a', 'We accept all major credit/debit cards via Stripe. Your payment information is securely processed — we never see your card details.')}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -94,6 +94,8 @@ export interface AuthUser {
|
||||
id: number;
|
||||
email: string;
|
||||
plan: string;
|
||||
role: 'user' | 'admin' | string;
|
||||
is_allowlisted_admin?: boolean;
|
||||
created_at: string;
|
||||
}
|
||||
|
||||
@@ -426,6 +428,164 @@ export async function checkHealth(): Promise<boolean> {
|
||||
}
|
||||
}
|
||||
|
||||
export interface PublicStatsSummary {
|
||||
total_files_processed: number;
|
||||
completed_files: number;
|
||||
failed_files: number;
|
||||
success_rate: number;
|
||||
files_last_24h: number;
|
||||
average_rating: number;
|
||||
rating_count: number;
|
||||
top_tools: Array<{ tool: string; count: number }>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return public site stats used for social proof and developer onboarding.
|
||||
*/
|
||||
export async function getPublicStats(): Promise<PublicStatsSummary> {
|
||||
const response = await api.get<PublicStatsSummary>('/stats/summary');
|
||||
return response.data;
|
||||
}
|
||||
|
||||
export interface InternalAdminUser {
|
||||
id: number;
|
||||
email: string;
|
||||
plan: 'free' | 'pro' | string;
|
||||
role: 'user' | 'admin' | string;
|
||||
is_allowlisted_admin: boolean;
|
||||
created_at: string;
|
||||
total_tasks: number;
|
||||
completed_tasks: number;
|
||||
failed_tasks: number;
|
||||
active_api_keys: number;
|
||||
}
|
||||
|
||||
export interface InternalAdminContact {
|
||||
id: number;
|
||||
name: string;
|
||||
email: string;
|
||||
category: string;
|
||||
subject: string | null;
|
||||
message: string;
|
||||
created_at: string;
|
||||
is_read: boolean;
|
||||
}
|
||||
|
||||
export interface InternalAdminOverview {
|
||||
users: {
|
||||
total: number;
|
||||
pro: number;
|
||||
free: number;
|
||||
};
|
||||
processing: {
|
||||
total_files_processed: number;
|
||||
completed_files: number;
|
||||
failed_files: number;
|
||||
files_last_24h: number;
|
||||
success_rate: number;
|
||||
};
|
||||
ratings: {
|
||||
average_rating: number;
|
||||
rating_count: number;
|
||||
};
|
||||
ai_cost: {
|
||||
month: string;
|
||||
total_usd: number;
|
||||
budget_usd: number;
|
||||
percent_used: number;
|
||||
};
|
||||
contacts: {
|
||||
total_messages: number;
|
||||
unread_messages: number;
|
||||
recent: InternalAdminContact[];
|
||||
};
|
||||
top_tools: Array<{
|
||||
tool: string;
|
||||
total_runs: number;
|
||||
failed_runs: number;
|
||||
}>;
|
||||
recent_failures: Array<{
|
||||
id: number;
|
||||
user_id: number | null;
|
||||
email: string | null;
|
||||
tool: string;
|
||||
original_filename: string | null;
|
||||
created_at: string;
|
||||
metadata: Record<string, unknown>;
|
||||
}>;
|
||||
recent_users: Array<{
|
||||
id: number;
|
||||
email: string;
|
||||
plan: string;
|
||||
created_at: string;
|
||||
total_tasks: number;
|
||||
active_api_keys: number;
|
||||
}>;
|
||||
}
|
||||
|
||||
export async function getInternalAdminOverview(): Promise<InternalAdminOverview> {
|
||||
const response = await api.get<InternalAdminOverview>('/internal/admin/overview');
|
||||
return response.data;
|
||||
}
|
||||
|
||||
export async function listInternalAdminUsers(query = '', limit = 25): Promise<InternalAdminUser[]> {
|
||||
const response = await api.get<{ items: InternalAdminUser[] }>('/internal/admin/users', {
|
||||
params: {
|
||||
query,
|
||||
limit,
|
||||
},
|
||||
});
|
||||
return response.data.items;
|
||||
}
|
||||
|
||||
export async function getInternalAdminContacts(page = 1, perPage = 20): Promise<{
|
||||
items: InternalAdminContact[];
|
||||
page: number;
|
||||
per_page: number;
|
||||
total: number;
|
||||
unread: number;
|
||||
}> {
|
||||
const response = await api.get<{
|
||||
items: InternalAdminContact[];
|
||||
page: number;
|
||||
per_page: number;
|
||||
total: number;
|
||||
unread: number;
|
||||
}>('/internal/admin/contacts', {
|
||||
params: {
|
||||
page,
|
||||
per_page: perPage,
|
||||
},
|
||||
});
|
||||
return response.data;
|
||||
}
|
||||
|
||||
export async function markInternalAdminContactRead(messageId: number): Promise<void> {
|
||||
await api.post(`/internal/admin/contacts/${messageId}/read`);
|
||||
}
|
||||
|
||||
export async function updateInternalAdminUserPlan(
|
||||
userId: number,
|
||||
plan: 'free' | 'pro'
|
||||
): Promise<AuthUser> {
|
||||
const response = await api.post<{ message: string; user: AuthUser }>(
|
||||
`/internal/admin/users/${userId}/plan`,
|
||||
{ plan }
|
||||
);
|
||||
return response.data.user;
|
||||
}
|
||||
|
||||
export async function updateInternalAdminUserRole(
|
||||
userId: number,
|
||||
role: 'user' | 'admin'
|
||||
): Promise<AuthUser> {
|
||||
const response = await api.post<{ message: string; user: AuthUser }>(
|
||||
`/internal/admin/users/${userId}/role`,
|
||||
{ role }
|
||||
);
|
||||
return response.data.user;
|
||||
}
|
||||
|
||||
// --- Account / Usage / API Keys ---
|
||||
|
||||
export interface UsageSummary {
|
||||
|
||||
18
frontend/src/utils/pdfClient.ts
Normal file
18
frontend/src/utils/pdfClient.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
let pdfLibLoader: Promise<typeof import('pdf-lib')> | null = null;
|
||||
|
||||
async function loadPdfLib() {
|
||||
if (!pdfLibLoader) {
|
||||
pdfLibLoader = import('pdf-lib');
|
||||
}
|
||||
|
||||
return pdfLibLoader;
|
||||
}
|
||||
|
||||
export async function getPdfPageCount(file: File): Promise<number> {
|
||||
const { PDFDocument } = await loadPdfLib();
|
||||
const buffer = await file.arrayBuffer();
|
||||
const pdf = await PDFDocument.load(buffer, {
|
||||
ignoreEncryption: true,
|
||||
});
|
||||
return pdf.getPageCount();
|
||||
}
|
||||
@@ -11,6 +11,37 @@ export interface ToolSeoData {
|
||||
ratingCount?: number;
|
||||
}
|
||||
|
||||
export interface LanguageAlternate {
|
||||
hrefLang: string;
|
||||
href: string;
|
||||
ogLocale: string;
|
||||
}
|
||||
|
||||
const LANGUAGE_CONFIG: Record<'en' | 'ar' | 'fr', { hrefLang: string; ogLocale: string }> = {
|
||||
en: { hrefLang: 'en', ogLocale: 'en_US' },
|
||||
ar: { hrefLang: 'ar', ogLocale: 'ar_SA' },
|
||||
fr: { hrefLang: 'fr', ogLocale: 'fr_FR' },
|
||||
};
|
||||
|
||||
export function normalizeSiteLanguage(language: string): 'en' | 'ar' | 'fr' {
|
||||
const baseLanguage = language.split('-')[0];
|
||||
return baseLanguage === 'ar' || baseLanguage === 'fr' ? baseLanguage : 'en';
|
||||
}
|
||||
|
||||
export function getOgLocale(language: string): string {
|
||||
return LANGUAGE_CONFIG[normalizeSiteLanguage(language)].ogLocale;
|
||||
}
|
||||
|
||||
export function buildLanguageAlternates(origin: string, path: string): LanguageAlternate[] {
|
||||
const separator = path.includes('?') ? '&' : '?';
|
||||
return (Object.entries(LANGUAGE_CONFIG) as Array<[keyof typeof LANGUAGE_CONFIG, (typeof LANGUAGE_CONFIG)[keyof typeof LANGUAGE_CONFIG]]>)
|
||||
.map(([language, config]) => ({
|
||||
hrefLang: config.hrefLang,
|
||||
href: `${origin}${path}${separator}lng=${language}`,
|
||||
ogLocale: config.ogLocale,
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate WebApplication JSON-LD structured data for a tool page.
|
||||
*/
|
||||
@@ -122,3 +153,31 @@ export function generateWebPage(page: {
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export function generateBlogPosting(post: {
|
||||
headline: string;
|
||||
description: string;
|
||||
url: string;
|
||||
datePublished: string;
|
||||
inLanguage: string;
|
||||
}): object {
|
||||
return {
|
||||
'@context': 'https://schema.org',
|
||||
'@type': 'BlogPosting',
|
||||
headline: post.headline,
|
||||
description: post.description,
|
||||
url: post.url,
|
||||
datePublished: post.datePublished,
|
||||
dateModified: post.datePublished,
|
||||
inLanguage: post.inLanguage,
|
||||
author: {
|
||||
'@type': 'Organization',
|
||||
name: 'SaaS-PDF',
|
||||
},
|
||||
publisher: {
|
||||
'@type': 'Organization',
|
||||
name: 'SaaS-PDF',
|
||||
},
|
||||
mainEntityOfPage: post.url,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -30,10 +30,44 @@ export default defineConfig({
|
||||
cssMinify: true,
|
||||
rollupOptions: {
|
||||
output: {
|
||||
manualChunks: {
|
||||
vendor: ['react', 'react-dom', 'react-router-dom'],
|
||||
i18n: ['i18next', 'react-i18next'],
|
||||
helmet: ['react-helmet-async'],
|
||||
manualChunks(id) {
|
||||
if (!id.includes('node_modules')) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
if (
|
||||
id.includes('react-dom') ||
|
||||
id.includes('react-router-dom') ||
|
||||
id.includes('/react/')
|
||||
) {
|
||||
return 'vendor';
|
||||
}
|
||||
|
||||
if (id.includes('i18next') || id.includes('react-i18next')) {
|
||||
return 'i18n';
|
||||
}
|
||||
|
||||
if (id.includes('react-helmet-async')) {
|
||||
return 'helmet';
|
||||
}
|
||||
|
||||
if (id.includes('lucide-react')) {
|
||||
return 'icons';
|
||||
}
|
||||
|
||||
if (id.includes('/axios/')) {
|
||||
return 'network';
|
||||
}
|
||||
|
||||
if (id.includes('/pdf-lib/')) {
|
||||
return 'pdf-core';
|
||||
}
|
||||
|
||||
if (id.includes('/fabric/')) {
|
||||
return 'editor';
|
||||
}
|
||||
|
||||
return undefined;
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
@@ -45,6 +45,12 @@ PDF_TOOLS = [
|
||||
{'slug': 'pdf-editor', 'priority': '0.8'},
|
||||
{'slug': 'pdf-flowchart', 'priority': '0.7'},
|
||||
{'slug': 'pdf-to-excel', 'priority': '0.8'},
|
||||
# Phase 2
|
||||
{'slug': 'sign-pdf', 'priority': '0.8'},
|
||||
{'slug': 'crop-pdf', 'priority': '0.7'},
|
||||
{'slug': 'flatten-pdf', 'priority': '0.7'},
|
||||
{'slug': 'repair-pdf', 'priority': '0.7'},
|
||||
{'slug': 'pdf-metadata', 'priority': '0.6'},
|
||||
]
|
||||
|
||||
# Image Tools
|
||||
@@ -53,6 +59,9 @@ IMAGE_TOOLS = [
|
||||
{'slug': 'image-resize', 'priority': '0.8'},
|
||||
{'slug': 'compress-image', 'priority': '0.8'},
|
||||
{'slug': 'remove-background', 'priority': '0.8'},
|
||||
# Phase 2
|
||||
{'slug': 'image-crop', 'priority': '0.7'},
|
||||
{'slug': 'image-rotate-flip', 'priority': '0.7'},
|
||||
]
|
||||
|
||||
# AI Tools
|
||||
@@ -71,6 +80,11 @@ UTILITY_TOOLS = [
|
||||
{'slug': 'video-to-gif', 'priority': '0.7'},
|
||||
{'slug': 'word-counter', 'priority': '0.6'},
|
||||
{'slug': 'text-cleaner', 'priority': '0.6'},
|
||||
# Phase 2
|
||||
{'slug': 'pdf-to-pptx', 'priority': '0.8'},
|
||||
{'slug': 'excel-to-pdf', 'priority': '0.8'},
|
||||
{'slug': 'pptx-to-pdf', 'priority': '0.8'},
|
||||
{'slug': 'barcode-generator', 'priority': '0.7'},
|
||||
]
|
||||
|
||||
TOOL_GROUPS = [
|
||||
|
||||
Reference in New Issue
Block a user