feat: Initialize frontend with React, Vite, and Tailwind CSS
- Set up main entry point for React application. - Create About, Home, NotFound, Privacy, and Terms pages with SEO support. - Implement API service for file uploads and task management. - Add global styles using Tailwind CSS. - Create utility functions for SEO and text processing. - Configure Vite for development and production builds. - Set up Nginx configuration for serving frontend and backend. - Add scripts for cleanup of expired files and sitemap generation. - Implement deployment script for production environment.
This commit is contained in:
1
backend/app/tasks/__init__.py
Normal file
1
backend/app/tasks/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Celery tasks for async file processing."""
|
||||
88
backend/app/tasks/compress_tasks.py
Normal file
88
backend/app/tasks/compress_tasks.py
Normal file
@@ -0,0 +1,88 @@
|
||||
"""Celery tasks for PDF compression."""
|
||||
import os
|
||||
import logging
|
||||
|
||||
from app.extensions import celery
|
||||
from app.services.compress_service import compress_pdf, PDFCompressionError
|
||||
from app.services.storage_service import storage
|
||||
from app.utils.sanitizer import cleanup_task_files
|
||||
|
||||
|
||||
def _cleanup(task_id: str):
|
||||
cleanup_task_files(task_id, keep_outputs=not storage.use_s3)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@celery.task(bind=True, name="app.tasks.compress_tasks.compress_pdf_task")
|
||||
def compress_pdf_task(
|
||||
self,
|
||||
input_path: str,
|
||||
task_id: str,
|
||||
original_filename: str,
|
||||
quality: str = "medium",
|
||||
):
|
||||
"""
|
||||
Async task: Compress a PDF file.
|
||||
|
||||
Args:
|
||||
input_path: Path to the uploaded PDF file
|
||||
task_id: Unique task identifier
|
||||
original_filename: Original filename for download
|
||||
quality: Compression quality ("low", "medium", "high")
|
||||
|
||||
Returns:
|
||||
dict with download_url, compression stats, and file info
|
||||
"""
|
||||
output_dir = os.path.join("/tmp/outputs", task_id)
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
output_path = os.path.join(output_dir, f"{task_id}.pdf")
|
||||
|
||||
try:
|
||||
self.update_state(
|
||||
state="PROCESSING",
|
||||
meta={"step": f"Compressing PDF ({quality} quality)..."},
|
||||
)
|
||||
|
||||
# Compress using Ghostscript
|
||||
stats = compress_pdf(input_path, output_path, quality)
|
||||
|
||||
self.update_state(state="PROCESSING", meta={"step": "Uploading result..."})
|
||||
|
||||
# Upload to S3
|
||||
s3_key = storage.upload_file(output_path, task_id, folder="outputs")
|
||||
|
||||
# Generate download filename
|
||||
name_without_ext = os.path.splitext(original_filename)[0]
|
||||
download_name = f"{name_without_ext}_compressed.pdf"
|
||||
|
||||
download_url = storage.generate_presigned_url(
|
||||
s3_key, original_filename=download_name
|
||||
)
|
||||
|
||||
result = {
|
||||
"status": "completed",
|
||||
"download_url": download_url,
|
||||
"filename": download_name,
|
||||
"original_size": stats["original_size"],
|
||||
"compressed_size": stats["compressed_size"],
|
||||
"reduction_percent": stats["reduction_percent"],
|
||||
}
|
||||
|
||||
_cleanup(task_id)
|
||||
|
||||
logger.info(
|
||||
f"Task {task_id}: PDF compression completed — "
|
||||
f"{stats['reduction_percent']}% reduction"
|
||||
)
|
||||
return result
|
||||
|
||||
except PDFCompressionError as e:
|
||||
logger.error(f"Task {task_id}: Compression error — {e}")
|
||||
_cleanup(task_id)
|
||||
return {"status": "failed", "error": str(e)}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Task {task_id}: Unexpected error — {e}")
|
||||
_cleanup(task_id)
|
||||
return {"status": "failed", "error": "An unexpected error occurred."}
|
||||
128
backend/app/tasks/convert_tasks.py
Normal file
128
backend/app/tasks/convert_tasks.py
Normal file
@@ -0,0 +1,128 @@
|
||||
"""Celery tasks for PDF conversion (PDF↔Word)."""
|
||||
import os
|
||||
import logging
|
||||
|
||||
from app.extensions import celery
|
||||
from app.services.pdf_service import pdf_to_word, word_to_pdf, PDFConversionError
|
||||
from app.services.storage_service import storage
|
||||
from app.utils.sanitizer import cleanup_task_files
|
||||
|
||||
|
||||
def _cleanup(task_id: str):
|
||||
"""Cleanup with local-aware flag."""
|
||||
cleanup_task_files(task_id, keep_outputs=not storage.use_s3)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@celery.task(bind=True, name="app.tasks.convert_tasks.convert_pdf_to_word")
|
||||
def convert_pdf_to_word(self, input_path: str, task_id: str, original_filename: str):
|
||||
"""
|
||||
Async task: Convert PDF to Word document.
|
||||
|
||||
Args:
|
||||
input_path: Path to the uploaded PDF file
|
||||
task_id: Unique task identifier
|
||||
original_filename: Original filename for download
|
||||
|
||||
Returns:
|
||||
dict with download_url and file info
|
||||
"""
|
||||
output_dir = os.path.join("/tmp/outputs", task_id)
|
||||
|
||||
try:
|
||||
self.update_state(state="PROCESSING", meta={"step": "Converting PDF to Word..."})
|
||||
|
||||
# Convert using LibreOffice
|
||||
output_path = pdf_to_word(input_path, output_dir)
|
||||
|
||||
self.update_state(state="PROCESSING", meta={"step": "Uploading result..."})
|
||||
|
||||
# Upload to S3
|
||||
s3_key = storage.upload_file(output_path, task_id, folder="outputs")
|
||||
|
||||
# Generate download filename
|
||||
name_without_ext = os.path.splitext(original_filename)[0]
|
||||
download_name = f"{name_without_ext}.docx"
|
||||
|
||||
# Generate presigned URL
|
||||
download_url = storage.generate_presigned_url(
|
||||
s3_key, original_filename=download_name
|
||||
)
|
||||
|
||||
result = {
|
||||
"status": "completed",
|
||||
"download_url": download_url,
|
||||
"filename": download_name,
|
||||
"output_size": os.path.getsize(output_path),
|
||||
}
|
||||
|
||||
# Cleanup local files
|
||||
_cleanup(task_id)
|
||||
|
||||
logger.info(f"Task {task_id}: PDF→Word conversion completed")
|
||||
return result
|
||||
|
||||
except PDFConversionError as e:
|
||||
logger.error(f"Task {task_id}: Conversion error — {e}")
|
||||
_cleanup(task_id)
|
||||
return {"status": "failed", "error": str(e)}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Task {task_id}: Unexpected error — {e}")
|
||||
_cleanup(task_id)
|
||||
return {"status": "failed", "error": "An unexpected error occurred."}
|
||||
|
||||
|
||||
@celery.task(bind=True, name="app.tasks.convert_tasks.convert_word_to_pdf")
|
||||
def convert_word_to_pdf(self, input_path: str, task_id: str, original_filename: str):
|
||||
"""
|
||||
Async task: Convert Word document to PDF.
|
||||
|
||||
Args:
|
||||
input_path: Path to the uploaded Word file
|
||||
task_id: Unique task identifier
|
||||
original_filename: Original filename for download
|
||||
|
||||
Returns:
|
||||
dict with download_url and file info
|
||||
"""
|
||||
output_dir = os.path.join("/tmp/outputs", task_id)
|
||||
|
||||
try:
|
||||
self.update_state(state="PROCESSING", meta={"step": "Converting Word to PDF..."})
|
||||
|
||||
output_path = word_to_pdf(input_path, output_dir)
|
||||
|
||||
self.update_state(state="PROCESSING", meta={"step": "Uploading result..."})
|
||||
|
||||
s3_key = storage.upload_file(output_path, task_id, folder="outputs")
|
||||
|
||||
name_without_ext = os.path.splitext(original_filename)[0]
|
||||
download_name = f"{name_without_ext}.pdf"
|
||||
|
||||
download_url = storage.generate_presigned_url(
|
||||
s3_key, original_filename=download_name
|
||||
)
|
||||
|
||||
result = {
|
||||
"status": "completed",
|
||||
"download_url": download_url,
|
||||
"filename": download_name,
|
||||
"output_size": os.path.getsize(output_path),
|
||||
}
|
||||
|
||||
_cleanup(task_id)
|
||||
|
||||
logger.info(f"Task {task_id}: Word→PDF conversion completed")
|
||||
return result
|
||||
|
||||
except PDFConversionError as e:
|
||||
logger.error(f"Task {task_id}: Conversion error — {e}")
|
||||
_cleanup(task_id)
|
||||
return {"status": "failed", "error": str(e)}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Task {task_id}: Unexpected error — {e}")
|
||||
_cleanup(task_id)
|
||||
return {"status": "failed", "error": "An unexpected error occurred."}
|
||||
160
backend/app/tasks/image_tasks.py
Normal file
160
backend/app/tasks/image_tasks.py
Normal file
@@ -0,0 +1,160 @@
|
||||
"""Celery tasks for image processing."""
|
||||
import os
|
||||
import logging
|
||||
|
||||
from app.extensions import celery
|
||||
from app.services.image_service import convert_image, resize_image, ImageProcessingError
|
||||
from app.services.storage_service import storage
|
||||
from app.utils.sanitizer import cleanup_task_files
|
||||
|
||||
|
||||
def _cleanup(task_id: str):
|
||||
cleanup_task_files(task_id, keep_outputs=not storage.use_s3)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@celery.task(bind=True, name="app.tasks.image_tasks.convert_image_task")
|
||||
def convert_image_task(
|
||||
self,
|
||||
input_path: str,
|
||||
task_id: str,
|
||||
original_filename: str,
|
||||
output_format: str,
|
||||
quality: int = 85,
|
||||
):
|
||||
"""
|
||||
Async task: Convert an image to a different format.
|
||||
|
||||
Args:
|
||||
input_path: Path to the uploaded image
|
||||
task_id: Unique task identifier
|
||||
original_filename: Original filename for download
|
||||
output_format: Target format ("jpg", "png", "webp")
|
||||
quality: Output quality 1-100
|
||||
|
||||
Returns:
|
||||
dict with download_url and conversion stats
|
||||
"""
|
||||
output_dir = os.path.join("/tmp/outputs", task_id)
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
output_path = os.path.join(output_dir, f"{task_id}.{output_format}")
|
||||
|
||||
try:
|
||||
self.update_state(
|
||||
state="PROCESSING",
|
||||
meta={"step": f"Converting image to {output_format.upper()}..."},
|
||||
)
|
||||
|
||||
stats = convert_image(input_path, output_path, output_format, quality)
|
||||
|
||||
self.update_state(state="PROCESSING", meta={"step": "Uploading result..."})
|
||||
|
||||
s3_key = storage.upload_file(output_path, task_id, folder="outputs")
|
||||
|
||||
name_without_ext = os.path.splitext(original_filename)[0]
|
||||
download_name = f"{name_without_ext}.{output_format}"
|
||||
|
||||
download_url = storage.generate_presigned_url(
|
||||
s3_key, original_filename=download_name
|
||||
)
|
||||
|
||||
result = {
|
||||
"status": "completed",
|
||||
"download_url": download_url,
|
||||
"filename": download_name,
|
||||
"original_size": stats["original_size"],
|
||||
"converted_size": stats["converted_size"],
|
||||
"width": stats["width"],
|
||||
"height": stats["height"],
|
||||
"format": stats["format"],
|
||||
}
|
||||
|
||||
_cleanup(task_id)
|
||||
|
||||
logger.info(f"Task {task_id}: Image conversion to {output_format} completed")
|
||||
return result
|
||||
|
||||
except ImageProcessingError as e:
|
||||
logger.error(f"Task {task_id}: Image error — {e}")
|
||||
_cleanup(task_id)
|
||||
return {"status": "failed", "error": str(e)}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Task {task_id}: Unexpected error — {e}")
|
||||
_cleanup(task_id)
|
||||
return {"status": "failed", "error": "An unexpected error occurred."}
|
||||
|
||||
|
||||
@celery.task(bind=True, name="app.tasks.image_tasks.resize_image_task")
|
||||
def resize_image_task(
|
||||
self,
|
||||
input_path: str,
|
||||
task_id: str,
|
||||
original_filename: str,
|
||||
width: int | None = None,
|
||||
height: int | None = None,
|
||||
quality: int = 85,
|
||||
):
|
||||
"""
|
||||
Async task: Resize an image.
|
||||
|
||||
Args:
|
||||
input_path: Path to the uploaded image
|
||||
task_id: Unique task identifier
|
||||
original_filename: Original filename for download
|
||||
width: Target width
|
||||
height: Target height
|
||||
quality: Output quality 1-100
|
||||
|
||||
Returns:
|
||||
dict with download_url and resize info
|
||||
"""
|
||||
ext = os.path.splitext(original_filename)[1].lstrip(".")
|
||||
output_dir = os.path.join("/tmp/outputs", task_id)
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
output_path = os.path.join(output_dir, f"{task_id}.{ext}")
|
||||
|
||||
try:
|
||||
self.update_state(
|
||||
state="PROCESSING",
|
||||
meta={"step": "Resizing image..."},
|
||||
)
|
||||
|
||||
stats = resize_image(input_path, output_path, width, height, quality)
|
||||
|
||||
self.update_state(state="PROCESSING", meta={"step": "Uploading result..."})
|
||||
|
||||
s3_key = storage.upload_file(output_path, task_id, folder="outputs")
|
||||
|
||||
name_without_ext = os.path.splitext(original_filename)[0]
|
||||
download_name = f"{name_without_ext}_resized.{ext}"
|
||||
|
||||
download_url = storage.generate_presigned_url(
|
||||
s3_key, original_filename=download_name
|
||||
)
|
||||
|
||||
result = {
|
||||
"status": "completed",
|
||||
"download_url": download_url,
|
||||
"filename": download_name,
|
||||
"original_width": stats["original_width"],
|
||||
"original_height": stats["original_height"],
|
||||
"new_width": stats["new_width"],
|
||||
"new_height": stats["new_height"],
|
||||
}
|
||||
|
||||
_cleanup(task_id)
|
||||
|
||||
logger.info(f"Task {task_id}: Image resize completed")
|
||||
return result
|
||||
|
||||
except ImageProcessingError as e:
|
||||
logger.error(f"Task {task_id}: Image error — {e}")
|
||||
_cleanup(task_id)
|
||||
return {"status": "failed", "error": str(e)}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Task {task_id}: Unexpected error — {e}")
|
||||
_cleanup(task_id)
|
||||
return {"status": "failed", "error": "An unexpected error occurred."}
|
||||
96
backend/app/tasks/video_tasks.py
Normal file
96
backend/app/tasks/video_tasks.py
Normal file
@@ -0,0 +1,96 @@
|
||||
"""Celery tasks for video processing."""
|
||||
import os
|
||||
import logging
|
||||
|
||||
from app.extensions import celery
|
||||
from app.services.video_service import video_to_gif, VideoProcessingError
|
||||
from app.services.storage_service import storage
|
||||
from app.utils.sanitizer import cleanup_task_files
|
||||
|
||||
|
||||
def _cleanup(task_id: str):
|
||||
cleanup_task_files(task_id, keep_outputs=not storage.use_s3)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@celery.task(bind=True, name="app.tasks.video_tasks.create_gif_task")
|
||||
def create_gif_task(
|
||||
self,
|
||||
input_path: str,
|
||||
task_id: str,
|
||||
original_filename: str,
|
||||
start_time: float = 0,
|
||||
duration: float = 5,
|
||||
fps: int = 10,
|
||||
width: int = 480,
|
||||
):
|
||||
"""
|
||||
Async task: Convert video clip to animated GIF.
|
||||
|
||||
Args:
|
||||
input_path: Path to the uploaded video
|
||||
task_id: Unique task identifier
|
||||
original_filename: Original filename for download
|
||||
start_time: Start time in seconds
|
||||
duration: Duration in seconds
|
||||
fps: Frames per second
|
||||
width: Output width in pixels
|
||||
|
||||
Returns:
|
||||
dict with download_url and GIF info
|
||||
"""
|
||||
output_dir = os.path.join("/tmp/outputs", task_id)
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
output_path = os.path.join(output_dir, f"{task_id}.gif")
|
||||
|
||||
try:
|
||||
self.update_state(
|
||||
state="PROCESSING",
|
||||
meta={"step": "Creating GIF from video..."},
|
||||
)
|
||||
|
||||
stats = video_to_gif(
|
||||
input_path, output_path,
|
||||
start_time=start_time,
|
||||
duration=duration,
|
||||
fps=fps,
|
||||
width=width,
|
||||
)
|
||||
|
||||
self.update_state(state="PROCESSING", meta={"step": "Uploading result..."})
|
||||
|
||||
s3_key = storage.upload_file(output_path, task_id, folder="outputs")
|
||||
|
||||
name_without_ext = os.path.splitext(original_filename)[0]
|
||||
download_name = f"{name_without_ext}.gif"
|
||||
|
||||
download_url = storage.generate_presigned_url(
|
||||
s3_key, original_filename=download_name
|
||||
)
|
||||
|
||||
result = {
|
||||
"status": "completed",
|
||||
"download_url": download_url,
|
||||
"filename": download_name,
|
||||
"output_size": stats["output_size"],
|
||||
"duration": stats["duration"],
|
||||
"fps": stats["fps"],
|
||||
"width": stats["width"],
|
||||
"height": stats["height"],
|
||||
}
|
||||
|
||||
_cleanup(task_id)
|
||||
|
||||
logger.info(f"Task {task_id}: Video→GIF creation completed")
|
||||
return result
|
||||
|
||||
except VideoProcessingError as e:
|
||||
logger.error(f"Task {task_id}: Video error — {e}")
|
||||
_cleanup(task_id)
|
||||
return {"status": "failed", "error": str(e)}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Task {task_id}: Unexpected error — {e}")
|
||||
_cleanup(task_id)
|
||||
return {"status": "failed", "error": "An unexpected error occurred."}
|
||||
Reference in New Issue
Block a user