feat: Initialize frontend with React, Vite, and Tailwind CSS
- Set up main entry point for React application. - Create About, Home, NotFound, Privacy, and Terms pages with SEO support. - Implement API service for file uploads and task management. - Add global styles using Tailwind CSS. - Create utility functions for SEO and text processing. - Configure Vite for development and production builds. - Set up Nginx configuration for serving frontend and backend. - Add scripts for cleanup of expired files and sitemap generation. - Implement deployment script for production environment.
This commit is contained in:
85
scripts/cleanup_expired_files.py
Normal file
85
scripts/cleanup_expired_files.py
Normal file
@@ -0,0 +1,85 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
cleanup_expired_files.py
|
||||
Removes expired upload/output files older than FILE_EXPIRY_SECONDS.
|
||||
|
||||
Usage:
|
||||
python scripts/cleanup_expired_files.py # Dry run
|
||||
python scripts/cleanup_expired_files.py --execute # Actually delete
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import shutil
|
||||
import argparse
|
||||
|
||||
# Default to 2 hours
|
||||
DEFAULT_EXPIRY_SECONDS = 7200
|
||||
UPLOAD_DIR = os.path.join(os.path.dirname(__file__), '..', 'backend', 'uploads')
|
||||
|
||||
|
||||
def cleanup(upload_dir: str, expiry_seconds: int, dry_run: bool = True) -> dict:
|
||||
"""Remove directories older than expiry_seconds."""
|
||||
now = time.time()
|
||||
stats = {'scanned': 0, 'deleted': 0, 'freed_bytes': 0, 'errors': 0}
|
||||
|
||||
if not os.path.isdir(upload_dir):
|
||||
print(f"Upload directory does not exist: {upload_dir}")
|
||||
return stats
|
||||
|
||||
for entry in os.listdir(upload_dir):
|
||||
full_path = os.path.join(upload_dir, entry)
|
||||
if not os.path.isdir(full_path):
|
||||
continue
|
||||
|
||||
stats['scanned'] += 1
|
||||
mod_time = os.path.getmtime(full_path)
|
||||
age = now - mod_time
|
||||
|
||||
if age > expiry_seconds:
|
||||
# Calculate size
|
||||
dir_size = sum(
|
||||
os.path.getsize(os.path.join(dp, f))
|
||||
for dp, _, filenames in os.walk(full_path)
|
||||
for f in filenames
|
||||
)
|
||||
|
||||
if dry_run:
|
||||
print(f"[DRY RUN] Would delete: {entry} (age: {age:.0f}s, size: {dir_size / 1024:.1f} KB)")
|
||||
else:
|
||||
try:
|
||||
shutil.rmtree(full_path)
|
||||
print(f"Deleted: {entry} (age: {age:.0f}s, size: {dir_size / 1024:.1f} KB)")
|
||||
stats['deleted'] += 1
|
||||
stats['freed_bytes'] += dir_size
|
||||
except Exception as e:
|
||||
print(f"Error deleting {entry}: {e}")
|
||||
stats['errors'] += 1
|
||||
|
||||
return stats
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description='Cleanup expired upload files')
|
||||
parser.add_argument('--execute', action='store_true', help='Actually delete files (default is dry run)')
|
||||
parser.add_argument('--expiry', type=int, default=DEFAULT_EXPIRY_SECONDS, help='Expiry time in seconds')
|
||||
parser.add_argument('--dir', type=str, default=UPLOAD_DIR, help='Upload directory path')
|
||||
args = parser.parse_args()
|
||||
|
||||
dry_run = not args.execute
|
||||
if dry_run:
|
||||
print("=== DRY RUN MODE (use --execute to delete) ===\n")
|
||||
|
||||
stats = cleanup(args.dir, args.expiry, dry_run)
|
||||
|
||||
print(f"\n--- Summary ---")
|
||||
print(f"Scanned: {stats['scanned']} directories")
|
||||
print(f"Deleted: {stats['deleted']} directories")
|
||||
print(f"Freed: {stats['freed_bytes'] / 1024 / 1024:.2f} MB")
|
||||
if stats['errors']:
|
||||
print(f"Errors: {stats['errors']}")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
58
scripts/deploy.sh
Normal file
58
scripts/deploy.sh
Normal file
@@ -0,0 +1,58 @@
|
||||
#!/bin/bash
|
||||
# deploy.sh — Production deployment script for SaaS-PDF
|
||||
set -euo pipefail
|
||||
|
||||
echo "========================================="
|
||||
echo " SaaS-PDF Production Deployment"
|
||||
echo "========================================="
|
||||
|
||||
# Colors
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
NC='\033[0m'
|
||||
|
||||
# Check Docker
|
||||
if ! command -v docker &> /dev/null; then
|
||||
echo -e "${RED}Docker is not installed.${NC}"
|
||||
exit 1
|
||||
fi
|
||||
if ! command -v docker-compose &> /dev/null && ! docker compose version &> /dev/null; then
|
||||
echo -e "${RED}Docker Compose is not installed.${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check .env
|
||||
if [ ! -f ".env" ]; then
|
||||
echo -e "${RED}.env file not found! Copy .env.example and configure it.${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo -e "${YELLOW}1/5 — Pulling latest code...${NC}"
|
||||
git pull origin main 2>/dev/null || echo "Not a git repo or no remote, skipping pull."
|
||||
|
||||
echo -e "${YELLOW}2/5 — Building Docker images...${NC}"
|
||||
docker compose -f docker-compose.prod.yml build --no-cache
|
||||
|
||||
echo -e "${YELLOW}3/5 — Stopping old containers...${NC}"
|
||||
docker compose -f docker-compose.prod.yml down --remove-orphans
|
||||
|
||||
echo -e "${YELLOW}4/5 — Starting services...${NC}"
|
||||
docker compose -f docker-compose.prod.yml up -d
|
||||
|
||||
echo -e "${YELLOW}5/5 — Waiting for health check...${NC}"
|
||||
sleep 10
|
||||
|
||||
# Health check
|
||||
if curl -sf http://localhost/health > /dev/null 2>&1; then
|
||||
echo -e "${GREEN}✓ Deployment successful! Service is healthy.${NC}"
|
||||
else
|
||||
echo -e "${RED}✗ Health check failed. Check logs:${NC}"
|
||||
echo " docker compose -f docker-compose.prod.yml logs backend"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo -e "${GREEN}Deployment complete!${NC}"
|
||||
echo " App: http://localhost"
|
||||
echo " Logs: docker compose -f docker-compose.prod.yml logs -f"
|
||||
86
scripts/generate_sitemap.py
Normal file
86
scripts/generate_sitemap.py
Normal file
@@ -0,0 +1,86 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
generate_sitemap.py
|
||||
Generates sitemap.xml for SEO.
|
||||
|
||||
Usage:
|
||||
python scripts/generate_sitemap.py --domain https://yourdomain.com
|
||||
"""
|
||||
|
||||
import argparse
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
TOOLS = [
|
||||
'/tools/pdf-to-word',
|
||||
'/tools/word-to-pdf',
|
||||
'/tools/compress-pdf',
|
||||
'/tools/image-converter',
|
||||
'/tools/video-to-gif',
|
||||
'/tools/word-counter',
|
||||
'/tools/text-cleaner',
|
||||
]
|
||||
|
||||
PAGES = [
|
||||
'/',
|
||||
'/about',
|
||||
'/privacy',
|
||||
]
|
||||
|
||||
|
||||
def generate_sitemap(domain: str) -> str:
|
||||
today = datetime.now().strftime('%Y-%m-%d')
|
||||
urls = []
|
||||
|
||||
# Home page — highest priority
|
||||
urls.append(f''' <url>
|
||||
<loc>{domain}/</loc>
|
||||
<lastmod>{today}</lastmod>
|
||||
<changefreq>weekly</changefreq>
|
||||
<priority>1.0</priority>
|
||||
</url>''')
|
||||
|
||||
# Tool pages — high priority
|
||||
for tool in TOOLS:
|
||||
urls.append(f''' <url>
|
||||
<loc>{domain}{tool}</loc>
|
||||
<lastmod>{today}</lastmod>
|
||||
<changefreq>monthly</changefreq>
|
||||
<priority>0.9</priority>
|
||||
</url>''')
|
||||
|
||||
# Static pages — lower priority
|
||||
for page in PAGES[1:]:
|
||||
urls.append(f''' <url>
|
||||
<loc>{domain}{page}</loc>
|
||||
<lastmod>{today}</lastmod>
|
||||
<changefreq>monthly</changefreq>
|
||||
<priority>0.5</priority>
|
||||
</url>''')
|
||||
|
||||
sitemap = f'''<?xml version="1.0" encoding="UTF-8"?>
|
||||
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
|
||||
{chr(10).join(urls)}
|
||||
</urlset>'''
|
||||
|
||||
return sitemap
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description='Generate sitemap.xml')
|
||||
parser.add_argument('--domain', type=str, required=True, help='Site domain (e.g. https://yourdomain.com)')
|
||||
parser.add_argument('--output', type=str, default='frontend/public/sitemap.xml', help='Output file path')
|
||||
args = parser.parse_args()
|
||||
|
||||
domain = args.domain.rstrip('/')
|
||||
sitemap = generate_sitemap(domain)
|
||||
|
||||
with open(args.output, 'w', encoding='utf-8') as f:
|
||||
f.write(sitemap)
|
||||
|
||||
print(f"Sitemap generated: {args.output}")
|
||||
print(f"URLs: {len(TOOLS) + len(PAGES)}")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
Reference in New Issue
Block a user