uploader / app /main.py
triflix's picture
Update app/main.py
c56b549 verified
# app/main.py
import uuid
import time
import shutil
import asyncio
from pathlib import Path
import aiofiles
from fastapi import FastAPI, Request, UploadFile, File, HTTPException, Depends
from fastapi.responses import JSONResponse, HTMLResponse, FileResponse
from fastapi.staticfiles import StaticFiles
from fastapi.templating import Jinja2Templates
from . import utils
# --- App Setup ---
app = FastAPI(title="Triflix Simple Uploader")
app.mount("/static", StaticFiles(directory="app/static"), name="static")
templates = Jinja2Templates(directory="app/templates")
# Ensure base upload directory exists on startup
utils.UPLOAD_ROOT.mkdir(parents=True, exist_ok=True)
# --- Background Cleanup Task ---
async def cleanup_expired_uploads():
"""Periodically scans for and removes expired uploads."""
while True:
await asyncio.sleep(60 * 10) # Run every 10 minutes
now = time.time()
for upload_dir in utils.UPLOAD_ROOT.iterdir():
if not upload_dir.is_dir():
continue
try:
meta = utils.load_meta(upload_dir.name)
# Expire after 1 hour (3600 seconds)
if meta.get("created_at_ts", 0) < now - 3600:
print(f"Cleaning up expired upload: {upload_dir.name}")
shutil.rmtree(upload_dir, ignore_errors=True)
except Exception as e:
print(f"Error during cleanup of {upload_dir.name}: {e}")
@app.on_event("startup")
async def on_startup():
asyncio.create_task(cleanup_expired_uploads())
# --- Dependency ---
def get_request_ip(request: Request):
return request.client.host
# --- Routes ---
@app.get("/", response_class=HTMLResponse)
async def read_root(request: Request):
return templates.TemplateResponse("index.html", {"request": request})
@app.post("/upload", status_code=201)
async def create_upload(file: UploadFile = File(...), ip: str = Depends(get_request_ip)):
if not utils.enforce_rate_limit(ip, limit=5, per_seconds=60):
raise HTTPException(status_code=429, detail="Upload limit exceeded.")
upload_id = uuid.uuid4().hex
upload_dir = utils.get_upload_dir(upload_id)
upload_dir.mkdir()
file_path = upload_dir / file.filename
# Stream the file to disk asynchronously
try:
async with aiofiles.open(file_path, "wb") as f:
while content := await file.read(1024 * 1024): # Read in 1MB chunks
await f.write(content)
except Exception as e:
# Clean up if upload fails
shutil.rmtree(upload_dir, ignore_errors=True)
raise HTTPException(status_code=500, detail=f"Failed to write file to disk: {e}")
file_size = file_path.stat().st_size
now = time.time()
meta = {
"upload_id": upload_id,
"filename": file.filename,
"size_bytes": file_size,
"created_at_ts": now,
"owner_ip": ip,
"file_path": str(file_path)
}
utils.save_meta(upload_id, meta)
return {
"upload_id": upload_id,
"filename": file.filename,
"size_bytes": file_size,
"download_url": f"/download/{upload_id}",
}
@app.get("/download/{upload_id}")
async def download_file(upload_id: str):
try:
meta = utils.load_meta(upload_id)
file_path = Path(meta["file_path"])
if not file_path.exists():
raise HTTPException(status_code=404, detail="File not found on server.")
return FileResponse(
path=file_path,
filename=meta["filename"],
media_type="application/octet-stream"
)
except FileNotFoundError:
raise HTTPException(status_code=404, detail="Upload session not found.")