Spaces:
Runtime error
Runtime error
File size: 1,662 Bytes
9f83ce9 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 | from fastapi import FastAPI, File, UploadFile, HTTPException
from fastapi.responses import JSONResponse
from pathlib import Path
import shutil
import logging
from inference import inference, get_args
from utils import config_logger
from tools import load_pipeline
from configs import ModelConfig, InferenceConfig
app = FastAPI()
@app.post("/upload-video/")
async def upload_video(file: UploadFile = File(...)):
if not file.filename.endswith(('.mp4', '.avi', '.mov', '.mkv')):
raise HTTPException(status_code=400, detail="Invalid file type. Only video files are allowed.")
# Save the uploaded file to a temporary location
temp_file_path = Path(f"temp_{file.filename}")
with temp_file_path.open("wb") as buffer:
shutil.copyfileobj(file.file, buffer)
# Load configurations
args = get_args()
model_config = args.model
inference_config = args.inference
# Update the source to the uploaded file
inference_config.source = temp_file_path
# Configure logger
config_logger(inference_config.output_dir / "inference.log")
# Load the pipeline
pipeline = load_pipeline(model_config, inference_config)
# Run inference
try:
inference(model_config, inference_config, pipeline)
except Exception as e:
logging.error(f"Error during inference: {str(e)}")
raise HTTPException(status_code=500, detail="Error during video processing")
# Clean up the temporary file
temp_file_path.unlink()
return JSONResponse(content={"message": "Video processed successfully"})
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=8000) |