from fastapi import FastAPI,UploadFile,File,status,HTTPException from inference_utils import get_predictions import cv2 import torch import os import aiofiles weights_path = os.path.join('best.pt') yolo_path = os.path.join('yolov5') model = torch.hub.load(yolo_path, 'custom', path = weights_path, source = 'local',device='cpu',force_reload=True) app = FastAPI() CHUNK_SIZE = 1024 * 1024 * 2 @app.get("/") async def root(): return {"message": "Hello World"} @app.post("/detect-monument/") async def upload(file: UploadFile = File(...)): try: filepath = os.path.join('./', os.path.basename(file.filename)) async with aiofiles.open(filepath, 'wb') as f: while chunk := await file.read(CHUNK_SIZE): await f.write(chunk) except Exception: raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail='There was an error uploading the file') finally: await file.close() image = cv2.imread(filepath) image = cv2.cvtColor(image,cv2.COLOR_BGR2RGB) predictions = get_predictions(model, image) os.remove(filepath) return {"predictions":predictions}