File size: 1,192 Bytes
d470f44
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
020ca35
 
 
 
d470f44
020ca35
 
d470f44
020ca35
d470f44
020ca35
d470f44
 
020ca35
d470f44
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
from fastapi import FastAPI,UploadFile,File,status,HTTPException
from inference_utils import get_predictions
import cv2
import torch
import os
import aiofiles

weights_path = os.path.join('best.pt')
yolo_path = os.path.join('yolov5')
model = torch.hub.load(yolo_path, 'custom', path = weights_path, source = 'local',device='cpu',force_reload=True)


app = FastAPI()
CHUNK_SIZE = 1024 * 1024 * 2

@app.get("/")
async def root():
    return {"message": "Hello World"}

@app.post("/detect-monument/")
async def upload(file: UploadFile = File(...)):
    try:
        filepath = os.path.join('./', os.path.basename(file.filename))
        async with aiofiles.open(filepath, 'wb') as f:
            while chunk := await file.read(CHUNK_SIZE):
                await f.write(chunk)
    except Exception:
        raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, 
            detail='There was an error uploading the file')
    finally:
        await file.close()
    
    image = cv2.imread(filepath)
    image = cv2.cvtColor(image,cv2.COLOR_BGR2RGB)
    predictions = get_predictions(model, image)
    os.remove(filepath)
    
    return {"predictions":predictions}