Spaces:
Sleeping
Sleeping
Update api/endpoints/unified.py
Browse files- api/endpoints/unified.py +43 -29
api/endpoints/unified.py
CHANGED
|
@@ -1,29 +1,43 @@
|
|
| 1 |
-
from fastapi import APIRouter, UploadFile, File, HTTPException
|
| 2 |
-
from models.loader import ModelLoader
|
| 3 |
-
from request_queue.queue_manager import RequestQueue
|
| 4 |
-
|
| 5 |
-
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
|
| 11 |
-
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
|
| 15 |
-
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi import APIRouter, UploadFile, File, HTTPException
|
| 2 |
+
from models.loader import ModelLoader
|
| 3 |
+
from request_queue.queue_manager import RequestQueue
|
| 4 |
+
from queue import Full
|
| 5 |
+
|
| 6 |
+
router = APIRouter(prefix="/predict/unified", tags=["Unified"])
|
| 7 |
+
|
| 8 |
+
model_loader = ModelLoader(config_dir="models/configs")
|
| 9 |
+
|
| 10 |
+
# Initialize a global request queue (can be tuned for maxsize)
|
| 11 |
+
request_queue = RequestQueue(maxsize=10)
|
| 12 |
+
|
| 13 |
+
@router.post("")
|
| 14 |
+
async def predict_unified(file: UploadFile = File(...)):
|
| 15 |
+
if not file or not file.filename:
|
| 16 |
+
raise HTTPException(status_code=400, detail="No file uploaded or file is invalid.")
|
| 17 |
+
try:
|
| 18 |
+
image_bytes = await file.read()
|
| 19 |
+
if not image_bytes:
|
| 20 |
+
raise HTTPException(status_code=400, detail="Uploaded file is empty.")
|
| 21 |
+
def process_request():
|
| 22 |
+
try:
|
| 23 |
+
router_model = model_loader.load_model("router")
|
| 24 |
+
except Exception:
|
| 25 |
+
raise HTTPException(status_code=503, detail="Router model not loaded or unavailable.")
|
| 26 |
+
router_result = router_model.predict(image_bytes)
|
| 27 |
+
model_key = router_result.get("model_key", "plant_disease")
|
| 28 |
+
try:
|
| 29 |
+
model = model_loader.load_model(model_key)
|
| 30 |
+
except Exception:
|
| 31 |
+
raise HTTPException(status_code=503, detail=f"Model '{model_key}' not loaded or unavailable.")
|
| 32 |
+
result = model.predict(image_bytes)
|
| 33 |
+
return {"filename": file.filename, "router_info": router_result, **result}
|
| 34 |
+
try:
|
| 35 |
+
request_queue.add_request(process_request)
|
| 36 |
+
except Full:
|
| 37 |
+
raise HTTPException(status_code=429, detail="Request queue is full. Please try again later.")
|
| 38 |
+
response = request_queue.get_request()()
|
| 39 |
+
return response
|
| 40 |
+
except HTTPException as e:
|
| 41 |
+
raise e
|
| 42 |
+
except Exception as e:
|
| 43 |
+
raise HTTPException(status_code=500, detail=str(e))
|