|
|
from fastapi import FastAPI |
|
|
from pydantic import BaseModel |
|
|
from typing import List |
|
|
import uvicorn |
|
|
import logging |
|
|
from datetime import datetime |
|
|
import pytz |
|
|
import torch |
|
|
|
|
|
from main import ImageProcessor |
|
|
|
|
|
logging.basicConfig(filename="drinksLog.log", filemode='w') |
|
|
logger = logging.getLogger("drinks") |
|
|
logger.setLevel(logging.DEBUG) |
|
|
file_handler = logging.FileHandler("drinksLog.log") |
|
|
logger.addHandler(file_handler) |
|
|
|
|
|
app = FastAPI() |
|
|
|
|
|
class RequestBody(BaseModel): |
|
|
fdz: List[str] |
|
|
cItem: List[str] |
|
|
|
|
|
class RequestData(BaseModel): |
|
|
body: RequestBody |
|
|
|
|
|
@app.get("/status") |
|
|
async def status(): |
|
|
return {"status": "AI Server is running"} |
|
|
|
|
|
@app.post("/drinks") |
|
|
async def detect_items(request_data: RequestData): |
|
|
try: |
|
|
image_processor = ImageProcessor() |
|
|
fdz_urls = request_data.body.fdz |
|
|
citem_urls = request_data.body.cItem |
|
|
|
|
|
result = await image_processor.process_images(fdz_urls, citem_urls) |
|
|
|
|
|
return {"response": result} |
|
|
|
|
|
except Exception as e: |
|
|
logger.error(f"Error during detection: {str(e)}") |
|
|
return {"error": "An error occurred during detection"} |
|
|
|
|
|
if __name__ == "__main__": |
|
|
try: |
|
|
uvicorn.run(app, host="127.0.0.1", port=4444) |
|
|
finally: |
|
|
torch.cuda.empty_cache() |
|
|
|