|
|
from fastapi import FastAPI, HTTPException |
|
|
from pydantic import BaseModel |
|
|
import asyncio |
|
|
from typing import List, Union |
|
|
from face_main import * |
|
|
from datetime import datetime |
|
|
from face_main import * |
|
|
import uvicorn |
|
|
import logging |
|
|
import pytz |
|
|
import torch |
|
|
import json |
|
|
|
|
|
logging.basicConfig(filename="HDML-FaceDetection.log", |
|
|
filemode='w') |
|
|
logger = logging.getLogger("HDML") |
|
|
logger.setLevel(logging.DEBUG) |
|
|
file_handler = logging.FileHandler("HDML-FaceDetection.log") |
|
|
logger.addHandler(file_handler) |
|
|
total_done = 0 |
|
|
total_error = 0 |
|
|
|
|
|
app = FastAPI() |
|
|
|
|
|
class Item(BaseModel): |
|
|
url: str |
|
|
|
|
|
def get_bd_time(): |
|
|
bd_timezone = pytz.timezone("Asia/Dhaka") |
|
|
time_now = datetime.now(bd_timezone) |
|
|
current_time = time_now.strftime("%I:%M:%S %p") |
|
|
return current_time |
|
|
|
|
|
|
|
|
async def process_item(item: Item): |
|
|
try: |
|
|
result = await mainDet(item.url) |
|
|
result = json.loads(result) |
|
|
return result |
|
|
except Exception as e: |
|
|
raise ValueError(f"process_item ERROR : {str(e)}") |
|
|
finally: |
|
|
torch.cuda.empty_cache() |
|
|
|
|
|
async def process_items(items: Union[Item, List[Item]]): |
|
|
try: |
|
|
if type(items)==list: |
|
|
coroutines = [process_item(item) for item in items] |
|
|
results = await asyncio.gather(*coroutines) |
|
|
print("multi : ",results) |
|
|
else: |
|
|
results = await process_item(items) |
|
|
print("single : ", results) |
|
|
return results |
|
|
except Exception as e: |
|
|
raise ValueError(f"process_items ERROR : {str(e)}") |
|
|
finally: |
|
|
torch.cuda.empty_cache() |
|
|
|
|
|
|
|
|
@app.get("/status") |
|
|
async def status(): |
|
|
return "AI Server in running" |
|
|
|
|
|
@app.post("/tech") |
|
|
async def create_items(items: Union[Item, List[Item]]): |
|
|
try: |
|
|
results = await process_items(items) |
|
|
print("Result Sent to User:", results) |
|
|
print("###################################################################################################") |
|
|
print(items) |
|
|
print("Last Execution Time : ", get_bd_time()) |
|
|
return results |
|
|
except Exception as e: |
|
|
global total_error |
|
|
total_error += 1 |
|
|
logger.info(f"Time:{get_bd_time()}, Execution Failed and Total Failed Execution : {total_error}, Payload:{items}, Error:{str(e)}") |
|
|
logger.error(str(e)) |
|
|
raise ValueError(f"process_item ERROR : {str(e)}") |
|
|
finally: |
|
|
global total_done |
|
|
total_done +=1 |
|
|
logger.info(f"Time:{get_bd_time()}, Execution Done and Total Successfull Execution : {total_done}, Payload:{items}, Result:{results}") |
|
|
torch.cuda.empty_cache() |
|
|
|
|
|
if __name__ == "__main__": |
|
|
try: |
|
|
del faceModel |
|
|
uvicorn.run(app, host="127.0.0.1", port=8585) |
|
|
except Exception as e: |
|
|
raise ValueError(f"face_api ERROR : {str(e)}") |
|
|
finally: |
|
|
torch.cuda.empty_cache() |