File size: 1,913 Bytes
fed5c73 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 | import time
from fastapi import APIRouter, status, Body, Response
from fastapi.responses import JSONResponse
from loguru import logger
from src.misc.settings import ApiSettings
from src.misc.schemas import ProductNamedEntityExtractionSchema
from src.models import ModelBuilder
from src.misc.logger_handlers import FileHandler
from src.misc.create_unique_id import create_unique_user_id
settings = ApiSettings()
print(settings.duckling_host, settings.duckling_port)
price_extractor_router = APIRouter(prefix=f"/price_extractor")
model = ModelBuilder.build_model(settings=settings)
@price_extractor_router.get("/healthcheck", tags=["Healthcheck"])
async def healthcheck():
return JSONResponse({"message": "I am alive"}, status_code=status.HTTP_200_OK)
@price_extractor_router.post("/predict", tags=["Predict"])
async def ner_predict(
response: Response,
input_query: str = Body(description="input query from the search bar"),
) -> ProductNamedEntityExtractionSchema:
request_tic = time.time()
request_unique_id = create_unique_user_id()
session_logger = logger.bind(user_unique_id=request_unique_id)
session_logger.add(sink=FileHandler(user_unique_id=request_unique_id))
session_logger.info(f"REQUEST ID -> {request_unique_id} : Request Recieved")
session_logger.info(
f"REQUEST ID -> {request_unique_id} : Request body is '{input_query}'"
)
pred_tic = time.time()
product_extraction_result: ProductNamedEntityExtractionSchema = model(
input_query=input_query
)
pred_toc = time.time()
session_logger.info(
f"REQUEST ID -> {request_unique_id} : Model output is {product_extraction_result.model_dump_json()}"
)
request_toc = time.time()
response.headers["model-inference-time"] = str(pred_toc - pred_tic)
response.headers["request-time"] = str(request_toc - request_tic)
return product_extraction_result
|