|
|
from io import BytesIO |
|
|
|
|
|
import requests |
|
|
from fastapi import HTTPException |
|
|
from PIL import Image |
|
|
|
|
|
from app.config import get_settings |
|
|
from app.core.errors import BadRequestError, VendorError |
|
|
from app.schemas.requests import ExtractionRequest |
|
|
from app.schemas.responses import APIResponse |
|
|
from app.services.factory import AIServiceFactory |
|
|
from app.utils.logger import setup_logger |
|
|
|
|
|
logger = setup_logger(__name__) |
|
|
settings = get_settings() |
|
|
|
|
|
|
|
|
async def handle_extract(request: ExtractionRequest): |
|
|
request.max_attempts = max(request.max_attempts, 1) |
|
|
request.max_attempts = min(request.max_attempts, 5) |
|
|
|
|
|
for attempt in range(1, request.max_attempts + 1): |
|
|
try: |
|
|
logger.info(f"Attempt: {attempt}") |
|
|
if request.ai_model in settings.OPENAI_MODELS: |
|
|
ai_vendor = "openai" |
|
|
elif request.ai_model in settings.ANTHROPIC_MODELS: |
|
|
ai_vendor = "anthropic" |
|
|
else: |
|
|
raise ValueError( |
|
|
f"Invalid AI model: {request.ai_model}, only support {settings.SUPPORTED_MODELS}" |
|
|
) |
|
|
service = AIServiceFactory.get_service(ai_vendor) |
|
|
|
|
|
pil_images = [] |
|
|
for url in request.img_urls: |
|
|
try: |
|
|
response = requests.get(url) |
|
|
response.raise_for_status() |
|
|
image = Image.open(BytesIO(response.content)) |
|
|
pil_images.append(image) |
|
|
except Exception as e: |
|
|
print(e) |
|
|
logger.error(f"Failed to download or process image from {url}: {e}") |
|
|
raise HTTPException( |
|
|
status_code=400, |
|
|
detail=f"Failed to process image from {url}", |
|
|
headers={"attempt": attempt}, |
|
|
) |
|
|
|
|
|
json_attributes = await service.extract_attributes_with_validation( |
|
|
request.attributes, |
|
|
request.ai_model, |
|
|
request.img_urls, |
|
|
request.product_taxonomy, |
|
|
request.product_data, |
|
|
pil_images=pil_images, |
|
|
) |
|
|
break |
|
|
except BadRequestError as e: |
|
|
logger.error("Bad request error: ", e) |
|
|
raise HTTPException( |
|
|
status_code=400, detail=str(e), headers={"attempt": attempt} |
|
|
) |
|
|
except ValueError as e: |
|
|
logger.error("Value error: ", e) |
|
|
raise HTTPException( |
|
|
status_code=400, detail=str(e), headers={"attempt": attempt} |
|
|
) |
|
|
except VendorError as e: |
|
|
logger.error("Vendor error: ", e) |
|
|
if attempt == request.max_attempts: |
|
|
raise HTTPException( |
|
|
status_code=500, detail=str(e), headers={"attempt": attempt} |
|
|
) |
|
|
else: |
|
|
if request.ai_model in settings.ANTHROPIC_MODELS: |
|
|
request.ai_model = settings.OPENAI_MODELS[ |
|
|
0 |
|
|
] |
|
|
logger.info( |
|
|
f"Switching from anthropic to {request.ai_model} for attempt {attempt + 1}" |
|
|
) |
|
|
elif request.ai_model in settings.OPENAI_MODELS: |
|
|
request.ai_model = settings.ANTHROPIC_MODELS[ |
|
|
0 |
|
|
] |
|
|
logger.info( |
|
|
f"Switching from OpenAI to {request.ai_model} for attempt {attempt + 1}" |
|
|
) |
|
|
|
|
|
except HTTPException as e: |
|
|
logger.error("HTTP exception: ", e) |
|
|
raise e |
|
|
except Exception as e: |
|
|
logger.error("Exception: ", e) |
|
|
if ( |
|
|
"overload" in str(e).lower() |
|
|
and request.ai_model in settings.ANTHROPIC_MODELS |
|
|
): |
|
|
request.ai_model = settings.OPENAI_MODELS[ |
|
|
0 |
|
|
] |
|
|
if attempt == request.max_attempts: |
|
|
raise HTTPException( |
|
|
status_code=500, |
|
|
detail="Internal server error", |
|
|
headers={"attempt": attempt}, |
|
|
) |
|
|
|
|
|
return json_attributes, attempt |
|
|
|