sushilideaclan01's picture
Add product scraping functionality and AI concept filling
76c3397
"""
FastAPI backend for the Image Ad Essentials Researcher.
"""
import json
from typing import Literal
from pathlib import Path
from fastapi import FastAPI, HTTPException
from fastapi.middleware.cors import CORSMiddleware
from fastapi.staticfiles import StaticFiles
from pydantic import BaseModel
from backend.pydantic_schema import ImageAdEssentials, TARGET_AUDIENCE_OPTIONS
from backend.gpt_method import researcher_gpt
from backend.claude_method import researcher_claude
from backend.scraper import scrape_product
from backend.ai_filler import fill_concept_fields
app = FastAPI(title="Image Ad Essentials Researcher")
# Allow frontend to call the API
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# --- Request / Response schemas ---
class ResearchRequest(BaseModel):
target_audience: list[str]
product_category: str
product_description: str
count: int = 5 # number of concepts/angles (psychology triggers) to generate
method: Literal["gpt", "claude"]
class ScrapeProductRequest(BaseModel):
url: str
class AudienceResult(BaseModel):
target_audience: str
output: list[ImageAdEssentials]
class ResearchResponse(BaseModel):
results: list[AudienceResult]
# --- Endpoints ---
@app.get("/api/target-audiences")
def get_target_audiences():
"""Return the predefined list of target audience options."""
return {"audiences": TARGET_AUDIENCE_OPTIONS}
@app.post("/api/scrape-product")
def scrape_and_fill_product(req: ScrapeProductRequest):
"""
Scrape product data from URL and use AI to fill concept fields.
Returns product data with suggested target_audience, competitors, and psychological_triggers.
"""
try:
# Scrape product data
product_data = scrape_product(req.url)
# Use AI to fill concept fields
concept_data = fill_concept_fields(product_data)
# Merge scraped data with AI-filled concepts
result = {
**product_data,
"target_audience": concept_data["target_audience"],
"competitors": concept_data["competitors"],
"psychological_triggers": concept_data["psychological_triggers"]
}
return result
except ValueError as e:
raise HTTPException(status_code=400, detail=str(e))
except Exception as e:
raise HTTPException(status_code=500, detail=f"An error occurred while scraping: {str(e)}")
@app.post("/api/research", response_model=ResearchResponse)
def run_research(req: ResearchRequest):
"""
Run the researcher for each target audience separately.
Returns grouped results — one set of triggers per audience.
"""
try:
all_results = []
for audience in req.target_audience:
if req.method == "gpt":
result = researcher_gpt(
audience, req.product_category, req.product_description, req.count
)
elif req.method == "claude":
result = researcher_claude(
audience, req.product_category, req.product_description, req.count
)
else:
raise HTTPException(status_code=400, detail="Invalid method. Use 'gpt' or 'claude'.")
all_results.append(AudienceResult(target_audience=audience, output=result))
return ResearchResponse(results=all_results)
except ValueError as e:
raise HTTPException(status_code=500, detail=str(e))
except Exception as e:
detail = str(e)
if not detail.strip():
detail = repr(e)
raise HTTPException(status_code=500, detail=detail)
# --- Serve frontend static files (MUST be after API routes) ---
FRONTEND_DIR = Path(__file__).resolve().parent.parent / "frontend"
app.mount("/", StaticFiles(directory=str(FRONTEND_DIR), html=True), name="frontend")
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=7860)