Spaces:
Sleeping
Sleeping
File size: 2,326 Bytes
2e32ddd 301d334 84c7ce0 2e32ddd 301d334 2b16a80 301d334 84c7ce0 336c698 84c7ce0 037629b a8ee0db 301d334 2e32ddd a8ee0db 2e32ddd c42e6f5 2e32ddd 2b16a80 2e32ddd 84c7ce0 2b56323 037629b ae434c1 301d334 ae434c1 84c7ce0 037629b 301d334 ae434c1 301d334 f95f21f 301d334 037629b ae434c1 301d334 ae434c1 037629b 301d334 ae434c1 301d334 ae434c1 037629b c42e6f5 2e32ddd 301d334 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 |
import os
import logging
from fastapi import FastAPI, Request, HTTPException
from fastapi.responses import JSONResponse
from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel
from src.services.graph_service import GraphInterviewProcessor
from langtrace_python_sdk import langtrace
langtrace.init(api_key=os.getenv("LANGTRACE_API_KEY"))
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
app = FastAPI(
title="Interview Simulation API",
description="API for interview simulations.",
version="1.0.0",
docs_url="/docs",
redoc_url="/redoc"
)
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
class HealthCheck(BaseModel):
status: str = "ok"
@app.get("/", response_model=HealthCheck, tags=["Status"])
async def health_check():
return HealthCheck()
@app.post("/simulate-interview/")
async def simulate_interview(request: Request):
"""
This endpoint receives the interview data, instantiates the graph processor
and starts the conversation.
"""
logger = logging.getLogger(__name__)
try:
payload = await request.json()
if not all(k in payload for k in ["user_id", "job_offer_id", "cv_document", "job_offer"]):
raise HTTPException(status_code=400, detail="Missing data in payload (user_id, job_offer_id, cv_document, job_offer).")
logger.info(f"Starting simulation for user: {payload['user_id']}")
processor = GraphInterviewProcessor(payload)
result = processor.invoke(payload.get("messages", []))
return JSONResponse(content=result)
except ValueError as ve:
logger.error(f"Data validation error: {ve}", exc_info=True)
return JSONResponse(content={"error": str(ve)}, status_code=400)
except Exception as e:
logger.error(f"Internal error in simulate-interview endpoint: {e}", exc_info=True)
return JSONResponse(
content={"error": "An internal error occurred on the assistant's server."},
status_code=500
)
if __name__ == "__main__":
import uvicorn
port = int(os.getenv("PORT", 8002)) # Use PORT environment variable, default to 8002
uvicorn.run(app, host="0.0.0.0", port=port)
|