DreamFlow-AI / api.py
Sachin21112004's picture
Update api.py
b25be4e verified
# api.py
# This is your main web server file.
# It creates ONE instance of your counselor and serves it.
from fastapi import FastAPI
from fastapi.responses import StreamingResponse
from pydantic import BaseModel
from counselor import UltraAdvancedHybridCounselor # Your main counselor class
import logging
from fastapi.middleware.cors import CORSMiddleware
import uvicorn
import os
from typing import Optional, Dict, Any # Added for new response models
# ---------- Logging ----------
# (Your logging setup is great, no changes)
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s - %(name)s - %(levelname)s - %(funcName)s:%(lineno)d - %(message)s",
handlers=[logging.FileHandler("logs/api.log", encoding="utf-8"), logging.StreamHandler()],
)
logger = logging.getLogger(__name__)
# ---------- App ----------
app = FastAPI(title="DreamFlow-AI API")
# ---------- CORS ----------
# (Your CORS setup is great, no changes)
ALLOWED_ORIGINS = [
"http://localhost:3000",
"http://127.0.0.1:3000",
"http://localhost:5173",
"http://127.0.0.1:5173",
]
ALLOWED_ORIGIN_REGEX = r"https://.*\.hf\.space$" # matches https://<space>.hf.space
app.add_middleware(
CORSMiddleware,
allow_origins=ALLOWED_ORIGINS,
allow_origin_regex=ALLOWED_ORIGIN_REGEX,
allow_credentials=False,
allow_methods=["GET", "POST", "OPTIONS"],
allow_headers=["*"],
expose_headers=["Content-Type"],
)
# ---------- Model ----------
# This is the most important part.
# This creates ONE instance when the app starts,
# which also starts the background fine-tuning thread.
logger.info("Starting UltraAdvancedHybridCounselor...")
counselor = UltraAdvancedHybridCounselor()
logger.info("Counselor instance created. Starting API.")
# ---------- Pydantic Models ----------
class CounselRequest(BaseModel):
query: str
session_id: str
class HealthResponse(BaseModel):
status: str
current_model: Optional[str]
web_search_enabled: bool
# ---------- Routes ----------
@app.get("/")
def root_health():
"""Basic health check for the root."""
return {"status": "ok", "service": "DreamFlow-AI", "port": int(os.getenv("PORT", 7860))}
@app.get("/health", response_model=HealthResponse)
def get_health_status():
"""
Returns the detailed health status of the counselor.
"""
try:
status = counselor.get_health_status()
return {
"status": status.get("status", "error"),
"current_model": counselor.current_model,
"web_search_enabled": counselor.tavily is not None
}
except Exception as e:
logger.error(f"Error in /health: {e}")
return {"status": "error", "current_model": None, "web_search_enabled": False}
@app.get("/model-info")
def get_model_info() -> Dict[str, Any]:
"""
Returns detailed info on the model and its fine-tuning worker.
"""
try:
return counselor.get_current_model_info()
except Exception as e:
logger.error(f"Error in /model-info: {e}")
return {"error": f"An unexpected error occurred: {e}"}
@app.post("/counsel")
async def counsel(request: CounselRequest):
"""
Main endpoint for asking the counselor a question.
(Your streaming implementation is great, no changes)
"""
async def stream_gen():
try:
async for chunk in counselor.get_comprehensive_answer(request.query, request.session_id):
yield chunk.encode("utf-8")
except Exception as e:
logger.error(f"Streaming error: {e}")
yield b"An error occurred during streaming."
return StreamingResponse(stream_gen(), media_type="text/plain")
# ---------- Entrypoint ----------
# (Your entrypoint is perfect, no changes)
if __name__ == "__main__":
# Hugging Face Spaces (Docker) expects the app to listen on port 7860
port = int(os.getenv("PORT", 7860))
uvicorn.run(app, host="0.0.0.0", port=port)