File size: 3,943 Bytes
033e78f
4404e67
 
033e78f
f388a93
 
 
4404e67
f388a93
ce032e2
3679521
033e78f
4404e67
f388a93
033e78f
4404e67
f388a93
b25be4e
 
 
f388a93
 
 
033e78f
 
f388a93
033e78f
4404e67
033e78f
b25be4e
 
 
 
033e78f
b25be4e
f388a93
 
b25be4e
 
 
 
 
 
 
f388a93
 
033e78f
4404e67
 
 
 
f388a93
4404e67
f388a93
4404e67
ff7a7ac
b25be4e
 
4404e67
 
 
 
 
ff7a7ac
033e78f
4404e67
 
 
 
b25be4e
4404e67
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ff7a7ac
 
4404e67
 
 
 
b25be4e
 
 
 
 
 
 
 
033e78f
 
4404e67
3679521
b25be4e
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
# api.py
# This is your main web server file.
# It creates ONE instance of your counselor and serves it.

from fastapi import FastAPI
from fastapi.responses import StreamingResponse
from pydantic import BaseModel
from counselor import UltraAdvancedHybridCounselor  # Your main counselor class
import logging
from fastapi.middleware.cors import CORSMiddleware
import uvicorn
import os
from typing import Optional, Dict, Any # Added for new response models

# ---------- Logging ----------
# (Your logging setup is great, no changes)
logging.basicConfig(
    level=logging.INFO,
    format="%(asctime)s - %(name)s - %(levelname)s - %(funcName)s:%(lineno)d - %(message)s",
    handlers=[logging.FileHandler("logs/api.log", encoding="utf-8"), logging.StreamHandler()],
)
logger = logging.getLogger(__name__)

# ---------- App ----------
app = FastAPI(title="DreamFlow-AI API")

# ---------- CORS ----------
# (Your CORS setup is great, no changes)
ALLOWED_ORIGINS = [
    "http://localhost:3000",
    "http://127.0.0.1:3000",
    "http://localhost:5173",
    "http://127.0.0.1:5173",
]
ALLOWED_ORIGIN_REGEX = r"https://.*\.hf\.space$"  # matches https://<space>.hf.space

app.add_middleware(
    CORSMiddleware,
    allow_origins=ALLOWED_ORIGINS,
    allow_origin_regex=ALLOWED_ORIGIN_REGEX,
    allow_credentials=False,
    allow_methods=["GET", "POST", "OPTIONS"],
    allow_headers=["*"],
    expose_headers=["Content-Type"],
)

# ---------- Model ----------
# This is the most important part.
# This creates ONE instance when the app starts,
# which also starts the background fine-tuning thread.
logger.info("Starting UltraAdvancedHybridCounselor...")
counselor = UltraAdvancedHybridCounselor()
logger.info("Counselor instance created. Starting API.")

# ---------- Pydantic Models ----------
class CounselRequest(BaseModel):
    query: str
    session_id: str

class HealthResponse(BaseModel):
    status: str
    current_model: Optional[str]
    web_search_enabled: bool

# ---------- Routes ----------

@app.get("/")
def root_health():
    """Basic health check for the root."""
    return {"status": "ok", "service": "DreamFlow-AI", "port": int(os.getenv("PORT", 7860))}

@app.get("/health", response_model=HealthResponse)
def get_health_status():
    """
    Returns the detailed health status of the counselor.
    """
    try:
        status = counselor.get_health_status()
        return {
            "status": status.get("status", "error"),
            "current_model": counselor.current_model,
            "web_search_enabled": counselor.tavily is not None
        }
    except Exception as e:
        logger.error(f"Error in /health: {e}")
        return {"status": "error", "current_model": None, "web_search_enabled": False}

@app.get("/model-info")
def get_model_info() -> Dict[str, Any]:
    """
    Returns detailed info on the model and its fine-tuning worker.
    """
    try:
        return counselor.get_current_model_info()
    except Exception as e:
        logger.error(f"Error in /model-info: {e}")
        return {"error": f"An unexpected error occurred: {e}"}

@app.post("/counsel")
async def counsel(request: CounselRequest):
    """
    Main endpoint for asking the counselor a question.
    (Your streaming implementation is great, no changes)
    """
    async def stream_gen():
        try:
            async for chunk in counselor.get_comprehensive_answer(request.query, request.session_id):
                yield chunk.encode("utf-8")
        except Exception as e:
            logger.error(f"Streaming error: {e}")
            yield b"An error occurred during streaming."
    return StreamingResponse(stream_gen(), media_type="text/plain")

# ---------- Entrypoint ----------
# (Your entrypoint is perfect, no changes)
if __name__ == "__main__":
    # Hugging Face Spaces (Docker) expects the app to listen on port 7860
    port = int(os.getenv("PORT", 7860))
    uvicorn.run(app, host="0.0.0.0", port=port)