Spaces:
Sleeping
Sleeping
| from contextlib import asynccontextmanager | |
| from fastapi import FastAPI, HTTPException, BackgroundTasks | |
| from pydantic import BaseModel | |
| from typing import Optional | |
| import os | |
| from app.core.database import init_db | |
| from app.services.document_processor import process_and_index_documents | |
| async def lifespan(app: FastAPI): | |
| # Startup | |
| init_db() | |
| yield | |
| # Shutdown (if needed) | |
| app = FastAPI(title="Physical AI Textbook RAG Chatbot", version="1.0.0", lifespan=lifespan) | |
| from fastapi.middleware.cors import CORSMiddleware | |
| app.add_middleware( | |
| CORSMiddleware, | |
| allow_origins=["http://localhost:3000", "http://localhost:3001", "http://localhost:3002", "http://localhost:8000", "*"], | |
| allow_credentials=True, | |
| allow_methods=["*"], | |
| allow_headers=["*"], | |
| ) | |
| class AskRequest(BaseModel): | |
| query: str | |
| selected_text: Optional[str] = None | |
| personalization_context: Optional[str] = None | |
| translate_urdu: bool = False | |
| class AskResponse(BaseModel): | |
| answer: str | |
| chapter: str | |
| section: str | |
| personalization_applied: bool | |
| translated_urdu: bool | |
| class TranslateRequest(BaseModel): | |
| text: str | |
| class TranslateResponse(BaseModel): | |
| translated_text: str | |
| async def root(): | |
| return {"message": "Welcome to the Physical AI RAG Chatbot API. Visit /docs for documentation."} | |
| async def health_check(): | |
| return {"status": "ok", "service": "Physical AI RAG Chatbot"} | |
| class PersonalizeRequest(BaseModel): | |
| text: str | |
| software_background: str | |
| hardware_experience: str | |
| class PersonalizeResponse(BaseModel): | |
| personalized_text: str | |
| async def ask_question(request: AskRequest): | |
| from app.services.chat_service import process_user_query # Import here to avoid circular dep if any | |
| result = await process_user_query( | |
| query=request.query, | |
| selected_text=request.selected_text, | |
| personalization=request.personalization_context, | |
| translate_urdu=request.translate_urdu | |
| ) | |
| return AskResponse( | |
| answer=result["answer"], | |
| chapter=result.get("chapter", "N/A"), | |
| section=result.get("section", "N/A"), | |
| personalization_applied=result["personalization_applied"], | |
| translated_urdu=result["translated_urdu"] | |
| ) | |
| async def translate_content(request: TranslateRequest): | |
| from app.services.chat_service import translate_text | |
| print(f"DEBUG: Received translation request with text length: {len(request.text)}") | |
| print(f"DEBUG: First 100 chars: {request.text[:100]}...") | |
| translated = await translate_text(request.text) | |
| print(f"DEBUG: Translation result length: {len(translated)}") | |
| print(f"DEBUG: Translation result preview: {translated[:100]}...") | |
| return TranslateResponse(translated_text=translated) | |
| async def personalize(request: PersonalizeRequest): | |
| from app.services.chat_service import personalize_content | |
| result = await personalize_content( | |
| text=request.text, | |
| software_bg=request.software_background, | |
| hardware_exp=request.hardware_experience | |
| ) | |
| return PersonalizeResponse(personalized_text=result) | |
| async def reload_documents(background_tasks: BackgroundTasks): | |
| # Path to book docs relative to this file | |
| # current file is app/main.py. Working dir when running is usually backend/ | |
| # Docs are at ../book-docs/docs | |
| # Robust path finding | |
| base_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # backend/ | |
| # Need to go up from backend -> rag-chatbot -> physical_ai_book -> book-docs | |
| docs_path = os.path.join(base_dir, "..", "..", "book-docs", "docs") | |
| docs_path = os.path.abspath(docs_path) | |
| if not os.path.exists(docs_path): | |
| raise HTTPException(status_code=404, detail=f"Docs directory not found at {docs_path}") | |
| # Trigger processing in background | |
| background_tasks.add_task(process_and_index_documents, docs_path) | |
| return {"status": "Indexing started in background. The chatbot will be fully ready in a few minutes."} | |
| if __name__ == "__main__": | |
| import uvicorn | |
| uvicorn.run("app.main:app", host="0.0.0.0", port=8000, reload=True) | |