File size: 4,949 Bytes
792ad00 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 | from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy.orm import Session
from typing import List
import logging
from openai import OpenAI
from core.database import get_db
from models import db_models, schemas
from api.auth import get_current_user
from services.rag_service import rag_service
from core.config import settings
router = APIRouter(prefix="/api/chat", tags=["AI Conversation"])
logger = logging.getLogger(__name__)
@router.get("/history", response_model=List[schemas.ChatMessageResponse])
async def get_chat_history(
current_user: db_models.User = Depends(get_current_user),
db: Session = Depends(get_db)
):
"""
Retrieves the full AI conversation history for the current user.
"""
messages = db.query(db_models.ChatMessage).filter(
db_models.ChatMessage.user_id == current_user.id
).order_by(db_models.ChatMessage.created_at.asc()).all()
return messages
@router.delete("/history")
async def clear_chat_history(
current_user: db_models.User = Depends(get_current_user),
db: Session = Depends(get_db)
):
"""
Wipes the conversation history clean (Fresh Start).
"""
db.query(db_models.ChatMessage).filter(
db_models.ChatMessage.user_id == current_user.id
).delete()
db.commit()
return {"message": "All AI conversation history has been cleared."}
@router.post("/query", response_model=schemas.ChatMessageResponse)
async def ask_ai(
message_in: schemas.ChatMessageCreate,
current_user: db_models.User = Depends(get_current_user),
db: Session = Depends(get_db)
):
"""
Unified AI Endpoint:
- Use this for general chat.
- Use this for PDF/Document specific questions (by providing rag_doc_id).
It automatically manages conversation history and RAG context retrieval.
"""
try:
openai_client = OpenAI(api_key=settings.OPENAI_API_KEY)
history = db.query(db_models.ChatMessage).filter(
db_models.ChatMessage.user_id == current_user.id
).order_by(db_models.ChatMessage.id.desc()).limit(10).all()
history.reverse() # Sort to chronological [oldest -> newest]
# 2. Save current user query to database
user_msg = db_models.ChatMessage(
user_id=current_user.id,
role="user",
content=message_in.query,
rag_doc_id=message_in.rag_doc_id
)
db.add(user_msg)
db.commit()
# 3. Context Retrieval (RAG)
context = ""
doc_filename = ""
if message_in.rag_doc_id:
rag_doc = db.query(db_models.RAGDocument).filter(
db_models.RAGDocument.id == message_in.rag_doc_id,
db_models.RAGDocument.user_id == current_user.id
).first()
if rag_doc:
doc_filename = rag_doc.filename
results = rag_service.search_document(
query=message_in.query,
doc_id=rag_doc.azure_doc_id,
user_id=current_user.id,
top_k=5
)
context = "\n\n".join([r["content"] for r in results])
# 4. Build LLM Messages
llm_messages = [
{
"role": "system",
"content": (
"You are a helpful AI assistant on the CreatorStudio platform. "
"Use the provided conversation history and document context to answer the user. "
"If the user refers to 'last message' or 'previous context', look at the history provided below."
)
}
]
# Add past messages (conversation history)
for msg in history:
llm_messages.append({"role": msg.role, "content": msg.content})
# Add RAG Knowledge if available
if context:
llm_messages.append({
"role": "system",
"content": f"REFERENTIAL KNOWLEDGE FROM DOCUMENT '{doc_filename}':\n\n{context}"
})
# Add current user query
llm_messages.append({"role": "user", "content": message_in.query})
# 5. Get AI Response
response = openai_client.chat.completions.create(
model="gpt-4o-mini",
messages=llm_messages,
temperature=0.7
)
ai_response_text = response.choices[0].message.content
# 6. Save assistant response to database
assistant_msg = db_models.ChatMessage(
user_id=current_user.id,
role="assistant",
content=ai_response_text,
rag_doc_id=message_in.rag_doc_id
)
db.add(assistant_msg)
db.commit()
db.refresh(assistant_msg)
return assistant_msg
except Exception as e:
logger.error(f"Unified AI Query failed: {e}")
raise HTTPException(status_code=500, detail=f"AI Error: {str(e)}") |