Spaces:
Sleeping
Sleeping
File size: 3,870 Bytes
7bea0f0 2c5135e 7bea0f0 2c5135e 7bea0f0 2c5135e 7bea0f0 d0e26fa 7bea0f0 2c5135e 7bea0f0 2c5135e 6b78269 7bea0f0 2c5135e 7bea0f0 2e0e513 1b35445 7bea0f0 2c5135e 6b78269 1a4eb23 2c5135e 2213916 2c5135e 7bea0f0 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 | import os
os.environ["POSTHOG_DISABLED"] = "true" # Disable PostHog telemetry
import requests
from fastapi import FastAPI, HTTPException
from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel
from dotenv import load_dotenv
#from kb_embed import search_knowledge_base
from services.kb_creation import collection, ingest_documents, search_knowledge_base
from contextlib import asynccontextmanager
import google.generativeai as genai
# --- 1. Initialize FastAPI ---
#app = FastAPI()
@asynccontextmanager
async def lifespan(app: FastAPI):
try:
folder_path = os.path.join(os.getcwd(), "documents")
if collection.count() == 0:
print("🔍 KB empty. Running ingestion...")
ingest_documents(folder_path)
else:
print(f"✅ KB already populated with {collection.count()} entries. Skipping ingestion.")
except Exception as e:
print(f"⚠️ KB ingestion failed: {e}")
yield
app = FastAPI(lifespan=lifespan)
# --- Configure CORS ---
origins = [
"https://nova-chatbot-chatbot-frontend.hf.space",
]
app.add_middleware(
CORSMiddleware,
allow_origins=origins,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# --- 3. Define the Request Data Structure ---
class ChatInput(BaseModel):
user_message: str
# --- 4. Gemini API Setup ---
GEMINI_API_KEY = os.getenv("GEMINI_API_KEY")
GEMINI_URL = f"https://generativelanguage.googleapis.com/v1beta/models/gemini-2.5-flash-lite:generateContent?key={GEMINI_API_KEY}"
# --- 5. Endpoints ---
@app.get("/")
async def health_check():
return {"status": "ok"}
@app.post("/chat")
async def chat_with_ai(input_data: ChatInput):
"""Handle chat interactions using Google Generative AI via requests."""
try:
# Retrieve relevant documents from knowledge base
kb_results = search_knowledge_base(input_data.user_message, top_k=10)
#print(f"kb_results are: {kb_results}")
# Extract relevant context from search results
context = ""
relevant_docs=[]
if kb_results and kb_results.get('documents'):
# Limit context to avoid token limits - take top 2 most relevant
relevant_docs = kb_results['documents'][0][:2]
context = "\n\n".join(relevant_docs)
# Construct enhanced prompt with context
if context:
enhanced_prompt = f"""Use the following knowledge base context to answer the user's question accurately.
If the context contains relevant information, base your answer on it.
If the context doesn't help, say you're raising a ticket and provide a ticket number.
Knowledge Base Context:
{context}
User Question: {input_data.user_message}
Answer:"""
else:
enhanced_prompt = f"User Question: {input_data.user_message}\n\nAnswer:"
headers = {"Content-Type": "application/json"}
payload = {
"contents": [
{
"parts": [{"text": enhanced_prompt}]
}
]
}
response = requests.post(GEMINI_URL, headers=headers, json=payload, verify=False) # SSL disabled for testing
print("Gemini status:", response.status_code)
print("Gemini resp snippet:", response.text[:500])
result = response.json()
#print("result",result)
# Extract Gemini's response
bot_response = result["candidates"][0]["content"]["parts"][0]["text"]
# Include debug info in response
debug_info = f"Context found: {'Yes' if context else 'No'}"
if context:
debug_info += f" (Top {len(relevant_docs)} documents used)"
return {"bot_response": bot_response, "debug": debug_info}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e)) |