File size: 4,989 Bytes
cd55ee8 11ff83b 4815889 b3e9a96 28b14ff b3e9a96 11ff83b cd55ee8 1d80ba8 cd55ee8 4815889 cd55ee8 4815889 40fce64 4815889 cd55ee8 11ff83b cd55ee8 28b14ff 11ff83b cd55ee8 b34ee0a cd55ee8 28b14ff b3e9a96 28b14ff b3e9a96 28b14ff b3e9a96 cd55ee8 28b14ff b3e9a96 28b14ff b3e9a96 28b14ff cd55ee8 28b14ff b3e9a96 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 |
# app.py
from fastapi import FastAPI
from pydantic import BaseModel
from fastapi.middleware.cors import CORSMiddleware
#from bot_instance import gemini_bot, llama_bot # singleton ErrorBot instance
from typing import List, Optional,Any
import os
from dotenv import load_dotenv
from util import ErrorBot
# from mongo_to_qdrant_ingestor import MongoToQdrantIngestor
# from qdrant_instance import qdrant
# from embedding_model_instance import embedding_model
# from json_to_qdrant_ingestor import JsonToQdrantIngestor
app = FastAPI(title="ErrorBot API")
# ✅ Allow all origins (adjust in production)
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# ---------------- Request Models ----------------
class MessageItem(BaseModel):
role: str # "user" or "bot"
content: str
class ChatRequest(BaseModel):
message: str
history: Optional[List[MessageItem]] = [] # optional conversation history
lastContext: List[Any] = None
# ---------------- Endpoints ----------------
@app.get("/")
def root():
return {"status": "ok"}
# @app.post("/chat")
# def chat(request: ChatRequest):
# """
# Main chat endpoint:
# - Accepts a message and optional conversation history
# - Uses ErrorBot with RAG + LLM
# """
# history_list = [
# {"role": msg.role, "content": msg.content} for msg in request.history
# ]
# # Ask bot with history
# answer = bot.ask(request.message, history=history_list)
# return {"reply": answer}
load_dotenv()
#GOOGLE_API_KEY = os.environ.get("GOOGLE_API_KEY")
#GROQ_API_KEY = os.getenv("GROQ_API_KEY")
#EMBEDDING_MODEL = "BAAI/bge-large-en-v1.5"
#EMBEDDING_MODEL = "all-MiniLM-L6-v2"
#EMBEDDING_MODEL = "BAAI/bge-m3"
@app.post("/gemini/chat")
def gemini_chat(request: ChatRequest):
history_list = [{"role": msg.role, "content": msg.content} for msg in request.history]
gemini_bot = ErrorBot(
llm_model_name="gemini-2.5-flash",
llm_provider="gemini",
last_context = request.lastContext
)
#print("In App.py")
#print(request.lastContext)
answer, last_context = gemini_bot.ask(request.message, history=history_list)
#print(answer)
#print(last_context)
return {"reply": answer, "last_context": last_context}
@app.post("/llama/chat")
def llama_chat(request: ChatRequest):
history_list = [{"role": msg.role, "content": msg.content} for msg in request.history]
llama_bot = ErrorBot(
llm_model_name="llama-3.3-70b-versatile",
llm_provider="groq",
last_context = request.lastContext
)
answer, last_context = llama_bot.ask(request.message, history=history_list)
#print(answer)
#print(last_context)
return {"reply": answer, "last_context": last_context}
# @app.post("/ingest/mongodb")
# def ingest_mongodb():
# """
# Ingest documents from MongoDB into the bot's knowledge base.
# """
# ingestor = MongoToQdrantIngestor(qdrant, embedding_model, collection_name="technical_errors")
# # def build_content(doc, entity_type):
# # """Simple example function to build textual content."""
# # return f"{entity_type}: {doc.get('title', '')} {doc.get('description', '')}"
# def build_content(doc: dict, entity_type: str) -> str:
# """Convert MongoDB document into natural text for embeddings."""
# parts = [f"{entity_type} ID: {doc.get('id', str(doc.get('_id', '')))}"]
# for k, v in doc.items():
# if k in ["_id"]: # skip ObjectId
# continue
# if isinstance(v, list):
# parts.append(f"{k}: {', '.join(map(str, v))}")
# elif isinstance(v, dict):
# nested = "; ".join([f"{nk}: {nv}" for nk, nv in v.items() if nv])
# parts.append(f"{k}: {nested}")
# else:
# if v:
# parts.append(f"{k}: {v}")
# return "\n".join(parts)
# ingestor.ingest_from_mongodb(
# build_content_fn=build_content,
# batch_size=500,
# )
# return {"status": "ingestion started"}
# @app.post("/ingest/json")
# def ingest_json():
# """
# Ingest documents from JSON into the bot's knowledge base.
# """
# json_sources = {
# "ProblemReport": "./json/problemReports.json",
# "Correction": "./json/corrections.json",
# "FaultAnalysis": "./json/faultanalysis.json",
# }
# ingestor = JsonToQdrantIngestor(qdrant, embedding_model, collection_name="technical_errors")
# def build_content(doc, entity_type):
# """Simple example function to build textual content."""
# return f"{entity_type}: {doc.get('title', '')} {doc.get('description', '')}"
# ingestor.ingest_from_json(
# json_sources,
# build_content_fn=build_content,
# batch_size=500,
# )
# return {"status": "ingestion started"} |