flight-delay-chatbot-api / app /chatbot_module.py
omaryasserhassan's picture
Update app/chatbot_module.py
70e88e4 verified
import os
from langchain.chains import RetrievalQA
from langchain_community.embeddings import HuggingFaceEmbeddings
from langchain_community.vectorstores import FAISS
from langchain_community.llms import HuggingFaceEndpoint
from langchain.text_splitter import CharacterTextSplitter
from langchain.document_loaders import TextLoader
# HF Spaces will inject token automatically
os.environ["TRANSFORMERS_CACHE"] = "/tmp/hf_cache"
os.environ["HF_HOME"] = "/tmp/hf_home"
def load_vectorstore():
embedder = HuggingFaceEmbeddings(model_name="all-MiniLM-L6-v2")
loader = TextLoader("app/data/analysis_summary.txt")
documents = loader.load()
splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=100)
chunks = splitter.split_documents(documents)
vectorstore = FAISS.from_documents(chunks, embedder)
return vectorstore.as_retriever()
def build_qa_chain():
llm = HuggingFaceEndpoint(
repo_id="google/flan-t5-base",
model_kwargs={"temperature": 0.3, "max_length": 512}
)
retriever = load_vectorstore()
chain = RetrievalQA.from_chain_type(
llm=llm,
retriever=retriever,
return_source_documents=False
)
return chain
qa_chain = build_qa_chain()
def get_bot_answer(query: str) -> str:
try:
return qa_chain.run(query)
except Exception as e:
return f"❌ Error: {str(e)}"