ku_bot / app.py
anamjafar6's picture
Update app.py
3964aec verified
import os
from dotenv import load_dotenv
from langchain.vectorstores import FAISS
from langchain.embeddings import OpenAIEmbeddings
from langchain.document_loaders import PyPDFLoader
from langchain.text_splitter import CharacterTextSplitter
from langchain.chains.question_answering import load_qa_chain
from langchain_groq import ChatGroq # ✅ Correct import
# Load environment variables (make sure you have a .env file)
load_dotenv()
# Get keys from environment
groq_api_key = os.getenv("GROQ_API_KEY")
openai_api_key = os.getenv("OPENAI_API_KEY")
# Load and split PDF
loader = PyPDFLoader("university_karachi_semester_fees.pdf")
pages = loader.load_and_split()
text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=200)
docs = text_splitter.split_documents(pages)
# Embed and create vector store
embedding = OpenAIEmbeddings(openai_api_key=openai_api_key) # ✅ Pass the OpenAI API key
vectorstore = FAISS.from_documents(docs, embedding)
# Set up LLM with GROQ
llm = ChatGroq(
model_name="mixtral-8x7b-32768", # ✅ Correct parameter name
groq_api_key=groq_api_key
)
# Question-answering chain
qa_chain = load_qa_chain(llm, chain_type="stuff")
# Simple user loop
while True:
query = input("\nAsk something about KU: ")
if query.lower() in ["exit", "quit"]:
break
matched_docs = vectorstore.similarity_search(query)
response = qa_chain.run(input_documents=matched_docs, question=query)
print("\nAnswer:", response)