SolarConnect / app.py
Shami96's picture
Update app.py
64324d8 verified
import os
import gradio as gr
from huggingface_hub import hf_hub_download
from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain_community.vectorstores import Chroma
from langchain_community.embeddings import HuggingFaceEmbeddings
from langchain_groq import ChatGroq
from langchain_community.document_loaders import PyPDFLoader
from langchain.memory import ConversationBufferMemory
from langchain.chains import ConversationalRetrievalChain
# Configuration
HF_REPO_ID = "Shami96/7solar-documentation"
HF_PDF_NAME = "7solar_documentation.pdf"
HF_TOKEN = os.environ.get("HF_TOKEN")
# Initialize components
def initialize_components():
print("⚙️ Initializing components...")
# Load PDF
try:
pdf_path = hf_hub_download(
repo_id=HF_REPO_ID,
filename=HF_PDF_NAME,
repo_type="dataset",
token=HF_TOKEN
)
loader = PyPDFLoader(pdf_path)
documents = loader.load()
except Exception as e:
raise RuntimeError(f"Failed to load PDF: {str(e)}")
# Create vector store
text_splitter = RecursiveCharacterTextSplitter(
chunk_size=1000,
chunk_overlap=200
)
chunks = text_splitter.split_documents(documents)
embeddings = HuggingFaceEmbeddings(model_name="all-MiniLM-L6-v2")
vectorstore = Chroma.from_documents(chunks, embeddings)
# Initialize LLM
llm = ChatGroq(
model_name="llama3-70b-8192",
temperature=0.3
)
# Create conversation memory
memory = ConversationBufferMemory(
memory_key="chat_history",
return_messages=True
)
# Create retrieval chain with memory
qa_chain = ConversationalRetrievalChain.from_llm(
llm=llm,
retriever=vectorstore.as_retriever(),
memory=memory,
chain_type="stuff"
)
return qa_chain
# Chat function
def respond(message, history):
try:
# Initialize if not already done
if 'qa_chain' not in globals():
global qa_chain
qa_chain = initialize_components()
# Handle greetings
if message.lower() in ["hi", "hello", "hey"]:
return "Hello! I'm your 7Solar assistant. How can I help you today?"
# Get response with conversation context
result = qa_chain({"question": message})
return result["answer"]
except Exception as e:
return f"An error occurred: {str(e)}"
# Create Gradio interface
demo = gr.ChatInterface(
fn=respond,
title="☀️ 7Solar Assistant",
description="Ask me anything about 7Solar's services and documentation",
examples=["What is 7Solar.pk?", "How does the registration process work?"],
cache_examples=False
)
# Launch with error handling
if __name__ == "__main__":
try:
demo.launch(server_name="0.0.0.0", server_port=7860)
except Exception as e:
print(f"Failed to launch: {str(e)}")
raise