Spaces:
Sleeping
Sleeping
| import os | |
| import gradio as gr | |
| from langchain.document_loaders import WebBaseLoader | |
| from langchain.text_splitter import RecursiveCharacterTextSplitter | |
| from langchain.vectorstores import FAISS | |
| from langchain.embeddings import HuggingFaceEmbeddings | |
| from langchain.chains import RetrievalQA | |
| from langchain_together import Together | |
| # π Set your API key | |
| os.environ["TOGETHER_API_KEY"] = os.environ.get("TOGETHER_API_KEY", "") | |
| # π Caches | |
| qa_cache = {} | |
| retriever_cache = {} | |
| # π Load and embed the website content | |
| def load_url(url): | |
| try: | |
| loader = WebBaseLoader(url) | |
| docs = loader.load() | |
| splitter = RecursiveCharacterTextSplitter(chunk_size=500, chunk_overlap=50) | |
| chunks = splitter.split_documents(docs) | |
| embeddings = HuggingFaceEmbeddings(model_name="all-MiniLM-L6-v2") | |
| db = FAISS.from_documents(chunks, embedding=embeddings) | |
| retriever = db.as_retriever() | |
| llm = Together( | |
| model="mistralai/Mistral-7B-Instruct-v0.2", | |
| temperature=0.5, | |
| max_tokens=512 | |
| ) | |
| qa = RetrievalQA.from_chain_type(llm=llm, retriever=retriever) | |
| return retriever, qa, "β Website loaded. You can start chatting!" | |
| except Exception as e: | |
| return None, None, f"β Error: {str(e)}" | |
| # π¬ Chat handler | |
| def chat(message, history, url): | |
| if url not in qa_cache: | |
| retriever, qa, status = load_url(url) | |
| if retriever is None: | |
| history.append({"role": "user", "content": message}) | |
| history.append({"role": "assistant", "content": status}) | |
| return history, "" | |
| retriever_cache[url] = retriever | |
| qa_cache[url] = qa | |
| history.append({"role": "system", "content": status}) | |
| else: | |
| qa = qa_cache[url] | |
| try: | |
| result = qa.invoke({"query": message})["result"] | |
| except Exception as e: | |
| result = f"β Error: {str(e)}" | |
| history.append({"role": "user", "content": message}) | |
| history.append({"role": "assistant", "content": result}) | |
| return history, "" | |
| # β Gradio UI | |
| with gr.Blocks() as demo: | |
| gr.Markdown("## π§ Chat with Any Website") | |
| url_input = gr.Textbox(label="Website URL", placeholder="https://en.wikipedia.org/wiki/LangChain") | |
| chatbot = gr.Chatbot(label="Chat", type="messages") | |
| msg_input = gr.Textbox(show_label=False, placeholder="Ask your question here and press Enter...") | |
| state = gr.State([]) | |
| msg_input.submit(chat, inputs=[msg_input, state, url_input], outputs=[chatbot, msg_input]) | |
| # π Footer | |
| gr.Markdown( | |
| """ | |
| --- | |
| <center> | |
| π <a href="https://github.com/vivekreddy1105" target="_blank">GitHub</a> | | |
| πΌ <a href="https://www.linkedin.com/in/vivekreddy1105/" target="_blank">LinkedIn</a><br> | |
| Β© 2025 Vivek Reddy Eluka | |
| </center> | |
| """, | |
| elem_id="footer" | |
| ) | |
| demo.launch(share=True) | |