File size: 2,927 Bytes
84bc3c0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
import os
import gradio as gr
from langchain.document_loaders import WebBaseLoader
from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain.vectorstores import FAISS
from langchain.embeddings import HuggingFaceEmbeddings
from langchain.chains import RetrievalQA
from langchain_together import Together

# πŸ” Set your API key
os.environ["TOGETHER_API_KEY"] = os.environ.get("TOGETHER_API_KEY", "")


# πŸ” Caches
qa_cache = {}
retriever_cache = {}

# πŸ” Load and embed the website content
def load_url(url):
    try:
        loader = WebBaseLoader(url)
        docs = loader.load()
        splitter = RecursiveCharacterTextSplitter(chunk_size=500, chunk_overlap=50)
        chunks = splitter.split_documents(docs)
        embeddings = HuggingFaceEmbeddings(model_name="all-MiniLM-L6-v2")
        db = FAISS.from_documents(chunks, embedding=embeddings)
        retriever = db.as_retriever()
        llm = Together(
            model="mistralai/Mistral-7B-Instruct-v0.2",
            temperature=0.5,
            max_tokens=512
        )
        qa = RetrievalQA.from_chain_type(llm=llm, retriever=retriever)
        return retriever, qa, "βœ… Website loaded. You can start chatting!"
    except Exception as e:
        return None, None, f"❌ Error: {str(e)}"

# πŸ’¬ Chat handler
def chat(message, history, url):
    if url not in qa_cache:
        retriever, qa, status = load_url(url)
        if retriever is None:
            history.append({"role": "user", "content": message})
            history.append({"role": "assistant", "content": status})
            return history, ""
        retriever_cache[url] = retriever
        qa_cache[url] = qa
        history.append({"role": "system", "content": status})
    else:
        qa = qa_cache[url]

    try:
        result = qa.invoke({"query": message})["result"]
    except Exception as e:
        result = f"❌ Error: {str(e)}"

    history.append({"role": "user", "content": message})
    history.append({"role": "assistant", "content": result})
    return history, ""

# βœ… Gradio UI
with gr.Blocks() as demo:
    gr.Markdown("## 🧠 Chat with Any Website")

    url_input = gr.Textbox(label="Website URL", placeholder="https://en.wikipedia.org/wiki/LangChain")
    chatbot = gr.Chatbot(label="Chat", type="messages")
    msg_input = gr.Textbox(show_label=False, placeholder="Ask your question here and press Enter...")
    state = gr.State([])

    msg_input.submit(chat, inputs=[msg_input, state, url_input], outputs=[chatbot, msg_input])

    # πŸ‘‡ Footer
    gr.Markdown(
        """
        ---
        <center>
            πŸ”— <a href="https://github.com/vivekreddy1105" target="_blank">GitHub</a> | 
            πŸ’Ό <a href="https://www.linkedin.com/in/vivekreddy1105/" target="_blank">LinkedIn</a><br>
            Β© 2025 Vivek Reddy Eluka
        </center>
        """,
        elem_id="footer"
    )

demo.launch(share=True)