File size: 2,120 Bytes
ea8cecd
 
 
53a6ab3
ea8cecd
53a6ab3
16edf39
0d2c7f0
53a6ab3
ea8cecd
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
53a6ab3
ea8cecd
53a6ab3
ea8cecd
 
 
 
 
 
 
 
 
 
 
 
 
 
 
b8fa546
ea8cecd
 
 
 
 
53a6ab3
ea8cecd
 
 
 
 
 
 
 
 
53a6ab3
ea8cecd
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
import os
import gradio as gr
from langchain_community.document_loaders import WebBaseLoader, PyMuPDFLoader
from langchain_huggingface import HuggingFaceEmbeddings
from langchain_community.vectorstores import FAISS
from langchain_community.llms import HuggingFaceHub
from langchain.chains.question_answering import load_qa_chain

# Get the token from the secrets we just set
hf_token = os.environ.get("HF_TOKEN")

def load_pdf(file_path):
    loader = PyMuPDFLoader(file_path)
    docs = loader.load()
    return docs

def load_website(url):
    loader = WebBaseLoader(url)
    docs = loader.load()
    return docs

def setup_vector_store(docs):
    embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2")
    vector_store = FAISS.from_documents(docs, embeddings)
    return vector_store

def ask_question(query, vector_store):
    retriever = vector_store.as_retriever()
    docs = retriever.get_relevant_documents(query)
    llm = HuggingFaceHub(
        repo_id="mistralai/Mixtral-8x7B-Instruct-v0.1",
        model_kwargs={"temperature": 0.7, "max_length": 512},
        huggingfacehub_api_token=hf_token
    )
    chain = load_qa_chain(llm, chain_type="stuff")
    response = chain.run(input_documents=docs, question=query)
    return response

def process_input(weblink, pdf_file, question):
    docs = []
    if not weblink and not pdf_file:
        return "Please provide a website link or upload a PDF."

    try:
        if weblink:
            docs.extend(load_website(weblink))
        if pdf_file:
            docs.extend(load_pdf(pdf_file.name))

        vector_store = setup_vector_store(docs)
        response = ask_question(question, vector_store)
        return response
    except Exception as e:
        return f"Error: {str(e)}"

demo = gr.Interface(
    fn=process_input,
    inputs=[
        gr.Textbox(label="Website Link (Optional)"),
        gr.File(label="Upload PDF (Optional)"),
        gr.Textbox(label="Ask a Question")
    ],
    outputs=gr.Textbox(label="Final Answer"),
    title="Web & PDF QA System"
)

if __name__ == "__main__":
    demo.launch()