Can Günen commited on
Commit
285167f
·
1 Parent(s): 0a659a1

added app and requirements files

Browse files
Files changed (2) hide show
  1. app.py +72 -0
  2. requirements.txt +5 -0
app.py ADDED
@@ -0,0 +1,72 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from langchain.chains.question_answering import load_qa_chain
2
+ from langchain.text_splitter import CharacterTextSplitter
3
+ from langchain.embeddings import HuggingFaceEmbeddings
4
+ from langchain.document_loaders import TextLoader
5
+ from langchain.vectorstores import FAISS
6
+ from langchain import HuggingFaceHub
7
+ import requests
8
+ import textwrap
9
+ import gradio as gr
10
+ import os
11
+
12
+ # Define a function to process the API key and text input and save them in variables
13
+ def process_inputs(api_key, text_input):
14
+ # Store the API key in a variable called `api_key_var`
15
+ os.environ["HUGGINGFACEHUB_API_TOKEN"] = api_key
16
+
17
+
18
+
19
+ # Check if the text input is a URL to a text file or the text itself
20
+ if text_input.startswith("http"):
21
+ # If the input is a URL, download the text file and store the text in a variable called `text_var`
22
+ response = requests.get(text_input)
23
+ text_var = response.text
24
+ else:
25
+ # If the input is the text itself, store it in a variable called `text_var`
26
+ text_var = text_input
27
+
28
+ with open("document.txt", "w") as f:
29
+ f.write(text_var)
30
+
31
+ return "Inputs saved successfully"
32
+
33
+
34
+ def talk(question):
35
+
36
+ loader = TextLoader('./document.txt')
37
+ documents = loader.load()
38
+
39
+ text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=0)
40
+ docs = text_splitter.split_documents(documents)
41
+
42
+ llm=HuggingFaceHub(repo_id="google/flan-t5-xl", model_kwargs={"temperature":0, "max_length":512})
43
+ chain = load_qa_chain(llm, chain_type="stuff")
44
+ embeddings = HuggingFaceEmbeddings()
45
+
46
+
47
+ print(docs)
48
+ db = FAISS.from_documents(docs, embeddings)
49
+ query = question
50
+ docs = db.similarity_search(query)
51
+ return chain.run(input_documents=docs, question=query)
52
+
53
+
54
+ with gr.Blocks() as demo:
55
+ gr.Markdown("Talk with your document")
56
+ with gr.Tab("Configs"):
57
+ # Create Gradio interfaces to get the API key and text input from the user
58
+ api_key_input = gr.inputs.Textbox(label="Enter your API key")
59
+ text_input = gr.inputs.Textbox(label="Enter text or URL to text file")
60
+ text_output = gr.Textbox(label="Status of configuration")
61
+ text_button = gr.Button("Save configuration")
62
+
63
+ with gr.Tab("QA"):
64
+ q_input = gr.inputs.Textbox(label="Please write your question")
65
+ q_output = gr.Textbox(label="Answer from AI")
66
+ q_button = gr.Button("Ask question")
67
+
68
+ text_button.click(process_inputs, inputs=[api_key_input, text_input], outputs=text_output)
69
+ q_button.click(talk, inputs= q_input, outputs=q_output)
70
+ #q_button.click(process_inputs, inputs=[api_key_input, text_input], outputs=text_output)
71
+
72
+ demo.launch(debug=True)
requirements.txt ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ install langchain
2
+ install huggingface_hub
3
+ install sentence_transformers
4
+ install gradio
5
+ faiss-cpu