OlamideKayode commited on
Commit
2b597fd
·
verified ·
1 Parent(s): 6d8546d

Upload app.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. app.py +21 -7
app.py CHANGED
@@ -7,16 +7,23 @@ import gradio as gr
7
 
8
  from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
9
  from sentence_transformers import SentenceTransformer
10
- from langchain.document_loaders import TextLoader
11
  from langchain.text_splitter import RecursiveCharacterTextSplitter
12
- from langchain.embeddings import HuggingFaceEmbeddings
13
- from langchain.vectorstores import FAISS as LangChainFAISS
14
- from langchain.docstore import InMemoryDocstore
15
  from langchain.schema import Document
16
- from langchain.llms import HuggingFacePipeline
17
  from huggingface_hub import login
18
  from huggingface_hub import upload_file
19
 
 
 
 
 
 
 
 
20
  # Extract the Knowledge Base ZIP
21
  if os.path.exists("md_knowledge_base.zip"):
22
  with zipfile.ZipFile("md_knowledge_base.zip", "r") as zip_ref:
@@ -123,13 +130,20 @@ def answer_fn(question):
123
  # Gradio Interface
124
  def chat_fn(user_message, history):
125
  bot_response = answer_fn(user_message)
126
- history = history + [(user_message, bot_response)]
127
  return history, history
128
 
129
 
 
 
 
 
 
 
130
  with gr.Blocks() as demo:
131
  gr.Markdown("## 📘 University of Hull Assistant")
132
- chatbot = gr.Chatbot()
 
133
  state = gr.State([])
134
 
135
  user_input = gr.Textbox(placeholder="Ask a question about University of Hull...", show_label=False)
 
7
 
8
  from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
9
  from sentence_transformers import SentenceTransformer
10
+ #from langchain.document_loaders import TextLoader
11
  from langchain.text_splitter import RecursiveCharacterTextSplitter
12
+ #from langchain.embeddings import HuggingFaceEmbeddings
13
+ #from langchain.vectorstores import FAISS as LangChainFAISS
14
+ #from langchain.docstore import InMemoryDocstore
15
  from langchain.schema import Document
16
+ #from langchain.llms import HuggingFacePipeline
17
  from huggingface_hub import login
18
  from huggingface_hub import upload_file
19
 
20
+ from langchain_community.document_loaders import TextLoader
21
+ from langchain_community.embeddings import HuggingFaceEmbeddings
22
+ from langchain_community.vectorstores import FAISS as LangChainFAISS
23
+ from langchain_community.docstore.in_memory import InMemoryDocstore
24
+ from langchain_community.llms import HuggingFacePipeline
25
+
26
+
27
  # Extract the Knowledge Base ZIP
28
  if os.path.exists("md_knowledge_base.zip"):
29
  with zipfile.ZipFile("md_knowledge_base.zip", "r") as zip_ref:
 
130
  # Gradio Interface
131
  def chat_fn(user_message, history):
132
  bot_response = answer_fn(user_message)
133
+ history = history + [{"role": "user", "content": user_message}, {"role": "assistant", "content": bot_response}]
134
  return history, history
135
 
136
 
137
+ #def chat_fn(user_message, history):
138
+ # bot_response = answer_fn(user_message)
139
+ # history = history + [(user_message, bot_response)]
140
+ # return history, history
141
+
142
+
143
  with gr.Blocks() as demo:
144
  gr.Markdown("## 📘 University of Hull Assistant")
145
+ #chatbot = gr.Chatbot()
146
+ chatbot = gr.Chatbot(label="University of Hull Assistant", type="messages")
147
  state = gr.State([])
148
 
149
  user_input = gr.Textbox(placeholder="Ask a question about University of Hull...", show_label=False)