peterpull commited on
Commit
92448df
·
1 Parent(s): 0db1768

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -13
app.py CHANGED
@@ -1,5 +1,4 @@
1
- from gpt_index import GPTSimpleVectorIndex
2
- from langchain import OpenAI
3
  import gradio as gr
4
  from gradio import Interface, Textbox
5
  import sys
@@ -17,8 +16,7 @@ DATASET_REPO_URL = "https://huggingface.co/datasets/peterpull/MediatorBot"
17
  DATA_FILENAME = "data.txt"
18
  INDEX_FILENAME = "index2.json"
19
 
20
-
21
- # we need a write access token.
22
  HF_TOKEN = os.environ.get("HF_TOKEN")
23
  print("HF TOKEN is none?", HF_TOKEN is None)
24
  print("HF hub ver", huggingface_hub.__version__)
@@ -53,21 +51,18 @@ def store_message(chatinput: str, chatresponse: str):
53
  file.write(f"{datetime.now()},{chatinput},{chatresponse}\n")
54
  print(f"Wrote to datafile: {datetime.now()},{chatinput},{chatresponse}\n")
55
 
56
- #trial - take out if fails to write to local directory
57
- with open('backup.txt', "a") as file:
58
- file.write(f"{datetime.now()},{chatinput},{chatresponse}\n")
59
- print(f"Wrote to datafile: {datetime.now()},{chatinput},{chatresponse}\n")
60
-
61
  return generate_text()
62
 
63
  def get_index(index_file_path):
64
  if os.path.exists(index_file_path):
 
65
  print_header_json_file(index_file_path)
66
  index_size = os.path.getsize(index_file_path)
67
  print(f"Size of {index_file_path}: {index_size} bytes") #let me know how big json file is.
68
- #debug - needs to be index_file_path
69
- return GPTSimpleVectorIndex.load_from_disk('./index/indexsmall.json'
70
- )
71
  else:
72
  print(f"Error: '{index_file_path}' does not exist.")
73
  sys.exit()
@@ -83,7 +78,7 @@ index = get_index(INDEX_FILE)
83
  # passes the prompt to the chatbot
84
  def chatbot(input_text, mentioned_person='Mediator John Haynes', confidence_threshold=0.5):
85
  prompt = f"You are {mentioned_person}. Answer this: {input_text}. Reply from the contextual data or say you don't know. To finish, ask an insightful question."
86
- response = index.query(prompt, response_mode="default", verbose=True)
87
 
88
  store_message(input_text,response)
89
 
 
1
+ from llama_index import GPTSimpleVectorIndex
 
2
  import gradio as gr
3
  from gradio import Interface, Textbox
4
  import sys
 
16
  DATA_FILENAME = "data.txt"
17
  INDEX_FILENAME = "index2.json"
18
 
19
+ # we need a HF access token - read I think suffices becuase we are cloning the distant repo to local space repo.
 
20
  HF_TOKEN = os.environ.get("HF_TOKEN")
21
  print("HF TOKEN is none?", HF_TOKEN is None)
22
  print("HF hub ver", huggingface_hub.__version__)
 
51
  file.write(f"{datetime.now()},{chatinput},{chatresponse}\n")
52
  print(f"Wrote to datafile: {datetime.now()},{chatinput},{chatresponse}\n")
53
 
54
+ #need to find a way to push back to dataset repo
55
+
 
 
 
56
  return generate_text()
57
 
58
  def get_index(index_file_path):
59
  if os.path.exists(index_file_path):
60
+ #print 500 characters of json header
61
  print_header_json_file(index_file_path)
62
  index_size = os.path.getsize(index_file_path)
63
  print(f"Size of {index_file_path}: {index_size} bytes") #let me know how big json file is.
64
+ #debug - this is where an error is occuring
65
+ return GPTSimpleVectorIndex.load_from_disk(index_file_path)
 
66
  else:
67
  print(f"Error: '{index_file_path}' does not exist.")
68
  sys.exit()
 
78
  # passes the prompt to the chatbot
79
  def chatbot(input_text, mentioned_person='Mediator John Haynes', confidence_threshold=0.5):
80
  prompt = f"You are {mentioned_person}. Answer this: {input_text}. Reply from the contextual data or say you don't know. To finish, ask an insightful question."
81
+ response = index.query(prompt, response_mode="compact")
82
 
83
  store_message(input_text,response)
84