pd4consultingmyles commited on
Commit
0a24f02
·
1 Parent(s): 035a610

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -7
app.py CHANGED
@@ -8,6 +8,7 @@ from gpt_index import (
8
  GPTSimpleVectorIndex,
9
  LLMPredictor,
10
  QuestionAnswerPrompt,
 
11
  PromptHelper
12
  )
13
 
@@ -42,17 +43,19 @@ with gr.Blocks() as demo:
42
 
43
  def ask_gpt(message, messages_history):
44
  messages_history += [{"role": "user", "content": message}]
45
- query_str = "Who is the mayor of Atlanta?"
46
  QA_PROMPT_TMPL = (
47
- "You are an AI specialized in Atlanta.\n"
48
- "If a query does not involve Atlanta, say you can't answer the query and make the answer related to Atlanta.\n"
49
- "If the query is ambiguous, assume it is related to Atlanta.\n"
50
- "If you cannot relate the query to Atlanta, do not answer it.\n"
51
  "We have provided context information below. \n"
52
  "---------------------\n"
53
  "{context_str}"
54
  "\n---------------------\n"
55
- "Given this information, please answer the query: {query_str}\n"
 
 
 
 
56
  )
57
  QA_PROMPT = QuestionAnswerPrompt(QA_PROMPT_TMPL)
58
 
@@ -60,7 +63,7 @@ with gr.Blocks() as demo:
60
  # Takes in the input from the user to deliver responses
61
  index = GPTSimpleVectorIndex.load_from_disk('index_demo.json')
62
  message = ' '.join([message['content'] for message in messages_history])
63
- response = index.query(message, response_mode="compact", text_qa_template = QA_PROMPT)
64
  return response.response, messages_history
65
  #return response['choices'][0]['message']['content'], messages_history
66
 
 
8
  GPTSimpleVectorIndex,
9
  LLMPredictor,
10
  QuestionAnswerPrompt,
11
+ RefinePrompt,
12
  PromptHelper
13
  )
14
 
 
43
 
44
  def ask_gpt(message, messages_history):
45
  messages_history += [{"role": "user", "content": message}]
46
+ query_str = ''
47
  QA_PROMPT_TMPL = (
48
+ "You are an conversational AI specialized in Atlanta.\n"
49
+ "If a query does not relate to Atlanta, say you can't answer the query.\n"# and make the answer related to Atlanta.\n"
 
 
50
  "We have provided context information below. \n"
51
  "---------------------\n"
52
  "{context_str}"
53
  "\n---------------------\n"
54
+ "Given this information, please give a detailed and conversational answer to the query: {query_str} and cite the url source associated with this answer.\n"
55
+ "Use information from previous queries in your response when appropriate.\n"
56
+ "Format the answer to the query like this: Answer: .\n"
57
+ "\nSource: followed by the source in bold.\n"
58
+ "Put the Answer and Source on different lines of the response and the Source is the url source associated with the answer.\n"
59
  )
60
  QA_PROMPT = QuestionAnswerPrompt(QA_PROMPT_TMPL)
61
 
 
63
  # Takes in the input from the user to deliver responses
64
  index = GPTSimpleVectorIndex.load_from_disk('index_demo.json')
65
  message = ' '.join([message['content'] for message in messages_history])
66
+ response = index.query(message, text_qa_template = QA_PROMPT)
67
  return response.response, messages_history
68
  #return response['choices'][0]['message']['content'], messages_history
69