anna1au commited on
Commit
c2a4832
·
verified ·
1 Parent(s): f99759e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -7
app.py CHANGED
@@ -83,18 +83,13 @@ def get_top_chunks(query, chunk_embeddings, text_chunks):
83
  client = InferenceClient("Qwen/Qwen2.5-7B-Instruct-1M")
84
 
85
  def respond(message, history):
86
- response = ""
87
  information = get_top_chunks(message,chunk_embeddings,cleaned_chunks)
88
  messages = [{"role":"system", "content": f"You are a friendly and informative chatbot. You answer in full sentences. You base your response on the following information: {information}"}]
89
  if history:
90
  messages.extend(history)
91
  messages.append({"role": "user", "content": message})
92
- complete = client.chat_completion(messages,max_tokens=1000,temperature = 0.2, stream = True)
93
- for i in complete:
94
- token = message.choices[0].delta.content
95
- response += token
96
- yield response
97
- #return response["choices"][0]["message"]["content"].strip()
98
 
99
  chatBot = gr.ChatInterface(respond, type = "messages")
100
  chatBot.launch()
 
83
  client = InferenceClient("Qwen/Qwen2.5-7B-Instruct-1M")
84
 
85
  def respond(message, history):
 
86
  information = get_top_chunks(message,chunk_embeddings,cleaned_chunks)
87
  messages = [{"role":"system", "content": f"You are a friendly and informative chatbot. You answer in full sentences. You base your response on the following information: {information}"}]
88
  if history:
89
  messages.extend(history)
90
  messages.append({"role": "user", "content": message})
91
+ complete = client.chat_completion(messages,max_tokens=1000,temperature = 0.2)
92
+ return response["choices"][0]["message"]["content"].strip()
 
 
 
 
93
 
94
  chatBot = gr.ChatInterface(respond, type = "messages")
95
  chatBot.launch()