change ouptu function of predict as it cannont handle langchain history

#12
by RCaz - opened
Files changed (1) hide show
  1. app.py +4 -4
app.py CHANGED
@@ -164,7 +164,7 @@ def predict(message, history, request: gr.Request):
164
  gpt_response = llm.invoke(
165
  messages,
166
  config={
167
- "tags": ["Testing", 'RAG-Bot', 'V1'],
168
  "metadata": {
169
  "rag_llm": "gpt-5-nano",
170
  "num_retrieved_docs": len(relevant_docs),
@@ -185,17 +185,17 @@ def predict(message, history, request: gr.Request):
185
  if line not in seen:
186
  seen.add(line)
187
  unique_source_lines.append(line)
188
- source_context = "\nSources:\n" + "\n".join(unique_source_lines)
189
 
190
  except :
191
- source_context = "\n".join([
192
  f"{doc.metadata["source"]}" for i, doc in enumerate(relevant_docs)])
193
 
194
  messages.append(AIMessage(content=source_context))
195
  print(gpt_response.content )
196
  print(source_context)
197
 
198
- return messages
199
 
200
 
201
  # setup tracking
 
164
  gpt_response = llm.invoke(
165
  messages,
166
  config={
167
+ "tags": ["Testing", 'RAG-Bot', 'V1','Host_on_HF'],
168
  "metadata": {
169
  "rag_llm": "gpt-5-nano",
170
  "num_retrieved_docs": len(relevant_docs),
 
185
  if line not in seen:
186
  seen.add(line)
187
  unique_source_lines.append(line)
188
+ source_context = "\nSources:" + "\n".join(unique_source_lines)
189
 
190
  except :
191
+ source_context = "\nSources:" + "\n".join([
192
  f"{doc.metadata["source"]}" for i, doc in enumerate(relevant_docs)])
193
 
194
  messages.append(AIMessage(content=source_context))
195
  print(gpt_response.content )
196
  print(source_context)
197
 
198
+ return f"{gpt_response.content} {source_context}"
199
 
200
 
201
  # setup tracking