pragyarama commited on
Commit
907f352
·
verified ·
1 Parent(s): 4809cd4

update respond function to call and include get_top_chunks

Browse files
Files changed (1) hide show
  1. app.py +14 -2
app.py CHANGED
@@ -65,11 +65,23 @@ def get_top_chunks(query, chunk_embeddings, text_chunks): #Finds most relevant t
65
  top_results = get_top_chunks("Why is it important to carry copies of your travel documents?", chunk_embeddings, cleaned_chunks) # Call get_top_chunks with query
66
  print(top_results)
67
 
68
- #SAMPLE HUGGING FACE PROJECT
69
  client = InferenceClient("Qwen/Qwen2.5-72B-instruct")
70
 
71
  def respond(message, history):
72
- messages = [{"role": "system", "content": "You are a friendly chatbot"}]
 
 
 
 
 
 
 
 
 
 
 
 
73
 
74
  if history:
75
  messages.extend(history)
 
65
  top_results = get_top_chunks("Why is it important to carry copies of your travel documents?", chunk_embeddings, cleaned_chunks) # Call get_top_chunks with query
66
  print(top_results)
67
 
68
+ #HUGGING FACE PROJECT
69
  client = InferenceClient("Qwen/Qwen2.5-72B-instruct")
70
 
71
  def respond(message, history):
72
+ top_chunks = get_top_chunks(message, chunk_embeddings, cleaned_chunks)
73
+ str_top_chunks = "\n".join(top_chunks)
74
+
75
+ messages = [
76
+ {
77
+ "role": "system",
78
+ "content": f"You are a friendly travel agent. Base your response on {top_chunks}."
79
+ },
80
+ {
81
+ "role": "user",
82
+ "content": f"Question: {message}"
83
+ }
84
+ ]
85
 
86
  if history:
87
  messages.extend(history)