Cyantist8208 commited on
Commit
d1c70d1
·
1 Parent(s): 4bfdc55

don't use list as a parameter

Browse files
Files changed (1) hide show
  1. app.py +6 -8
app.py CHANGED
@@ -108,7 +108,7 @@ def store_doc(doc_text: str, user_id="demo"):
108
  except Exception as e:
109
  return f"Error during storing: {e}"
110
 
111
- def answer(system: str, context: list[str], question: str, user_id="demo", history="None"):
112
  """UI callback: retrieve, build prompt with Qwen tags, generate answer."""
113
  try:
114
  if not question.strip():
@@ -116,6 +116,7 @@ def answer(system: str, context: list[str], question: str, user_id="demo", histo
116
  if history != "None" and not kb[user_id]["texts"]:
117
  return "No reference passage yet. Add one first."
118
 
 
119
  # 1. Retrieve top-k similar passages
120
  if history == "Some":
121
  q_vec = embed(question)
@@ -123,13 +124,13 @@ def answer(system: str, context: list[str], question: str, user_id="demo", histo
123
  sims = torch.matmul(store["vecs"], q_vec) # [N]
124
  k = min(4, sims.numel())
125
  idxs = torch.topk(sims, k=k).indices.tolist()
126
- context += [store["texts"][i] for i in idxs]
127
  elif history == "All":
128
  store = kb[user_id]
129
- context += store["texts"]
130
 
131
  # 2. Build a Qwen-chat prompt (helper defined earlier)
132
- prompt = build_qwen_prompt(system, context, question)
133
 
134
  # 3. Generate and strip everything before the assistant tag
135
  load_chat()
@@ -180,10 +181,7 @@ with gr.Blocks() as demo:
180
  answer_box = gr.Textbox(lines=6, label="Assistant reply")
181
 
182
  answer_btn.click(
183
- fn=lambda sys, ctx, q, uid, h: answer(sys,
184
- [line.strip() for line in ctx.splitlines() if line.strip()],
185
- q, uid, h
186
- ),
187
  inputs=[system_box, context_box, question_box, user_id_box, history_cb],
188
  outputs=answer_box
189
  )
 
108
  except Exception as e:
109
  return f"Error during storing: {e}"
110
 
111
+ def answer(system: str, context: str, question: str, user_id="demo", history="None"):
112
  """UI callback: retrieve, build prompt with Qwen tags, generate answer."""
113
  try:
114
  if not question.strip():
 
116
  if history != "None" and not kb[user_id]["texts"]:
117
  return "No reference passage yet. Add one first."
118
 
119
+ context_list = [context]
120
  # 1. Retrieve top-k similar passages
121
  if history == "Some":
122
  q_vec = embed(question)
 
124
  sims = torch.matmul(store["vecs"], q_vec) # [N]
125
  k = min(4, sims.numel())
126
  idxs = torch.topk(sims, k=k).indices.tolist()
127
+ context_list += [store["texts"][i] for i in idxs]
128
  elif history == "All":
129
  store = kb[user_id]
130
+ context_list += store["texts"]
131
 
132
  # 2. Build a Qwen-chat prompt (helper defined earlier)
133
+ prompt = build_qwen_prompt(system, context_list, question)
134
 
135
  # 3. Generate and strip everything before the assistant tag
136
  load_chat()
 
181
  answer_box = gr.Textbox(lines=6, label="Assistant reply")
182
 
183
  answer_btn.click(
184
+ fn=answer,
 
 
 
185
  inputs=[system_box, context_box, question_box, user_id_box, history_cb],
186
  outputs=answer_box
187
  )