Cyantist8208 commited on
Commit
a97164c
·
1 Parent(s): b8a407c
Files changed (1) hide show
  1. app.py +3 -1
app.py CHANGED
@@ -116,12 +116,14 @@ def build_llm_prompt(system: str, context: list[str], user_question: str) -> str
116
  conversation.append({"role": "user", "content": user_question.strip()})
117
 
118
  # 套用 LLaMA-style prompt 格式
119
- return tokenizer.apply_chat_template(
120
  conversation,
121
  tokenize=False,
122
  add_generation_prompt=False
123
  )
124
 
 
 
125
  # ---------- 4. Gradio playground (same UI as before) --------------------------
126
  def store_doc(doc_text: str,user_id="demo",chunk_size=DEFAULT_CHUNK_SIZE,chunk_overlap=DEFAULT_CHUNK_OVERLAP):
127
  try:
 
116
  conversation.append({"role": "user", "content": user_question.strip()})
117
 
118
  # 套用 LLaMA-style prompt 格式
119
+ input_token = tokenizer.apply_chat_template(
120
  conversation,
121
  tokenize=False,
122
  add_generation_prompt=False
123
  )
124
 
125
+ return tokenizer.decode(input_token)
126
+
127
  # ---------- 4. Gradio playground (same UI as before) --------------------------
128
  def store_doc(doc_text: str,user_id="demo",chunk_size=DEFAULT_CHUNK_SIZE,chunk_overlap=DEFAULT_CHUNK_OVERLAP):
129
  try: