caobin commited on
Commit
611d2af
·
verified ·
1 Parent(s): e620cc1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -4
app.py CHANGED
@@ -12,14 +12,17 @@ model = AutoModelForCausalLM.from_pretrained(
12
  )
13
 
14
  def chat_fn(message, history):
15
- # 只保留最近 3 轮历史
16
- recent_history = history[-6:] # 3轮对话,每轮2条消息
17
  full_prompt = ""
18
  for msg in recent_history:
 
 
 
19
  if msg["role"] == "user":
20
- full_prompt += f"<|user|>{msg['content']}<|assistant|>"
21
  elif msg["role"] == "assistant":
22
- full_prompt += msg['content']
 
23
  full_prompt += f"<|user|>{message}<|assistant|>"
24
 
25
  inputs = tokenizer(full_prompt, return_tensors="pt").to(model.device)
@@ -35,6 +38,7 @@ def chat_fn(message, history):
35
  output_text = output_text.split("<|assistant|>")[-1]
36
  return output_text.strip()
37
 
 
38
  with gr.Blocks(title="caobin LLM Chatbot") as demo:
39
  gr.Markdown("# 🤖 caobin's AI assistant")
40
  chatbot = gr.Chatbot(height=450)
 
12
  )
13
 
14
  def chat_fn(message, history):
15
+ recent_history = history[-6:] # 只保留最近 3
 
16
  full_prompt = ""
17
  for msg in recent_history:
18
+ content = msg['content']
19
+ if isinstance(content, list):
20
+ content = " ".join([str(c) for c in content])
21
  if msg["role"] == "user":
22
+ full_prompt += f"<|user|>{content}<|assistant|>"
23
  elif msg["role"] == "assistant":
24
+ full_prompt += content
25
+
26
  full_prompt += f"<|user|>{message}<|assistant|>"
27
 
28
  inputs = tokenizer(full_prompt, return_tensors="pt").to(model.device)
 
38
  output_text = output_text.split("<|assistant|>")[-1]
39
  return output_text.strip()
40
 
41
+
42
  with gr.Blocks(title="caobin LLM Chatbot") as demo:
43
  gr.Markdown("# 🤖 caobin's AI assistant")
44
  chatbot = gr.Chatbot(height=450)