asemxin commited on
Commit
554cff6
·
1 Parent(s): 82dc502

feat: 添加 per-chat 对话历史(最近10轮)

Browse files
Files changed (1) hide show
  1. image_daemon.py +27 -18
image_daemon.py CHANGED
@@ -20,6 +20,10 @@ OPENCLAW_GATEWAY = "http://127.0.0.1:18789/v1"
20
  _use_gateway = False # 启动时探测决定
21
  _soul_prompt = "" # SOUL.md 内容
22
 
 
 
 
 
23
  # ---------- 日志 ----------
24
  def log(msg):
25
  ts = time.strftime("%H:%M:%S")
@@ -257,14 +261,14 @@ def check_openclaw_gateway():
257
  log(f"⚠️ Gateway 不可用 ({e}),使用外部 LLM + SOUL 人设")
258
 
259
  # ---------- LLM 对话 ----------
260
- def chat_with_llm(user_text):
261
  """优先转发给本地 OpenClaw Gateway(带人设+插件),Fallback 到外部 LLM+SOUL"""
262
  try:
263
  if _use_gateway:
264
  # 走 OpenClaw Gateway:人设和工具由它处理
265
  resp = requests.post(
266
  f"{OPENCLAW_GATEWAY}/chat/completions",
267
- json={"model": "default", "messages": [{"role": "user", "content": user_text}], "stream": False},
268
  timeout=120
269
  )
270
  if resp.status_code == 200:
@@ -272,23 +276,20 @@ def chat_with_llm(user_text):
272
  reply = data["choices"][0]["message"]["content"]
273
  log(f"🤖 Gateway 回复: {reply[:60]}...")
274
  return reply
275
- log(f"⚠️ Gateway 失败 ({resp.status_code}),Fallback 到外部 LLM")
276
 
277
  # Fallback:外部 LLM + SOUL.md 人设
278
  if not API_KEY:
279
  return "抱歉,我的大脑连接中断了 (API_KEY missing)"
280
- url = f"{API_BASE_URL}/chat/completions"
281
- headers = {"Authorization": f"Bearer {API_KEY}", "Content-Type": "application/json"}
282
- payload = {
283
- "model": MODEL_NAME,
284
- "messages": [
285
- {"role": "system", "content": _soul_prompt or "You are a helpful assistant."},
286
- {"role": "user", "content": user_text}
287
- ],
288
- "stream": False
289
- }
290
  log(f"🤖 外部 LLM ({MODEL_NAME}): {user_text[:50]}...")
291
- resp = requests.post(url, headers=headers, json=payload, timeout=60)
 
 
 
 
292
  if resp.status_code == 200:
293
  reply = resp.json()["choices"][0]["message"]["content"]
294
  log(f"🤖 LLM 回复: {reply[:60]}...")
@@ -301,15 +302,23 @@ def chat_with_llm(user_text):
301
 
302
  # ---------- 处理文本消息 ----------
303
  def handle_text_message(message_id, chat_id, text):
304
- """LLM -> 发送"""
305
  token = get_token()
306
  if not token:
307
  return
308
 
309
- # 简单防重:也许以后需要
310
- # 这里直接调用 LLM
311
- reply = chat_with_llm(text)
 
312
  if reply:
 
 
 
 
 
 
 
313
  send_text(token, chat_id, reply)
314
 
315
  # ---------- 事件处理 ----------
 
20
  _use_gateway = False # 启动时探测决定
21
  _soul_prompt = "" # SOUL.md 内容
22
 
23
+ # 对话历史 (per chat_id)
24
+ _chat_history = {} # {chat_id: [{role, content}, ...]}
25
+ MAX_HISTORY = 10 # 保留最近 N 轮
26
+
27
  # ---------- 日志 ----------
28
  def log(msg):
29
  ts = time.strftime("%H:%M:%S")
 
261
  log(f"⚠️ Gateway 不可用 ({e}),使用外部 LLM + SOUL 人设")
262
 
263
  # ---------- LLM 对话 ----------
264
+ def chat_with_llm(user_text, history=None):
265
  """优先转发给本地 OpenClaw Gateway(带人设+插件),Fallback 到外部 LLM+SOUL"""
266
  try:
267
  if _use_gateway:
268
  # 走 OpenClaw Gateway:人设和工具由它处理
269
  resp = requests.post(
270
  f"{OPENCLAW_GATEWAY}/chat/completions",
271
+ json={"model": "default", "messages": (history or []) + [{"role": "user", "content": user_text}], "stream": False},
272
  timeout=120
273
  )
274
  if resp.status_code == 200:
 
276
  reply = data["choices"][0]["message"]["content"]
277
  log(f"🤖 Gateway 回复: {reply[:60]}...")
278
  return reply
279
+ log(f"⚠️ Gateway 失败 ({resp.status_code}),Fallback到外部 LLM")
280
 
281
  # Fallback:外部 LLM + SOUL.md 人设
282
  if not API_KEY:
283
  return "抱歉,我的大脑连接中断了 (API_KEY missing)"
284
+ soul = _soul_prompt or "You are a helpful assistant."
285
+ messages = [{"role": "system", "content": soul}] + (history or []) + [{"role": "user", "content": user_text}]
286
+ payload = {"model": MODEL_NAME, "messages": messages, "stream": False}
 
 
 
 
 
 
 
287
  log(f"🤖 外部 LLM ({MODEL_NAME}): {user_text[:50]}...")
288
+ resp = requests.post(
289
+ f"{API_BASE_URL}/chat/completions",
290
+ headers={"Authorization": f"Bearer {API_KEY}", "Content-Type": "application/json"},
291
+ json=payload, timeout=60
292
+ )
293
  if resp.status_code == 200:
294
  reply = resp.json()["choices"][0]["message"]["content"]
295
  log(f"🤖 LLM 回复: {reply[:60]}...")
 
302
 
303
  # ---------- 处理文本消息 ----------
304
  def handle_text_message(message_id, chat_id, text):
305
+ """LLM (带历史) -> 发送"""
306
  token = get_token()
307
  if not token:
308
  return
309
 
310
+ # 获取该用户的历史
311
+ history = _chat_history.get(chat_id, [])
312
+
313
+ reply = chat_with_llm(text, history)
314
  if reply:
315
+ # 更新历史
316
+ history = history + [
317
+ {"role": "user", "content": text},
318
+ {"role": "assistant", "content": reply}
319
+ ]
320
+ # 只保留最近 N 轮(每轮 2 条记录)
321
+ _chat_history[chat_id] = history[-(MAX_HISTORY * 2):]
322
  send_text(token, chat_id, reply)
323
 
324
  # ---------- 事件处理 ----------