asemxin commited on
Commit ·
76163fd
1
Parent(s): 229d644
feat: Gateway 优先对话 + SOUL.md 人设 fallback
Browse files- image_daemon.py +78 -19
image_daemon.py
CHANGED
|
@@ -15,6 +15,11 @@ API_BASE_URL = os.environ.get("API_BASE_URL", "https://asem12345-cliproxyapi.hf.
|
|
| 15 |
API_KEY = os.environ.get("API_KEY", "")
|
| 16 |
MODEL_NAME = os.environ.get("MODEL_NAME", "gemini-3-flash")
|
| 17 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 18 |
# ---------- 日志 ----------
|
| 19 |
def log(msg):
|
| 20 |
ts = time.strftime("%H:%M:%S")
|
|
@@ -181,40 +186,85 @@ def handle_image_message(message_id, chat_id, image_key):
|
|
| 181 |
f.write(img_data)
|
| 182 |
log(f"⚠️ 图床全部失败,本地保存: {path}")
|
| 183 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 184 |
# ---------- LLM 对话 ----------
|
| 185 |
def chat_with_llm(user_text):
|
| 186 |
-
"""
|
| 187 |
-
if not API_KEY:
|
| 188 |
-
log("❌ API_KEY 未设置,无法对话")
|
| 189 |
-
return "抱歉,我的大脑连接中断了 (API_KEY missing)"
|
| 190 |
-
|
| 191 |
try:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 192 |
url = f"{API_BASE_URL}/chat/completions"
|
| 193 |
-
headers = {
|
| 194 |
-
"Authorization": f"Bearer {API_KEY}",
|
| 195 |
-
"Content-Type": "application/json"
|
| 196 |
-
}
|
| 197 |
payload = {
|
| 198 |
"model": MODEL_NAME,
|
| 199 |
"messages": [
|
| 200 |
-
{"role": "system", "content": "You are
|
| 201 |
{"role": "user", "content": user_text}
|
| 202 |
],
|
| 203 |
"stream": False
|
| 204 |
}
|
| 205 |
-
log(f"🤖
|
| 206 |
resp = requests.post(url, headers=headers, json=payload, timeout=60)
|
| 207 |
-
|
| 208 |
if resp.status_code == 200:
|
| 209 |
-
|
| 210 |
-
|
| 211 |
-
log(f"🤖 LLM 回复: {reply[:50]}...")
|
| 212 |
return reply
|
| 213 |
-
|
| 214 |
-
|
| 215 |
-
return f"思考时遇到错误 ({resp.status_code})"
|
| 216 |
except Exception as e:
|
| 217 |
-
log(f"❌
|
| 218 |
return f"大脑短路了: {e}"
|
| 219 |
|
| 220 |
# ---------- 处理文本消息 ----------
|
|
@@ -284,6 +334,9 @@ def main():
|
|
| 284 |
log("❌ FEISHU_APP_ID 或 FEISHU_APP_SECRET 未设置,退出")
|
| 285 |
sys.exit(1)
|
| 286 |
|
|
|
|
|
|
|
|
|
|
| 287 |
# 预热 token
|
| 288 |
token = get_token()
|
| 289 |
if token:
|
|
@@ -291,6 +344,12 @@ def main():
|
|
| 291 |
else:
|
| 292 |
log("⚠️ Token 获取失败,稍后重试")
|
| 293 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 294 |
# 初始化 lark-oapi WebSocket 客户端
|
| 295 |
try:
|
| 296 |
import lark_oapi as lark
|
|
|
|
| 15 |
API_KEY = os.environ.get("API_KEY", "")
|
| 16 |
MODEL_NAME = os.environ.get("MODEL_NAME", "gemini-3-flash")
|
| 17 |
|
| 18 |
+
# OpenClaw Gateway (本地)
|
| 19 |
+
OPENCLAW_GATEWAY = "http://127.0.0.1:18789/v1"
|
| 20 |
+
_use_gateway = False # 启动时探测决定
|
| 21 |
+
_soul_prompt = "" # SOUL.md 内容
|
| 22 |
+
|
| 23 |
# ---------- 日志 ----------
|
| 24 |
def log(msg):
|
| 25 |
ts = time.strftime("%H:%M:%S")
|
|
|
|
| 186 |
f.write(img_data)
|
| 187 |
log(f"⚠️ 图床全部失败,本地保存: {path}")
|
| 188 |
|
| 189 |
+
# ---------- SOUL.md 加载 ----------
|
| 190 |
+
def load_soul():
|
| 191 |
+
"""读取人设文件,去除图片处理指令(由 Daemon 接管)"""
|
| 192 |
+
global _soul_prompt
|
| 193 |
+
soul_path = "/root/.openclaw/workspace/SOUL.md"
|
| 194 |
+
try:
|
| 195 |
+
with open(soul_path, "r", encoding="utf-8") as f:
|
| 196 |
+
content = f.read()
|
| 197 |
+
# 去除图片处理指令(Daemon 已接管,避免 LLM 产生幻觉)
|
| 198 |
+
if "## 图片处理" in content:
|
| 199 |
+
content = content[:content.index("## 图片处理")].rstrip()
|
| 200 |
+
_soul_prompt = content
|
| 201 |
+
log(f"✅ SOUL.md 已加载 ({len(_soul_prompt)} 字)")
|
| 202 |
+
except FileNotFoundError:
|
| 203 |
+
_soul_prompt = "You are MoltBot, a helpful AI assistant."
|
| 204 |
+
log("⚠️ SOUL.md 未找到,使用默认人设")
|
| 205 |
+
except Exception as e:
|
| 206 |
+
_soul_prompt = "You are MoltBot, a helpful AI assistant."
|
| 207 |
+
log(f"⚠️ SOUL.md 加载失败: {e}")
|
| 208 |
+
|
| 209 |
+
# ---------- OpenClaw Gateway 探测 ----------
|
| 210 |
+
def check_openclaw_gateway():
|
| 211 |
+
"""探测本地 Gateway 是否可用"""
|
| 212 |
+
global _use_gateway
|
| 213 |
+
try:
|
| 214 |
+
resp = requests.post(
|
| 215 |
+
f"{OPENCLAW_GATEWAY}/chat/completions",
|
| 216 |
+
json={"model": "default", "messages": [{"role": "user", "content": "ping"}]},
|
| 217 |
+
timeout=5
|
| 218 |
+
)
|
| 219 |
+
if resp.status_code in (200, 201):
|
| 220 |
+
_use_gateway = True
|
| 221 |
+
log(f"✅ OpenClaw Gateway 可用 ({OPENCLAW_GATEWAY})")
|
| 222 |
+
else:
|
| 223 |
+
log(f"⚠️ Gateway 响应 {resp.status_code},使用外部 LLM")
|
| 224 |
+
except Exception as e:
|
| 225 |
+
log(f"⚠️ Gateway 不可用 ({e}),使用外部 LLM + SOUL 人设")
|
| 226 |
+
|
| 227 |
# ---------- LLM 对话 ----------
|
| 228 |
def chat_with_llm(user_text):
|
| 229 |
+
"""优先转发给本地 OpenClaw Gateway(带人设+插件),Fallback 到外部 LLM+SOUL"""
|
|
|
|
|
|
|
|
|
|
|
|
|
| 230 |
try:
|
| 231 |
+
if _use_gateway:
|
| 232 |
+
# 走 OpenClaw Gateway:人设和工具由它处理
|
| 233 |
+
resp = requests.post(
|
| 234 |
+
f"{OPENCLAW_GATEWAY}/chat/completions",
|
| 235 |
+
json={"model": "default", "messages": [{"role": "user", "content": user_text}], "stream": False},
|
| 236 |
+
timeout=120
|
| 237 |
+
)
|
| 238 |
+
if resp.status_code == 200:
|
| 239 |
+
data = resp.json()
|
| 240 |
+
reply = data["choices"][0]["message"]["content"]
|
| 241 |
+
log(f"🤖 Gateway 回复: {reply[:60]}...")
|
| 242 |
+
return reply
|
| 243 |
+
log(f"⚠️ Gateway 失败 ({resp.status_code}),Fallback 到外部 LLM")
|
| 244 |
+
|
| 245 |
+
# Fallback:外部 LLM + SOUL.md 人设
|
| 246 |
+
if not API_KEY:
|
| 247 |
+
return "抱歉,我的大脑连接中断了 (API_KEY missing)"
|
| 248 |
url = f"{API_BASE_URL}/chat/completions"
|
| 249 |
+
headers = {"Authorization": f"Bearer {API_KEY}", "Content-Type": "application/json"}
|
|
|
|
|
|
|
|
|
|
| 250 |
payload = {
|
| 251 |
"model": MODEL_NAME,
|
| 252 |
"messages": [
|
| 253 |
+
{"role": "system", "content": _soul_prompt or "You are a helpful assistant."},
|
| 254 |
{"role": "user", "content": user_text}
|
| 255 |
],
|
| 256 |
"stream": False
|
| 257 |
}
|
| 258 |
+
log(f"🤖 外部 LLM ({MODEL_NAME}): {user_text[:50]}...")
|
| 259 |
resp = requests.post(url, headers=headers, json=payload, timeout=60)
|
|
|
|
| 260 |
if resp.status_code == 200:
|
| 261 |
+
reply = resp.json()["choices"][0]["message"]["content"]
|
| 262 |
+
log(f"🤖 LLM 回复: {reply[:60]}...")
|
|
|
|
| 263 |
return reply
|
| 264 |
+
log(f"❌ 外部 LLM 错误 {resp.status_code}: {resp.text[:100]}")
|
| 265 |
+
return f"思考时遇到错误 ({resp.status_code})"
|
|
|
|
| 266 |
except Exception as e:
|
| 267 |
+
log(f"❌ chat_with_llm 异常: {e}")
|
| 268 |
return f"大脑短路了: {e}"
|
| 269 |
|
| 270 |
# ---------- 处理文本消息 ----------
|
|
|
|
| 334 |
log("❌ FEISHU_APP_ID 或 FEISHU_APP_SECRET 未设置,退出")
|
| 335 |
sys.exit(1)
|
| 336 |
|
| 337 |
+
# 加载人设
|
| 338 |
+
load_soul()
|
| 339 |
+
|
| 340 |
# 预热 token
|
| 341 |
token = get_token()
|
| 342 |
if token:
|
|
|
|
| 344 |
else:
|
| 345 |
log("⚠️ Token 获取失败,稍后重试")
|
| 346 |
|
| 347 |
+
# 延迟探测 OpenClaw Gateway(等 Gateway 先启动完成)
|
| 348 |
+
def delayed_gateway_check():
|
| 349 |
+
time.sleep(10)
|
| 350 |
+
check_openclaw_gateway()
|
| 351 |
+
threading.Thread(target=delayed_gateway_check, daemon=True).start()
|
| 352 |
+
|
| 353 |
# 初始化 lark-oapi WebSocket 客户端
|
| 354 |
try:
|
| 355 |
import lark_oapi as lark
|