import gradio as gr import requests import json OLLAMA_URL = "http://localhost:11434/api/chat" def chat(message, history): # 构造对话历史 messages = [] for human, assistant in history: messages.append({"role": "user", "content": human}) messages.append({"role": "assistant", "content": assistant}) messages.append({"role": "user", "content": message}) # 调用 Ollama 的聊天接口 payload = { "model": "huihui_ai/deepseek-r1-abliterated:1.5b", "messages": messages, "stream": False # 如需流式输出可改为 True } try: response = requests.post(OLLAMA_URL, json=payload, timeout=60) if response.status_code == 200: result = response.json() return result["message"]["content"] else: return f"Ollama 返回错误:{response.status_code} - {response.text}" except Exception as e: return f"请求失败:{str(e)}" # 启动 Gradio 聊天界面 gr.ChatInterface(chat).launch(server_name="0.0.0.0", server_port=7860)