| import gradio as gr |
| from huggingface_hub import InferenceClient |
| import os |
|
|
| |
| client = InferenceClient( |
| provider="novita", |
| api_key = os.getenv("HF_token") |
| ) |
|
|
| def chat(user_input, history, cot_enabled): |
| if history is None: |
| history = [] |
|
|
| messages = [] |
| for m in history: |
| messages.append({"role": m["role"], "content": m["content"]}) |
|
|
| |
| messages.insert(0, { |
| "role": "system", |
| "content": "你是一个有医学背景并能进行深度思维推理的 AI。请仔细回答用户的问题。" |
| }) |
|
|
| |
| cot_prompt = "。请你一步一步地思考并解释你的推理过程。" if cot_enabled else "" |
| messages.append({"role": "user", "content": user_input.strip() + cot_prompt}) |
|
|
| try: |
| completion = client.chat.completions.create( |
| model="deepseek-ai/DeepSeek-R1-Distill-Llama-8B", |
| messages=messages, |
| max_tokens=512, |
| temperature=0.5, |
| ) |
|
|
| reply = completion.choices[0].message.content |
|
|
| history.append({"role": "user", "content": user_input}) |
| history.append({"role": "assistant", "content": reply}) |
| return history, history |
|
|
| except Exception as e: |
| error_msg = f"出错了:{str(e)}" |
| history.append({"role": "user", "content": user_input}) |
| history.append({"role": "assistant", "content": error_msg}) |
| return history, history |
|
|
|
|
|
|
| |
| with gr.Blocks(title="🌐 AI 医疗问答系统") as demo: |
| gr.Markdown("## 💬 医疗问答助手\n使用 Hugging Face 上的 DeepSeek 模型进行问答。") |
|
|
| chatbot = gr.Chatbot(label="对话记录", type="messages") |
| user_input = gr.Textbox(lines=3, label="你的问题") |
|
|
| cot_mode = gr.Checkbox(label="深度思考", value=True) |
| |
| state = gr.State([]) |
|
|
| send = gr.Button("发送") |
| send.click(fn=chat, inputs=[user_input, state, cot_mode], outputs=[chatbot, state]) |
|
|
|
|
| if __name__ == "__main__": |
| demo.launch(share=True) |