|
|
import gradio as gr |
|
|
import torch |
|
|
from transformers import AutoModelForCausalLM, AutoTokenizer |
|
|
import os |
|
|
|
|
|
|
|
|
MODEL_ID = os.getenv("MODEL_ID", "badanwang/teacher_basic_qwen3-0.6b") |
|
|
print(f"正在加载模型: {MODEL_ID}") |
|
|
|
|
|
|
|
|
try: |
|
|
tokenizer = AutoTokenizer.from_pretrained(MODEL_ID, trust_remote_code=True) |
|
|
|
|
|
model = AutoModelForCausalLM.from_pretrained( |
|
|
MODEL_ID, |
|
|
torch_dtype="auto", |
|
|
device_map="auto", |
|
|
trust_remote_code=True |
|
|
) |
|
|
print("模型和分词器加载成功!") |
|
|
|
|
|
|
|
|
def predict(prompt: str, history: list[list[str]]): |
|
|
""" |
|
|
接收输入和历史,返回更新后的历史。 |
|
|
Gradio 会自动为此函数创建 API 端点。 |
|
|
""" |
|
|
print(f"收到请求: prompt='{prompt}'") |
|
|
|
|
|
messages = [] |
|
|
for user_message, bot_message in history: |
|
|
messages.append({"role": "user", "content": user_message}) |
|
|
messages.append({"role": "assistant", "content": bot_message}) |
|
|
messages.append({"role": "user", "content": prompt}) |
|
|
|
|
|
input_ids = tokenizer.apply_chat_template( |
|
|
messages, |
|
|
add_generation_prompt=True, |
|
|
tokenize=True, |
|
|
return_tensors="pt" |
|
|
).to(model.device) |
|
|
|
|
|
outputs = model.generate(input_ids, max_new_tokens=1024) |
|
|
response_text = tokenizer.decode(outputs[0][input_ids.shape[-1]:], skip_special_tokens=True) |
|
|
|
|
|
print(f"生成回复: {response_text}") |
|
|
|
|
|
history.append([prompt, response_text]) |
|
|
return history |
|
|
|
|
|
except Exception as e: |
|
|
print(f"加载模型时发生致命错误: {e}") |
|
|
|
|
|
def predict(*args, **kwargs): |
|
|
raise gr.Error(f"模型加载失败,请检查Space后台日志以确认是否为内存不足。错误详情: {e}") |
|
|
|
|
|
|
|
|
with gr.Blocks(theme=gr.themes.Default()) as demo: |
|
|
gr.Markdown(f"## 简易模型聊天 ({MODEL_ID})") |
|
|
chatbot = gr.Chatbot(label="对话窗口", height=600) |
|
|
msg = gr.Textbox(label="输入你的问题") |
|
|
clear = gr.Button("清除对话") |
|
|
|
|
|
msg.submit(predict, [msg, chatbot], chatbot) |
|
|
clear.click(lambda: [], None, chatbot) |
|
|
|
|
|
|
|
|
|
|
|
print("准备启动Gradio应用...") |
|
|
demo.queue().launch(api_open=True) |