File size: 3,256 Bytes
7d20126
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
# gradio_gemini_chat.py
# pip install -U google-genai gradio

import os
import traceback
import gradio as gr
from google import genai

API_KEY = os.getenv("GOOGLE_API_KEY") or "AIzaSyBAgrcgtf30Sm_msEGKATQvXRBSq1yyaSM"
MODEL_NAME = "gemini-2.5-flash"
client = genai.Client(api_key=API_KEY)

def build_contents_from_history_messages(history_msgs, user_msg):
    """
    history_msgs: List[{"role": "user"/"assistant", "content": str}]
    轉成 google-genai 的 contents,並附上本輪 user_msg。
    """
    contents = []
    for m in history_msgs:
        role = m.get("role")
        text = (m.get("content") or "").strip()
        if not text:
            continue
        if role == "assistant":
            contents.append({"role": "model", "parts": [{"text": text}]})
        else:  # "user" 以外都當使用者
            contents.append({"role": "user", "parts": [{"text": text}]})
    if user_msg:
        contents.append({"role": "user", "parts": [{"text": user_msg}]})
    return contents

def chat_fn(user_msg, history_msgs, sys_prompt):
    user_msg = (user_msg or "").strip()
    if not user_msg:
        return history_msgs, ""

    # 指令:清空
    if user_msg.lower() in ("/reset", "/clear"):
        return [], ""

    try:
        contents = build_contents_from_history_messages(history_msgs, user_msg)

        kwargs = {}
        if sys_prompt and sys_prompt.strip():
            kwargs["system_instruction"] = sys_prompt.strip()

        resp = client.models.generate_content(
            model=MODEL_NAME,
            contents=contents,
            **kwargs
        )
        bot_text = (resp.text or "").strip()
    except Exception as e:
        bot_text = f"[發生錯誤]\n{e}\n\n{traceback.format_exc(limit=2)}"

    # messages 形式需要依序 append 使用者與助理訊息
    history_msgs = history_msgs + [
        {"role": "user", "content": user_msg},
        {"role": "assistant", "content": bot_text},
    ]
    return history_msgs, ""

with gr.Blocks(title="Gemini Chat (google-genai + Gradio)") as demo:
    gr.Markdown("## Gemini Chatbot(/reset 清空對話)")

    sys_prompt = gr.Textbox(
        label="System Prompt(可選)",
        lines=2,
        placeholder="例如:你是溫柔且穩重的助教,回答請精簡、有條理。"
    )

    # 改用 messages 形式,避免 'tuples' 的棄用警告
    chatbot = gr.Chatbot(
        label="對話",
        height=500,
        type="messages",
    )

    user_in = gr.Textbox(label="輸入訊息", lines=2, placeholder="打字聊天吧(/reset 清空)")
    with gr.Row():
        send_btn = gr.Button("送出", variant="primary")
        clear_btn = gr.Button("清空對話")

    send_btn.click(chat_fn, [user_in, chatbot, sys_prompt], [chatbot, user_in])
    user_in.submit(chat_fn, [user_in, chatbot, sys_prompt], [chatbot, user_in])
    clear_btn.click(lambda: ([], ""), None, [chatbot, user_in])

if __name__ == "__main__":
    # 讓 Gradio 自動挑可用埠(避免 7860 被占用錯誤)
    # 在部分筆記本/雲端環境不要強制 inbrowser
    demo.queue().launch(
        server_port=None,   # 或寫 0 也可以
        inbrowser=False,
        share=False         # 需要外網連結時改成 True
    )