| import os, requests, gradio as gr |
|
|
| |
| API_URL = "https://api-inference.huggingface.co/models/mistralai/Mixtral-8x7B-Instruct-v0.1" |
| HF_TOKEN = os.getenv("HF_TOKEN") |
| HEADERS = {"Authorization": f"Bearer {HF_TOKEN}"} |
|
|
| |
| def chat(message, history): |
| history = history or [] |
| payload = { |
| "inputs": message, |
| "parameters": {"max_new_tokens": 300, "temperature": 0.7}, |
| "options": {"wait_for_model": True} |
| } |
|
|
| try: |
| r = requests.post(API_URL, headers=HEADERS, json=payload, timeout=60) |
|
|
| try: |
| data = r.json() |
| except ValueError: |
| reply = "⚠️ Model boş yanıt döndürdü veya bağlantı kesildi." |
| history.append((message, reply)) |
| return history, history |
|
|
| |
| if isinstance(data, list) and "generated_text" in data[0]: |
| reply = data[0]["generated_text"] |
| else: |
| reply = data.get("generated_text") or data.get("error", "⚠️ Modelden yanıt alınamadı.") |
|
|
| except Exception as e: |
| reply = f"❌ Hata: {e}" |
|
|
| history.append((message, reply)) |
| return history, history |
|
|
|
|
| |
| theme = gr.themes.Soft(primary_hue="blue", neutral_hue="slate").set( |
| body_background_fill="#0f172a", |
| block_background_fill="#1e293b", |
| block_title_text_color="#38bdf8" |
| ) |
|
|
| with gr.Blocks(theme=theme, title="ZenkaMind v11") as demo: |
| gr.Markdown( |
| "<h1 style='text-align:center;color:#38bdf8'>🧠 ZenkaMind v11</h1>" |
| "<p style='text-align:center;color:#94a3b8'>Türkçe yapay zekâ sohbet asistanı — Mixtral 8x7B</p>" |
| ) |
|
|
| chat_ui = gr.Chatbot(height=460, label="ZenkaMind") |
| msg = gr.Textbox(placeholder="Mesajınızı yazın ve Enter’a basın…", show_label=False) |
| clear = gr.Button("🧹 Temizle") |
|
|
| msg.submit(chat, [msg, chat_ui], [chat_ui, chat_ui]) |
| clear.click(lambda: None, None, chat_ui, queue=False) |
|
|
| gr.Markdown("<p style='text-align:center;font-size:12px;color:#64748b'>© 2025 ZenkaMind Bilişim & Teknoloji</p>") |
|
|
| demo.launch() |
|
|