File size: 2,607 Bytes
1413a24
 
 
c7a1da2
1413a24
 
 
c7a1da2
 
1413a24
 
 
 
 
 
 
 
 
 
 
c7a1da2
1413a24
c7a1da2
1413a24
c7a1da2
1413a24
c7a1da2
1413a24
18eb1ce
1413a24
c7a1da2
1413a24
 
 
 
 
 
 
c7a1da2
 
 
 
f9ec801
1413a24
 
 
 
 
c7a1da2
 
1413a24
c7a1da2
 
1413a24
c7a1da2
1413a24
 
 
 
 
 
 
c7a1da2
 
1413a24
 
 
 
 
 
 
 
c7a1da2
1413a24
 
 
 
c7a1da2
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
import os
import requests
import gradio as gr

# 🔐 Hugging Face API bağlantısı
API_URL = "https://api-inference.huggingface.co/models/mistralai/Mixtral-8x7B-Instruct-v0.1"
HF_TOKEN = os.getenv("HF_TOKEN")  # Hugging Face Secret olarak ekledin ya, burada çağrılıyor
HEADERS = {"Authorization": f"Bearer {HF_TOKEN}"}

# 💬 Chat fonksiyonu
def chat(message, history):
    history = history or []
    convo = [{"role": "system", "content": "You are ZenkaMind, a helpful Turkish AI assistant."}]

    for user, bot in history:
        convo.append({"role": "user", "content": user})
        convo.append({"role": "assistant", "content": bot})

    convo.append({"role": "user", "content": message})

    payload = {
        "inputs": convo,
        "parameters": {"max_new_tokens": 300, "temperature": 0.7},
        "options": {"wait_for_model": True},
    }

    try:
        r = requests.post(API_URL, headers=HEADERS, json=payload, timeout=60)
        data = r.json()

        if isinstance(data, list) and "generated_text" in data[0]:
            reply = data[0]["generated_text"]
        elif isinstance(data, dict) and "error" in data:
            reply = f"⚠️ Hata: {data['error']}"
        else:
            reply = "⚠️ Model boş yanıt döndürdü veya bağlantı kesildi."
    except Exception as e:
        reply = f"❌ Sunucu hatası: {str(e)}"

    history.append((message, reply))
    return history, history


# 🎨 Tema ve Arayüz
theme = gr.themes.Soft(
    primary_hue="blue",
    neutral_hue="slate",
).set(
    body_background_fill="#0f172a",
    block_background_fill="#1e293b",
    block_label_text_color="#38bdf8",
)

with gr.Blocks(theme=theme, title="ZenkaMind v12") as demo:
    gr.Markdown(
        """
        <h1 style="text-align:center;color:#38bdf8;">🧠 ZenkaMind v12</h1>
        <p style="text-align:center;color:#94a3b8;">
        Türkçe yapay zekâ sohbet asistanı — <strong>Mixtral 8x7B</strong> modeliyle çalışır.<br>
        © 2025 ZenkaMind Bilişim & Teknoloji
        </p>
        """
    )

    chatbot = gr.Chatbot(height=500, label="ZenkaMind Sohbet Ekranı")
    user_msg = gr.Textbox(
        placeholder="Mesajınızı yazın ve Enter’a basın...",
        show_label=False,
        autofocus=True,
    )
    send_btn = gr.Button("🚀 Gönder")
    clear_btn = gr.Button("🧹 Sohbeti Temizle")

    # Bağlantılar
    user_msg.submit(chat, [user_msg, chatbot], [chatbot, chatbot])
    send_btn.click(chat, [user_msg, chatbot], [chatbot, chatbot])
    clear_btn.click(lambda: None, None, chatbot, queue=False)

demo.launch()