|
|
import os |
|
|
import requests |
|
|
import gradio as gr |
|
|
|
|
|
|
|
|
API_URL = "https://api-inference.huggingface.co/models/mistralai/Mixtral-8x7B-Instruct-v0.1" |
|
|
HF_TOKEN = os.getenv("HF_TOKEN") |
|
|
HEADERS = {"Authorization": f"Bearer {HF_TOKEN}"} |
|
|
|
|
|
def chat(message, history): |
|
|
history = history or [] |
|
|
try: |
|
|
payload = { |
|
|
"inputs": f"Kullanıcı: {message}\nAsistan:", |
|
|
"parameters": {"max_new_tokens": 250, "temperature": 0.7}, |
|
|
"options": {"wait_for_model": True} |
|
|
} |
|
|
response = requests.post(API_URL, headers=HEADERS, json=payload, timeout=90) |
|
|
|
|
|
if response.status_code == 200: |
|
|
data = response.json() |
|
|
reply = data[0].get("generated_text", "⚠️ Model yanıt vermedi.") |
|
|
elif response.status_code == 503: |
|
|
reply = "🕓 Model yükleniyor, lütfen birkaç saniye bekleyin..." |
|
|
elif response.status_code == 404: |
|
|
reply = "⚠️ Model bulunamadı (404). Lütfen Mixtral model adını kontrol edin." |
|
|
else: |
|
|
reply = f"⚠️ Hata kodu: {response.status_code}" |
|
|
except Exception as e: |
|
|
reply = f"❌ Bağlantı hatası: {str(e)}" |
|
|
|
|
|
history.append([message, reply]) |
|
|
return history, history |
|
|
|
|
|
|
|
|
theme = gr.themes.Soft(primary_hue="cyan", neutral_hue="slate") |
|
|
|
|
|
with gr.Blocks(theme=theme, title="ZenkaMind v19") as demo: |
|
|
gr.Markdown("<h1 style='text-align:center;color:#06b6d4'>🧠 ZenkaMind v19</h1>" |
|
|
"<p style='text-align:center;color:#94a3b8'>Mixtral 8x7B modeliyle Türkçe yapay zekâ</p>") |
|
|
chatbot = gr.Chatbot(label="ZenkaMind Sohbet", height=500) |
|
|
msg = gr.Textbox(placeholder="Mesajınızı yazın...", show_label=False) |
|
|
clear = gr.Button("🧹 Sohbeti Temizle") |
|
|
|
|
|
msg.submit(chat, [msg, chatbot], [chatbot, chatbot]) |
|
|
clear.click(lambda: [], None, chatbot, queue=False) |
|
|
|
|
|
gr.Markdown("<p style='text-align:center;font-size:12px;color:#64748b'>© 2025 ZenkaMind Bilişim & Teknoloji — Manisa</p>") |
|
|
|
|
|
demo.launch() |
|
|
|