Zenkad commited on
Commit
18eb1ce
·
verified ·
1 Parent(s): c7a1da2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +22 -27
app.py CHANGED
@@ -1,53 +1,48 @@
1
  import os, requests, gradio as gr
2
 
3
- # 🔗 Hugging Face API bağlantısı
4
- API_URL = "https://api-inference.huggingface.co/models/mistralai/Mixtral-8x7B-Instruct-v0.1"
 
5
  HF_TOKEN = os.getenv("HF_TOKEN")
6
  HEADERS = {"Authorization": f"Bearer {HF_TOKEN}"}
7
 
8
- # 💬 Ana sohbet fonksiyonu
9
- def chat(message, history):
10
- history = history or []
11
  payload = {
12
  "inputs": message,
13
  "parameters": {"max_new_tokens": 300, "temperature": 0.7},
14
  "options": {"wait_for_model": True}
15
  }
16
-
17
  try:
18
- r = requests.post(API_URL, headers=HEADERS, json=payload, timeout=60)
19
-
20
- try:
21
- data = r.json()
22
- except ValueError:
23
- reply = "⚠️ Model boş yanıt döndürdü veya bağlantı kesildi."
24
- history.append((message, reply))
25
- return history, history
26
-
27
- # 🔍 Yanıt kontrolü
28
  if isinstance(data, list) and "generated_text" in data[0]:
29
- reply = data[0]["generated_text"]
30
- else:
31
- reply = data.get("generated_text") or data.get("error", "⚠️ Modelden yanıt alınamadı.")
32
-
33
- except Exception as e:
34
- reply = f"❌ Hata: {e}"
35
 
 
 
 
 
 
 
 
36
  history.append((message, reply))
37
  return history, history
38
 
39
-
40
- # 🎨 Tema ve arayüz ayarları
41
  theme = gr.themes.Soft(primary_hue="blue", neutral_hue="slate").set(
42
  body_background_fill="#0f172a",
43
  block_background_fill="#1e293b",
44
  block_title_text_color="#38bdf8"
45
  )
46
 
47
- with gr.Blocks(theme=theme, title="ZenkaMind v11") as demo:
48
  gr.Markdown(
49
- "<h1 style='text-align:center;color:#38bdf8'>🧠 ZenkaMind v11</h1>"
50
- "<p style='text-align:center;color:#94a3b8'>Türkçe yapay zekâ sohbet asistanı — Mixtral 8x7B</p>"
51
  )
52
 
53
  chat_ui = gr.Chatbot(height=460, label="ZenkaMind")
 
1
  import os, requests, gradio as gr
2
 
3
+ # 🔗 Ana ve yedek modeller
4
+ PRIMARY_URL = "https://api-inference.huggingface.co/models/mistralai/Mixtral-8x7B-Instruct-v0.1"
5
+ FALLBACK_URL = "https://api-inference.huggingface.co/models/meta-llama/Meta-Llama-3-8B-Instruct"
6
  HF_TOKEN = os.getenv("HF_TOKEN")
7
  HEADERS = {"Authorization": f"Bearer {HF_TOKEN}"}
8
 
9
+ def ask_model(url, message):
 
 
10
  payload = {
11
  "inputs": message,
12
  "parameters": {"max_new_tokens": 300, "temperature": 0.7},
13
  "options": {"wait_for_model": True}
14
  }
 
15
  try:
16
+ r = requests.post(url, headers=HEADERS, json=payload, timeout=60)
17
+ data = r.json()
 
 
 
 
 
 
 
 
18
  if isinstance(data, list) and "generated_text" in data[0]:
19
+ return data[0]["generated_text"]
20
+ if isinstance(data, dict) and "generated_text" in data:
21
+ return data["generated_text"]
22
+ return None
23
+ except Exception:
24
+ return None
25
 
26
+ def chat(message, history):
27
+ history = history or []
28
+ reply = ask_model(PRIMARY_URL, message)
29
+ if not reply:
30
+ reply = ask_model(FALLBACK_URL, message)
31
+ if not reply:
32
+ reply = "⚠️ Şu anda modellerden yanıt alınamadı. Lütfen birkaç saniye sonra tekrar deneyin."
33
  history.append((message, reply))
34
  return history, history
35
 
 
 
36
  theme = gr.themes.Soft(primary_hue="blue", neutral_hue="slate").set(
37
  body_background_fill="#0f172a",
38
  block_background_fill="#1e293b",
39
  block_title_text_color="#38bdf8"
40
  )
41
 
42
+ with gr.Blocks(theme=theme, title="ZenkaMind v12") as demo:
43
  gr.Markdown(
44
+ "<h1 style='text-align:center;color:#38bdf8'>🧠 ZenkaMind v12</h1>"
45
+ "<p style='text-align:center;color:#94a3b8'>Türkçe yapay zekâ sohbet asistanı — Mixtral + Llama 3</p>"
46
  )
47
 
48
  chat_ui = gr.Chatbot(height=460, label="ZenkaMind")