Teotonix commited on
Commit
32a4a3b
·
verified ·
1 Parent(s): d47f38a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +64 -94
app.py CHANGED
@@ -1,7 +1,11 @@
1
  import os
2
  import gradio as gr
 
3
  from openai import OpenAI
4
 
 
 
 
5
  HF_TOKEN = os.getenv("HF_TOKEN")
6
 
7
  client = OpenAI(
@@ -9,12 +13,12 @@ client = OpenAI(
9
  api_key=HF_TOKEN,
10
  )
11
 
12
- MODEL_ID = "HuggingFaceTB/SmolLM3-3B:hf-inference"
13
  SYSTEM = "Sen MAIND AI'sin. Kısa, net ve yardımcı cevap ver."
14
 
15
  def llm_reply(user_text: str) -> str:
16
  resp = client.chat.completions.create(
17
- model=MODEL_ID,
18
  messages=[
19
  {"role": "system", "content": SYSTEM},
20
  {"role": "user", "content": user_text},
@@ -30,111 +34,77 @@ def chat_fn(message, history):
30
  try:
31
  reply = llm_reply(message)
32
  except Exception as e:
33
- reply = f"⚠️ Hata: {e}"
34
  history.append({"role": "assistant", "content": reply})
35
  return history, ""
36
 
37
- CSS = """
38
- :root{
39
- --bg:#050b1a;
40
- --panel:#0a1636;
41
- --panel2:#0c1d46;
42
- --stroke:rgba(56,189,248,.25);
43
- --glow:rgba(56,189,248,.25);
44
- --txt:#e6f1ff;
45
- --muted:#93c5fd;
46
- --blue:#38bdf8;
47
- }
48
- body, .gradio-container{
49
- background: radial-gradient(1200px 800px at 20% 0%, rgba(56,189,248,.18), transparent 55%),
50
- radial-gradient(900px 700px at 90% 20%, rgba(99,102,241,.18), transparent 55%),
51
- var(--bg) !important;
52
- color: var(--txt) !important;
53
- }
54
- #wrap{
55
- max-width: 980px;
56
- margin: 0 auto;
57
- }
58
- #topbar{
59
- display:flex; align-items:center; gap:14px;
60
- padding: 14px 16px;
61
- background: linear-gradient(180deg, rgba(10,22,54,.9), rgba(10,22,54,.55));
62
- border: 1px solid var(--stroke);
63
- border-radius: 16px;
64
- box-shadow: 0 0 0 1px rgba(56,189,248,.06), 0 20px 60px rgba(0,0,0,.35);
65
- }
66
- #brand h1{ margin:0; font-size: 20px; letter-spacing:.4px; }
67
- #brand p{ margin:2px 0 0; color: var(--muted); font-size: 12px; }
68
 
69
- #card{
70
- margin-top: 14px;
71
- padding: 14px;
72
- background: linear-gradient(180deg, rgba(12,29,70,.85), rgba(10,22,54,.6));
73
- border: 1px solid var(--stroke);
74
- border-radius: 18px;
75
- box-shadow: 0 0 0 1px rgba(56,189,248,.05), 0 30px 90px rgba(0,0,0,.45);
76
- }
 
 
 
 
77
 
78
- /* Chat area */
79
- #chat .wrap{
80
- border-radius: 14px;
81
- }
82
- .gr-chatbot{
83
- background: transparent !important;
84
- }
85
- .gr-chatbot .message{
86
- border-radius: 14px !important;
87
- border: 1px solid rgba(56,189,248,.18) !important;
88
- box-shadow: 0 0 20px rgba(56,189,248,.08);
89
- }
90
- .gr-chatbot .message.user{
91
- background: rgba(56,189,248,.08) !important;
92
- }
93
- .gr-chatbot .message.bot{
94
- background: rgba(99,102,241,.08) !important;
95
- }
96
 
97
- /* Input row */
98
- #inputrow{
99
- margin-top: 10px;
100
- gap: 10px;
101
- }
102
- #sendbtn button{
103
- background: linear-gradient(90deg, rgba(56,189,248,.95), rgba(99,102,241,.95)) !important;
104
- border: 1px solid rgba(56,189,248,.35) !important;
105
- border-radius: 14px !important;
106
- box-shadow: 0 0 30px rgba(56,189,248,.18);
107
- }
108
- #sendbtn button:hover{
109
- filter: brightness(1.05);
110
- }
111
- textarea, input, .gr-text-input textarea{
112
- background: rgba(5,11,26,.55) !important;
113
- border: 1px solid rgba(56,189,248,.22) !important;
114
- border-radius: 14px !important;
115
- color: var(--txt) !important;
116
- }
117
  """
118
 
119
  with gr.Blocks(css=CSS, title="MaindAI") as demo:
120
  with gr.Column(elem_id="wrap"):
121
  with gr.Row(elem_id="topbar"):
122
  gr.Image("logo.png", show_label=False, height=58, width=58, container=False)
123
- with gr.Column(elem_id="brand"):
124
- gr.Markdown("# MAIND AI")
125
- gr.Markdown("<p>Neon mavi tema • HF Router • SmolLM3-3B</p>")
126
 
127
- with gr.Column(elem_id="card"):
128
- chatbot = gr.Chatbot(value=[], elem_id="chat", height=420)
129
- with gr.Row(elem_id="inputrow"):
130
- msg = gr.Textbox(
131
- placeholder="Bir şey sor…",
132
- show_label=False,
133
- scale=8
134
- )
135
- send = gr.Button("Gönder", elem_id="sendbtn", scale=2)
136
 
137
- send.click(chat_fn, [msg, chatbot], [chatbot, msg])
138
- msg.submit(chat_fn, [msg, chatbot], [chatbot, msg])
 
 
 
 
 
 
 
 
139
 
140
  demo.launch()
 
1
  import os
2
  import gradio as gr
3
+ import torch
4
  from openai import OpenAI
5
 
6
+ from diffusers import StableDiffusionPipeline, EulerAncestralDiscreteScheduler
7
+
8
+ # ----------------- CHAT (HF Router) -----------------
9
  HF_TOKEN = os.getenv("HF_TOKEN")
10
 
11
  client = OpenAI(
 
13
  api_key=HF_TOKEN,
14
  )
15
 
16
+ CHAT_MODEL = "HuggingFaceTB/SmolLM3-3B:hf-inference"
17
  SYSTEM = "Sen MAIND AI'sin. Kısa, net ve yardımcı cevap ver."
18
 
19
  def llm_reply(user_text: str) -> str:
20
  resp = client.chat.completions.create(
21
+ model=CHAT_MODEL,
22
  messages=[
23
  {"role": "system", "content": SYSTEM},
24
  {"role": "user", "content": user_text},
 
34
  try:
35
  reply = llm_reply(message)
36
  except Exception as e:
37
+ reply = f"⚠️ Chat hata: {e}"
38
  history.append({"role": "assistant", "content": reply})
39
  return history, ""
40
 
41
+ # ----------------- IMAGE (LOCAL in Space, CPU) -----------------
42
+ IMG_MODEL = "segmind/tiny-sd" # küçük ve Space için uygun :contentReference[oaicite:1]{index=1}
43
+ img_pipe = None
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
44
 
45
+ def get_pipe():
46
+ global img_pipe
47
+ if img_pipe is None:
48
+ pipe = StableDiffusionPipeline.from_pretrained(
49
+ IMG_MODEL,
50
+ torch_dtype=torch.float32,
51
+ )
52
+ pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config)
53
+ pipe.enable_attention_slicing() # RAM azaltır
54
+ pipe = pipe.to("cpu")
55
+ img_pipe = pipe
56
+ return img_pipe
57
 
58
+ def gen_image(prompt: str, steps: int, size: int):
59
+ if not prompt or not prompt.strip():
60
+ raise gr.Error("Prompt boş.")
61
+ pipe = get_pipe()
62
+ with torch.inference_mode():
63
+ out = pipe(
64
+ prompt=prompt,
65
+ num_inference_steps=int(steps),
66
+ guidance_scale=6.5,
67
+ height=int(size),
68
+ width=int(size),
69
+ )
70
+ return out.images[0]
 
 
 
 
 
71
 
72
+ # ----------------- UI (mavi neon) -----------------
73
+ CSS = """
74
+ :root{--bg:#050b1a;--panel:#0a1636;--stroke:rgba(56,189,248,.25);--txt:#e6f1ff;--muted:#93c5fd;}
75
+ body,.gradio-container{background:radial-gradient(1200px 800px at 20% 0%, rgba(56,189,248,.18), transparent 55%),var(--bg)!important;color:var(--txt)!important;}
76
+ #wrap{max-width:1100px;margin:0 auto;}
77
+ #topbar{display:flex;align-items:center;gap:14px;padding:14px 16px;background:rgba(10,22,54,.75);border:1px solid var(--stroke);border-radius:16px;}
78
+ #card{margin-top:14px;padding:14px;background:rgba(10,22,54,.55);border:1px solid var(--stroke);border-radius:18px;}
79
+ button{border-radius:14px!important;}
80
+ textarea,input{background:rgba(5,11,26,.55)!important;border:1px solid rgba(56,189,248,.22)!important;color:var(--txt)!important;}
 
 
 
 
 
 
 
 
 
 
 
81
  """
82
 
83
  with gr.Blocks(css=CSS, title="MaindAI") as demo:
84
  with gr.Column(elem_id="wrap"):
85
  with gr.Row(elem_id="topbar"):
86
  gr.Image("logo.png", show_label=False, height=58, width=58, container=False)
87
+ gr.Markdown("## 💙 MAIND AI\n<small style='color:#93c5fd'>Chat + Görsel (HF Space CPU)</small>")
 
 
88
 
89
+ with gr.Row(elem_id="card"):
90
+ # SOL: Chat
91
+ with gr.Column(scale=6):
92
+ gr.Markdown("### 💬 Chat")
93
+ chatbot = gr.Chatbot(value=[], height=420)
94
+ msg = gr.Textbox(placeholder="Bir şey sor…", show_label=False)
95
+ send = gr.Button("Gönder")
96
+ send.click(chat_fn, [msg, chatbot], [chatbot, msg])
97
+ msg.submit(chat_fn, [msg, chatbot], [chatbot, msg])
98
 
99
+ # SAĞ: Image
100
+ with gr.Column(scale=5):
101
+ gr.Markdown("### 🎨 Görsel Üret")
102
+ img_prompt = gr.Textbox(placeholder="Örn: neon mavi cyberpunk şehir, yağmur, gece", show_label=False)
103
+ with gr.Row():
104
+ steps = gr.Slider(2, 12, value=6, step=1, label="Steps (hız)")
105
+ size = gr.Slider(256, 512, value=384, step=64, label="Boyut")
106
+ img_btn = gr.Button("Görsel Oluştur")
107
+ img_out = gr.Image(height=420)
108
+ img_btn.click(gen_image, [img_prompt, steps, size], img_out)
109
 
110
  demo.launch()