Spaces:
Runtime error
Runtime error
| import os, json, requests | |
| import gradio as gr | |
| OLLAMA_URL = os.environ.get("OLLAMA_URL", "http://127.0.0.1:11434") | |
| MODEL = os.environ.get("MODEL_TAG", "qwen2:0.5b") | |
| def chat_fn(message, history): | |
| """ | |
| Gradio ChatInterface beklenen imza: | |
| - message: str (kullanıcının son mesajı) | |
| - history: list[list[str, str]] -> [[user, assistant], ...] | |
| Dönen değer: str veya token token üreten generator (yield) | |
| """ | |
| # Geçmiş + yeni mesajı Ollama formatına çevir | |
| msgs = [] | |
| for u, a in history: | |
| if u: msgs.append({"role": "user", "content": u}) | |
| if a: msgs.append({"role": "assistant", "content": a}) | |
| msgs.append({"role": "user", "content": message}) | |
| try: | |
| r = requests.post( | |
| f"{OLLAMA_URL}/api/chat", | |
| json={"model": MODEL, "messages": msgs, "stream": True}, | |
| stream=True, | |
| timeout=300, | |
| ) | |
| r.raise_for_status() | |
| except Exception as e: | |
| return f"[backend error] {e}" | |
| partial = "" | |
| for line in r.iter_lines(): | |
| if not line: | |
| continue | |
| try: | |
| obj = json.loads(line.decode("utf-8")) | |
| except Exception as e: | |
| return f"[stream parse error] {e} | line={line[:200]}" | |
| msg = obj.get("message", {}) | |
| if "content" in msg: | |
| partial += msg["content"] | |
| yield partial | |
| if obj.get("done"): | |
| break | |
| def ping_backend(): | |
| # hızlı sağlık kontrolü tuşu (opsiyonel) | |
| try: | |
| t = requests.get(f"{OLLAMA_URL}/api/tags", timeout=10) | |
| return f"OK {t.status_code}: {t.text[:120]}..." | |
| except Exception as e: | |
| return f"FAIL: {e}" | |
| with gr.Blocks(title="Ollama Chat (HF Spaces, Free CPU)") as demo: | |
| gr.Markdown("# Ollama Chat (HF Spaces, Free CPU)") | |
| gr.Markdown("Küçük bir LLM ile basit sohbet arayüzü. İlk açılışta model indirilebilir.") | |
| ping_btn = gr.Button("Backend Health Check") | |
| ping_out = gr.Textbox(label="Health", interactive=False) | |
| ping_btn.click(fn=ping_backend, outputs=ping_out) | |
| chat = gr.ChatInterface( | |
| fn=chat_fn, | |
| textbox=gr.Textbox(placeholder="Mesajını yaz..."), | |
| title=None, | |
| description=None, | |
| ) | |
| if __name__ == "__main__": | |
| demo.queue().launch(server_name="0.0.0.0", server_port=8080) | |