Yermek68 commited on
Commit
1f1ce4a
·
verified ·
1 Parent(s): bb9dd00

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +66 -5
app.py CHANGED
@@ -340,16 +340,77 @@ from gradio.routes import mount_gradio_app
340
  # HF Spaces detection
341
  IS_HF_SPACES = os.getenv("SPACE_ID") is not None
342
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
343
  if __name__ == "__main__":
344
  if IS_HF_SPACES:
345
- # HF Spaces: mount FastAPI Gradio на порт 7860
346
- import uvicorn
347
- app = gr.mount_gradio_app(app, demo, path="/")
348
  uvicorn.run(app, host="0.0.0.0", port=7860)
349
  else:
350
- # ✅ Локально: Gradio и FastAPI запускаются отдельно
351
  import threading
352
- import uvicorn
 
353
 
354
  def run_gradio():
355
  demo.queue().launch(server_port=7861, share=False)
 
340
  # HF Spaces detection
341
  IS_HF_SPACES = os.getenv("SPACE_ID") is not None
342
 
343
+ import os
344
+ import gradio as gr
345
+ import uvicorn
346
+ from fastapi import FastAPI
347
+ from gradio.routes import mount_gradio_app
348
+
349
+ # ✅ Безопасный импорт alert_core (чтобы не было ошибки)
350
+ try:
351
+ from alert_core import log_alert
352
+ except ModuleNotFoundError:
353
+ def log_alert(message):
354
+ print(f"[⚠️ ALERT] {message} (alert_core not found, using fallback)")
355
+
356
+ # =====================================================================
357
+ # 🚀 Твой основной интерфейс Gradio
358
+ # =====================================================================
359
+ demo = gr.Blocks(title="Eroha AgentAPI v5.9.2 — Enterprise Edition")
360
+
361
+ with demo:
362
+ gr.Markdown("# 🤖 Eroha AgentAPI v5.9.2 — Enterprise Edition")
363
+
364
+ with gr.Tab("💬 Chat"):
365
+ inp = gr.Textbox(label="Введите запрос")
366
+ model = gr.Dropdown(
367
+ ["microsoft/phi-3-mini-4k-instruct",
368
+ "google/gemma-2-2b-it",
369
+ "meta-llama/Meta-Llama-3-8B-Instruct"],
370
+ value="microsoft/phi-3-mini-4k-instruct", label="Модель"
371
+ )
372
+ out = gr.Textbox(label="Ответ")
373
+ btn = gr.Button("🚀 Отправить")
374
+ btn.click(fn=lambda x, m: f"Обработка запроса для {m}: {x}",
375
+ inputs=[inp, model],
376
+ outputs=out)
377
+
378
+ with gr.Tab("📊 Dashboard"):
379
+ dash = gr.Markdown("📈 Здесь будет аналитика или метрики")
380
+ refresh = gr.Button("🔄 Обновить")
381
+ refresh.click(fn=lambda: "Метрики обновлены ✅", outputs=dash)
382
+
383
+ # =====================================================================
384
+ # ✅ Определяем окружение и логирование
385
+ # =====================================================================
386
+ IS_HF_SPACES = os.getenv("SPACE_ID") is not None
387
+ RUN_ENV = "Hugging Face Spaces" if IS_HF_SPACES else "Localhost"
388
+
389
+ import logging
390
+ logging.basicConfig(
391
+ level=logging.INFO,
392
+ format="%(asctime)s [%(levelname)s] %(message)s",
393
+ handlers=[logging.StreamHandler()]
394
+ )
395
+
396
+ logging.info(f"🚀 Starting Eroha Agent Environment: {RUN_ENV}")
397
+ log_alert(f"System boot: {RUN_ENV}")
398
+
399
+ # Создаём FastAPI приложение
400
+ app = FastAPI()
401
+
402
+ # =====================================================================
403
+ # ✅ Финальный блок запуска
404
+ # =====================================================================
405
  if __name__ == "__main__":
406
  if IS_HF_SPACES:
407
+ logging.info("Running on Hugging Face Spaces (port 7860)")
408
+ app = mount_gradio_app(app, demo, path="/")
 
409
  uvicorn.run(app, host="0.0.0.0", port=7860)
410
  else:
 
411
  import threading
412
+
413
+ logging.info("✅ Running locally (FastAPI → 7860 | Gradio → 7861)")
414
 
415
  def run_gradio():
416
  demo.queue().launch(server_port=7861, share=False)