Yermek68 commited on
Commit
b758f58
·
verified ·
1 Parent(s): 80ea55c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +44 -86
app.py CHANGED
@@ -1,87 +1,45 @@
 
 
1
  import gradio as gr
2
- from transformers import pipeline, AutoTokenizer, logging
3
- from langdetect import detect
4
- import threading, warnings, os, torch
5
-
6
- # --- Silent mode ---
7
- os.environ["TOKENIZERS_PARALLELISM"] = "false"
8
- os.environ["TRANSFORMERS_NO_ADVISORY_WARNINGS"] = "1"
9
- warnings.filterwarnings("ignore")
10
- logging.set_verbosity_error()
11
-
12
- # --- Global vars ---
13
- models = {}
14
- models_ready = False
15
- status_message = "⏳ Инициализация моделей (1–2 мин)..."
16
-
17
- # --- Model loader ---
18
- def load_model(model_name, task="summarization"):
19
- try:
20
- print(f"🔄 Загружается модель: {model_name}")
21
- tokenizer = AutoTokenizer.from_pretrained(
22
- model_name,
23
- use_fast=False,
24
- legacy=True,
25
- trust_remote_code=True
26
- )
27
- model_pipe = pipeline(task, model=model_name, tokenizer=tokenizer, device=-1)
28
- # Torch optimizations
29
- if hasattr(model_pipe.model, "half"):
30
- model_pipe.model = model_pipe.model.half()
31
- if hasattr(torch, "compile"):
32
- try:
33
- model_pipe.model = torch.compile(model_pipe.model, mode="reduce-overhead")
34
- except Exception:
35
- pass
36
- return model_pipe
37
- except Exception as e:
38
- print(f"⚠️ Ошибка при загрузке {model_name}: {e}")
39
- return None
40
-
41
- # --- Preload all models asynchronously ---
42
- def preload_models():
43
- global models, models_ready, status_message
44
- try:
45
- models["en"] = load_model("facebook/bart-large-cnn")
46
- models["ru"] = load_model("IlyaGusev/mbart_ru_sum_gazeta")
47
- models["multi"] = load_model("csebuetnlp/mT5_multilingual_XLSum")
48
- models_ready = True
49
- status_message = "✅ Все модели успешно загружены и оптимизированы!"
50
- print(status_message)
51
- except Exception as e:
52
- status_message = f"❌ Ошибка инициализации моделей: {e}"
53
- print(status_message)
54
-
55
- threading.Thread(target=preload_models, daemon=True).start()
56
-
57
- # --- Summarization logic ---
58
- def summarize_text(text):
59
- if not text.strip():
60
- return "⚠️ Введите текст для суммаризации."
61
- if not models_ready:
62
- return status_message
63
- try:
64
- lang = detect(text)
65
- model = (
66
- models["ru"] if lang == "ru" else
67
- models["en"] if lang in ("en", "fr", "de", "es") else
68
- models["multi"]
69
- )
70
- summary = model(text, max_length=250, min_length=40, do_sample=False)
71
- return summary[0]["summary_text"]
72
- except Exception as e:
73
- return f"⚠️ Ошибка суммаризации: {e}"
74
-
75
- # --- UI ---
76
- with gr.Blocks(title="Eroha Summarizer PRO — Multilingual (BART / MBART / mT5)") as app:
77
- gr.Markdown("""
78
- ## 🧠 **Eroha Summarizer PRO**
79
- <div style='color:gray'>Многоязычный суммаризатор (EN/RU/Multilingual)</div>
80
- """)
81
- status = gr.Markdown(status_message)
82
- inp = gr.Textbox(label="Введите текст", lines=12, placeholder="Вставьте текст сюда...")
83
- out = gr.Textbox(label="Результат суммаризации")
84
- btn = gr.Button("🚀 Суммаризировать")
85
- btn.click(fn=summarize_text, inputs=inp, outputs=out)
86
-
87
- app.launch(server_name="0.0.0.0", server_port=7860)
 
1
+ from fastapi import FastAPI, UploadFile, File
2
+ from typing import List
3
  import gradio as gr
4
+ from transformers import pipeline
5
+
6
+ app = FastAPI(title="Eroha Summarizer PRO", version="3.6")
7
+
8
+ # Подгружаем модель (ленивая загрузка)
9
+ summarizer = None
10
+
11
+ @app.get("/")
12
+ def home():
13
+ return {"message": "✅ Eroha Summarizer PRO работает!"}
14
+
15
+ @app.post("/summarize")
16
+ async def summarize(files: List[UploadFile] = File(...)):
17
+ global summarizer
18
+ if summarizer is None:
19
+ summarizer = pipeline("summarization", model="facebook/bart-large-cnn")
20
+
21
+ texts = [await f.read() for f in files]
22
+ full_text = "\n".join([t.decode("utf-8") for t in texts])
23
+ summary = summarizer(full_text, max_length=180, min_length=50, do_sample=False)
24
+ return {"summary": summary[0]["summary_text"]}
25
+
26
+ # ============ Gradio UI ============
27
+ def gradio_summary(text):
28
+ global summarizer
29
+ if summarizer is None:
30
+ summarizer = pipeline("summarization", model="facebook/bart-large-cnn")
31
+ result = summarizer(text, max_length=180, min_length=50, do_sample=False)
32
+ return result[0]["summary_text"]
33
+
34
+ iface = gr.Interface(
35
+ fn=gradio_summary,
36
+ inputs=gr.Textbox(lines=10, label="Введите текст для суммаризации"),
37
+ outputs="text",
38
+ title="Eroha Summarizer PRO",
39
+ description="AI-система для анализа и суммаризации текстов."
40
+ )
41
+
42
+ @app.get("/gradio")
43
+ def launch_gradio():
44
+ iface.launch(share=False, server_name="0.0.0.0", server_port=7860)
45
+ return {"message": "Gradio interface запущен!"}