Diabvell commited on
Commit
a6f23ff
·
verified ·
1 Parent(s): 07585c3

Delete app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -82
app.py DELETED
@@ -1,82 +0,0 @@
1
- from pathlib import Path
2
- import gradio as gr
3
- import torch
4
- from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
5
-
6
- ROOT = Path(__file__).parent
7
-
8
- # Your repo has: t5_enzh/checkpoint-50000/...
9
- MODEL_ROOT = ROOT / "t5_enzh"
10
-
11
- def latest_checkpoint(root: Path) -> Path | None:
12
- if not root.exists():
13
- return None
14
- ckpts = [p for p in root.iterdir() if p.is_dir() and p.name.startswith("checkpoint-")]
15
- if not ckpts:
16
- # maybe you uploaded final model directly into t5_enzh
17
- if (root / "config.json").exists():
18
- return root
19
- return None
20
- # pick biggest step number
21
- ckpts.sort(key=lambda p: int(p.name.split("-")[-1]))
22
- return ckpts[-1]
23
-
24
- CKPT = latest_checkpoint(MODEL_ROOT)
25
-
26
- _pipe = {"tok": None, "model": None}
27
-
28
- def model_ready():
29
- return CKPT is not None
30
-
31
- def get_model():
32
- if _pipe["model"] is None:
33
- tok = AutoTokenizer.from_pretrained(CKPT, use_fast=False)
34
- model = AutoModelForSeq2SeqLM.from_pretrained(CKPT)
35
- if torch.cuda.is_available():
36
- model = model.to("cuda")
37
- model.eval()
38
- _pipe["tok"] = tok
39
- _pipe["model"] = model
40
- return _pipe["tok"], _pipe["model"]
41
-
42
- def translate_text(text: str):
43
- text = (text or "").strip()
44
- if not text:
45
- return ""
46
- if not model_ready():
47
- return "[Model not ready yet — upload/checkpoint missing.]"
48
-
49
- tok, model = get_model()
50
-
51
- prompt = "translate English to Chinese: " + text # ✅ key fix
52
-
53
- inputs = tok(prompt, return_tensors="pt", truncation=True, max_length=256)
54
- if torch.cuda.is_available():
55
- inputs = {k: v.to("cuda") for k, v in inputs.items()}
56
-
57
- with torch.no_grad():
58
- out_ids = model.generate(
59
- **inputs,
60
- max_new_tokens=80,
61
- num_beams=4,
62
- early_stopping=True,
63
- )
64
-
65
- result = tok.decode(out_ids[0], skip_special_tokens=True).strip()
66
- return result if result else "[No output — check prompt prefix / checkpoint quality]"
67
-
68
-
69
- title = "EN → ZH Translator"
70
- status = "✅ Model ready" if model_ready() else "⏳ Model not ready"
71
-
72
- with gr.Blocks() as demo:
73
- gr.Markdown(f"# {title}")
74
- gr.Markdown(f"**Status:** {status}")
75
- gr.Markdown(f"**Loaded path:** `{str(CKPT) if CKPT else 'None'}`")
76
-
77
- inp = gr.Textbox(label="English", lines=4, placeholder="Type English here...")
78
- out = gr.Textbox(label="Chinese", lines=4)
79
- btn = gr.Button("Translate")
80
- btn.click(translate_text, inp, out)
81
-
82
- demo.launch()