Spaces:
Sleeping
Sleeping
| # app.py β Gradio Blocks + REST API bawaan (api_name), lazy-load model | |
| import os | |
| import gradio as gr | |
| from transformers import AutoTokenizer, AutoModelForSeq2SeqLM, pipeline | |
| # ===== Konfigurasi ===== | |
| MODEL_ID = os.getenv("MODEL_ID", "hasmar03/mt5_id2md") | |
| MAX_LEN = int(os.getenv("MAX_LEN", "128")) | |
| # ===== Lazy loader ===== | |
| pipe = None | |
| def get_pipe(): | |
| global pipe | |
| if pipe is None: | |
| tok = AutoTokenizer.from_pretrained(MODEL_ID) | |
| mdl = AutoModelForSeq2SeqLM.from_pretrained(MODEL_ID) | |
| pipe = pipeline( | |
| "text2text-generation", | |
| model=mdl, | |
| tokenizer=tok, | |
| max_length=MAX_LEN, | |
| ) | |
| return pipe | |
| def _build_prompt(text: str, direction: str): | |
| # Sesuaikan dengan skema training Anda | |
| if direction == "id2md" or direction == "Indonesia β Mandar": | |
| return f"translate Indonesian to Mandar: {text}" | |
| elif direction == "md2id" or direction == "Mandar β Indonesia": | |
| return f"translate Mandar to Indonesian: {text}" | |
| return text | |
| def translate_fn(text: str, arah: str): | |
| p = get_pipe() | |
| prompt = _build_prompt(text, arah) | |
| out = p(prompt)[0]["generated_text"] | |
| return out | |
| with gr.Blocks(title="Mandar β Indonesia Translator") as demo: | |
| gr.Markdown("### Mandar β Indonesia Translator") | |
| arah = gr.Radio( | |
| ["Indonesia β Mandar", "Mandar β Indonesia"], | |
| value="Indonesia β Mandar", | |
| label="Arah", | |
| ) | |
| src = gr.Textbox(label="Teks sumber", lines=3, placeholder="Ketik teksβ¦") | |
| btn = gr.Button("Terjemahkan") | |
| out = gr.Textbox(label="Hasil", lines=3) | |
| # api_name membuat REST endpoint: /api/predict/translate | |
| btn.click(translate_fn, inputs=[src, arah], outputs=out, api_name="translate") | |
| # Antrian (aman untuk Space) | |
| demo.queue() | |
| # Opsional: saat run lokal | |
| if __name__ == "__main__": | |
| demo.launch() | |