Spaces:
Runtime error
Runtime error
| import gradio as gr | |
| from transformers import AutoModelForSeq2SeqLM, AutoTokenizer | |
| import spaces | |
| import torch | |
| model_name = "ai4bharat/indictrans2-en-indic-1B" | |
| tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True) | |
| model = AutoModelForSeq2SeqLM.from_pretrained(model_name, trust_remote_code=True) | |
| device = "cuda" if torch.cuda.is_available() else "cpu" | |
| model.to(device) | |
| LANG_MAP = { | |
| "Hindi": "hin_Deva", | |
| "Bengali": "ben_Beng", | |
| "Marathi": "mar_Deva", | |
| "Telugu": "tel_Telu", | |
| "Tamil": "tam_Taml", | |
| "Gujarati": "guj_Gujr", | |
| "Kannada": "kan_Knda", | |
| "Malayalam": "mal_Mlym", | |
| "Punjabi": "pan_Guru", | |
| "Odia": "ory_Orya", | |
| } | |
| def translate(target_lang, text): | |
| lang_code = LANG_MAP[target_lang] | |
| inputs = tokenizer( | |
| text, | |
| return_tensors="pt", | |
| padding=True, | |
| truncation=True | |
| ).to(device) | |
| output = model.generate( | |
| **inputs, | |
| forced_bos_token_id=tokenizer.convert_tokens_to_ids(lang_code), | |
| max_length=200 | |
| ) | |
| return tokenizer.batch_decode(output, skip_special_tokens=True)[0] | |
| iface = gr.Interface( | |
| fn=translate, | |
| inputs=[ | |
| gr.Dropdown(list(LANG_MAP.keys()), label="Select Target Language", value="Hindi"), | |
| gr.Textbox(label="Enter Text in English") | |
| ], | |
| outputs=gr.Textbox(label="Translated Output"), | |
| title="BASHA BRIDGE 🌍", | |
| description="AI Powered Multilingual Translation System" | |
| ) | |
| iface.launch() | |