Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| import torch | |
| from transformers import MarianMTModel, MarianTokenizer | |
| # Model configuration | |
| MODEL_NAME = "cihanunlu/medical-nmt-tr-en" | |
| # Load model and tokenizer | |
| print(f"Loading model: {MODEL_NAME}") | |
| tokenizer = MarianTokenizer.from_pretrained(MODEL_NAME) | |
| model = MarianMTModel.from_pretrained(MODEL_NAME) | |
| model.eval() | |
| # Move to GPU if available | |
| device = "cuda" if torch.cuda.is_available() else "cpu" | |
| model = model.to(device) | |
| print(f"Model loaded on {device}") | |
| def translate_turkish_to_english(text, max_length=256, num_beams=5): | |
| if not text or not text.strip(): | |
| return "Please enter Turkish text to translate." | |
| try: | |
| inputs = tokenizer( | |
| text, | |
| return_tensors="pt", | |
| padding=True, | |
| truncation=True, | |
| max_length=max_length | |
| ) | |
| inputs = {k: v.to(device) for k, v in inputs.items()} | |
| with torch.no_grad(): | |
| outputs = model.generate( | |
| **inputs, | |
| max_length=max_length, | |
| num_beams=num_beams, | |
| early_stopping=True | |
| ) | |
| return tokenizer.decode(outputs[0], skip_special_tokens=True) | |
| except Exception as e: | |
| return f"Error: {str(e)}" | |
| examples = [ | |
| "Hastanın kan basıncı normal sınırlarda.", | |
| "Ameliyat başarılı bir şekilde tamamlandı.", | |
| "Hasta taburcu edilmeye hazır.", | |
| "Laboratuvar sonuçları bekleniyor.", | |
| "Dalak üst pol lokalizasyonunda iki adet 6 mm boyutunda aksesuar dalağı düşündüren şey var.", | |
| ] | |
| demo = gr.Interface( | |
| fn=translate_turkish_to_english, | |
| inputs=gr.Textbox( | |
| label="🇹🇷 Turkish Text", | |
| placeholder="Enter Turkish medical text here...", | |
| lines=5 | |
| ), | |
| outputs=gr.Textbox( | |
| label="🇬🇧 English Translation", | |
| lines=5 | |
| ), | |
| title="🏥 Medical Translator: Turkish → English", | |
| description=""" | |
| **Features:** | |
| - Domain-specific medical terminology | |
| - High-quality translations for healthcare contexts | |
| - Fast inference with beam search | |
| """, | |
| examples=examples | |
| ) | |
| if __name__ == "__main__": | |
| demo.launch( | |
| theme=gr.themes.Citrus() | |
| ) | |