| from transformers import AutoModelForSeq2SeqLM, AutoTokenizer | |
| # Load model and tokenizer | |
| model_name = "hima06varshini/english-to-telugu-translation" | |
| model = AutoModelForSeq2SeqLM.from_pretrained(model_name) | |
| tokenizer = AutoTokenizer.from_pretrained(model_name) | |
| def translate(text): | |
| inputs = tokenizer(text, return_tensors="pt") | |
| outputs = model.generate(**inputs) | |
| return tokenizer.decode(outputs[0], skip_special_tokens=True) | |
| # Example usage | |
| text = "Hello, how are you?" | |
| translation = translate(text) | |
| print("Translated text:", translation) |