CoderHassan's picture
Update app.py
e54889e verified
raw
history blame
1.11 kB
import streamlit as st
from transformers import T5Tokenizer, T5ForConditionalGeneration
# Load the T5 model and tokenizer
@st.cache_resource
def load_model():
model_name = "t5-small"
tokenizer = T5Tokenizer.from_pretrained(model_name)
model = T5ForConditionalGeneration.from_pretrained(model_name)
return model, tokenizer
model, tokenizer = load_model()
def translate_text(text, model, tokenizer):
input_text = f"translate English to Urdu: {text}"
inputs = tokenizer.encode(input_text, return_tensors="pt", truncation=True)
outputs = model.generate(inputs, max_length=512, num_beams=5, early_stopping=True)
translated_text = tokenizer.decode(outputs[0], skip_special_tokens=True)
return translated_text
# Streamlit UI
st.title("English to Urdu Translation with T5")
# Input text from the user
text_to_translate = st.text_area("Enter English text to translate:")
if text_to_translate.strip():
with st.spinner("Translating..."):
translated_text = translate_text(text_to_translate, model, tokenizer)
st.markdown(f"### Translated Text:\n{translated_text}")