Spaces:
Sleeping
Sleeping
File size: 825 Bytes
f34cda4 256e579 bae4895 256e579 bae4895 b430ba4 ad9e506 b430ba4 f34cda4 b430ba4 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 |
import gradio as gr
import torch
from transformers import AutoTokenizer, T5ForConditionalGeneration
MODEL_NAME = "google/byt5-small"
tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)
model = T5ForConditionalGeneration.from_pretrained(MODEL_NAME)
model.eval()
def text_to_ipa(text):
prompt = f"Text: {text}\nIPA:"
inputs = tokenizer(prompt, return_tensors="pt", truncation=True, max_length=512)
with torch.no_grad():
outputs = model.generate(**inputs, max_new_tokens=64)
return tokenizer.decode(outputs[0], skip_special_tokens=True)
with gr.Blocks() as demo:
input_text = gr.Textbox()
output_text = gr.Textbox()
btn = gr.Button("Generate")
btn.click(
text_to_ipa,
inputs=input_text,
outputs=output_text,
api_name="predict"
)
demo.launch()
|