Spaces:
Build error
Build error
| from transformers import AutoTokenizer, TFAutoModelForSeq2SeqLM | |
| import gradio as gr | |
| checkpoint = "google-t5/t5-small" | |
| def language_translator(text): | |
| tokenizer = AutoTokenizer.from_pretrained(checkpoint) | |
| model = TFAutoModelForSeq2SeqLM.from_pretrained("tf_model/") | |
| tokenized = tokenizer([text], return_tensors='np') | |
| out = model.generate(**tokenized, max_length=128) | |
| with tokenizer.as_target_tokenizer(): | |
| return tokenizer.decode(out[0],skip_special_tokens=True) | |
| examples = [ | |
| ["Hello, how are you today?"], | |
| ["Translate this sentence into another language."], | |
| ["Can you help me with this text?"], | |
| ] | |
| demo = gr.Interface(fn=language_translator, inputs='text',outputs='text',title='Language Translator ENGLISH TO FRENCH',examples=examples) | |
| demo.launch(debug=True,share=True) | |