| import gradio as gr | |
| from transformers import AutoTokenizer | |
| #from pythainlp.tokenize import word_tokenize | |
| #from transformers import pipeline | |
| def greet(name): | |
| tokenizer = AutoTokenizer.from_pretrained("KoichiYasuoka/roberta-base-thai-spm") | |
| t = tokenizer.tokenize(name) | |
| #t = word_tokenize(text, keep_whitespace = False) | |
| #translator = pipeline("translation_en_to_de") | |
| #return translator(name) | |
| op = "" | |
| for w in t: | |
| op = op + "/" + w | |
| return "Hello ครับ คุณ" + op + "! " | |
| iface = gr.Interface(fn=greet, inputs="text", outputs="text") | |
| iface.launch() |