Spaces:
Sleeping
Sleeping
Create tokenizer.json
Browse files- tokenizer.json +4 -0
tokenizer.json
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from transformers import T5Tokenizer
|
| 2 |
+
|
| 3 |
+
tokenizer = T5Tokenizer.from_pretrained('t5-small') # or your own trained model
|
| 4 |
+
tokenizer.save_pretrained('./t5_urdu_translation')
|