| from transformers import AutoTokenizer | |
| tok = AutoTokenizer.from_pretrained(".") | |
| print("Vocab size:", len(tok)) | |
| print("pad_token_id:", tok.pad_token_id) | |
| print("eos_token_id:", tok.eos_token_id) |
| from transformers import AutoTokenizer | |
| tok = AutoTokenizer.from_pretrained(".") | |
| print("Vocab size:", len(tok)) | |
| print("pad_token_id:", tok.pad_token_id) | |
| print("eos_token_id:", tok.eos_token_id) |