Transformers
Italian
markod0925 commited on
Commit
45e07da
·
verified ·
1 Parent(s): b9a1e8d

Upload tokenizer

Browse files
special_tokens_map.json CHANGED
@@ -1,5 +1,4 @@
1
  {
2
  "bos_token": "<|endoftext|>",
3
- "eos_token": "<|endoftext|>",
4
- "unk_token": "<|endoftext|>"
5
  }
 
1
  {
2
  "bos_token": "<|endoftext|>",
3
+ "eos_token": "<|endoftext|>"
 
4
  }
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -1,5 +1,4 @@
1
  {
2
- "add_prefix_space": false,
3
  "added_tokens_decoder": {
4
  "0": {
5
  "content": "<|endoftext|>",
@@ -15,6 +14,5 @@
15
  "eos_token": "<|endoftext|>",
16
  "extra_special_tokens": {},
17
  "model_max_length": 1000000000000000019884624838656,
18
- "tokenizer_class": "GPT2Tokenizer",
19
- "unk_token": "<|endoftext|>"
20
  }
 
1
  {
 
2
  "added_tokens_decoder": {
3
  "0": {
4
  "content": "<|endoftext|>",
 
14
  "eos_token": "<|endoftext|>",
15
  "extra_special_tokens": {},
16
  "model_max_length": 1000000000000000019884624838656,
17
+ "tokenizer_class": "PreTrainedTokenizerFast"
 
18
  }