Samba commited on
Commit
21dd35f
·
1 Parent(s): d6be459

Upload tokenizer

Browse files
special_tokens_map.json CHANGED
@@ -6,6 +6,8 @@
6
  "<|tel|>",
7
  "<|rrn|>"
8
  ],
 
9
  "eos_token": "<|endoftext|>",
10
- "pad_token": "<|endoftext|>"
 
11
  }
 
6
  "<|tel|>",
7
  "<|rrn|>"
8
  ],
9
+ "bos_token": "<|endoftext|>",
10
  "eos_token": "<|endoftext|>",
11
+ "pad_token": "<|endoftext|>",
12
+ "unk_token": "<|endoftext|>"
13
  }
tokenizer.json CHANGED
@@ -1,6 +1,11 @@
1
  {
2
  "version": "1.0",
3
- "truncation": null,
 
 
 
 
 
4
  "padding": null,
5
  "added_tokens": [
6
  {
 
1
  {
2
  "version": "1.0",
3
+ "truncation": {
4
+ "direction": "Right",
5
+ "max_length": 2048,
6
+ "strategy": "LongestFirst",
7
+ "stride": 0
8
+ },
9
  "padding": null,
10
  "added_tokens": [
11
  {
tokenizer_config.json CHANGED
@@ -1,7 +1,10 @@
1
  {
 
 
2
  "clean_up_tokenization_spaces": true,
3
  "eos_token": "<|endoftext|>",
4
- "model_max_length": 2048,
5
  "pad_token": "<|endoftext|>",
6
- "tokenizer_class": "PreTrainedTokenizerFast"
 
7
  }
 
1
  {
2
+ "add_prefix_space": false,
3
+ "bos_token": "<|endoftext|>",
4
  "clean_up_tokenization_spaces": true,
5
  "eos_token": "<|endoftext|>",
6
+ "model_max_length": 1000000000000000019884624838656,
7
  "pad_token": "<|endoftext|>",
8
+ "tokenizer_class": "GPTNeoXTokenizer",
9
+ "unk_token": "<|endoftext|>"
10
  }