Zeb commited on
Commit
cd48ea4
·
1 Parent(s): ee7c4e8

Remove old tokenizers

Browse files
fw57M_Surprisal_bytespanP1-0_8064/special_tokens_map.json DELETED
@@ -1,6 +0,0 @@
1
- {
2
- "bos_token": "<|endoftext|>",
3
- "eos_token": "<|endoftext|>",
4
- "pad_token": "<|padding|>",
5
- "unk_token": "<|unk|>"
6
- }
 
 
 
 
 
 
 
fw57M_Surprisal_bytespanP1-0_8064/tokenizer.json DELETED
The diff for this file is too large to render. See raw diff
 
fw57M_Surprisal_bytespanP1-0_8064/tokenizer_config.json DELETED
@@ -1,37 +0,0 @@
1
- {
2
- "add_prefix_space": true,
3
- "added_tokens_decoder": {
4
- "0": {
5
- "content": "<|padding|>",
6
- "lstrip": false,
7
- "normalized": false,
8
- "rstrip": false,
9
- "single_word": false,
10
- "special": true
11
- },
12
- "1": {
13
- "content": "<|endoftext|>",
14
- "lstrip": false,
15
- "normalized": false,
16
- "rstrip": false,
17
- "single_word": false,
18
- "special": true
19
- },
20
- "258": {
21
- "content": "<|unk|>",
22
- "lstrip": false,
23
- "normalized": false,
24
- "rstrip": false,
25
- "single_word": false,
26
- "special": true
27
- }
28
- },
29
- "bos_token": "<|endoftext|>",
30
- "clean_up_tokenization_spaces": false,
31
- "eos_token": "<|endoftext|>",
32
- "extra_special_tokens": {},
33
- "model_max_length": 1000000000000000019884624838656,
34
- "pad_token": "<|padding|>",
35
- "tokenizer_class": "PreTrainedTokenizer",
36
- "unk_token": "<|unk|>"
37
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
fw57M_Surprisal_bytespanP1-0_8064/vocab.json DELETED
The diff for this file is too large to render. See raw diff