jacob-valdez commited on
Commit
02390f4
·
verified ·
1 Parent(s): db5c88b

Upload model/tokenizer/tokenizer_config.json with huggingface_hub

Browse files
model/tokenizer/tokenizer_config.json CHANGED
@@ -1,10 +1,23 @@
1
  {
2
- "tokenizer_class": "GPT2Tokenizer",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3
  "model_max_length": 1024,
4
- "padding_side": "right",
5
- "truncation_side": "right",
6
  "pad_token": "<|endoftext|>",
7
- "unk_token": "<|endoftext|>",
8
- "eos_token": "<|endoftext|>",
9
- "bos_token": "<|endoftext|>"
10
- }
 
1
  {
2
+ "add_bos_token": false,
3
+ "add_prefix_space": false,
4
+ "added_tokens_decoder": {
5
+ "50256": {
6
+ "content": "<|endoftext|>",
7
+ "lstrip": false,
8
+ "normalized": true,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ }
13
+ },
14
+ "bos_token": "<|endoftext|>",
15
+ "clean_up_tokenization_spaces": false,
16
+ "eos_token": "<|endoftext|>",
17
+ "errors": "replace",
18
+ "extra_special_tokens": {},
19
  "model_max_length": 1024,
 
 
20
  "pad_token": "<|endoftext|>",
21
+ "tokenizer_class": "GPT2Tokenizer",
22
+ "unk_token": "<|endoftext|>"
23
+ }