File size: 310 Bytes
e600974
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
{
  "tokenizer_class": "CharTokenizer",
  "model_type": "VerySmollGPT",
  "vocab_size": 104,
  "clean_up_tokenization_spaces": true,
  "bos_token": "<BOS>",
  "eos_token": "<EOS>",
  "unk_token": "<UNK>",
  "pad_token": "<PAD>",
  "add_prefix_space": false,
  "add_bos_token": false,
  "add_eos_token": false
}