zohaibterminator commited on
Commit
4b6cb47
·
verified ·
1 Parent(s): 3d09d67

Upload tokenizer

Browse files
Files changed (1) hide show
  1. tokenizer_config.json +6 -0
tokenizer_config.json CHANGED
@@ -2062,14 +2062,20 @@
2062
  "clean_up_tokenization_spaces": true,
2063
  "eos_token": "<|eot_id|>",
2064
  "extra_special_tokens": {},
 
2065
  "model_input_names": [
2066
  "input_ids",
2067
  "attention_mask"
2068
  ],
2069
  "model_max_length": 131072,
 
2070
  "pad_token": "<|finetune_right_pad_id|>",
 
2071
  "padding_side": "right",
2072
  "processor_class": "MllamaProcessor",
 
2073
  "tokenizer_class": "PreTrainedTokenizer",
 
 
2074
  "unk_token": null
2075
  }
 
2062
  "clean_up_tokenization_spaces": true,
2063
  "eos_token": "<|eot_id|>",
2064
  "extra_special_tokens": {},
2065
+ "max_length": 2048,
2066
  "model_input_names": [
2067
  "input_ids",
2068
  "attention_mask"
2069
  ],
2070
  "model_max_length": 131072,
2071
+ "pad_to_multiple_of": null,
2072
  "pad_token": "<|finetune_right_pad_id|>",
2073
+ "pad_token_type_id": 0,
2074
  "padding_side": "right",
2075
  "processor_class": "MllamaProcessor",
2076
+ "stride": 0,
2077
  "tokenizer_class": "PreTrainedTokenizer",
2078
+ "truncation_side": "right",
2079
+ "truncation_strategy": "longest_first",
2080
  "unk_token": null
2081
  }