saunak14 commited on
Commit
d2b6497
·
verified ·
1 Parent(s): 875ddcc

Upload model trained with Unsloth

Browse files

Upload model trained with Unsloth 2x faster

special_tokens_map.json CHANGED
@@ -17,7 +17,7 @@
17
  "content": "<|placeholder6|>",
18
  "lstrip": false,
19
  "normalized": false,
20
- "rstrip": true,
21
  "single_word": false
22
  },
23
  "unk_token": {
 
17
  "content": "<|placeholder6|>",
18
  "lstrip": false,
19
  "normalized": false,
20
+ "rstrip": false,
21
  "single_word": false
22
  },
23
  "unk_token": {
tokenizer.json CHANGED
@@ -116,7 +116,7 @@
116
  "content": "<|placeholder6|>",
117
  "single_word": false,
118
  "lstrip": false,
119
- "rstrip": true,
120
  "normalized": false,
121
  "special": true
122
  },
 
116
  "content": "<|placeholder6|>",
117
  "single_word": false,
118
  "lstrip": false,
119
+ "rstrip": false,
120
  "normalized": false,
121
  "special": true
122
  },
tokenizer_config.json CHANGED
@@ -103,7 +103,7 @@
103
  "content": "<|placeholder6|>",
104
  "lstrip": false,
105
  "normalized": false,
106
- "rstrip": true,
107
  "single_word": false,
108
  "special": true
109
  },
@@ -123,7 +123,7 @@
123
  "legacy": false,
124
  "model_max_length": 4096,
125
  "pad_token": "<|placeholder6|>",
126
- "padding_side": "left",
127
  "sp_model_kwargs": {},
128
  "tokenizer_class": "LlamaTokenizer",
129
  "unk_token": "<unk>",
 
103
  "content": "<|placeholder6|>",
104
  "lstrip": false,
105
  "normalized": false,
106
+ "rstrip": false,
107
  "single_word": false,
108
  "special": true
109
  },
 
123
  "legacy": false,
124
  "model_max_length": 4096,
125
  "pad_token": "<|placeholder6|>",
126
+ "padding_side": "right",
127
  "sp_model_kwargs": {},
128
  "tokenizer_class": "LlamaTokenizer",
129
  "unk_token": "<unk>",