VaibhavHD commited on
Commit
88f9e07
·
verified ·
1 Parent(s): 04c580b

Upload folder using huggingface_hub

Browse files
adapter_config.json CHANGED
@@ -26,11 +26,11 @@
26
  "revision": null,
27
  "target_modules": [
28
  "up_proj",
 
 
29
  "gate_proj",
30
  "o_proj",
31
- "q_proj",
32
  "k_proj",
33
- "down_proj",
34
  "v_proj"
35
  ],
36
  "target_parameters": null,
 
26
  "revision": null,
27
  "target_modules": [
28
  "up_proj",
29
+ "down_proj",
30
+ "q_proj",
31
  "gate_proj",
32
  "o_proj",
 
33
  "k_proj",
 
34
  "v_proj"
35
  ],
36
  "target_parameters": null,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:22009c4837b33dfad709f9f63417a13c2fb793f714017978151f8ee65c360747
3
  size 30026872
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:84fe3613fec091064e2ad21a09b3889b3636806d0e236796674e2645bcfb8dbd
3
  size 30026872
tokenizer.json CHANGED
@@ -1,7 +1,21 @@
1
  {
2
  "version": "1.0",
3
- "truncation": null,
4
- "padding": null,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  "added_tokens": [
6
  {
7
  "id": 32000,
 
1
  {
2
  "version": "1.0",
3
+ "truncation": {
4
+ "direction": "Right",
5
+ "max_length": 512,
6
+ "strategy": "LongestFirst",
7
+ "stride": 0
8
+ },
9
+ "padding": {
10
+ "strategy": {
11
+ "Fixed": 512
12
+ },
13
+ "direction": "Left",
14
+ "pad_to_multiple_of": null,
15
+ "pad_id": 32014,
16
+ "pad_type_id": 0,
17
+ "pad_token": "<|end▁of▁sentence|>"
18
+ },
19
  "added_tokens": [
20
  {
21
  "id": 32000,
tokenizer_config.json CHANGED
@@ -183,11 +183,12 @@
183
  "bos_token": "<|begin▁of▁sentence|>",
184
  "clean_up_tokenization_spaces": false,
185
  "eos_token": "<|end▁of▁sentence|>",
 
186
  "legacy": true,
187
  "model_max_length": 16384,
188
  "pad_token": "<|end▁of▁sentence|>",
189
  "sp_model_kwargs": {},
190
- "tokenizer_class": "LlamaTokenizer",
191
  "unk_token": null,
192
  "use_default_system_prompt": false
193
  }
 
183
  "bos_token": "<|begin▁of▁sentence|>",
184
  "clean_up_tokenization_spaces": false,
185
  "eos_token": "<|end▁of▁sentence|>",
186
+ "extra_special_tokens": {},
187
  "legacy": true,
188
  "model_max_length": 16384,
189
  "pad_token": "<|end▁of▁sentence|>",
190
  "sp_model_kwargs": {},
191
+ "tokenizer_class": "LlamaTokenizerFast",
192
  "unk_token": null,
193
  "use_default_system_prompt": false
194
  }