iproskurina commited on
Commit
4341452
·
verified ·
1 Parent(s): dcd4869

Add files using upload-large-folder tool

Browse files
config.json ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_remove_final_layer_norm": false,
3
+ "activation_dropout": 0.0,
4
+ "activation_function": "relu",
5
+ "architectures": [
6
+ "OPTForCausalLM"
7
+ ],
8
+ "attention_dropout": 0.0,
9
+ "bos_token_id": 2,
10
+ "do_layer_norm_before": true,
11
+ "dropout": 0.1,
12
+ "enable_bias": true,
13
+ "eos_token_id": 2,
14
+ "ffn_dim": 8192,
15
+ "hidden_size": 2048,
16
+ "init_std": 0.02,
17
+ "layer_norm_elementwise_affine": true,
18
+ "layerdrop": 0.0,
19
+ "max_position_embeddings": 2048,
20
+ "model_type": "opt",
21
+ "num_attention_heads": 32,
22
+ "num_hidden_layers": 24,
23
+ "pad_token_id": 1,
24
+ "prefix": "</s>",
25
+ "quantization_config": {
26
+ "bits": 4,
27
+ "checkpoint_format": "gptq",
28
+ "desc_act": false,
29
+ "group_size": 128,
30
+ "lm_head": false,
31
+ "meta": {
32
+ "damp_auto_increment": 0.0025,
33
+ "damp_percent": 0.01,
34
+ "mse": 0.0,
35
+ "quantizer": [
36
+ "gptqmodel:2.2.0"
37
+ ],
38
+ "static_groups": false,
39
+ "true_sequential": true,
40
+ "uri": "https://github.com/modelcloud/gptqmodel"
41
+ },
42
+ "pack_dtype": "int32",
43
+ "quant_method": "gptq",
44
+ "sym": true
45
+ },
46
+ "torch_dtype": "float16",
47
+ "transformers_version": "4.51.3",
48
+ "use_cache": true,
49
+ "vocab_size": 50272,
50
+ "word_embed_proj_dim": 2048
51
+ }
generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 2,
4
+ "eos_token_id": 2,
5
+ "pad_token_id": 1,
6
+ "transformers_version": "4.51.3"
7
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:251a68caf4367ed7a6038a812d0066f7e7d88e9fa1a1cb33c713b6c55b858b8e
3
+ size 845033800
quant_log.csv ADDED
@@ -0,0 +1,145 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ layer,module,loss,samples,damp,time
2
+ 0,self_attn.k_proj,0.00409379,0.01000,1.022
3
+ 0,self_attn.v_proj,0.00072114,0.01000,1.006
4
+ 0,self_attn.q_proj,0.00414090,0.01000,0.969
5
+ 0,self_attn.out_proj,0.00000870,0.01000,0.986
6
+ 0,fc1,0.00970921,0.01000,1.046
7
+ 0,fc2,0.00010505,0.01000,4.252
8
+ 1,self_attn.k_proj,0.00327041,0.01000,1.012
9
+ 1,self_attn.v_proj,0.00041319,0.01000,1.001
10
+ 1,self_attn.q_proj,0.00355976,0.01000,0.986
11
+ 1,self_attn.out_proj,0.00000244,0.01000,1.018
12
+ 1,fc1,0.01151395,0.01000,1.005
13
+ 1,fc2,0.00014460,0.01000,4.327
14
+ 2,self_attn.k_proj,0.00454492,0.01000,0.993
15
+ 2,self_attn.v_proj,0.00071424,0.01000,0.980
16
+ 2,self_attn.q_proj,0.00453008,0.01000,0.991
17
+ 2,self_attn.out_proj,0.00000383,0.01000,1.002
18
+ 2,fc1,0.01163073,0.01000,0.999
19
+ 2,fc2,0.00006701,0.01000,4.270
20
+ 3,self_attn.k_proj,0.00453221,0.01000,0.995
21
+ 3,self_attn.v_proj,0.00088928,0.01000,1.014
22
+ 3,self_attn.q_proj,0.00502093,0.01000,1.022
23
+ 3,self_attn.out_proj,0.00000183,0.01000,1.003
24
+ 3,fc1,0.01080185,0.01000,1.026
25
+ 3,fc2,0.00006996,0.01000,4.321
26
+ 4,self_attn.k_proj,0.00675831,0.01000,1.039
27
+ 4,self_attn.v_proj,0.00108673,0.01000,0.992
28
+ 4,self_attn.q_proj,0.00723823,0.01000,1.073
29
+ 4,self_attn.out_proj,0.00000383,0.01000,1.058
30
+ 4,fc1,0.01171546,0.01000,0.999
31
+ 4,fc2,0.00007058,0.01000,4.253
32
+ 5,self_attn.k_proj,0.00815600,0.01000,0.989
33
+ 5,self_attn.v_proj,0.00138022,0.01000,1.066
34
+ 5,self_attn.q_proj,0.00834911,0.01000,0.994
35
+ 5,self_attn.out_proj,0.00000388,0.01000,1.050
36
+ 5,fc1,0.01271201,0.01000,1.005
37
+ 5,fc2,0.00009196,0.01000,4.272
38
+ 6,self_attn.k_proj,0.01265277,0.01000,0.989
39
+ 6,self_attn.v_proj,0.00174527,0.01000,0.975
40
+ 6,self_attn.q_proj,0.01124555,0.01000,1.008
41
+ 6,self_attn.out_proj,0.00000933,0.01000,1.001
42
+ 6,fc1,0.01568377,0.01000,1.001
43
+ 6,fc2,0.00009791,0.01000,4.231
44
+ 7,self_attn.k_proj,0.01346123,0.01000,0.994
45
+ 7,self_attn.v_proj,0.00204103,0.01000,0.989
46
+ 7,self_attn.q_proj,0.01190130,0.01000,0.990
47
+ 7,self_attn.out_proj,0.00001416,0.01000,1.009
48
+ 7,fc1,0.01938798,0.01000,1.004
49
+ 7,fc2,0.00014014,0.01000,4.244
50
+ 8,self_attn.k_proj,0.01525222,0.01000,1.020
51
+ 8,self_attn.v_proj,0.00235129,0.01000,0.988
52
+ 8,self_attn.q_proj,0.01232455,0.01000,0.991
53
+ 8,self_attn.out_proj,0.00001867,0.01000,1.001
54
+ 8,fc1,0.02317689,0.01000,1.018
55
+ 8,fc2,0.00017385,0.01000,4.215
56
+ 9,self_attn.k_proj,0.01641444,0.01000,0.995
57
+ 9,self_attn.v_proj,0.00270767,0.01000,1.023
58
+ 9,self_attn.q_proj,0.01364542,0.01000,1.007
59
+ 9,self_attn.out_proj,0.00002797,0.01000,0.987
60
+ 9,fc1,0.02664278,0.01000,0.999
61
+ 9,fc2,2.56742547,0.01250,4.445
62
+ 10,self_attn.k_proj,0.01577057,0.01000,0.994
63
+ 10,self_attn.v_proj,0.00330441,0.01000,0.999
64
+ 10,self_attn.q_proj,0.01304028,0.01000,0.992
65
+ 10,self_attn.out_proj,0.00005218,0.01000,0.996
66
+ 10,fc1,0.02900651,0.01000,0.998
67
+ 10,fc2,0.00040414,0.01000,4.289
68
+ 11,self_attn.k_proj,0.01669788,0.01000,1.005
69
+ 11,self_attn.v_proj,0.00428681,0.01000,0.985
70
+ 11,self_attn.q_proj,0.01389177,0.01000,0.979
71
+ 11,self_attn.out_proj,0.00003902,0.01000,0.997
72
+ 11,fc1,0.03293292,0.01000,0.999
73
+ 11,fc2,0.00036001,0.01000,4.255
74
+ 12,self_attn.k_proj,0.01809878,0.01000,0.997
75
+ 12,self_attn.v_proj,0.00461741,0.01000,0.969
76
+ 12,self_attn.q_proj,0.01426543,0.01000,0.993
77
+ 12,self_attn.out_proj,0.00008923,0.01000,1.003
78
+ 12,fc1,0.03490859,0.01000,0.999
79
+ 12,fc2,0.00053366,0.01000,4.238
80
+ 13,self_attn.k_proj,0.02024559,0.01000,1.000
81
+ 13,self_attn.v_proj,0.00529331,0.01000,1.002
82
+ 13,self_attn.q_proj,0.01510829,0.01000,0.994
83
+ 13,self_attn.out_proj,0.00011232,0.01000,1.021
84
+ 13,fc1,0.04002735,0.01000,1.002
85
+ 13,fc2,0.00079521,0.01000,4.285
86
+ 14,self_attn.k_proj,0.02019605,0.01000,0.993
87
+ 14,self_attn.v_proj,0.00715139,0.01000,0.986
88
+ 14,self_attn.q_proj,0.01556215,0.01000,0.982
89
+ 14,self_attn.out_proj,0.00013239,0.01000,0.988
90
+ 14,fc1,0.04567505,0.01000,1.026
91
+ 14,fc2,0.00109855,0.01000,4.247
92
+ 15,self_attn.k_proj,0.01876904,0.01000,1.011
93
+ 15,self_attn.v_proj,0.00927897,0.01000,1.025
94
+ 15,self_attn.q_proj,0.01618721,0.01000,1.082
95
+ 15,self_attn.out_proj,0.00013910,0.01000,1.037
96
+ 15,fc1,0.04992504,0.01000,0.998
97
+ 15,fc2,0.00144680,0.01000,4.283
98
+ 16,self_attn.k_proj,0.01909367,0.01000,0.997
99
+ 16,self_attn.v_proj,0.01037889,0.01000,0.986
100
+ 16,self_attn.q_proj,0.01454423,0.01000,0.998
101
+ 16,self_attn.out_proj,0.00020494,0.01000,0.990
102
+ 16,fc1,0.05550109,0.01000,0.997
103
+ 16,fc2,0.00181646,0.01000,4.268
104
+ 17,self_attn.k_proj,0.01868965,0.01000,1.021
105
+ 17,self_attn.v_proj,0.01156883,0.01000,0.980
106
+ 17,self_attn.q_proj,0.01493619,0.01000,0.997
107
+ 17,self_attn.out_proj,0.00022462,0.01000,0.999
108
+ 17,fc1,0.06165383,0.01000,1.022
109
+ 17,fc2,0.00235090,0.01000,4.320
110
+ 18,self_attn.k_proj,0.01697141,0.01000,0.998
111
+ 18,self_attn.v_proj,0.01306967,0.01000,0.993
112
+ 18,self_attn.q_proj,0.01436241,0.01000,1.007
113
+ 18,self_attn.out_proj,0.00025583,0.01000,0.994
114
+ 18,fc1,0.06307104,0.01000,1.001
115
+ 18,fc2,0.00259235,0.01000,4.204
116
+ 19,self_attn.k_proj,0.01683915,0.01000,1.004
117
+ 19,self_attn.v_proj,0.01634647,0.01000,0.989
118
+ 19,self_attn.q_proj,0.01405389,0.01000,0.988
119
+ 19,self_attn.out_proj,0.00030797,0.01000,1.010
120
+ 19,fc1,0.06668582,0.01000,1.014
121
+ 19,fc2,0.00294858,0.01000,4.217
122
+ 20,self_attn.k_proj,0.01660586,0.01000,0.997
123
+ 20,self_attn.v_proj,0.01790955,0.01000,1.003
124
+ 20,self_attn.q_proj,0.01307197,0.01000,0.983
125
+ 20,self_attn.out_proj,0.00048068,0.01000,0.998
126
+ 20,fc1,0.06591332,0.01000,1.005
127
+ 20,fc2,0.00350037,0.01000,4.249
128
+ 21,self_attn.k_proj,0.01451043,0.01000,0.992
129
+ 21,self_attn.v_proj,0.01692051,0.01000,0.982
130
+ 21,self_attn.q_proj,0.01337012,0.01000,1.004
131
+ 21,self_attn.out_proj,0.00033322,0.01000,1.010
132
+ 21,fc1,0.06099212,0.01000,1.001
133
+ 21,fc2,0.00390600,0.01000,4.357
134
+ 22,self_attn.k_proj,0.01366263,0.01000,0.984
135
+ 22,self_attn.v_proj,0.01748320,0.01000,0.997
136
+ 22,self_attn.q_proj,0.01474320,0.01000,0.996
137
+ 22,self_attn.out_proj,0.00038908,0.01000,1.004
138
+ 22,fc1,0.05703627,0.01000,1.007
139
+ 22,fc2,0.00426069,0.01000,4.242
140
+ 23,self_attn.k_proj,0.01776943,0.01000,0.998
141
+ 23,self_attn.v_proj,0.01178338,0.01000,1.004
142
+ 23,self_attn.q_proj,0.03046774,0.01000,1.023
143
+ 23,self_attn.out_proj,0.00090270,0.01000,1.002
144
+ 23,fc1,0.05197454,0.01000,1.012
145
+ 23,fc2,0.00358283,0.01000,4.266
quantize_config.json ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bits": 4,
3
+ "group_size": 128,
4
+ "desc_act": false,
5
+ "sym": true,
6
+ "lm_head": false,
7
+ "quant_method": "gptq",
8
+ "checkpoint_format": "gptq",
9
+ "pack_dtype": "int32",
10
+ "meta": {
11
+ "quantizer": [
12
+ "gptqmodel:2.2.0"
13
+ ],
14
+ "uri": "https://github.com/modelcloud/gptqmodel",
15
+ "damp_percent": 0.01,
16
+ "damp_auto_increment": 0.0025,
17
+ "static_groups": false,
18
+ "true_sequential": true,
19
+ "mse": 0.0
20
+ }
21
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "</s>",
4
+ "lstrip": false,
5
+ "normalized": true,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": true,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": "<pad>",
17
+ "unk_token": {
18
+ "content": "</s>",
19
+ "lstrip": false,
20
+ "normalized": true,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ }
24
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_prefix_space": false,
4
+ "added_tokens_decoder": {
5
+ "1": {
6
+ "content": "<pad>",
7
+ "lstrip": false,
8
+ "normalized": true,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "2": {
14
+ "content": "</s>",
15
+ "lstrip": false,
16
+ "normalized": true,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ }
21
+ },
22
+ "bos_token": "</s>",
23
+ "clean_up_tokenization_spaces": false,
24
+ "eos_token": "</s>",
25
+ "errors": "replace",
26
+ "extra_special_tokens": {},
27
+ "model_max_length": 1000000000000000019884624838656,
28
+ "pad_token": "<pad>",
29
+ "tokenizer_class": "GPT2TokenizerFast",
30
+ "unk_token": "</s>",
31
+ "_commit_hash": null
32
+ }
vocab.json ADDED
The diff for this file is too large to render. See raw diff