Azrail commited on
Commit
034688c
·
verified ·
1 Parent(s): a13a28f

Model save

Browse files
README.md ADDED
@@ -0,0 +1,199 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: transformers
3
+ tags:
4
+ - generated_from_trainer
5
+ model-index:
6
+ - name: smallm_70
7
+ results: []
8
+ ---
9
+
10
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
11
+ should probably proofread and complete it, then remove this comment. -->
12
+
13
+ # smallm_70
14
+
15
+ This model is a fine-tuned version of [](https://huggingface.co/) on the None dataset.
16
+ It achieves the following results on the evaluation set:
17
+ - Loss: 2.4118
18
+ - Num Input Tokens Seen: 18350075456
19
+
20
+ ## Model description
21
+
22
+ More information needed
23
+
24
+ ## Intended uses & limitations
25
+
26
+ More information needed
27
+
28
+ ## Training and evaluation data
29
+
30
+ More information needed
31
+
32
+ ## Training procedure
33
+
34
+ ### Training hyperparameters
35
+
36
+ The following hyperparameters were used during training:
37
+ - learning_rate: 0.001
38
+ - train_batch_size: 64
39
+ - eval_batch_size: 4
40
+ - seed: 42
41
+ - gradient_accumulation_steps: 4
42
+ - total_train_batch_size: 256
43
+ - optimizer: Use OptimizerNames.ADAMW_APEX_FUSED with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
44
+ - lr_scheduler_type: warmup_stable_decay
45
+ - lr_scheduler_warmup_steps: 500
46
+ - training_steps: 70000
47
+
48
+ ### Training results
49
+
50
+ | Training Loss | Epoch | Step | Validation Loss | Input Tokens Seen |
51
+ |:-------------:|:------:|:-----:|:---------------:|:-----------------:|
52
+ | 4.5284 | 0.0024 | 500 | 4.3092 | 131072000 |
53
+ | 3.6005 | 0.0048 | 1000 | 3.4853 | 262144000 |
54
+ | 3.3639 | 0.0072 | 1500 | 3.2486 | 393216000 |
55
+ | 3.2218 | 0.0095 | 2000 | 3.1193 | 524288000 |
56
+ | 3.1433 | 0.0119 | 2500 | 3.0400 | 655360000 |
57
+ | 3.0977 | 0.0143 | 3000 | 2.9805 | 786432000 |
58
+ | 3.0253 | 0.0167 | 3500 | 2.9370 | 917504000 |
59
+ | 3.0106 | 0.0191 | 4000 | 2.8984 | 1048576000 |
60
+ | 2.9664 | 0.0215 | 4500 | 2.8711 | 1179648000 |
61
+ | 2.947 | 0.0239 | 5000 | 2.8471 | 1310720000 |
62
+ | 2.9221 | 0.0262 | 5500 | 2.8247 | 1441792000 |
63
+ | 2.9096 | 0.0286 | 6000 | 2.8036 | 1572864000 |
64
+ | 2.8965 | 0.0310 | 6500 | 2.7873 | 1703936000 |
65
+ | 2.8642 | 0.0334 | 7000 | 2.7708 | 1835008000 |
66
+ | 2.8692 | 0.0358 | 7500 | 2.7582 | 1966080000 |
67
+ | 2.8494 | 0.0382 | 8000 | 2.7443 | 2097152000 |
68
+ | 2.844 | 0.0405 | 8500 | 2.7307 | 2228224000 |
69
+ | 2.8044 | 0.0429 | 9000 | 2.7220 | 2359296000 |
70
+ | 2.8106 | 0.0453 | 9500 | 2.7105 | 2490368000 |
71
+ | 2.8051 | 0.0477 | 10000 | 2.7000 | 2621440000 |
72
+ | 2.7979 | 0.0501 | 10500 | 2.6894 | 2752512000 |
73
+ | 2.7976 | 0.0525 | 11000 | 2.6826 | 2883584000 |
74
+ | 2.783 | 0.0549 | 11500 | 2.6739 | 3014656000 |
75
+ | 2.7781 | 0.0572 | 12000 | 2.6683 | 3145728000 |
76
+ | 2.7687 | 0.0596 | 12500 | 2.6606 | 3276800000 |
77
+ | 2.7676 | 0.0620 | 13000 | 2.6534 | 3407872000 |
78
+ | 2.7593 | 0.0644 | 13500 | 2.6474 | 3538944000 |
79
+ | 2.7516 | 0.0668 | 14000 | 2.6439 | 3670016000 |
80
+ | 2.7475 | 0.0692 | 14500 | 2.6359 | 3801088000 |
81
+ | 2.7471 | 0.0716 | 15000 | 2.6311 | 3932160000 |
82
+ | 2.7442 | 0.0739 | 15500 | 2.6253 | 4063232000 |
83
+ | 2.7271 | 0.0763 | 16000 | 2.6222 | 4194304000 |
84
+ | 2.7237 | 0.0787 | 16500 | 2.6179 | 4325376000 |
85
+ | 2.7151 | 0.0811 | 17000 | 2.6127 | 4456448000 |
86
+ | 2.7164 | 0.0835 | 17500 | 2.6086 | 4587520000 |
87
+ | 2.7163 | 0.0859 | 18000 | 2.6037 | 4718592000 |
88
+ | 2.7064 | 0.0882 | 18500 | 2.6001 | 4849664000 |
89
+ | 2.6996 | 0.0906 | 19000 | 2.5984 | 4980736000 |
90
+ | 2.7012 | 0.0930 | 19500 | 2.5943 | 5111808000 |
91
+ | 2.6975 | 0.0954 | 20000 | 2.5900 | 5242880000 |
92
+ | 2.7002 | 0.0978 | 20500 | 2.5875 | 5373952000 |
93
+ | 2.6935 | 0.1002 | 21000 | 2.5839 | 5505024000 |
94
+ | 2.7079 | 0.1026 | 21500 | 2.5816 | 5636096000 |
95
+ | 2.6803 | 0.1049 | 22000 | 2.5773 | 5767168000 |
96
+ | 2.6797 | 0.1073 | 22500 | 2.5754 | 5898240000 |
97
+ | 2.6836 | 0.1097 | 23000 | 2.5706 | 6029312000 |
98
+ | 2.6798 | 0.1121 | 23500 | 2.5710 | 6160384000 |
99
+ | 2.6917 | 0.1145 | 24000 | 2.5665 | 6291456000 |
100
+ | 2.6657 | 0.1169 | 24500 | 2.5641 | 6422528000 |
101
+ | 2.6582 | 0.1193 | 25000 | 2.5630 | 6553600000 |
102
+ | 2.6818 | 0.1216 | 25500 | 2.5603 | 6684672000 |
103
+ | 2.6682 | 0.1240 | 26000 | 2.5588 | 6815744000 |
104
+ | 2.6665 | 0.1264 | 26500 | 2.5561 | 6946816000 |
105
+ | 2.6547 | 0.1288 | 27000 | 2.5519 | 7077888000 |
106
+ | 2.6651 | 0.1312 | 27500 | 2.5501 | 7208960000 |
107
+ | 2.6623 | 0.1336 | 28000 | 2.5487 | 7340032000 |
108
+ | 2.6741 | 0.1359 | 28500 | 2.5456 | 7471104000 |
109
+ | 2.6453 | 0.1383 | 29000 | 2.5441 | 7602176000 |
110
+ | 2.6635 | 0.1407 | 29500 | 2.5434 | 7733248000 |
111
+ | 2.6502 | 0.1431 | 30000 | 2.5401 | 7864320000 |
112
+ | 2.654 | 0.1455 | 30500 | 2.5374 | 7995392000 |
113
+ | 2.6658 | 0.1479 | 31000 | 2.5451 | 8126464000 |
114
+ | 2.6675 | 0.1503 | 31500 | 2.5402 | 8257536000 |
115
+ | 2.6405 | 0.1526 | 32000 | 2.5325 | 8388608000 |
116
+ | 2.6436 | 0.1550 | 32500 | 2.5313 | 8519680000 |
117
+ | 2.6495 | 0.1574 | 33000 | 2.5319 | 8650752000 |
118
+ | 2.6525 | 0.1598 | 33500 | 2.5301 | 8781824000 |
119
+ | 2.6499 | 0.1622 | 34000 | 2.5331 | 8912896000 |
120
+ | 2.6619 | 0.1646 | 34500 | 2.5325 | 9043963456 |
121
+ | 2.652 | 0.1670 | 35000 | 2.5248 | 9175035456 |
122
+ | 2.6436 | 0.1693 | 35500 | 2.5231 | 9306107456 |
123
+ | 2.6377 | 0.1717 | 36000 | 2.5250 | 9437179456 |
124
+ | 2.6372 | 0.1741 | 36500 | 2.5252 | 9568251456 |
125
+ | 2.6521 | 0.1765 | 37000 | 2.5249 | 9699323456 |
126
+ | 2.6406 | 0.1789 | 37500 | 2.5189 | 9830395456 |
127
+ | 2.6369 | 0.1813 | 38000 | 2.5171 | 9961467456 |
128
+ | 2.6382 | 0.1836 | 38500 | 2.5153 | 10092539456 |
129
+ | 2.6284 | 0.1860 | 39000 | 2.5149 | 10223611456 |
130
+ | 2.642 | 0.1884 | 39500 | 2.5141 | 10354683456 |
131
+ | 2.6422 | 0.1908 | 40000 | 2.5145 | 10485755456 |
132
+ | 2.6149 | 0.1932 | 40500 | 2.5119 | 10616827456 |
133
+ | 2.626 | 0.1956 | 41000 | 2.5085 | 10747899456 |
134
+ | 2.6449 | 0.1980 | 41500 | 2.5157 | 10878971456 |
135
+ | 2.6016 | 0.2003 | 42000 | 2.5062 | 11010043456 |
136
+ | 2.6271 | 0.2027 | 42500 | 2.5126 | 11141115456 |
137
+ | 2.618 | 0.2051 | 43000 | 2.5047 | 11272187456 |
138
+ | 2.6271 | 0.2075 | 43500 | 2.5098 | 11403259456 |
139
+ | 2.6364 | 0.2099 | 44000 | 2.5009 | 11534331456 |
140
+ | 2.6051 | 0.2123 | 44500 | 2.4998 | 11665403456 |
141
+ | 2.6156 | 0.2147 | 45000 | 2.5038 | 11796475456 |
142
+ | 2.6099 | 0.2170 | 45500 | 2.5025 | 11927547456 |
143
+ | 2.6132 | 0.2194 | 46000 | 2.4990 | 12058619456 |
144
+ | 2.6259 | 0.2218 | 46500 | 2.4975 | 12189691456 |
145
+ | 2.6045 | 0.2242 | 47000 | 2.4973 | 12320763456 |
146
+ | 2.5983 | 0.2266 | 47500 | 2.4959 | 12451835456 |
147
+ | 2.6101 | 0.2290 | 48000 | 2.4971 | 12582907456 |
148
+ | 2.6242 | 0.2313 | 48500 | 2.4955 | 12713979456 |
149
+ | 2.6288 | 0.2337 | 49000 | 2.4981 | 12845051456 |
150
+ | 2.6231 | 0.2361 | 49500 | 2.5008 | 12976123456 |
151
+ | 2.617 | 0.2385 | 50000 | 2.4974 | 13107195456 |
152
+ | 2.6013 | 0.2409 | 50500 | 2.5022 | 13238267456 |
153
+ | 2.6063 | 0.2433 | 51000 | 2.4981 | 13369339456 |
154
+ | 2.5922 | 0.2457 | 51500 | 2.4883 | 13500411456 |
155
+ | 2.6139 | 0.2480 | 52000 | 2.4932 | 13631483456 |
156
+ | 2.595 | 0.2504 | 52500 | 2.4901 | 13762555456 |
157
+ | 2.608 | 0.2528 | 53000 | 2.4855 | 13893627456 |
158
+ | 2.5976 | 0.2552 | 53500 | 2.4863 | 14024699456 |
159
+ | 2.5916 | 0.2576 | 54000 | 2.4871 | 14155771456 |
160
+ | 2.5991 | 0.2600 | 54500 | 2.4862 | 14286843456 |
161
+ | 2.5946 | 0.2624 | 55000 | 2.4892 | 14417915456 |
162
+ | 2.6713 | 0.2647 | 55500 | 2.5105 | 14548987456 |
163
+ | 2.6172 | 0.2671 | 56000 | 2.4925 | 14680059456 |
164
+ | 2.5905 | 0.2695 | 56500 | 2.4836 | 14811131456 |
165
+ | 2.6021 | 0.2719 | 57000 | 2.4829 | 14942203456 |
166
+ | 2.5965 | 0.2743 | 57500 | 2.4838 | 15073275456 |
167
+ | 2.582 | 0.2767 | 58000 | 2.4821 | 15204347456 |
168
+ | 2.5927 | 0.2790 | 58500 | 2.4754 | 15335419456 |
169
+ | 2.6026 | 0.2814 | 59000 | 2.4757 | 15466491456 |
170
+ | 2.5939 | 0.2838 | 59500 | 2.4700 | 15597563456 |
171
+ | 2.5853 | 0.2862 | 60000 | 2.4705 | 15728635456 |
172
+ | 2.5888 | 0.2886 | 60500 | 2.4637 | 15859707456 |
173
+ | 2.5864 | 0.2910 | 61000 | 2.4598 | 15990779456 |
174
+ | 2.5716 | 0.2934 | 61500 | 2.4552 | 16121851456 |
175
+ | 2.5587 | 0.2957 | 62000 | 2.4521 | 16252923456 |
176
+ | 2.56 | 0.2981 | 62500 | 2.4480 | 16383995456 |
177
+ | 2.5567 | 0.3005 | 63000 | 2.4442 | 16515067456 |
178
+ | 2.5554 | 0.3029 | 63500 | 2.4396 | 16646139456 |
179
+ | 2.5599 | 0.3053 | 64000 | 2.4374 | 16777211456 |
180
+ | 2.5382 | 0.3077 | 64500 | 2.4323 | 16908283456 |
181
+ | 2.5286 | 0.3101 | 65000 | 2.4292 | 17039355456 |
182
+ | 2.5439 | 0.3124 | 65500 | 2.4263 | 17170427456 |
183
+ | 2.5491 | 0.3148 | 66000 | 2.4224 | 17301499456 |
184
+ | 2.5125 | 0.3172 | 66500 | 2.4198 | 17432571456 |
185
+ | 2.5394 | 0.3196 | 67000 | 2.4178 | 17563643456 |
186
+ | 2.5189 | 0.3220 | 67500 | 2.4156 | 17694715456 |
187
+ | 2.5213 | 0.3244 | 68000 | 2.4140 | 17825787456 |
188
+ | 2.5439 | 0.3267 | 68500 | 2.4129 | 17956859456 |
189
+ | 2.5361 | 0.3291 | 69000 | 2.4120 | 18087931456 |
190
+ | 2.5176 | 0.3315 | 69500 | 2.4120 | 18219003456 |
191
+ | 2.5303 | 0.3339 | 70000 | 2.4118 | 18350075456 |
192
+
193
+
194
+ ### Framework versions
195
+
196
+ - Transformers 4.50.3
197
+ - Pytorch 2.6.0+cu126
198
+ - Datasets 3.5.0
199
+ - Tokenizers 0.21.1
generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 0,
5
+ "pad_token_id": 0,
6
+ "transformers_version": "4.50.3"
7
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<|beginoftext|>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|endoftext|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<|endoftext|>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ }
23
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,115 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "<|endoftext|>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "1": {
12
+ "content": "<|beginoftext|>",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "2": {
20
+ "content": "<|reserved_token_1|>",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "3": {
28
+ "content": "<|reserved_token_2|>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "4": {
36
+ "content": "<|reserved_token_3|>",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ },
43
+ "5": {
44
+ "content": "<|reserved_token_4|>",
45
+ "lstrip": false,
46
+ "normalized": false,
47
+ "rstrip": false,
48
+ "single_word": false,
49
+ "special": true
50
+ },
51
+ "6": {
52
+ "content": "<|reserved_token_5|>",
53
+ "lstrip": false,
54
+ "normalized": false,
55
+ "rstrip": false,
56
+ "single_word": false,
57
+ "special": true
58
+ },
59
+ "7": {
60
+ "content": "<|reserved_token_6|>",
61
+ "lstrip": false,
62
+ "normalized": false,
63
+ "rstrip": false,
64
+ "single_word": false,
65
+ "special": true
66
+ },
67
+ "8": {
68
+ "content": "<|reserved_token_7|>",
69
+ "lstrip": false,
70
+ "normalized": false,
71
+ "rstrip": false,
72
+ "single_word": false,
73
+ "special": true
74
+ },
75
+ "9": {
76
+ "content": "<|reserved_token_8|>",
77
+ "lstrip": false,
78
+ "normalized": false,
79
+ "rstrip": false,
80
+ "single_word": false,
81
+ "special": true
82
+ },
83
+ "10": {
84
+ "content": "<|reserved_token_9|>",
85
+ "lstrip": false,
86
+ "normalized": false,
87
+ "rstrip": false,
88
+ "single_word": false,
89
+ "special": true
90
+ },
91
+ "11": {
92
+ "content": "<|reserved_token_10|>",
93
+ "lstrip": false,
94
+ "normalized": false,
95
+ "rstrip": false,
96
+ "single_word": false,
97
+ "special": true
98
+ },
99
+ "12": {
100
+ "content": "<|reserved_token_11|>",
101
+ "lstrip": false,
102
+ "normalized": false,
103
+ "rstrip": false,
104
+ "single_word": false,
105
+ "special": true
106
+ }
107
+ },
108
+ "bos_token": "<|beginoftext|>",
109
+ "clean_up_tokenization_spaces": false,
110
+ "eos_token": "<|endoftext|>",
111
+ "extra_special_tokens": {},
112
+ "model_max_length": 1000000000000000019884624838656,
113
+ "pad_token": "<|endoftext|>",
114
+ "tokenizer_class": "PreTrainedTokenizer"
115
+ }
trainer_state.json ADDED
The diff for this file is too large to render. See raw diff