GakkiLi commited on
Commit
7575216
·
verified ·
1 Parent(s): a2da3af

Upload count.json

Browse files
Files changed (1) hide show
  1. data0805/count.json +589 -0
data0805/count.json ADDED
@@ -0,0 +1,589 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "EleutherAI/pythia-160m-deduped": {
3
+ "model_name": "EleutherAI/pythia-160m-deduped",
4
+ "total_params": 123689472,
5
+ "embedding_params": 38633472,
6
+ "non_embedding_params": 85056000,
7
+ "model_size": 0.16,
8
+ "model_label": "Pythia",
9
+ "vocab_size": 50304
10
+ },
11
+ "EleutherAI/pythia-410m-deduped": {
12
+ "model_name": "EleutherAI/pythia-410m-deduped",
13
+ "total_params": 353822720,
14
+ "embedding_params": 51511296,
15
+ "non_embedding_params": 302311424,
16
+ "model_size": 0.41,
17
+ "model_label": "Pythia",
18
+ "vocab_size": 50304
19
+ },
20
+ "facebook/opt-125m": {
21
+ "model_name": "facebook/opt-125m",
22
+ "total_params": 125239296,
23
+ "embedding_params": 40183296,
24
+ "non_embedding_params": 85056000,
25
+ "model_size": 0.125,
26
+ "model_label": "OPT",
27
+ "vocab_size": 50272
28
+ },
29
+ "facebook/opt-350m": {
30
+ "model_name": "facebook/opt-350m",
31
+ "total_params": 331196416,
32
+ "embedding_params": 27838464,
33
+ "non_embedding_params": 303357952,
34
+ "model_size": 0.35,
35
+ "model_label": "OPT",
36
+ "vocab_size": 50272
37
+ },
38
+ "facebook/opt-1.3b": {
39
+ "model_name": "facebook/opt-1.3b",
40
+ "total_params": 1315758080,
41
+ "embedding_params": 107155456,
42
+ "non_embedding_params": 1208602624,
43
+ "model_size": 1.3,
44
+ "model_label": "OPT",
45
+ "vocab_size": 50272
46
+ },
47
+ "facebook/opt-2.7b": {
48
+ "model_name": "facebook/opt-2.7b",
49
+ "total_params": 2651596800,
50
+ "embedding_params": 133944320,
51
+ "non_embedding_params": 2517652480,
52
+ "model_size": 2.7,
53
+ "model_label": "OPT",
54
+ "vocab_size": 50272
55
+ },
56
+ "facebook/opt-6.7b": {
57
+ "model_name": "facebook/opt-6.7b",
58
+ "total_params": 6658473984,
59
+ "embedding_params": 214310912,
60
+ "non_embedding_params": 6444163072,
61
+ "model_size": 6.7,
62
+ "model_label": "OPT",
63
+ "vocab_size": 50272
64
+ },
65
+ "EleutherAI/gpt-neo-125m": {
66
+ "model_name": "EleutherAI/gpt-neo-125m",
67
+ "total_params": 125198592,
68
+ "embedding_params": 40170240,
69
+ "non_embedding_params": 85028352,
70
+ "model_size": 0.125,
71
+ "model_label": "GPT-Neo",
72
+ "vocab_size": 50257
73
+ },
74
+ "distilbert/distilgpt2": {
75
+ "model_name": "distilbert/distilgpt2",
76
+ "total_params": 81912576,
77
+ "embedding_params": 39383808,
78
+ "non_embedding_params": 42528768,
79
+ "model_size": 0.082,
80
+ "model_label": "DistilGPT2",
81
+ "vocab_size": 50257
82
+ },
83
+ "Qwen/Qwen2.5-0.5B": {
84
+ "model_name": "Qwen/Qwen2.5-0.5B",
85
+ "total_params": 494032768,
86
+ "embedding_params": 136134656,
87
+ "non_embedding_params": 357898112,
88
+ "model_size": 0.5,
89
+ "model_label": "QWen2.5",
90
+ "vocab_size": 151936
91
+ },
92
+ "Qwen/Qwen2.5-1.5B": {
93
+ "model_name": "Qwen/Qwen2.5-1.5B",
94
+ "total_params": 1543714304,
95
+ "embedding_params": 233373696,
96
+ "non_embedding_params": 1310340608,
97
+ "model_size": 1.5,
98
+ "model_label": "QWen2.5",
99
+ "vocab_size": 151936
100
+ },
101
+ "Qwen/Qwen2.5-3B": {
102
+ "model_name": "Qwen/Qwen2.5-3B",
103
+ "total_params": 3085938688,
104
+ "embedding_params": 311164928,
105
+ "non_embedding_params": 2774773760,
106
+ "model_size": 3,
107
+ "model_label": "QWen2.5",
108
+ "vocab_size": 151936
109
+ },
110
+ "Qwen/Qwen2.5-7B": {
111
+ "model_name": "Qwen/Qwen2.5-7B",
112
+ "total_params": 7070619136,
113
+ "embedding_params": 544997376,
114
+ "non_embedding_params": 6525621760,
115
+ "model_size": 7,
116
+ "model_label": "QWen2.5",
117
+ "vocab_size": 152064
118
+ },
119
+ "Qwen/Qwen2.5-14B": {
120
+ "model_name": "Qwen/Qwen2.5-14B",
121
+ "total_params": 13991465984,
122
+ "embedding_params": 778567680,
123
+ "non_embedding_params": 13212898304,
124
+ "model_size": 14,
125
+ "model_label": "QWen2.5",
126
+ "vocab_size": 152064
127
+ },
128
+ "Qwen/Qwen2.5-32B": {
129
+ "model_name": "Qwen/Qwen2.5-32B",
130
+ "total_params": 31985308672,
131
+ "embedding_params": 778567680,
132
+ "non_embedding_params": 31206740992,
133
+ "model_size": 32,
134
+ "model_label": "QWen2.5",
135
+ "vocab_size": 152064
136
+ },
137
+ "Qwen/Qwen2.5-72B": {
138
+ "model_name": "Qwen/Qwen2.5-72B",
139
+ "total_params": 71460495360,
140
+ "embedding_params": 1245708288,
141
+ "non_embedding_params": 70214787072,
142
+ "model_size": 72,
143
+ "model_label": "QWen2.5",
144
+ "vocab_size": 152064
145
+ },
146
+ "Qwen/Qwen3-0.6B":{
147
+ "model_name": "Qwen/Qwen3-0.6B",
148
+ "total_params": 596049920,
149
+ "embedding_params": 155582464,
150
+ "non_embedding_params": 440467456,
151
+ "model_size": 0.6,
152
+ "model_label": "QWen3",
153
+ "vocab_size": 151936
154
+ },
155
+ "Qwen/Qwen3-1.7B":{
156
+ "model_name": "Qwen/Qwen3-1.7B",
157
+ "total_params": 1720574976,
158
+ "embedding_params": 311164928,
159
+ "non_embedding_params": 1409410048,
160
+ "model_size": 1.7,
161
+ "model_label": "QWen3",
162
+ "vocab_size": 151936
163
+ },
164
+ "Qwen/Qwen3-4B":{
165
+ "model_name": "Qwen/Qwen3-4B",
166
+ "total_params": 4022468096,
167
+ "embedding_params": 388956160,
168
+ "non_embedding_params": 3633511936,
169
+ "model_size": 4,
170
+ "model_label": "QWen3",
171
+ "vocab_size": 151936
172
+ },
173
+ "Qwen/Qwen3-8B":{
174
+ "model_name": "Qwen/Qwen3-8B",
175
+ "total_params": 7568405504,
176
+ "embedding_params": 622329856,
177
+ "non_embedding_params": 6946075648,
178
+ "model_size": 8,
179
+ "model_label": "QWen3",
180
+ "vocab_size": 151936
181
+ },
182
+ "Qwen/Qwen3-14B":{
183
+ "model_name": "Qwen/Qwen3-14B",
184
+ "total_params": 13990394880,
185
+ "embedding_params": 777912320,
186
+ "non_embedding_params": 13212482560,
187
+ "model_size": 14,
188
+ "model_label": "QWen3",
189
+ "vocab_size": 151936
190
+ },
191
+ "Qwen/Qwen3-32B": {
192
+ "model_name": "Qwen/Qwen3-32B",
193
+ "total_params": 31984210944,
194
+ "embedding_params": 777912320,
195
+ "non_embedding_params": 31206298624,
196
+ "model_size": 32,
197
+ "model_label": "Qwen3",
198
+ "vocab_size": 151
199
+ },
200
+ "Qwen/Qwen3-0.6B-Base": {
201
+ "model_name": "Qwen/Qwen3-0.6B-Base",
202
+ "total_params": 596049920,
203
+ "embedding_params": 155582464,
204
+ "non_embedding_params": 440467456,
205
+ "model_size": 32,
206
+ "model_label": "Qwen3",
207
+ "vocab_size": 151936
208
+ },
209
+ "Qwen/Qwen3-1.7B-Base": {
210
+ "model_name": "Qwen/Qwen3-1.7B-Base",
211
+ "total_params": 1720574976,
212
+ "embedding_params": 311164928,
213
+ "non_embedding_params": 1409410048,
214
+ "model_size": 32,
215
+ "model_label": "Qwen3",
216
+ "vocab_size": 151936
217
+ },
218
+ "Qwen/Qwen3-4B-Base": {
219
+ "model_name": "Qwen/Qwen3-4B-Base",
220
+ "total_params": 4022468096,
221
+ "embedding_params": 388956160,
222
+ "non_embedding_params": 3633511936,
223
+ "model_size": 32,
224
+ "model_label": "Qwen3",
225
+ "vocab_size": 151936
226
+ },
227
+ "Qwen/Qwen3-8B-Base": {
228
+ "model_name": "Qwen/Qwen3-8B-Base",
229
+ "total_params": 7568405504,
230
+ "embedding_params": 622329856,
231
+ "non_embedding_params": 6946075648,
232
+ "model_size": 32,
233
+ "model_label": "Qwen3",
234
+ "vocab_size": 151936
235
+ },
236
+ "Qwen/Qwen3-14B-Base": {
237
+ "model_name": "Qwen/Qwen3-14B-Base",
238
+ "total_params": 13990394880,
239
+ "embedding_params": 777912320,
240
+ "non_embedding_params": 13212482560,
241
+ "model_size": 32,
242
+ "model_label": "Qwen3",
243
+ "vocab_size": 151936
244
+ },
245
+ "meta-llama/Meta-Llama-3-8B": {
246
+ "model_name": "meta-llama/Meta-Llama-3-8B",
247
+ "total_params": 7504924672,
248
+ "embedding_params": 525336576,
249
+ "non_embedding_params": 6979588096,
250
+ "model_size": 8,
251
+ "model_label": "Llama3",
252
+ "vocab_size": 128256
253
+ },
254
+ "meta-llama/Meta-Llama-3-70B": {
255
+ "model_name": "meta-llama/Meta-Llama-3-70B",
256
+ "total_params": 69503033344,
257
+ "embedding_params": 1050673152,
258
+ "non_embedding_params": 68452360192,
259
+ "model_size": 70,
260
+ "model_label": "Llama3",
261
+ "vocab_size": 128256
262
+ },
263
+ "meta-llama/Llama-2-13b-hf": {
264
+ "model_name": "meta-llama/Llama-2-13b-hf",
265
+ "total_params": 12852024320,
266
+ "embedding_params": 163840000,
267
+ "non_embedding_params": 12688184320,
268
+ "model_size": 13,
269
+ "model_label": "Llama2",
270
+ "vocab_size": 32000
271
+ },
272
+ "meta-llama/Llama-2-7b-hf": {
273
+ "model_name": "meta-llama/Llama-2-7b-hf",
274
+ "total_params": 6607343616,
275
+ "embedding_params": 131072000,
276
+ "non_embedding_params": 6476271616,
277
+ "model_size": 7,
278
+ "model_label": "Llama2",
279
+ "vocab_size": 32000
280
+ },
281
+ "meta-llama/Llama-2-70b-hf": {
282
+ "model_name": "meta-llama/Llama-2-70b-hf",
283
+ "total_params": 68714504192,
284
+ "embedding_params": 262144000,
285
+ "non_embedding_params": 68452360192,
286
+ "model_size": 70,
287
+ "model_label": "Llama2",
288
+ "vocab_size": 32000
289
+ },
290
+ "openai-community/gpt2": {
291
+ "model_name": "openai-community/gpt2",
292
+ "total_params": 124439808,
293
+ "embedding_params": 39383808,
294
+ "non_embedding_params": 85056000,
295
+ "model_size": 0.124,
296
+ "model_label": "GPT2",
297
+ "vocab_size": 50257
298
+ },
299
+ "openai-community/gpt2-medium": {
300
+ "model_name": "openai-community/gpt2-medium",
301
+ "total_params": 354823168,
302
+ "embedding_params": 52511744,
303
+ "non_embedding_params": 302311424,
304
+ "model_size": 0.355,
305
+ "model_label": "GPT2",
306
+ "vocab_size": 50257
307
+ },
308
+ "openai-community/gpt2-large": {
309
+ "model_name": "openai-community/gpt2-large",
310
+ "total_params": 774030080,
311
+ "embedding_params": 65639680,
312
+ "non_embedding_params": 708390400,
313
+ "model_size": 0.774,
314
+ "model_label": "GPT2",
315
+ "vocab_size": 50257
316
+ },
317
+ "openai-community/gpt2-xl": {
318
+ "model_name": "openai-community/gpt2-xl",
319
+ "total_params": 1557611200,
320
+ "embedding_params": 82049600,
321
+ "non_embedding_params": 1475561600,
322
+ "model_size": 1.5,
323
+ "model_label": "GPT2",
324
+ "vocab_size": 50257
325
+ },
326
+ "huggyllama/llama-7b": {
327
+ "model_name": "huggyllama/llama-7b",
328
+ "total_params": 6607343616,
329
+ "embedding_params": 131072000,
330
+ "non_embedding_params": 6476271616,
331
+ "model_size": 7,
332
+ "model_label": "Llama1",
333
+ "vocab_size": 32000
334
+ },
335
+ "huggyllama/llama-65b": {
336
+ "model_name": "huggyllama/llama-65b",
337
+ "total_params": 65023516672,
338
+ "embedding_params": 262144000,
339
+ "non_embedding_params": 64761372672,
340
+ "model_size": 65,
341
+ "model_label": "Llama1",
342
+ "vocab_size": 32000
343
+ },
344
+ "microsoft/phi-2": {
345
+ "model_name": "microsoft/phi-2",
346
+ "total_params": 2648560640,
347
+ "embedding_params": 131072000,
348
+ "non_embedding_params": 2517488640,
349
+ "model_size": 2,
350
+ "model_label": "Phi-2",
351
+ "vocab_size": 51200
352
+ },
353
+ "meta-llama/Llama-3.2-1B": {
354
+ "model_name": "meta-llama/Llama-3.2-1B",
355
+ "total_params": 1235814400,
356
+ "embedding_params": 262668288,
357
+ "non_embedding_params": 973146112,
358
+ "model_size": 1,
359
+ "model_label": "Llama3.2",
360
+ "vocab_size": 128256
361
+ },
362
+ "meta-llama/Llama-3.2-3B": {
363
+ "model_name": "meta-llama/Llama-3.2-3B",
364
+ "total_params": 3212749824,
365
+ "embedding_params": 394002432,
366
+ "non_embedding_params": 2818747392,
367
+ "model_size": 3,
368
+ "model_label": "Llama3.2",
369
+ "vocab_size": 128256
370
+ },
371
+ "mistralai/Mistral-7B-v0.1": {
372
+ "model_name": "mistralai/Mistral-7B-v0.1",
373
+ "total_params": 7110660096,
374
+ "embedding_params": 131072000,
375
+ "non_embedding_params": 6979588096,
376
+ "model_size": 7,
377
+ "model_label": "Mistral",
378
+ "vocab_size": 32000
379
+ },
380
+ "baichuan-inc/Baichuan-M1-14B-Base": {
381
+ "model_name": "baichuan-inc/Baichuan-M1-14B-Base",
382
+ "total_params": 13789189920,
383
+ "embedding_params": 681574400,
384
+ "non_embedding_params": 13107615520,
385
+ "model_size": 14,
386
+ "model_label": "BaichuanM1",
387
+ "vocab_size": 133120
388
+ },
389
+ "THUDM/glm-4-9b": {
390
+ "model_name": "THUDM/glm-4-9b",
391
+ "total_params": 9399951360,
392
+ "embedding_params": 620756992,
393
+ "non_embedding_params": 8779194368,
394
+ "model_size": 9,
395
+ "model_label": "GLM4",
396
+ "vocab_size": 151552
397
+ },
398
+ "deepseek-ai/DeepSeek-V2-Lite": {
399
+ "model_name": "deepseek-ai/DeepSeek-V2-Lite",
400
+ "total_params": 15496769024,
401
+ "embedding_params": 209715200,
402
+ "non_embedding_params": 15287053824,
403
+ "model_size": 16,
404
+ "model_label": "DeepSeek-V2",
405
+ "vocab_size": 102400
406
+ },
407
+ "roneneldan/TinyStories-1M": {
408
+ "model_name": "roneneldan/TinyStories-1M",
409
+ "total_params": 3745984,
410
+ "embedding_params": 3347520,
411
+ "non_embedding_params": 398464,
412
+ "model_size": 0.001,
413
+ "model_label": "TinyStories",
414
+ "vocab_size": 50257
415
+ },
416
+ "roneneldan/TinyStories-3M": {
417
+ "model_name": "roneneldan/TinyStories-3M",
418
+ "total_params": 8278400,
419
+ "embedding_params": 6695040,
420
+ "non_embedding_params": 1583360,
421
+ "model_size": 0.003,
422
+ "model_label": "TinyStories",
423
+ "vocab_size": 50257
424
+ },
425
+ "roneneldan/TinyStories-8M": {
426
+ "model_name": "roneneldan/TinyStories-8M",
427
+ "total_params": 19702528,
428
+ "embedding_params": 13390080,
429
+ "non_embedding_params": 6312448,
430
+ "model_size": 0.008,
431
+ "model_label": "TinyStories",
432
+ "vocab_size": 50257
433
+ },
434
+ "EleutherAI/pythia-14m": {
435
+ "model_name": "EleutherAI/pythia-14m",
436
+ "total_params": 7628800,
437
+ "embedding_params": 6438912,
438
+ "non_embedding_params": 1189888,
439
+ "model_size": 0.014,
440
+ "model_label": "Pythia",
441
+ "vocab_size": 50304
442
+ },
443
+ "EleutherAI/pythia-70m": {
444
+ "model_name": "EleutherAI/pythia-70m",
445
+ "total_params": 44670976,
446
+ "embedding_params": 25755648,
447
+ "non_embedding_params": 18915328,
448
+ "model_size": 0.07,
449
+ "model_label": "Pythia",
450
+ "vocab_size": 50304
451
+ },
452
+ "EleutherAI/pythia-160m": {
453
+ "model_name": "EleutherAI/pythia-160m",
454
+ "total_params": 123689472,
455
+ "embedding_params": 38633472,
456
+ "non_embedding_params": 85056000,
457
+ "model_size": 0.16,
458
+ "model_label": "Pythia",
459
+ "vocab_size": 50304
460
+ },
461
+ "EleutherAI/pythia-410m": {
462
+ "model_name": "EleutherAI/pythia-410m",
463
+ "total_params": 353822720,
464
+ "embedding_params": 51511296,
465
+ "non_embedding_params": 302311424,
466
+ "model_size": 0.41,
467
+ "model_label": "Pythia",
468
+ "vocab_size": 50304
469
+ },
470
+
471
+ "EleutherAI/pythia-31m": {
472
+ "model_name": "EleutherAI/pythia-31m",
473
+ "total_params": 17616896,
474
+ "embedding_params": 12877824,
475
+ "non_embedding_params": 4739072,
476
+ "model_size": "31m",
477
+ "model_label": "Pythia",
478
+ "vocab_size": 50304
479
+ },
480
+ "EleutherAI/pythia-1b": {
481
+ "model_name": "EleutherAI/pythia-1b",
482
+ "total_params": 908759040,
483
+ "embedding_params": 103022592,
484
+ "non_embedding_params": 805736448,
485
+ "model_size": "1b",
486
+ "model_label": "Pythia",
487
+ "vocab_size": 50304
488
+ },
489
+ "EleutherAI/pythia-1.4b": {
490
+ "model_name": "EleutherAI/pythia-1.4b",
491
+ "total_params": 1311625216,
492
+ "embedding_params": 103022592,
493
+ "non_embedding_params": 1208602624,
494
+ "model_size": "1.4b",
495
+ "model_label": "Pythia",
496
+ "vocab_size": 50304
497
+ },
498
+ "EleutherAI/pythia-2.8b": {
499
+ "model_name": "EleutherAI/pythia-2.8b",
500
+ "total_params": 2646430720,
501
+ "embedding_params": 128778240,
502
+ "non_embedding_params": 2517652480,
503
+ "model_size": "2.8b",
504
+ "model_label": "Pythia",
505
+ "vocab_size": 50304
506
+ },
507
+ "EleutherAI/pythia-6.9b": {
508
+ "model_name": "EleutherAI/pythia-6.9b",
509
+ "total_params": 6650732544,
510
+ "embedding_params": 206569472,
511
+ "non_embedding_params": 6444163072,
512
+ "model_size": "6.9b",
513
+ "model_label": "Pythia",
514
+ "vocab_size": 50432
515
+ },
516
+ "EleutherAI/pythia-12b": {
517
+ "model_name": "EleutherAI/pythia-12b",
518
+ "total_params": 11586549760,
519
+ "embedding_params": 259522560,
520
+ "non_embedding_params": 11327027200,
521
+ "model_size": "12b",
522
+ "model_label": "Pythia",
523
+ "vocab_size": 50688
524
+ },
525
+ "openai-community/openai-gpt": {
526
+ "model_name": "openai-community/openai-gpt",
527
+ "total_params": 116534784,
528
+ "embedding_params": 31480320,
529
+ "non_embedding_params": 85054464,
530
+ "model_size": 0.12,
531
+ "model_label": "GPT1",
532
+ "vocab_size": 40478
533
+ },
534
+ "DeepSeek-V3-Base": {
535
+ "model_name": "DeepSeek-V3-Base",
536
+ "total_params": 671000000000,
537
+ "embedding_params": 900000000,
538
+ "non_embedding_params": 669200000000,
539
+ "model_size": 671,
540
+ "model_label": "DeepSeek-V3-Base",
541
+ "vocab_size": 129280
542
+ },
543
+ "deepseek-ai/DeepSeek-V3-Base": {
544
+ "model_name": "DeepSeek-V3-Base",
545
+ "total_params": 671000000000,
546
+ "embedding_params": 900000000,
547
+ "non_embedding_params": 669200000000,
548
+ "model_size": 671,
549
+ "model_label": "DeepSeek-V3-Base",
550
+ "vocab_size": 129280
551
+ },
552
+ "meta-llama/Llama-3.1-8B": {
553
+ "total_params": 7504924672,
554
+ "embedding_params": 525336576,
555
+ "non_embedding_params": 6979588096,
556
+ "vocab_size": 128256,
557
+ "model_size": 8,
558
+ "model_label": "Llama-3.1"
559
+ },
560
+ "meta-llama/Llama-3.1-70B": {
561
+ "model_name": "meta-llama/Llama-3.1-70B",
562
+ "total_params": 69503033344,
563
+ "embedding_params": 1050673152,
564
+ "non_embedding_params": 68452360192,
565
+ "model_size": 70,
566
+ "model_label": "Llama3.1",
567
+ "vocab_size": 128256
568
+ },
569
+ "meta-llama/Llama-3.1-405B": {
570
+ "total_params": 403752042496,
571
+ "embedding_params": 2101346304,
572
+ "non_embedding_params": 401650696192,
573
+ "vocab_size": 128256,
574
+ "model_size": 405,
575
+ "model_label": "Llama-3.1"
576
+ },
577
+ "meta-llama/Llama-4-Maverick-17B-128E": {
578
+ "total_params": 399677363200,
579
+ "embedding_params": 1034485760,
580
+ "non_embedding_params": 398642877440,
581
+ "vocab_size": 202048
582
+ },
583
+ "meta-llama/Llama-4-Scout-17B-16E": {
584
+ "total_params": 106735375360,
585
+ "embedding_params": 1034485760,
586
+ "non_embedding_params": 105700889600,
587
+ "vocab_size": 202048
588
+ }
589
+ }