File size: 16,895 Bytes
7575216
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
{
  "EleutherAI/pythia-160m-deduped": {
    "model_name": "EleutherAI/pythia-160m-deduped",
    "total_params": 123689472,
    "embedding_params": 38633472,
    "non_embedding_params": 85056000,
    "model_size": 0.16,
    "model_label": "Pythia",
    "vocab_size": 50304
  },
  "EleutherAI/pythia-410m-deduped": {
    "model_name": "EleutherAI/pythia-410m-deduped",
    "total_params": 353822720,
    "embedding_params": 51511296,
    "non_embedding_params": 302311424,
    "model_size": 0.41,
    "model_label": "Pythia",
    "vocab_size": 50304
  },
  "facebook/opt-125m": {
    "model_name": "facebook/opt-125m",
    "total_params": 125239296,
    "embedding_params": 40183296,
    "non_embedding_params": 85056000,
    "model_size": 0.125,
    "model_label": "OPT",
    "vocab_size": 50272
  },
  "facebook/opt-350m": {
    "model_name": "facebook/opt-350m",
    "total_params": 331196416,
    "embedding_params": 27838464,
    "non_embedding_params": 303357952,
    "model_size": 0.35,
    "model_label": "OPT",
    "vocab_size": 50272
  },
  "facebook/opt-1.3b": {
    "model_name": "facebook/opt-1.3b",
    "total_params": 1315758080,
    "embedding_params": 107155456,
    "non_embedding_params": 1208602624,
    "model_size": 1.3,
    "model_label": "OPT",
    "vocab_size": 50272
  },
  "facebook/opt-2.7b": {
    "model_name": "facebook/opt-2.7b",
    "total_params": 2651596800,
    "embedding_params": 133944320,
    "non_embedding_params": 2517652480,
    "model_size": 2.7,
    "model_label": "OPT",
    "vocab_size": 50272
  },
  "facebook/opt-6.7b": {
    "model_name": "facebook/opt-6.7b",
    "total_params": 6658473984,
    "embedding_params": 214310912,
    "non_embedding_params": 6444163072,
    "model_size": 6.7,
    "model_label": "OPT",
    "vocab_size": 50272
  },
  "EleutherAI/gpt-neo-125m": {
    "model_name": "EleutherAI/gpt-neo-125m",
    "total_params": 125198592,
    "embedding_params": 40170240,
    "non_embedding_params": 85028352,
    "model_size": 0.125,
    "model_label": "GPT-Neo",
    "vocab_size": 50257
  },
  "distilbert/distilgpt2": {
    "model_name": "distilbert/distilgpt2",
    "total_params": 81912576,
    "embedding_params": 39383808,
    "non_embedding_params": 42528768,
    "model_size": 0.082,
    "model_label": "DistilGPT2",
    "vocab_size": 50257
  },
  "Qwen/Qwen2.5-0.5B": {
    "model_name": "Qwen/Qwen2.5-0.5B",
    "total_params": 494032768,
    "embedding_params": 136134656,
    "non_embedding_params": 357898112,
    "model_size": 0.5,
    "model_label": "QWen2.5",
    "vocab_size": 151936
  },
  "Qwen/Qwen2.5-1.5B": {
    "model_name": "Qwen/Qwen2.5-1.5B",
    "total_params": 1543714304,
    "embedding_params": 233373696,
    "non_embedding_params": 1310340608,
    "model_size": 1.5,
    "model_label": "QWen2.5",
    "vocab_size": 151936
  },
  "Qwen/Qwen2.5-3B": {
    "model_name": "Qwen/Qwen2.5-3B",
    "total_params": 3085938688,
    "embedding_params": 311164928,
    "non_embedding_params": 2774773760,
    "model_size": 3,
    "model_label": "QWen2.5",
    "vocab_size": 151936
  },
  "Qwen/Qwen2.5-7B": {
    "model_name": "Qwen/Qwen2.5-7B",
    "total_params": 7070619136,
    "embedding_params": 544997376,
    "non_embedding_params": 6525621760,
    "model_size": 7,
    "model_label": "QWen2.5",
    "vocab_size": 152064
  },
  "Qwen/Qwen2.5-14B": {
    "model_name": "Qwen/Qwen2.5-14B",
    "total_params": 13991465984,
    "embedding_params": 778567680,
    "non_embedding_params": 13212898304,
    "model_size": 14,
    "model_label": "QWen2.5",
    "vocab_size": 152064
  },
  "Qwen/Qwen2.5-32B": {
    "model_name": "Qwen/Qwen2.5-32B",
    "total_params": 31985308672,
    "embedding_params": 778567680,
    "non_embedding_params": 31206740992,
    "model_size": 32,
    "model_label": "QWen2.5",
    "vocab_size": 152064
  },
  "Qwen/Qwen2.5-72B": {
    "model_name": "Qwen/Qwen2.5-72B",
    "total_params": 71460495360,
    "embedding_params": 1245708288,
    "non_embedding_params": 70214787072,
    "model_size": 72,
    "model_label": "QWen2.5",
    "vocab_size": 152064
  },
  "Qwen/Qwen3-0.6B":{
    "model_name": "Qwen/Qwen3-0.6B",
    "total_params": 596049920,
    "embedding_params": 155582464,
    "non_embedding_params": 440467456,
    "model_size": 0.6,
    "model_label": "QWen3",
    "vocab_size": 151936
  },
  "Qwen/Qwen3-1.7B":{
    "model_name": "Qwen/Qwen3-1.7B",
    "total_params": 1720574976,
    "embedding_params": 311164928,
    "non_embedding_params": 1409410048,
    "model_size": 1.7,
    "model_label": "QWen3",
    "vocab_size": 151936
  },
  "Qwen/Qwen3-4B":{
    "model_name": "Qwen/Qwen3-4B",
    "total_params": 4022468096,
    "embedding_params": 388956160,
    "non_embedding_params": 3633511936,
    "model_size": 4,
    "model_label": "QWen3",
    "vocab_size": 151936
  },
  "Qwen/Qwen3-8B":{
    "model_name": "Qwen/Qwen3-8B",
    "total_params": 7568405504,
    "embedding_params": 622329856,
    "non_embedding_params": 6946075648,
    "model_size": 8,
    "model_label": "QWen3",
    "vocab_size": 151936
  },
  "Qwen/Qwen3-14B":{
    "model_name": "Qwen/Qwen3-14B",
    "total_params": 13990394880,
    "embedding_params": 777912320,
    "non_embedding_params": 13212482560,
    "model_size": 14,
    "model_label": "QWen3",
    "vocab_size": 151936
  },
  "Qwen/Qwen3-32B": {
    "model_name": "Qwen/Qwen3-32B",
    "total_params": 31984210944,
    "embedding_params": 777912320,
    "non_embedding_params": 31206298624,
    "model_size": 32,
    "model_label": "Qwen3",
    "vocab_size": 151
  },
  "Qwen/Qwen3-0.6B-Base": {
    "model_name": "Qwen/Qwen3-0.6B-Base",
    "total_params": 596049920,
    "embedding_params": 155582464,
    "non_embedding_params": 440467456,
    "model_size": 32,
    "model_label": "Qwen3",
    "vocab_size": 151936
  },
  "Qwen/Qwen3-1.7B-Base": {
    "model_name": "Qwen/Qwen3-1.7B-Base",
    "total_params": 1720574976,
    "embedding_params": 311164928,
    "non_embedding_params": 1409410048,
    "model_size": 32,
    "model_label": "Qwen3",
    "vocab_size": 151936
  },
  "Qwen/Qwen3-4B-Base": {
    "model_name": "Qwen/Qwen3-4B-Base",
    "total_params": 4022468096,
    "embedding_params": 388956160,
    "non_embedding_params": 3633511936,
    "model_size": 32,
    "model_label": "Qwen3",
    "vocab_size": 151936
  },
  "Qwen/Qwen3-8B-Base": {
    "model_name": "Qwen/Qwen3-8B-Base",
    "total_params": 7568405504,
    "embedding_params": 622329856,
    "non_embedding_params": 6946075648,
    "model_size": 32,
    "model_label": "Qwen3",
    "vocab_size": 151936
  },
  "Qwen/Qwen3-14B-Base": {
    "model_name": "Qwen/Qwen3-14B-Base",
    "total_params": 13990394880,
    "embedding_params": 777912320,
    "non_embedding_params": 13212482560,
    "model_size": 32,
    "model_label": "Qwen3",
    "vocab_size": 151936
  },
  "meta-llama/Meta-Llama-3-8B": {
    "model_name": "meta-llama/Meta-Llama-3-8B",
    "total_params": 7504924672,
    "embedding_params": 525336576,
    "non_embedding_params": 6979588096,
    "model_size": 8,
    "model_label": "Llama3",
    "vocab_size": 128256
  },
  "meta-llama/Meta-Llama-3-70B": {
    "model_name": "meta-llama/Meta-Llama-3-70B",
    "total_params": 69503033344,
    "embedding_params": 1050673152,
    "non_embedding_params": 68452360192,
    "model_size": 70,
    "model_label": "Llama3",
    "vocab_size": 128256
  },
  "meta-llama/Llama-2-13b-hf": {
    "model_name": "meta-llama/Llama-2-13b-hf",
    "total_params": 12852024320,
    "embedding_params": 163840000,
    "non_embedding_params": 12688184320,
    "model_size": 13,
    "model_label": "Llama2",
    "vocab_size": 32000
  },
  "meta-llama/Llama-2-7b-hf": {
    "model_name": "meta-llama/Llama-2-7b-hf",
    "total_params": 6607343616,
    "embedding_params": 131072000,
    "non_embedding_params": 6476271616,
    "model_size": 7,
    "model_label": "Llama2",
    "vocab_size": 32000
  },
  "meta-llama/Llama-2-70b-hf": {
    "model_name": "meta-llama/Llama-2-70b-hf",
    "total_params": 68714504192,
    "embedding_params": 262144000,
    "non_embedding_params": 68452360192,
    "model_size": 70,
    "model_label": "Llama2",
    "vocab_size": 32000
  },
  "openai-community/gpt2": {
    "model_name": "openai-community/gpt2",
    "total_params": 124439808,
    "embedding_params": 39383808,
    "non_embedding_params": 85056000,
    "model_size": 0.124,
    "model_label": "GPT2",
    "vocab_size": 50257
  },
  "openai-community/gpt2-medium": {
    "model_name": "openai-community/gpt2-medium",
    "total_params": 354823168,
    "embedding_params": 52511744,
    "non_embedding_params": 302311424,
    "model_size": 0.355,
    "model_label": "GPT2",
    "vocab_size": 50257
  },
  "openai-community/gpt2-large": {
    "model_name": "openai-community/gpt2-large",
    "total_params": 774030080,
    "embedding_params": 65639680,
    "non_embedding_params": 708390400,
    "model_size": 0.774,
    "model_label": "GPT2",
    "vocab_size": 50257
  },
  "openai-community/gpt2-xl": {
    "model_name": "openai-community/gpt2-xl",
    "total_params": 1557611200,
    "embedding_params": 82049600,
    "non_embedding_params": 1475561600,
    "model_size": 1.5,
    "model_label": "GPT2",
    "vocab_size": 50257
  },
  "huggyllama/llama-7b": {
    "model_name": "huggyllama/llama-7b",
    "total_params": 6607343616,
    "embedding_params": 131072000,
    "non_embedding_params": 6476271616,
    "model_size": 7,
    "model_label": "Llama1",
    "vocab_size": 32000
  },
  "huggyllama/llama-65b": {
    "model_name": "huggyllama/llama-65b",
    "total_params": 65023516672,
    "embedding_params": 262144000,
    "non_embedding_params": 64761372672,
    "model_size": 65,
    "model_label": "Llama1",
    "vocab_size": 32000
  },
  "microsoft/phi-2": {
    "model_name": "microsoft/phi-2",
    "total_params": 2648560640,
    "embedding_params": 131072000,
    "non_embedding_params": 2517488640,
    "model_size": 2,
    "model_label": "Phi-2",
    "vocab_size": 51200
  },
  "meta-llama/Llama-3.2-1B": {
    "model_name": "meta-llama/Llama-3.2-1B",
    "total_params": 1235814400,
    "embedding_params": 262668288,
    "non_embedding_params": 973146112,
    "model_size": 1,
    "model_label": "Llama3.2",
    "vocab_size": 128256
  },
  "meta-llama/Llama-3.2-3B": {
    "model_name": "meta-llama/Llama-3.2-3B",
    "total_params": 3212749824,
    "embedding_params": 394002432,
    "non_embedding_params": 2818747392,
    "model_size": 3,
    "model_label": "Llama3.2",
    "vocab_size": 128256
  },
  "mistralai/Mistral-7B-v0.1": {
    "model_name": "mistralai/Mistral-7B-v0.1",
    "total_params": 7110660096,
    "embedding_params": 131072000,
    "non_embedding_params": 6979588096,
    "model_size": 7,
    "model_label": "Mistral",
    "vocab_size": 32000
  },
  "baichuan-inc/Baichuan-M1-14B-Base": {
    "model_name": "baichuan-inc/Baichuan-M1-14B-Base",
    "total_params": 13789189920,
    "embedding_params": 681574400,
    "non_embedding_params": 13107615520,
    "model_size": 14,
    "model_label": "BaichuanM1",
    "vocab_size": 133120
  },
  "THUDM/glm-4-9b": {
    "model_name": "THUDM/glm-4-9b",
    "total_params": 9399951360,
    "embedding_params": 620756992,
    "non_embedding_params": 8779194368,
    "model_size": 9,
    "model_label": "GLM4",
    "vocab_size": 151552
  },
  "deepseek-ai/DeepSeek-V2-Lite": {
    "model_name": "deepseek-ai/DeepSeek-V2-Lite",
    "total_params": 15496769024,
    "embedding_params": 209715200,
    "non_embedding_params": 15287053824,
    "model_size": 16,
    "model_label": "DeepSeek-V2",
    "vocab_size": 102400
  },
  "roneneldan/TinyStories-1M": {
    "model_name": "roneneldan/TinyStories-1M",
    "total_params": 3745984,
    "embedding_params": 3347520,
    "non_embedding_params": 398464,
    "model_size": 0.001,
    "model_label": "TinyStories",
    "vocab_size": 50257
  },
  "roneneldan/TinyStories-3M": {
    "model_name": "roneneldan/TinyStories-3M",
    "total_params": 8278400,
    "embedding_params": 6695040,
    "non_embedding_params": 1583360,
    "model_size": 0.003,
    "model_label": "TinyStories",
    "vocab_size": 50257
  },
  "roneneldan/TinyStories-8M": {
    "model_name": "roneneldan/TinyStories-8M",
    "total_params": 19702528,
    "embedding_params": 13390080,
    "non_embedding_params": 6312448,
    "model_size": 0.008,
    "model_label": "TinyStories",
    "vocab_size": 50257
  },
  "EleutherAI/pythia-14m": {
    "model_name": "EleutherAI/pythia-14m",
    "total_params": 7628800,
    "embedding_params": 6438912,
    "non_embedding_params": 1189888,
    "model_size": 0.014,
    "model_label": "Pythia",
    "vocab_size": 50304
  },
  "EleutherAI/pythia-70m": {
    "model_name": "EleutherAI/pythia-70m",
    "total_params": 44670976,
    "embedding_params": 25755648,
    "non_embedding_params": 18915328,
    "model_size": 0.07,
    "model_label": "Pythia",
    "vocab_size": 50304
  },
  "EleutherAI/pythia-160m": {
    "model_name": "EleutherAI/pythia-160m",
    "total_params": 123689472,
    "embedding_params": 38633472,
    "non_embedding_params": 85056000,
    "model_size": 0.16,
    "model_label": "Pythia",
    "vocab_size": 50304
  },
  "EleutherAI/pythia-410m": {
    "model_name": "EleutherAI/pythia-410m",
    "total_params": 353822720,
    "embedding_params": 51511296,
    "non_embedding_params": 302311424,
    "model_size": 0.41,
    "model_label": "Pythia",
    "vocab_size": 50304
  },

  "EleutherAI/pythia-31m": {
    "model_name": "EleutherAI/pythia-31m",
    "total_params": 17616896,
    "embedding_params": 12877824,
    "non_embedding_params": 4739072,
    "model_size": "31m",
    "model_label": "Pythia",
    "vocab_size": 50304
  },
  "EleutherAI/pythia-1b": {
    "model_name": "EleutherAI/pythia-1b",
    "total_params": 908759040,
    "embedding_params": 103022592,
    "non_embedding_params": 805736448,
    "model_size": "1b",
    "model_label": "Pythia",
    "vocab_size": 50304
  },
  "EleutherAI/pythia-1.4b": {
    "model_name": "EleutherAI/pythia-1.4b",
    "total_params": 1311625216,
    "embedding_params": 103022592,
    "non_embedding_params": 1208602624,
    "model_size": "1.4b",
    "model_label": "Pythia",
    "vocab_size": 50304
  },
  "EleutherAI/pythia-2.8b": {
    "model_name": "EleutherAI/pythia-2.8b",
    "total_params": 2646430720,
    "embedding_params": 128778240,
    "non_embedding_params": 2517652480,
    "model_size": "2.8b",
    "model_label": "Pythia",
    "vocab_size": 50304
  },
  "EleutherAI/pythia-6.9b": {
    "model_name": "EleutherAI/pythia-6.9b",
    "total_params": 6650732544,
    "embedding_params": 206569472,
    "non_embedding_params": 6444163072,
    "model_size": "6.9b",
    "model_label": "Pythia",
    "vocab_size": 50432
  },
  "EleutherAI/pythia-12b": {
    "model_name": "EleutherAI/pythia-12b",
    "total_params": 11586549760,
    "embedding_params": 259522560,
    "non_embedding_params": 11327027200,
    "model_size": "12b",
    "model_label": "Pythia",
    "vocab_size": 50688
  },
  "openai-community/openai-gpt": {
    "model_name": "openai-community/openai-gpt",
    "total_params": 116534784,
    "embedding_params": 31480320,
    "non_embedding_params": 85054464,
    "model_size": 0.12,
    "model_label": "GPT1",
    "vocab_size": 40478
  },
  "DeepSeek-V3-Base": {
    "model_name": "DeepSeek-V3-Base",
    "total_params": 671000000000,
    "embedding_params": 900000000,
    "non_embedding_params": 669200000000,
    "model_size": 671,
    "model_label": "DeepSeek-V3-Base",
    "vocab_size": 129280
  },
  "deepseek-ai/DeepSeek-V3-Base": {
    "model_name": "DeepSeek-V3-Base",
    "total_params": 671000000000,
    "embedding_params": 900000000,
    "non_embedding_params": 669200000000,
    "model_size": 671,
    "model_label": "DeepSeek-V3-Base",
    "vocab_size": 129280
  },
  "meta-llama/Llama-3.1-8B": {
    "total_params": 7504924672,
    "embedding_params": 525336576,
    "non_embedding_params": 6979588096,
    "vocab_size": 128256,
    "model_size": 8,
    "model_label": "Llama-3.1"
  },
  "meta-llama/Llama-3.1-70B": {
    "model_name": "meta-llama/Llama-3.1-70B",
    "total_params": 69503033344,
    "embedding_params": 1050673152,
    "non_embedding_params": 68452360192,
    "model_size": 70,
    "model_label": "Llama3.1",
    "vocab_size": 128256
  },
  "meta-llama/Llama-3.1-405B": {
    "total_params": 403752042496,
    "embedding_params": 2101346304,
    "non_embedding_params": 401650696192,
    "vocab_size": 128256,
    "model_size": 405,
    "model_label": "Llama-3.1"
  },
  "meta-llama/Llama-4-Maverick-17B-128E": {
    "total_params": 399677363200,
    "embedding_params": 1034485760,
    "non_embedding_params": 398642877440,
    "vocab_size": 202048
  },
  "meta-llama/Llama-4-Scout-17B-16E": {
    "total_params": 106735375360,
    "embedding_params": 1034485760,
    "non_embedding_params": 105700889600,
    "vocab_size": 202048
  }
}