chuanmew commited on
Commit
67f5890
·
verified ·
1 Parent(s): 23f825a

Delete testcheckpoint-8000

Browse files
testcheckpoint-8000/config.json DELETED
@@ -1,125 +0,0 @@
1
- {
2
- "base_vlm": "Qwen/Qwen2.5-VL-3B-Instruct",
3
- "batch_size": 64,
4
- "buffer_size": 64,
5
- "frozen_keys": [
6
- "*vlm.model*",
7
- "*vlm.visual*",
8
- "*vlm.lm_head*",
9
- "*vlm.should_update_latent_head*",
10
- "*module.octo_transformer.transformer*",
11
- "*module.octo_transformer.observation_tokenizers*",
12
- "*module.octo_transformer.obs_projections.obs_primary_projection*",
13
- "*module.octo_transformer.readout_embeddings*",
14
- "*module.octo_transformer.obs_pos_embeddings.obs_primary_pos_embedding*",
15
- "*module.heads*"
16
- ],
17
- "model": {
18
- "heads": {
19
- "action": {
20
- "args": [],
21
- "kwargs": {
22
- "action_dim": 7,
23
- "action_horizon": 4,
24
- "dropout_rate": 0.0,
25
- "n_diffusion_samples": 1,
26
- "readout_key": "readout_action",
27
- "token_embedding_size": 768,
28
- "use_map": false
29
- },
30
- "module": "octo.model.components.action_heads",
31
- "name": "DiffusionActionHead"
32
- }
33
- },
34
- "max_horizon": 10,
35
- "observation_tokenizers": {
36
- "primary": {
37
- "args": [],
38
- "kwargs": {
39
- "encoder": {
40
- "args": [],
41
- "kwargs": {
42
- "in_features": 3
43
- },
44
- "module": "octo.model.components.vit_encoders",
45
- "name": "SmallStem16"
46
- },
47
- "obs_stack_keys": [
48
- "image_primary"
49
- ],
50
- "task_stack_keys": []
51
- },
52
- "module": "octo.model.components.tokenizers",
53
- "name": "ImageTokenizer"
54
- },
55
- "proprio": {
56
- "args": [],
57
- "kwargs": {
58
- "bin_type": "normal",
59
- "high": 2.0,
60
- "low": -2.0,
61
- "n_bins": 256,
62
- "obs_keys": [
63
- "proprio"
64
- ]
65
- },
66
- "module": "octo.model.components.tokenizers",
67
- "name": "LowdimObsTokenizer"
68
- }
69
- },
70
- "readouts": {
71
- "action": 1
72
- },
73
- "repeat_task_tokens": true,
74
- "task_tokenizers": {
75
- "latent": {
76
- "args": [],
77
- "kwargs": {
78
- "embed_dim": 2048
79
- },
80
- "module": "octo.model.components.tokenizers",
81
- "name": "HelixTaskTokenizer"
82
- }
83
- },
84
- "token_embedding_size": 768,
85
- "transformer_kwargs": {
86
- "add_position_embedding": false,
87
- "attention_dropout_rate": 0.0,
88
- "dropout_rate": 0.0,
89
- "mlp_dim": 3072,
90
- "num_attention_heads": 12,
91
- "num_layers": 12
92
- },
93
- "use_correct_attention": true
94
- },
95
- "obs_token_nums": {
96
- "primary": 256,
97
- "proprio": 7,
98
- "wrist": 64
99
- },
100
- "sampler_num_samples": 2560000,
101
- "seed": 42,
102
- "subsample_length": 100,
103
- "training_arguments": {
104
- "bf16": true,
105
- "dataloader_pin_memory": true,
106
- "ddp_find_unused_parameters": false,
107
- "eval_steps": 2000,
108
- "gradient_accumulation_steps": 2,
109
- "learning_rate": 0.001,
110
- "logging_nan_inf_filter": false,
111
- "logging_steps": 100,
112
- "lr_scheduler_type": "linear",
113
- "max_grad_norm": 1.0,
114
- "max_steps": 300000,
115
- "optim": "adamw_torch_fused",
116
- "per_device_eval_batch_size": 1,
117
- "per_device_train_batch_size": 1,
118
- "report_to": "wandb",
119
- "save_safetensors": true,
120
- "save_steps": 4000,
121
- "torch_compile": true,
122
- "warmup_steps": 1000,
123
- "weight_decay": 0.1
124
- }
125
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
testcheckpoint-8000/dataset_statistics.json DELETED
@@ -1,1442 +0,0 @@
1
- [
2
- {
3
- "action": {
4
- "mean": [
5
- 0.006003643851727247,
6
- 0.005378914996981621,
7
- -0.01152078527957201,
8
- 0.03845681995153427,
9
- -0.005092295352369547,
10
- 4.705651463154936e-06,
11
- 0.5016621351242065
12
- ],
13
- "std": [
14
- 0.06735217571258545,
15
- 0.0580400712788105,
16
- 0.07147737592458725,
17
- 0.14923600852489471,
18
- 0.12775161862373352,
19
- 0.14116433262825012,
20
- 0.4989379346370697
21
- ],
22
- "min": [
23
- -2.0204520225524902,
24
- -5.497899532318115,
25
- -2.031663417816162,
26
- -1.569917917251587,
27
- -1.569892168045044,
28
- -1.570419430732727,
29
- 0.0
30
- ],
31
- "max": [
32
- 2.9984593391418457,
33
- 22.09052848815918,
34
- 2.7507524490356445,
35
- 1.570636510848999,
36
- 1.5321086645126343,
37
- 1.5691522359848022,
38
- 1.0
39
- ],
40
- "mask": [
41
- true,
42
- true,
43
- true,
44
- true,
45
- true,
46
- true,
47
- false
48
- ]
49
- },
50
- "state": {
51
- "mean": [
52
- 0.5665628910064697,
53
- -0.08489959686994553,
54
- 0.7671157121658325,
55
- -1.5243796110153198,
56
- 0.6186873316764832,
57
- -1.6081069707870483,
58
- 0.4268847405910492
59
- ],
60
- "std": [
61
- 0.12401141226291656,
62
- 0.11508350074291229,
63
- 0.24391832947731018,
64
- 2.357435941696167,
65
- 0.4143419563770294,
66
- 0.7966252565383911,
67
- 0.44836491346359253
68
- ],
69
- "min": [
70
- -0.4436439275741577,
71
- -0.9970501065254211,
72
- -0.006579156965017319,
73
- -3.141592025756836,
74
- -1.5693315267562866,
75
- -3.1415860652923584,
76
- 0.0
77
- ],
78
- "max": [
79
- 1.0534898042678833,
80
- 0.48018959164619446,
81
- 1.6896663904190063,
82
- 3.141592264175415,
83
- 1.570796251296997,
84
- 3.1415884494781494,
85
- 1.0
86
- ],
87
- "mask": [
88
- true,
89
- true,
90
- true,
91
- true,
92
- true,
93
- true,
94
- false
95
- ]
96
- }
97
- },
98
- {
99
- "action": {
100
- "mean": [
101
- -0.0004243608273100108,
102
- 0.00032724777702242136,
103
- 0.0032227751798927784,
104
- 0.0,
105
- 0.0,
106
- -0.03106882981956005,
107
- 0.3495003879070282
108
- ],
109
- "std": [
110
- 0.018492985516786575,
111
- 0.025810159742832184,
112
- 0.05933956056833267,
113
- 0.0,
114
- 0.0,
115
- 0.12920430302619934,
116
- 0.46033594012260437
117
- ],
118
- "min": [
119
- -0.159867063164711,
120
- -0.2892282009124756,
121
- -0.2795473635196686,
122
- 0.0,
123
- 0.0,
124
- -1.9875637292861938,
125
- 0.0
126
- ],
127
- "max": [
128
- 0.1697135865688324,
129
- 0.2777623236179352,
130
- 0.43710532784461975,
131
- 0.0,
132
- 0.0,
133
- 1.9684287309646606,
134
- 1.0
135
- ],
136
- "mask": [
137
- true,
138
- true,
139
- true,
140
- true,
141
- true,
142
- true,
143
- false
144
- ]
145
- },
146
- "state": {
147
- "mean": [
148
- 0.5530868768692017,
149
- 0.047566935420036316,
150
- 0.1365358680486679,
151
- 0.7709221243858337,
152
- 0.004769620019942522,
153
- -0.9132861495018005,
154
- 0.4574867784976959
155
- ],
156
- "std": [
157
- 0.04509514942765236,
158
- 0.1025579646229744,
159
- 0.06630735844373703,
160
- 3.0413753986358643,
161
- 0.010649651288986206,
162
- 2.727771520614624,
163
- 0.4964221119880676
164
- ],
165
- "min": [
166
- 0.40573424100875854,
167
- -0.2028520256280899,
168
- 0.018512273207306862,
169
- -3.1415927410125732,
170
- -0.25521254539489746,
171
- -3.1415927410125732,
172
- 0.0
173
- ],
174
- "max": [
175
- 0.7243871092796326,
176
- 0.31309840083122253,
177
- 0.8312229514122009,
178
- 3.141592264175415,
179
- 0.23549413681030273,
180
- 3.141592264175415,
181
- 1.0
182
- ],
183
- "mask": [
184
- true,
185
- true,
186
- true,
187
- true,
188
- true,
189
- true,
190
- false
191
- ]
192
- }
193
- },
194
- {
195
- "action": {
196
- "mean": [
197
- 0.00017057154036592692,
198
- 0.000148340841406025,
199
- 0.00013469347322825342,
200
- -4.7412850108230487e-05,
201
- -0.00056671560741961,
202
- 0.00011704787902999669,
203
- 0.5864841938018799
204
- ],
205
- "std": [
206
- 0.009621696546673775,
207
- 0.013471683487296104,
208
- 0.012634573504328728,
209
- 0.02847793698310852,
210
- 0.030329007655382156,
211
- 0.06173764914274216,
212
- 0.5004628300666809
213
- ],
214
- "min": [
215
- -0.4007510244846344,
216
- -0.13874775171279907,
217
- -0.22553899884223938,
218
- -3.1120171546936035,
219
- -1.8618112802505493,
220
- -0.7410628795623779,
221
- 0.0
222
- ],
223
- "max": [
224
- 0.41691166162490845,
225
- 0.25864794850349426,
226
- 0.21218234300613403,
227
- 3.122202157974243,
228
- 1.8618113994598389,
229
- 0.758469820022583,
230
- 1.0
231
- ],
232
- "mask": [
233
- true,
234
- true,
235
- true,
236
- true,
237
- true,
238
- true,
239
- false
240
- ]
241
- },
242
- "state": {
243
- "mean": [
244
- 0.30944472551345825,
245
- 0.030668215826153755,
246
- 0.06456991285085678,
247
- 0.006582529284060001,
248
- -0.07698487490415573,
249
- 0.10747171938419342,
250
- 0.7057961225509644
251
- ],
252
- "std": [
253
- 0.06057106330990791,
254
- 0.09182365238666534,
255
- 0.05156636983156204,
256
- 0.13059234619140625,
257
- 0.16860555112361908,
258
- 0.5758708119392395,
259
- 0.35565808415412903
260
- ],
261
- "min": [
262
- -0.04167502000927925,
263
- -0.3563207685947418,
264
- -0.15537554025650024,
265
- -3.141592502593994,
266
- -1.4992541074752808,
267
- -3.14153790473938,
268
- 0.04637829214334488
269
- ],
270
- "max": [
271
- 0.5862360596656799,
272
- 0.4034728705883026,
273
- 0.3568263053894043,
274
- 1.3517687320709229,
275
- 1.570796251296997,
276
- 3.1412041187286377,
277
- 1.1121242046356201
278
- ],
279
- "mask": [
280
- true,
281
- true,
282
- true,
283
- true,
284
- true,
285
- true,
286
- false
287
- ]
288
- }
289
- },
290
- {
291
- "action": {
292
- "mean": [
293
- -0.0027445757295936346,
294
- 0.00709549430757761,
295
- 0.013522458262741566,
296
- -0.004958729259669781,
297
- -0.009980662725865841,
298
- -0.006119652651250362,
299
- 0.43063056468963623
300
- ],
301
- "std": [
302
- 0.23144568502902985,
303
- 0.36525610089302063,
304
- 0.28786516189575195,
305
- 0.26068246364593506,
306
- 0.24356642365455627,
307
- 0.5206449627876282,
308
- 0.4965480864048004
309
- ],
310
- "min": [
311
- -4.242457866668701,
312
- -3.192805051803589,
313
- -1.3371467590332031,
314
- -3.1237380504608154,
315
- -2.6722638607025146,
316
- -3.130864381790161,
317
- 0.0
318
- ],
319
- "max": [
320
- 1.4915844202041626,
321
- 2.1842432022094727,
322
- 2.6836395263671875,
323
- 3.0924112796783447,
324
- 2.665865182876587,
325
- 3.127634286880493,
326
- 1.0
327
- ],
328
- "mask": [
329
- true,
330
- true,
331
- true,
332
- true,
333
- true,
334
- true,
335
- false
336
- ]
337
- },
338
- "state": {
339
- "mean": [
340
- 0.37217476963996887,
341
- 0.13205504417419434,
342
- 0.3828185200691223,
343
- -0.06818609684705734,
344
- -0.17516356706619263,
345
- 0.4611636996269226,
346
- 0.15133924782276154
347
- ],
348
- "std": [
349
- 0.11450899392366409,
350
- 0.2691265642642975,
351
- 0.1126580685377121,
352
- 3.0389301776885986,
353
- 0.18606479465961456,
354
- 0.686427891254425,
355
- 0.28193965554237366
356
- ],
357
- "min": [
358
- 0.07693906873464584,
359
- -0.4944135844707489,
360
- 0.20030911266803741,
361
- -3.1415886878967285,
362
- -0.8683896064758301,
363
- -1.789320707321167,
364
- -0.6495265960693359
365
- ],
366
- "max": [
367
- 0.7124971151351929,
368
- 0.6118948459625244,
369
- 0.617118775844574,
370
- 3.141589403152466,
371
- 0.3071756362915039,
372
- 2.4624788761138916,
373
- 0.9704633951187134
374
- ],
375
- "mask": [
376
- true,
377
- true,
378
- true,
379
- true,
380
- true,
381
- true,
382
- false
383
- ]
384
- }
385
- },
386
- {
387
- "action": {
388
- "mean": [
389
- 0.0012445214670151472,
390
- -0.005251587834209204,
391
- -0.0031454148702323437,
392
- 0.0,
393
- 0.0,
394
- 0.0,
395
- 0.3504074811935425
396
- ],
397
- "std": [
398
- 0.12222036719322205,
399
- 0.09560250490903854,
400
- 0.11195981502532959,
401
- 0.0,
402
- 0.0,
403
- 0.0,
404
- 0.47727230191230774
405
- ],
406
- "min": [
407
- -0.20000000298023224,
408
- -0.20000000298023224,
409
- -0.20000000298023224,
410
- 0.0,
411
- 0.0,
412
- 0.0,
413
- 0.0
414
- ],
415
- "max": [
416
- 0.20000000298023224,
417
- 0.20000000298023224,
418
- 0.20000000298023224,
419
- 0.0,
420
- 0.0,
421
- 0.0,
422
- 1.0
423
- ],
424
- "mask": [
425
- true,
426
- true,
427
- true,
428
- true,
429
- true,
430
- true,
431
- false
432
- ]
433
- },
434
- "state": {
435
- "mean": [
436
- -0.07360073924064636,
437
- -0.4282155930995941,
438
- 0.275579571723938,
439
- 0.004527503624558449,
440
- -0.01690048724412918,
441
- 0.9914278388023376,
442
- 0.12712202966213226
443
- ],
444
- "std": [
445
- 0.14040811359882355,
446
- 0.0908941999077797,
447
- 0.05241652950644493,
448
- 0.008698763325810432,
449
- 0.015913205221295357,
450
- 0.0024853625800460577,
451
- 0.01771053671836853
452
- ],
453
- "min": [
454
- -0.4429473876953125,
455
- -0.6635459661483765,
456
- 0.1568669229745865,
457
- -0.07075214385986328,
458
- -0.09496450424194336,
459
- 0.9703185558319092,
460
- 0.04738260433077812
461
- ],
462
- "max": [
463
- 0.2252020239830017,
464
- -0.19358234107494354,
465
- 0.4066188633441925,
466
- 0.03788280487060547,
467
- 0.07292509078979492,
468
- 0.9981606006622314,
469
- 0.2313518077135086
470
- ],
471
- "mask": [
472
- true,
473
- true,
474
- true,
475
- true,
476
- true,
477
- true,
478
- false
479
- ]
480
- }
481
- },
482
- {
483
- "action": {
484
- "mean": [
485
- 0.0005774280871264637,
486
- 0.0011819612700492144,
487
- -0.0005258310702629387,
488
- 0.00021356214710976928,
489
- 0.00013796966231893748,
490
- 0.001266916748136282,
491
- 0.6296713948249817
492
- ],
493
- "std": [
494
- 0.011499631218612194,
495
- 0.008035642094910145,
496
- 0.009562249295413494,
497
- 0.009380568750202656,
498
- 0.016513563692569733,
499
- 0.01090747956186533,
500
- 0.4827438294887543
501
- ],
502
- "min": [
503
- -0.019999999552965164,
504
- -0.019999999552965164,
505
- -0.019999999552965164,
506
- -0.06666667014360428,
507
- -0.06666667014360428,
508
- -0.06666667014360428,
509
- 0.0
510
- ],
511
- "max": [
512
- 0.019999999552965164,
513
- 0.019999999552965164,
514
- 0.019999999552965164,
515
- 0.06666667014360428,
516
- 0.06666667014360428,
517
- 0.06666667014360428,
518
- 1.0
519
- ],
520
- "mask": [
521
- true,
522
- true,
523
- true,
524
- true,
525
- true,
526
- true,
527
- false
528
- ]
529
- },
530
- "state": {
531
- "mean": [
532
- 0.4526209235191345,
533
- 0.05664529278874397,
534
- -0.03501615673303604,
535
- 0.17179693281650543,
536
- 0.0025787020567804575,
537
- 1.589087724685669,
538
- 0.3703971207141876
539
- ],
540
- "std": [
541
- 0.07940495759248734,
542
- 0.12453591078519821,
543
- 0.08049638569355011,
544
- 3.0333428382873535,
545
- 0.09067592769861221,
546
- 0.17449992895126343,
547
- 0.48298484086990356
548
- ],
549
- "min": [
550
- 0.1970997005701065,
551
- -0.27643972635269165,
552
- -0.20529526472091675,
553
- -3.1415910720825195,
554
- -0.5112340450286865,
555
- 0.778240442276001,
556
- 0.0
557
- ],
558
- "max": [
559
- 0.6610942482948303,
560
- 0.38513994216918945,
561
- 0.2049914449453354,
562
- 3.141587495803833,
563
- 0.38567113876342773,
564
- 2.6045620441436768,
565
- 1.0
566
- ],
567
- "mask": [
568
- true,
569
- true,
570
- true,
571
- true,
572
- true,
573
- true,
574
- false
575
- ]
576
- }
577
- },
578
- {
579
- "action": {
580
- "mean": [
581
- 8.754087320994586e-05,
582
- -0.0005884904530830681,
583
- 0.0,
584
- 0.0,
585
- 0.0,
586
- 0.0,
587
- 1.0
588
- ],
589
- "std": [
590
- 0.02815437689423561,
591
- 0.03940409794449806,
592
- 0.0,
593
- 0.0,
594
- 0.0,
595
- 0.0,
596
- 0.0
597
- ],
598
- "min": [
599
- -0.21989956498146057,
600
- -0.23736150562763214,
601
- 0.0,
602
- 0.0,
603
- 0.0,
604
- 0.0,
605
- 1.0
606
- ],
607
- "max": [
608
- 0.23357294499874115,
609
- 0.24496802687644958,
610
- 0.0,
611
- 0.0,
612
- 0.0,
613
- 0.0,
614
- 1.0
615
- ],
616
- "mask": [
617
- true,
618
- true,
619
- true,
620
- true,
621
- true,
622
- true,
623
- false
624
- ]
625
- },
626
- "state": {
627
- "mean": [
628
- 0.3996198773384094,
629
- 0.004858094733208418,
630
- 0.0,
631
- 0.0,
632
- 0.0,
633
- 0.0,
634
- 0.0
635
- ],
636
- "std": [
637
- 0.10492434352636337,
638
- 0.14027132093906403,
639
- 0.0,
640
- 0.0,
641
- 0.0,
642
- 0.0,
643
- 0.0
644
- ],
645
- "min": [
646
- 0.18907572329044342,
647
- -0.3051564395427704,
648
- 0.0,
649
- 0.0,
650
- 0.0,
651
- 0.0,
652
- 0.0
653
- ],
654
- "max": [
655
- 0.6191085577011108,
656
- 0.345907062292099,
657
- 0.0,
658
- 0.0,
659
- 0.0,
660
- 0.0,
661
- 0.0
662
- ],
663
- "mask": [
664
- true,
665
- true,
666
- true,
667
- true,
668
- true,
669
- true,
670
- false
671
- ]
672
- }
673
- },
674
- {
675
- "action": {
676
- "mean": [
677
- 0.000777362089138478,
678
- 0.00013811276585329324,
679
- -0.00025248670135624707,
680
- 0.0012776820221915841,
681
- -0.004746967926621437,
682
- 0.0026623313315212727,
683
- 0.4880663752555847
684
- ],
685
- "std": [
686
- 0.00801623985171318,
687
- 0.00912316795438528,
688
- 0.00956878811120987,
689
- 0.04118994623422623,
690
- 0.03837846964597702,
691
- 0.04597204923629761,
692
- 0.49996426701545715
693
- ],
694
- "min": [
695
- -0.024999044835567474,
696
- -0.024999700486660004,
697
- -0.02499929815530777,
698
- -0.24993225932121277,
699
- -0.2499666064977646,
700
- -0.2499932497739792,
701
- 0.0
702
- ],
703
- "max": [
704
- 0.02499854564666748,
705
- 0.02499903365969658,
706
- 0.024999922141432762,
707
- 0.24974457919597626,
708
- 0.24997030198574066,
709
- 0.24999946355819702,
710
- 1.0
711
- ],
712
- "mask": [
713
- true,
714
- true,
715
- true,
716
- true,
717
- true,
718
- true,
719
- false
720
- ]
721
- },
722
- "state": {
723
- "mean": [
724
- 0.42382633686065674,
725
- -0.01392359845340252,
726
- 0.3769100308418274,
727
- -1.199476718902588,
728
- -0.04784892499446869,
729
- -0.3599538803100586,
730
- 0.049042750149965286
731
- ],
732
- "std": [
733
- 0.10459557175636292,
734
- 0.1273813247680664,
735
- 0.11172926425933838,
736
- 2.4611380100250244,
737
- 0.38851362466812134,
738
- 1.2116936445236206,
739
- 0.031794678419828415
740
- ],
741
- "min": [
742
- 0.16438177227973938,
743
- -0.3419400751590729,
744
- 0.085594043135643,
745
- -3.1415905952453613,
746
- -1.5607775449752808,
747
- -3.1414384841918945,
748
- -0.0005391233135014772
749
- ],
750
- "max": [
751
- 0.7794343829154968,
752
- 0.3587854206562042,
753
- 0.638763964176178,
754
- 3.141591787338257,
755
- 1.5084254741668701,
756
- 3.1415464878082275,
757
- 0.0811397060751915
758
- ],
759
- "mask": [
760
- true,
761
- true,
762
- true,
763
- true,
764
- true,
765
- true,
766
- false
767
- ]
768
- }
769
- },
770
- {
771
- "action": {
772
- "mean": [
773
- 0.0011816318146884441,
774
- -0.00025564825045876205,
775
- 0.0003137202584184706,
776
- 0.002336798934265971,
777
- -0.00242138491012156,
778
- -0.001972040394321084,
779
- 0.7230969667434692
780
- ],
781
- "std": [
782
- 0.013190814293920994,
783
- 0.012909645214676857,
784
- 0.012628739699721336,
785
- 0.040437646210193634,
786
- 0.055802080780267715,
787
- 0.03839560225605965,
788
- 0.44745731353759766
789
- ],
790
- "min": [
791
- -0.052125245332717896,
792
- -0.07232445478439331,
793
- -0.06730806827545166,
794
- -0.20499730110168457,
795
- -0.21479034423828125,
796
- -0.2243671417236328,
797
- 0.0
798
- ],
799
- "max": [
800
- 0.06424188613891602,
801
- 0.07027634978294373,
802
- 0.06129661202430725,
803
- 0.20181679725646973,
804
- 0.19077539443969727,
805
- 0.26377415657043457,
806
- 1.0
807
- ],
808
- "mask": [
809
- true,
810
- true,
811
- true,
812
- true,
813
- true,
814
- true,
815
- false
816
- ]
817
- },
818
- "state": {
819
- "mean": [
820
- 0.30728915333747864,
821
- 0.41396260261535645,
822
- 0.4691331088542938,
823
- 1.7814242839813232,
824
- 0.044897496700286865,
825
- 1.305274486541748,
826
- 0.0
827
- ],
828
- "std": [
829
- 0.11810726672410965,
830
- 0.11085426807403564,
831
- 0.1647123098373413,
832
- 0.6698192358016968,
833
- 0.5180392861366272,
834
- 0.3852464556694031,
835
- 0.0
836
- ],
837
- "min": [
838
- -0.029897235333919525,
839
- 0.003107964526861906,
840
- 0.1199457123875618,
841
- -3.14158296585083,
842
- -1.0961432456970215,
843
- -0.41072607040405273,
844
- 0.0
845
- ],
846
- "max": [
847
- 0.5901635885238647,
848
- 0.781833291053772,
849
- 0.8468776941299438,
850
- 3.1414129734039307,
851
- 1.373811960220337,
852
- 2.342768907546997,
853
- 0.0
854
- ],
855
- "mask": [
856
- true,
857
- true,
858
- true,
859
- true,
860
- true,
861
- true,
862
- false
863
- ]
864
- }
865
- },
866
- {
867
- "action": {
868
- "mean": [
869
- 0.00014109841140452772,
870
- 0.001067000557668507,
871
- 0.00057840789668262,
872
- -0.0032693298999220133,
873
- -0.0025746545288711786,
874
- 0.017619194462895393,
875
- 0.4864794611930847
876
- ],
877
- "std": [
878
- 0.015914559364318848,
879
- 0.014729844406247139,
880
- 0.013863170519471169,
881
- 0.05770312249660492,
882
- 0.11294151842594147,
883
- 0.33116403222084045,
884
- 0.4992203116416931
885
- ],
886
- "min": [
887
- -0.10495579987764359,
888
- -0.10939455777406693,
889
- -0.10000000149011612,
890
- -0.971906840801239,
891
- -1.0475432872772217,
892
- -3.06000018119812,
893
- 0.0
894
- ],
895
- "max": [
896
- 0.10000000149011612,
897
- 0.10000000149011612,
898
- 0.10000000149011612,
899
- 0.8651833534240723,
900
- 1.0909736156463623,
901
- 2.863185405731201,
902
- 1.0
903
- ],
904
- "mask": [
905
- true,
906
- true,
907
- true,
908
- true,
909
- true,
910
- true,
911
- false
912
- ]
913
- },
914
- "state": {
915
- "mean": [
916
- 0.5481934547424316,
917
- 0.049859024584293365,
918
- 0.08234382420778275,
919
- -1.6028425693511963,
920
- 0.028501030057668686,
921
- -0.033880893141031265,
922
- 0.04552179202437401
923
- ],
924
- "std": [
925
- 0.07731735706329346,
926
- 0.07867485284805298,
927
- 0.04057478532195091,
928
- 2.6797101497650146,
929
- 0.3000299036502838,
930
- 0.9212139844894409,
931
- 0.021556446328759193
932
- ],
933
- "min": [
934
- 0.2878606617450714,
935
- -0.3141690492630005,
936
- -0.00460465531796217,
937
- -3.1415927410125732,
938
- -1.1365289688110352,
939
- -3.141582727432251,
940
- -3.4803331800503656e-05
941
- ],
942
- "max": [
943
- 0.75081467628479,
944
- 0.29695066809654236,
945
- 0.35806331038475037,
946
- 3.141592264175415,
947
- 1.5458743572235107,
948
- 3.1415674686431885,
949
- 0.07995442301034927
950
- ],
951
- "mask": [
952
- true,
953
- true,
954
- true,
955
- true,
956
- true,
957
- true,
958
- false
959
- ]
960
- }
961
- },
962
- {
963
- "action": {
964
- "mean": [
965
- 0.011745305731892586,
966
- 0.006730083376169205,
967
- 0.06043865531682968,
968
- 0.0,
969
- 0.0,
970
- 0.0016618669033050537,
971
- 0.5274017453193665
972
- ],
973
- "std": [
974
- 0.4632987678050995,
975
- 0.4121692478656769,
976
- 0.4110864996910095,
977
- 0.0,
978
- 0.0,
979
- 0.05783988535404205,
980
- 0.4985195994377136
981
- ],
982
- "min": [
983
- -1.0,
984
- -1.0,
985
- -1.0,
986
- 0.0,
987
- 0.0,
988
- -0.375,
989
- 0.0
990
- ],
991
- "max": [
992
- 1.0,
993
- 1.0,
994
- 1.0,
995
- 0.0,
996
- 0.0,
997
- 0.375,
998
- 1.0
999
- ],
1000
- "mask": [
1001
- true,
1002
- true,
1003
- true,
1004
- true,
1005
- true,
1006
- true,
1007
- false
1008
- ]
1009
- },
1010
- "state": {
1011
- "mean": [
1012
- 0.5367307662963867,
1013
- -0.08597735315561295,
1014
- 0.11143582314252853,
1015
- 0.03930690512061119,
1016
- 0.013144118711352348,
1017
- -0.044802285730838776,
1018
- 0.051319584250450134
1019
- ],
1020
- "std": [
1021
- 0.07999736070632935,
1022
- 0.13983988761901855,
1023
- 0.05644551292061806,
1024
- 3.100438117980957,
1025
- 0.08134326338768005,
1026
- 0.6309059262275696,
1027
- 0.025284448638558388
1028
- ],
1029
- "min": [
1030
- 0.3208981454372406,
1031
- -0.3730051815509796,
1032
- 0.020952222868800163,
1033
- -3.141592025756836,
1034
- -0.21639776229858398,
1035
- -1.995558738708496,
1036
- -8.864999836077914e-05
1037
- ],
1038
- "max": [
1039
- 0.7488242387771606,
1040
- 0.2829112708568573,
1041
- 0.3541720509529114,
1042
- 3.1415913105010986,
1043
- 0.26857471466064453,
1044
- 2.4386942386627197,
1045
- 0.07783995568752289
1046
- ],
1047
- "mask": [
1048
- true,
1049
- true,
1050
- true,
1051
- true,
1052
- true,
1053
- true,
1054
- false
1055
- ]
1056
- }
1057
- },
1058
- {
1059
- "action": {
1060
- "mean": [
1061
- 0.07817424833774567,
1062
- 0.03106578253209591,
1063
- 0.043012287467718124,
1064
- 0.0,
1065
- 0.0,
1066
- -0.015927663072943687,
1067
- 0.4326333999633789
1068
- ],
1069
- "std": [
1070
- 0.3897651433944702,
1071
- 0.29878485202789307,
1072
- 0.27838900685310364,
1073
- 0.0,
1074
- 0.0,
1075
- 0.08109692484140396,
1076
- 0.49516406655311584
1077
- ],
1078
- "min": [
1079
- -1.0183025598526,
1080
- -0.9800000190734863,
1081
- -0.9774575233459473,
1082
- 0.0,
1083
- 0.0,
1084
- -0.34607142210006714,
1085
- 0.0
1086
- ],
1087
- "max": [
1088
- 1.0002285242080688,
1089
- 0.960608720779419,
1090
- 1.105179786682129,
1091
- 0.0,
1092
- 0.0,
1093
- 0.341785728931427,
1094
- 1.0
1095
- ],
1096
- "mask": [
1097
- true,
1098
- true,
1099
- true,
1100
- true,
1101
- true,
1102
- true,
1103
- false
1104
- ]
1105
- },
1106
- "state": {
1107
- "mean": [
1108
- 0.22182214260101318,
1109
- 0.037475429475307465,
1110
- -0.012922837398946285,
1111
- -1.211675763130188,
1112
- 0.9867777228355408,
1113
- 1.2979546785354614,
1114
- 0.05917484313249588
1115
- ],
1116
- "std": [
1117
- 0.13009746372699738,
1118
- 0.13202384114265442,
1119
- 0.08945119380950928,
1120
- 0.2226451337337494,
1121
- 0.24374930560588837,
1122
- 0.26084408164024353,
1123
- 0.016847841441631317
1124
- ],
1125
- "min": [
1126
- -0.23144784569740295,
1127
- -0.41377919912338257,
1128
- -0.3536752760410309,
1129
- -1.7786533832550049,
1130
- 0.0,
1131
- 0.0,
1132
- 0.00016088332631625235
1133
- ],
1134
- "max": [
1135
- 0.5567107200622559,
1136
- 0.3814372420310974,
1137
- 0.3687466084957123,
1138
- 0.0,
1139
- 1.2598693370819092,
1140
- 1.707510232925415,
1141
- 0.07965432107448578
1142
- ],
1143
- "mask": [
1144
- true,
1145
- true,
1146
- true,
1147
- true,
1148
- true,
1149
- true,
1150
- false
1151
- ]
1152
- }
1153
- },
1154
- {
1155
- "action": {
1156
- "mean": [
1157
- -0.009976202622056007,
1158
- 0.0009141670889221132,
1159
- 0.004997506737709045,
1160
- 0.00026673608226701617,
1161
- -0.008862413465976715,
1162
- -0.029926998540759087,
1163
- 0.6457992792129517
1164
- ],
1165
- "std": [
1166
- 0.029649879783391953,
1167
- 0.02263188548386097,
1168
- 0.020144397392868996,
1169
- 0.04050878435373306,
1170
- 0.045230600982904434,
1171
- 0.07449887692928314,
1172
- 0.3936954438686371
1173
- ],
1174
- "min": [
1175
- -0.1677047461271286,
1176
- -0.14630407094955444,
1177
- -0.10066790133714676,
1178
- -0.29421567916870117,
1179
- -0.32101404666900635,
1180
- -0.4635624885559082,
1181
- 0.0
1182
- ],
1183
- "max": [
1184
- 0.2165454924106598,
1185
- 0.1251407265663147,
1186
- 0.09988310933113098,
1187
- 0.33544227480888367,
1188
- 0.28117990493774414,
1189
- 0.40614867210388184,
1190
- 1.0
1191
- ],
1192
- "mask": [
1193
- true,
1194
- true,
1195
- true,
1196
- true,
1197
- true,
1198
- true,
1199
- false
1200
- ]
1201
- },
1202
- "state": {
1203
- "mean": [
1204
- 0.018192559480667114,
1205
- 0.10970789194107056,
1206
- 0.7886075973510742,
1207
- -0.5168830156326294,
1208
- -0.22489114105701447,
1209
- -0.17720149457454681,
1210
- 0.5501070618629456
1211
- ],
1212
- "std": [
1213
- 0.1826269030570984,
1214
- 0.09544865041971207,
1215
- 0.08628752827644348,
1216
- 0.243532195687294,
1217
- 0.4256756901741028,
1218
- 0.9913674592971802,
1219
- 0.35924333333969116
1220
- ],
1221
- "min": [
1222
- -0.7190948724746704,
1223
- -0.3756217360496521,
1224
- -0.281008243560791,
1225
- -2.400146484375,
1226
- -2.500656843185425,
1227
- -3.1274476051330566,
1228
- 0.0
1229
- ],
1230
- "max": [
1231
- 0.6597589254379272,
1232
- 0.7259413599967957,
1233
- 1.1217665672302246,
1234
- 2.2803165912628174,
1235
- 1.815157175064087,
1236
- 3.1237576007843018,
1237
- 1.0
1238
- ],
1239
- "mask": [
1240
- true,
1241
- true,
1242
- true,
1243
- true,
1244
- true,
1245
- true,
1246
- false
1247
- ]
1248
- }
1249
- },
1250
- {
1251
- "action": {
1252
- "mean": [
1253
- 0.0066413660533726215,
1254
- -0.0008098935359157622,
1255
- 0.006867636926472187,
1256
- 0.0011580738937482238,
1257
- -0.0064012822695076466,
1258
- -0.011905902065336704,
1259
- 0.6870529055595398
1260
- ],
1261
- "std": [
1262
- 0.021368511021137238,
1263
- 0.018089566379785538,
1264
- 0.03348425030708313,
1265
- 0.01740916259586811,
1266
- 0.033924732357263565,
1267
- 0.046416450291872025,
1268
- 0.4637002944946289
1269
- ],
1270
- "min": [
1271
- -0.10054297000169754,
1272
- -0.08427435159683228,
1273
- -0.13533438742160797,
1274
- -0.17556548118591309,
1275
- -0.18485672771930695,
1276
- -0.2680685818195343,
1277
- 0.0
1278
- ],
1279
- "max": [
1280
- 0.18991442024707794,
1281
- 0.0739002525806427,
1282
- 0.18064819276332855,
1283
- 0.0866486132144928,
1284
- 0.13464981317520142,
1285
- 0.16910280287265778,
1286
- 1.0
1287
- ],
1288
- "mask": [
1289
- true,
1290
- true,
1291
- true,
1292
- true,
1293
- true,
1294
- true,
1295
- false
1296
- ]
1297
- },
1298
- "state": {
1299
- "mean": [
1300
- -0.5662466287612915,
1301
- 0.49160608649253845,
1302
- 0.11201082915067673,
1303
- 0.8307151198387146,
1304
- -0.9549650549888611,
1305
- 0.02047070488333702,
1306
- 0.0
1307
- ],
1308
- "std": [
1309
- 0.1710057258605957,
1310
- 0.1466914266347885,
1311
- 0.3270176351070404,
1312
- 1.158150315284729,
1313
- 0.5659270286560059,
1314
- 2.674586772918701,
1315
- 0.0
1316
- ],
1317
- "min": [
1318
- -0.9590276479721069,
1319
- -0.042818181216716766,
1320
- -0.40131720900535583,
1321
- -3.1390082836151123,
1322
- -1.5646799802780151,
1323
- -3.1407129764556885,
1324
- 0.0
1325
- ],
1326
- "max": [
1327
- 0.044179707765579224,
1328
- 0.7948326468467712,
1329
- 0.9278888702392578,
1330
- 3.133342981338501,
1331
- 0.13346624374389648,
1332
- 3.1415083408355713,
1333
- 0.0
1334
- ],
1335
- "mask": [
1336
- true,
1337
- true,
1338
- true,
1339
- true,
1340
- true,
1341
- true,
1342
- false
1343
- ]
1344
- }
1345
- },
1346
- {
1347
- "action": {
1348
- "mean": [
1349
- 0.00036335503682494164,
1350
- 0.0,
1351
- 0.0016447657253593206,
1352
- 0.0,
1353
- 0.0,
1354
- 0.0,
1355
- 0.3933013081550598
1356
- ],
1357
- "std": [
1358
- 0.004081646911799908,
1359
- 0.0,
1360
- 0.003774851793423295,
1361
- 0.0,
1362
- 0.0,
1363
- 0.0,
1364
- 0.48849305510520935
1365
- ],
1366
- "min": [
1367
- -0.019353797659277916,
1368
- 0.0,
1369
- -0.02019215188920498,
1370
- 0.0,
1371
- 0.0,
1372
- 0.0,
1373
- 0.0
1374
- ],
1375
- "max": [
1376
- 0.02338407188653946,
1377
- 0.0,
1378
- 0.023404927924275398,
1379
- 0.0,
1380
- 0.0,
1381
- 0.0,
1382
- 1.0
1383
- ],
1384
- "mask": [
1385
- true,
1386
- true,
1387
- true,
1388
- true,
1389
- true,
1390
- true,
1391
- false
1392
- ]
1393
- },
1394
- "state": {
1395
- "mean": [
1396
- 0.1457524597644806,
1397
- 0.0,
1398
- 0.7565534710884094,
1399
- 0.0,
1400
- 0.0,
1401
- 0.0,
1402
- -0.41083183884620667
1403
- ],
1404
- "std": [
1405
- 0.09961383789777756,
1406
- 0.0,
1407
- 0.15851713716983795,
1408
- 0.0,
1409
- 0.0,
1410
- 0.0,
1411
- 2.737846612930298
1412
- ],
1413
- "min": [
1414
- 0.005320895928889513,
1415
- 0.0,
1416
- 0.45218077301979065,
1417
- 0.0,
1418
- 0.0,
1419
- 0.0,
1420
- -3.1155149936676025
1421
- ],
1422
- "max": [
1423
- 0.3401322066783905,
1424
- 0.0,
1425
- 1.1013386249542236,
1426
- 0.0,
1427
- 0.0,
1428
- 0.0,
1429
- 3.110913038253784
1430
- ],
1431
- "mask": [
1432
- true,
1433
- true,
1434
- true,
1435
- true,
1436
- true,
1437
- true,
1438
- false
1439
- ]
1440
- }
1441
- }
1442
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
testcheckpoint-8000/model-00001-of-00002.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:e76c429fb04f3e051661110381a1a25c44b47c29e4305a557cb65ffcd25b9c19
3
- size 4997808936
 
 
 
 
testcheckpoint-8000/model-00002-of-00002.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:a3b407875b994bdd2511467b7c6f214e8fc0da8f9e29bf8c6ab0357bf3f63072
3
- size 2695456842
 
 
 
 
testcheckpoint-8000/model.safetensors.index.json DELETED
@@ -1,1043 +0,0 @@
1
- {
2
- "metadata": {
3
- "total_size": 7693138642
4
- },
5
- "weight_map": {
6
- "module.heads.action.alpha_hats": "model-00002-of-00002.safetensors",
7
- "module.heads.action.alphas": "model-00002-of-00002.safetensors",
8
- "module.heads.action.betas": "model-00002-of-00002.safetensors",
9
- "module.heads.action.diffusion_model.cond_encoder.layers.0.bias": "model-00002-of-00002.safetensors",
10
- "module.heads.action.diffusion_model.cond_encoder.layers.0.weight": "model-00002-of-00002.safetensors",
11
- "module.heads.action.diffusion_model.cond_encoder.layers.1.bias": "model-00002-of-00002.safetensors",
12
- "module.heads.action.diffusion_model.cond_encoder.layers.1.weight": "model-00002-of-00002.safetensors",
13
- "module.heads.action.diffusion_model.reverse_network.blocks.0.fc1.bias": "model-00002-of-00002.safetensors",
14
- "module.heads.action.diffusion_model.reverse_network.blocks.0.fc1.weight": "model-00002-of-00002.safetensors",
15
- "module.heads.action.diffusion_model.reverse_network.blocks.0.fc2.bias": "model-00002-of-00002.safetensors",
16
- "module.heads.action.diffusion_model.reverse_network.blocks.0.fc2.weight": "model-00002-of-00002.safetensors",
17
- "module.heads.action.diffusion_model.reverse_network.blocks.0.layer_norm.bias": "model-00002-of-00002.safetensors",
18
- "module.heads.action.diffusion_model.reverse_network.blocks.0.layer_norm.weight": "model-00002-of-00002.safetensors",
19
- "module.heads.action.diffusion_model.reverse_network.blocks.1.fc1.bias": "model-00002-of-00002.safetensors",
20
- "module.heads.action.diffusion_model.reverse_network.blocks.1.fc1.weight": "model-00002-of-00002.safetensors",
21
- "module.heads.action.diffusion_model.reverse_network.blocks.1.fc2.bias": "model-00002-of-00002.safetensors",
22
- "module.heads.action.diffusion_model.reverse_network.blocks.1.fc2.weight": "model-00002-of-00002.safetensors",
23
- "module.heads.action.diffusion_model.reverse_network.blocks.1.layer_norm.bias": "model-00002-of-00002.safetensors",
24
- "module.heads.action.diffusion_model.reverse_network.blocks.1.layer_norm.weight": "model-00002-of-00002.safetensors",
25
- "module.heads.action.diffusion_model.reverse_network.blocks.2.fc1.bias": "model-00002-of-00002.safetensors",
26
- "module.heads.action.diffusion_model.reverse_network.blocks.2.fc1.weight": "model-00002-of-00002.safetensors",
27
- "module.heads.action.diffusion_model.reverse_network.blocks.2.fc2.bias": "model-00002-of-00002.safetensors",
28
- "module.heads.action.diffusion_model.reverse_network.blocks.2.fc2.weight": "model-00002-of-00002.safetensors",
29
- "module.heads.action.diffusion_model.reverse_network.blocks.2.layer_norm.bias": "model-00002-of-00002.safetensors",
30
- "module.heads.action.diffusion_model.reverse_network.blocks.2.layer_norm.weight": "model-00002-of-00002.safetensors",
31
- "module.heads.action.diffusion_model.reverse_network.input_layer.bias": "model-00002-of-00002.safetensors",
32
- "module.heads.action.diffusion_model.reverse_network.input_layer.weight": "model-00002-of-00002.safetensors",
33
- "module.heads.action.diffusion_model.reverse_network.output_layer.bias": "model-00002-of-00002.safetensors",
34
- "module.heads.action.diffusion_model.reverse_network.output_layer.weight": "model-00002-of-00002.safetensors",
35
- "module.heads.action.diffusion_model.time_preprocess.kernel": "model-00002-of-00002.safetensors",
36
- "module.octo_transformer.obs_pos_embeddings.obs_primary_pos_embedding": "model-00002-of-00002.safetensors",
37
- "module.octo_transformer.obs_pos_embeddings.obs_proprio_pos_embedding": "model-00002-of-00002.safetensors",
38
- "module.octo_transformer.obs_projections.obs_primary_projection.bias": "model-00002-of-00002.safetensors",
39
- "module.octo_transformer.obs_projections.obs_primary_projection.weight": "model-00002-of-00002.safetensors",
40
- "module.octo_transformer.obs_projections.obs_proprio_projection.bias": "model-00002-of-00002.safetensors",
41
- "module.octo_transformer.obs_projections.obs_proprio_projection.weight": "model-00002-of-00002.safetensors",
42
- "module.octo_transformer.observation_tokenizers.primary.encoder.conv_layers.0.0.bias": "model-00002-of-00002.safetensors",
43
- "module.octo_transformer.observation_tokenizers.primary.encoder.conv_layers.0.0.weight": "model-00002-of-00002.safetensors",
44
- "module.octo_transformer.observation_tokenizers.primary.encoder.conv_layers.0.1.bias": "model-00002-of-00002.safetensors",
45
- "module.octo_transformer.observation_tokenizers.primary.encoder.conv_layers.0.1.weight": "model-00002-of-00002.safetensors",
46
- "module.octo_transformer.observation_tokenizers.primary.encoder.conv_layers.1.0.bias": "model-00002-of-00002.safetensors",
47
- "module.octo_transformer.observation_tokenizers.primary.encoder.conv_layers.1.0.weight": "model-00002-of-00002.safetensors",
48
- "module.octo_transformer.observation_tokenizers.primary.encoder.conv_layers.1.1.bias": "model-00002-of-00002.safetensors",
49
- "module.octo_transformer.observation_tokenizers.primary.encoder.conv_layers.1.1.weight": "model-00002-of-00002.safetensors",
50
- "module.octo_transformer.observation_tokenizers.primary.encoder.conv_layers.2.0.bias": "model-00002-of-00002.safetensors",
51
- "module.octo_transformer.observation_tokenizers.primary.encoder.conv_layers.2.0.weight": "model-00002-of-00002.safetensors",
52
- "module.octo_transformer.observation_tokenizers.primary.encoder.conv_layers.2.1.bias": "model-00002-of-00002.safetensors",
53
- "module.octo_transformer.observation_tokenizers.primary.encoder.conv_layers.2.1.weight": "model-00002-of-00002.safetensors",
54
- "module.octo_transformer.observation_tokenizers.primary.encoder.conv_layers.3.0.bias": "model-00002-of-00002.safetensors",
55
- "module.octo_transformer.observation_tokenizers.primary.encoder.conv_layers.3.0.weight": "model-00002-of-00002.safetensors",
56
- "module.octo_transformer.observation_tokenizers.primary.encoder.conv_layers.3.1.bias": "model-00002-of-00002.safetensors",
57
- "module.octo_transformer.observation_tokenizers.primary.encoder.conv_layers.3.1.weight": "model-00002-of-00002.safetensors",
58
- "module.octo_transformer.observation_tokenizers.primary.encoder.embedding.bias": "model-00002-of-00002.safetensors",
59
- "module.octo_transformer.observation_tokenizers.primary.encoder.embedding.weight": "model-00002-of-00002.safetensors",
60
- "module.octo_transformer.readout_embeddings.action": "model-00002-of-00002.safetensors",
61
- "module.octo_transformer.task_pos_embeddings.task_latent_pos_embedding": "model-00002-of-00002.safetensors",
62
- "module.octo_transformer.task_projections.task_latent_projection.bias": "model-00002-of-00002.safetensors",
63
- "module.octo_transformer.task_projections.task_latent_projection.weight": "model-00002-of-00002.safetensors",
64
- "module.octo_transformer.transformer.transformer.encoder_layers.0.attention.in_proj_bias": "model-00002-of-00002.safetensors",
65
- "module.octo_transformer.transformer.transformer.encoder_layers.0.attention.in_proj_weight": "model-00002-of-00002.safetensors",
66
- "module.octo_transformer.transformer.transformer.encoder_layers.0.attention.out_proj.bias": "model-00002-of-00002.safetensors",
67
- "module.octo_transformer.transformer.transformer.encoder_layers.0.attention.out_proj.weight": "model-00002-of-00002.safetensors",
68
- "module.octo_transformer.transformer.transformer.encoder_layers.0.layer_norm1.bias": "model-00002-of-00002.safetensors",
69
- "module.octo_transformer.transformer.transformer.encoder_layers.0.layer_norm1.weight": "model-00002-of-00002.safetensors",
70
- "module.octo_transformer.transformer.transformer.encoder_layers.0.layer_norm2.bias": "model-00002-of-00002.safetensors",
71
- "module.octo_transformer.transformer.transformer.encoder_layers.0.layer_norm2.weight": "model-00002-of-00002.safetensors",
72
- "module.octo_transformer.transformer.transformer.encoder_layers.0.mlp_block.mlp.0.bias": "model-00002-of-00002.safetensors",
73
- "module.octo_transformer.transformer.transformer.encoder_layers.0.mlp_block.mlp.0.weight": "model-00002-of-00002.safetensors",
74
- "module.octo_transformer.transformer.transformer.encoder_layers.0.mlp_block.mlp.3.bias": "model-00002-of-00002.safetensors",
75
- "module.octo_transformer.transformer.transformer.encoder_layers.0.mlp_block.mlp.3.weight": "model-00002-of-00002.safetensors",
76
- "module.octo_transformer.transformer.transformer.encoder_layers.1.attention.in_proj_bias": "model-00002-of-00002.safetensors",
77
- "module.octo_transformer.transformer.transformer.encoder_layers.1.attention.in_proj_weight": "model-00002-of-00002.safetensors",
78
- "module.octo_transformer.transformer.transformer.encoder_layers.1.attention.out_proj.bias": "model-00002-of-00002.safetensors",
79
- "module.octo_transformer.transformer.transformer.encoder_layers.1.attention.out_proj.weight": "model-00002-of-00002.safetensors",
80
- "module.octo_transformer.transformer.transformer.encoder_layers.1.layer_norm1.bias": "model-00002-of-00002.safetensors",
81
- "module.octo_transformer.transformer.transformer.encoder_layers.1.layer_norm1.weight": "model-00002-of-00002.safetensors",
82
- "module.octo_transformer.transformer.transformer.encoder_layers.1.layer_norm2.bias": "model-00002-of-00002.safetensors",
83
- "module.octo_transformer.transformer.transformer.encoder_layers.1.layer_norm2.weight": "model-00002-of-00002.safetensors",
84
- "module.octo_transformer.transformer.transformer.encoder_layers.1.mlp_block.mlp.0.bias": "model-00002-of-00002.safetensors",
85
- "module.octo_transformer.transformer.transformer.encoder_layers.1.mlp_block.mlp.0.weight": "model-00002-of-00002.safetensors",
86
- "module.octo_transformer.transformer.transformer.encoder_layers.1.mlp_block.mlp.3.bias": "model-00002-of-00002.safetensors",
87
- "module.octo_transformer.transformer.transformer.encoder_layers.1.mlp_block.mlp.3.weight": "model-00002-of-00002.safetensors",
88
- "module.octo_transformer.transformer.transformer.encoder_layers.10.attention.in_proj_bias": "model-00002-of-00002.safetensors",
89
- "module.octo_transformer.transformer.transformer.encoder_layers.10.attention.in_proj_weight": "model-00002-of-00002.safetensors",
90
- "module.octo_transformer.transformer.transformer.encoder_layers.10.attention.out_proj.bias": "model-00002-of-00002.safetensors",
91
- "module.octo_transformer.transformer.transformer.encoder_layers.10.attention.out_proj.weight": "model-00002-of-00002.safetensors",
92
- "module.octo_transformer.transformer.transformer.encoder_layers.10.layer_norm1.bias": "model-00002-of-00002.safetensors",
93
- "module.octo_transformer.transformer.transformer.encoder_layers.10.layer_norm1.weight": "model-00002-of-00002.safetensors",
94
- "module.octo_transformer.transformer.transformer.encoder_layers.10.layer_norm2.bias": "model-00002-of-00002.safetensors",
95
- "module.octo_transformer.transformer.transformer.encoder_layers.10.layer_norm2.weight": "model-00002-of-00002.safetensors",
96
- "module.octo_transformer.transformer.transformer.encoder_layers.10.mlp_block.mlp.0.bias": "model-00002-of-00002.safetensors",
97
- "module.octo_transformer.transformer.transformer.encoder_layers.10.mlp_block.mlp.0.weight": "model-00002-of-00002.safetensors",
98
- "module.octo_transformer.transformer.transformer.encoder_layers.10.mlp_block.mlp.3.bias": "model-00002-of-00002.safetensors",
99
- "module.octo_transformer.transformer.transformer.encoder_layers.10.mlp_block.mlp.3.weight": "model-00002-of-00002.safetensors",
100
- "module.octo_transformer.transformer.transformer.encoder_layers.11.attention.in_proj_bias": "model-00002-of-00002.safetensors",
101
- "module.octo_transformer.transformer.transformer.encoder_layers.11.attention.in_proj_weight": "model-00002-of-00002.safetensors",
102
- "module.octo_transformer.transformer.transformer.encoder_layers.11.attention.out_proj.bias": "model-00002-of-00002.safetensors",
103
- "module.octo_transformer.transformer.transformer.encoder_layers.11.attention.out_proj.weight": "model-00002-of-00002.safetensors",
104
- "module.octo_transformer.transformer.transformer.encoder_layers.11.layer_norm1.bias": "model-00002-of-00002.safetensors",
105
- "module.octo_transformer.transformer.transformer.encoder_layers.11.layer_norm1.weight": "model-00002-of-00002.safetensors",
106
- "module.octo_transformer.transformer.transformer.encoder_layers.11.layer_norm2.bias": "model-00002-of-00002.safetensors",
107
- "module.octo_transformer.transformer.transformer.encoder_layers.11.layer_norm2.weight": "model-00002-of-00002.safetensors",
108
- "module.octo_transformer.transformer.transformer.encoder_layers.11.mlp_block.mlp.0.bias": "model-00002-of-00002.safetensors",
109
- "module.octo_transformer.transformer.transformer.encoder_layers.11.mlp_block.mlp.0.weight": "model-00002-of-00002.safetensors",
110
- "module.octo_transformer.transformer.transformer.encoder_layers.11.mlp_block.mlp.3.bias": "model-00002-of-00002.safetensors",
111
- "module.octo_transformer.transformer.transformer.encoder_layers.11.mlp_block.mlp.3.weight": "model-00002-of-00002.safetensors",
112
- "module.octo_transformer.transformer.transformer.encoder_layers.2.attention.in_proj_bias": "model-00002-of-00002.safetensors",
113
- "module.octo_transformer.transformer.transformer.encoder_layers.2.attention.in_proj_weight": "model-00002-of-00002.safetensors",
114
- "module.octo_transformer.transformer.transformer.encoder_layers.2.attention.out_proj.bias": "model-00002-of-00002.safetensors",
115
- "module.octo_transformer.transformer.transformer.encoder_layers.2.attention.out_proj.weight": "model-00002-of-00002.safetensors",
116
- "module.octo_transformer.transformer.transformer.encoder_layers.2.layer_norm1.bias": "model-00002-of-00002.safetensors",
117
- "module.octo_transformer.transformer.transformer.encoder_layers.2.layer_norm1.weight": "model-00002-of-00002.safetensors",
118
- "module.octo_transformer.transformer.transformer.encoder_layers.2.layer_norm2.bias": "model-00002-of-00002.safetensors",
119
- "module.octo_transformer.transformer.transformer.encoder_layers.2.layer_norm2.weight": "model-00002-of-00002.safetensors",
120
- "module.octo_transformer.transformer.transformer.encoder_layers.2.mlp_block.mlp.0.bias": "model-00002-of-00002.safetensors",
121
- "module.octo_transformer.transformer.transformer.encoder_layers.2.mlp_block.mlp.0.weight": "model-00002-of-00002.safetensors",
122
- "module.octo_transformer.transformer.transformer.encoder_layers.2.mlp_block.mlp.3.bias": "model-00002-of-00002.safetensors",
123
- "module.octo_transformer.transformer.transformer.encoder_layers.2.mlp_block.mlp.3.weight": "model-00002-of-00002.safetensors",
124
- "module.octo_transformer.transformer.transformer.encoder_layers.3.attention.in_proj_bias": "model-00002-of-00002.safetensors",
125
- "module.octo_transformer.transformer.transformer.encoder_layers.3.attention.in_proj_weight": "model-00002-of-00002.safetensors",
126
- "module.octo_transformer.transformer.transformer.encoder_layers.3.attention.out_proj.bias": "model-00002-of-00002.safetensors",
127
- "module.octo_transformer.transformer.transformer.encoder_layers.3.attention.out_proj.weight": "model-00002-of-00002.safetensors",
128
- "module.octo_transformer.transformer.transformer.encoder_layers.3.layer_norm1.bias": "model-00002-of-00002.safetensors",
129
- "module.octo_transformer.transformer.transformer.encoder_layers.3.layer_norm1.weight": "model-00002-of-00002.safetensors",
130
- "module.octo_transformer.transformer.transformer.encoder_layers.3.layer_norm2.bias": "model-00002-of-00002.safetensors",
131
- "module.octo_transformer.transformer.transformer.encoder_layers.3.layer_norm2.weight": "model-00002-of-00002.safetensors",
132
- "module.octo_transformer.transformer.transformer.encoder_layers.3.mlp_block.mlp.0.bias": "model-00002-of-00002.safetensors",
133
- "module.octo_transformer.transformer.transformer.encoder_layers.3.mlp_block.mlp.0.weight": "model-00002-of-00002.safetensors",
134
- "module.octo_transformer.transformer.transformer.encoder_layers.3.mlp_block.mlp.3.bias": "model-00002-of-00002.safetensors",
135
- "module.octo_transformer.transformer.transformer.encoder_layers.3.mlp_block.mlp.3.weight": "model-00002-of-00002.safetensors",
136
- "module.octo_transformer.transformer.transformer.encoder_layers.4.attention.in_proj_bias": "model-00002-of-00002.safetensors",
137
- "module.octo_transformer.transformer.transformer.encoder_layers.4.attention.in_proj_weight": "model-00002-of-00002.safetensors",
138
- "module.octo_transformer.transformer.transformer.encoder_layers.4.attention.out_proj.bias": "model-00002-of-00002.safetensors",
139
- "module.octo_transformer.transformer.transformer.encoder_layers.4.attention.out_proj.weight": "model-00002-of-00002.safetensors",
140
- "module.octo_transformer.transformer.transformer.encoder_layers.4.layer_norm1.bias": "model-00002-of-00002.safetensors",
141
- "module.octo_transformer.transformer.transformer.encoder_layers.4.layer_norm1.weight": "model-00002-of-00002.safetensors",
142
- "module.octo_transformer.transformer.transformer.encoder_layers.4.layer_norm2.bias": "model-00002-of-00002.safetensors",
143
- "module.octo_transformer.transformer.transformer.encoder_layers.4.layer_norm2.weight": "model-00002-of-00002.safetensors",
144
- "module.octo_transformer.transformer.transformer.encoder_layers.4.mlp_block.mlp.0.bias": "model-00002-of-00002.safetensors",
145
- "module.octo_transformer.transformer.transformer.encoder_layers.4.mlp_block.mlp.0.weight": "model-00002-of-00002.safetensors",
146
- "module.octo_transformer.transformer.transformer.encoder_layers.4.mlp_block.mlp.3.bias": "model-00002-of-00002.safetensors",
147
- "module.octo_transformer.transformer.transformer.encoder_layers.4.mlp_block.mlp.3.weight": "model-00002-of-00002.safetensors",
148
- "module.octo_transformer.transformer.transformer.encoder_layers.5.attention.in_proj_bias": "model-00002-of-00002.safetensors",
149
- "module.octo_transformer.transformer.transformer.encoder_layers.5.attention.in_proj_weight": "model-00002-of-00002.safetensors",
150
- "module.octo_transformer.transformer.transformer.encoder_layers.5.attention.out_proj.bias": "model-00002-of-00002.safetensors",
151
- "module.octo_transformer.transformer.transformer.encoder_layers.5.attention.out_proj.weight": "model-00002-of-00002.safetensors",
152
- "module.octo_transformer.transformer.transformer.encoder_layers.5.layer_norm1.bias": "model-00002-of-00002.safetensors",
153
- "module.octo_transformer.transformer.transformer.encoder_layers.5.layer_norm1.weight": "model-00002-of-00002.safetensors",
154
- "module.octo_transformer.transformer.transformer.encoder_layers.5.layer_norm2.bias": "model-00002-of-00002.safetensors",
155
- "module.octo_transformer.transformer.transformer.encoder_layers.5.layer_norm2.weight": "model-00002-of-00002.safetensors",
156
- "module.octo_transformer.transformer.transformer.encoder_layers.5.mlp_block.mlp.0.bias": "model-00002-of-00002.safetensors",
157
- "module.octo_transformer.transformer.transformer.encoder_layers.5.mlp_block.mlp.0.weight": "model-00002-of-00002.safetensors",
158
- "module.octo_transformer.transformer.transformer.encoder_layers.5.mlp_block.mlp.3.bias": "model-00002-of-00002.safetensors",
159
- "module.octo_transformer.transformer.transformer.encoder_layers.5.mlp_block.mlp.3.weight": "model-00002-of-00002.safetensors",
160
- "module.octo_transformer.transformer.transformer.encoder_layers.6.attention.in_proj_bias": "model-00002-of-00002.safetensors",
161
- "module.octo_transformer.transformer.transformer.encoder_layers.6.attention.in_proj_weight": "model-00002-of-00002.safetensors",
162
- "module.octo_transformer.transformer.transformer.encoder_layers.6.attention.out_proj.bias": "model-00002-of-00002.safetensors",
163
- "module.octo_transformer.transformer.transformer.encoder_layers.6.attention.out_proj.weight": "model-00002-of-00002.safetensors",
164
- "module.octo_transformer.transformer.transformer.encoder_layers.6.layer_norm1.bias": "model-00002-of-00002.safetensors",
165
- "module.octo_transformer.transformer.transformer.encoder_layers.6.layer_norm1.weight": "model-00002-of-00002.safetensors",
166
- "module.octo_transformer.transformer.transformer.encoder_layers.6.layer_norm2.bias": "model-00002-of-00002.safetensors",
167
- "module.octo_transformer.transformer.transformer.encoder_layers.6.layer_norm2.weight": "model-00002-of-00002.safetensors",
168
- "module.octo_transformer.transformer.transformer.encoder_layers.6.mlp_block.mlp.0.bias": "model-00002-of-00002.safetensors",
169
- "module.octo_transformer.transformer.transformer.encoder_layers.6.mlp_block.mlp.0.weight": "model-00002-of-00002.safetensors",
170
- "module.octo_transformer.transformer.transformer.encoder_layers.6.mlp_block.mlp.3.bias": "model-00002-of-00002.safetensors",
171
- "module.octo_transformer.transformer.transformer.encoder_layers.6.mlp_block.mlp.3.weight": "model-00002-of-00002.safetensors",
172
- "module.octo_transformer.transformer.transformer.encoder_layers.7.attention.in_proj_bias": "model-00002-of-00002.safetensors",
173
- "module.octo_transformer.transformer.transformer.encoder_layers.7.attention.in_proj_weight": "model-00002-of-00002.safetensors",
174
- "module.octo_transformer.transformer.transformer.encoder_layers.7.attention.out_proj.bias": "model-00002-of-00002.safetensors",
175
- "module.octo_transformer.transformer.transformer.encoder_layers.7.attention.out_proj.weight": "model-00002-of-00002.safetensors",
176
- "module.octo_transformer.transformer.transformer.encoder_layers.7.layer_norm1.bias": "model-00002-of-00002.safetensors",
177
- "module.octo_transformer.transformer.transformer.encoder_layers.7.layer_norm1.weight": "model-00002-of-00002.safetensors",
178
- "module.octo_transformer.transformer.transformer.encoder_layers.7.layer_norm2.bias": "model-00002-of-00002.safetensors",
179
- "module.octo_transformer.transformer.transformer.encoder_layers.7.layer_norm2.weight": "model-00002-of-00002.safetensors",
180
- "module.octo_transformer.transformer.transformer.encoder_layers.7.mlp_block.mlp.0.bias": "model-00002-of-00002.safetensors",
181
- "module.octo_transformer.transformer.transformer.encoder_layers.7.mlp_block.mlp.0.weight": "model-00002-of-00002.safetensors",
182
- "module.octo_transformer.transformer.transformer.encoder_layers.7.mlp_block.mlp.3.bias": "model-00002-of-00002.safetensors",
183
- "module.octo_transformer.transformer.transformer.encoder_layers.7.mlp_block.mlp.3.weight": "model-00002-of-00002.safetensors",
184
- "module.octo_transformer.transformer.transformer.encoder_layers.8.attention.in_proj_bias": "model-00002-of-00002.safetensors",
185
- "module.octo_transformer.transformer.transformer.encoder_layers.8.attention.in_proj_weight": "model-00002-of-00002.safetensors",
186
- "module.octo_transformer.transformer.transformer.encoder_layers.8.attention.out_proj.bias": "model-00002-of-00002.safetensors",
187
- "module.octo_transformer.transformer.transformer.encoder_layers.8.attention.out_proj.weight": "model-00002-of-00002.safetensors",
188
- "module.octo_transformer.transformer.transformer.encoder_layers.8.layer_norm1.bias": "model-00002-of-00002.safetensors",
189
- "module.octo_transformer.transformer.transformer.encoder_layers.8.layer_norm1.weight": "model-00002-of-00002.safetensors",
190
- "module.octo_transformer.transformer.transformer.encoder_layers.8.layer_norm2.bias": "model-00002-of-00002.safetensors",
191
- "module.octo_transformer.transformer.transformer.encoder_layers.8.layer_norm2.weight": "model-00002-of-00002.safetensors",
192
- "module.octo_transformer.transformer.transformer.encoder_layers.8.mlp_block.mlp.0.bias": "model-00002-of-00002.safetensors",
193
- "module.octo_transformer.transformer.transformer.encoder_layers.8.mlp_block.mlp.0.weight": "model-00002-of-00002.safetensors",
194
- "module.octo_transformer.transformer.transformer.encoder_layers.8.mlp_block.mlp.3.bias": "model-00002-of-00002.safetensors",
195
- "module.octo_transformer.transformer.transformer.encoder_layers.8.mlp_block.mlp.3.weight": "model-00002-of-00002.safetensors",
196
- "module.octo_transformer.transformer.transformer.encoder_layers.9.attention.in_proj_bias": "model-00002-of-00002.safetensors",
197
- "module.octo_transformer.transformer.transformer.encoder_layers.9.attention.in_proj_weight": "model-00002-of-00002.safetensors",
198
- "module.octo_transformer.transformer.transformer.encoder_layers.9.attention.out_proj.bias": "model-00002-of-00002.safetensors",
199
- "module.octo_transformer.transformer.transformer.encoder_layers.9.attention.out_proj.weight": "model-00002-of-00002.safetensors",
200
- "module.octo_transformer.transformer.transformer.encoder_layers.9.layer_norm1.bias": "model-00002-of-00002.safetensors",
201
- "module.octo_transformer.transformer.transformer.encoder_layers.9.layer_norm1.weight": "model-00002-of-00002.safetensors",
202
- "module.octo_transformer.transformer.transformer.encoder_layers.9.layer_norm2.bias": "model-00002-of-00002.safetensors",
203
- "module.octo_transformer.transformer.transformer.encoder_layers.9.layer_norm2.weight": "model-00002-of-00002.safetensors",
204
- "module.octo_transformer.transformer.transformer.encoder_layers.9.mlp_block.mlp.0.bias": "model-00002-of-00002.safetensors",
205
- "module.octo_transformer.transformer.transformer.encoder_layers.9.mlp_block.mlp.0.weight": "model-00002-of-00002.safetensors",
206
- "module.octo_transformer.transformer.transformer.encoder_layers.9.mlp_block.mlp.3.bias": "model-00002-of-00002.safetensors",
207
- "module.octo_transformer.transformer.transformer.encoder_layers.9.mlp_block.mlp.3.weight": "model-00002-of-00002.safetensors",
208
- "module.octo_transformer.transformer.transformer.layer_norm.bias": "model-00002-of-00002.safetensors",
209
- "module.octo_transformer.transformer.transformer.layer_norm.weight": "model-00002-of-00002.safetensors",
210
- "vlm.latent_token_embedding": "model-00001-of-00002.safetensors",
211
- "vlm.model.embed_tokens.weight": "model-00001-of-00002.safetensors",
212
- "vlm.model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors",
213
- "vlm.model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
214
- "vlm.model.layers.0.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
215
- "vlm.model.layers.0.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
216
- "vlm.model.layers.0.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
217
- "vlm.model.layers.0.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
218
- "vlm.model.layers.0.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
219
- "vlm.model.layers.0.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
220
- "vlm.model.layers.0.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
221
- "vlm.model.layers.0.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
222
- "vlm.model.layers.0.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
223
- "vlm.model.layers.0.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
224
- "vlm.model.layers.1.input_layernorm.weight": "model-00001-of-00002.safetensors",
225
- "vlm.model.layers.1.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
226
- "vlm.model.layers.1.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
227
- "vlm.model.layers.1.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
228
- "vlm.model.layers.1.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
229
- "vlm.model.layers.1.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
230
- "vlm.model.layers.1.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
231
- "vlm.model.layers.1.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
232
- "vlm.model.layers.1.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
233
- "vlm.model.layers.1.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
234
- "vlm.model.layers.1.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
235
- "vlm.model.layers.1.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
236
- "vlm.model.layers.10.input_layernorm.weight": "model-00001-of-00002.safetensors",
237
- "vlm.model.layers.10.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
238
- "vlm.model.layers.10.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
239
- "vlm.model.layers.10.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
240
- "vlm.model.layers.10.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
241
- "vlm.model.layers.10.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
242
- "vlm.model.layers.10.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
243
- "vlm.model.layers.10.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
244
- "vlm.model.layers.10.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
245
- "vlm.model.layers.10.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
246
- "vlm.model.layers.10.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
247
- "vlm.model.layers.10.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
248
- "vlm.model.layers.11.input_layernorm.weight": "model-00001-of-00002.safetensors",
249
- "vlm.model.layers.11.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
250
- "vlm.model.layers.11.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
251
- "vlm.model.layers.11.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
252
- "vlm.model.layers.11.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
253
- "vlm.model.layers.11.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
254
- "vlm.model.layers.11.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
255
- "vlm.model.layers.11.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
256
- "vlm.model.layers.11.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
257
- "vlm.model.layers.11.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
258
- "vlm.model.layers.11.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
259
- "vlm.model.layers.11.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
260
- "vlm.model.layers.12.input_layernorm.weight": "model-00001-of-00002.safetensors",
261
- "vlm.model.layers.12.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
262
- "vlm.model.layers.12.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
263
- "vlm.model.layers.12.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
264
- "vlm.model.layers.12.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
265
- "vlm.model.layers.12.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
266
- "vlm.model.layers.12.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
267
- "vlm.model.layers.12.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
268
- "vlm.model.layers.12.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
269
- "vlm.model.layers.12.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
270
- "vlm.model.layers.12.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
271
- "vlm.model.layers.12.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
272
- "vlm.model.layers.13.input_layernorm.weight": "model-00001-of-00002.safetensors",
273
- "vlm.model.layers.13.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
274
- "vlm.model.layers.13.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
275
- "vlm.model.layers.13.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
276
- "vlm.model.layers.13.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
277
- "vlm.model.layers.13.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
278
- "vlm.model.layers.13.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
279
- "vlm.model.layers.13.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
280
- "vlm.model.layers.13.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
281
- "vlm.model.layers.13.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
282
- "vlm.model.layers.13.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
283
- "vlm.model.layers.13.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
284
- "vlm.model.layers.14.input_layernorm.weight": "model-00001-of-00002.safetensors",
285
- "vlm.model.layers.14.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
286
- "vlm.model.layers.14.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
287
- "vlm.model.layers.14.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
288
- "vlm.model.layers.14.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
289
- "vlm.model.layers.14.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
290
- "vlm.model.layers.14.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
291
- "vlm.model.layers.14.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
292
- "vlm.model.layers.14.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
293
- "vlm.model.layers.14.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
294
- "vlm.model.layers.14.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
295
- "vlm.model.layers.14.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
296
- "vlm.model.layers.15.input_layernorm.weight": "model-00001-of-00002.safetensors",
297
- "vlm.model.layers.15.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
298
- "vlm.model.layers.15.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
299
- "vlm.model.layers.15.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
300
- "vlm.model.layers.15.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
301
- "vlm.model.layers.15.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
302
- "vlm.model.layers.15.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
303
- "vlm.model.layers.15.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
304
- "vlm.model.layers.15.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
305
- "vlm.model.layers.15.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
306
- "vlm.model.layers.15.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
307
- "vlm.model.layers.15.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
308
- "vlm.model.layers.16.input_layernorm.weight": "model-00001-of-00002.safetensors",
309
- "vlm.model.layers.16.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
310
- "vlm.model.layers.16.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
311
- "vlm.model.layers.16.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
312
- "vlm.model.layers.16.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
313
- "vlm.model.layers.16.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
314
- "vlm.model.layers.16.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
315
- "vlm.model.layers.16.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
316
- "vlm.model.layers.16.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
317
- "vlm.model.layers.16.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
318
- "vlm.model.layers.16.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
319
- "vlm.model.layers.16.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
320
- "vlm.model.layers.17.input_layernorm.weight": "model-00001-of-00002.safetensors",
321
- "vlm.model.layers.17.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
322
- "vlm.model.layers.17.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
323
- "vlm.model.layers.17.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
324
- "vlm.model.layers.17.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
325
- "vlm.model.layers.17.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
326
- "vlm.model.layers.17.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
327
- "vlm.model.layers.17.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
328
- "vlm.model.layers.17.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
329
- "vlm.model.layers.17.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
330
- "vlm.model.layers.17.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
331
- "vlm.model.layers.17.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
332
- "vlm.model.layers.18.input_layernorm.weight": "model-00001-of-00002.safetensors",
333
- "vlm.model.layers.18.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
334
- "vlm.model.layers.18.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
335
- "vlm.model.layers.18.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
336
- "vlm.model.layers.18.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
337
- "vlm.model.layers.18.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
338
- "vlm.model.layers.18.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
339
- "vlm.model.layers.18.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
340
- "vlm.model.layers.18.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
341
- "vlm.model.layers.18.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
342
- "vlm.model.layers.18.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
343
- "vlm.model.layers.18.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
344
- "vlm.model.layers.19.input_layernorm.weight": "model-00002-of-00002.safetensors",
345
- "vlm.model.layers.19.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
346
- "vlm.model.layers.19.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
347
- "vlm.model.layers.19.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
348
- "vlm.model.layers.19.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
349
- "vlm.model.layers.19.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
350
- "vlm.model.layers.19.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
351
- "vlm.model.layers.19.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
352
- "vlm.model.layers.19.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
353
- "vlm.model.layers.19.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
354
- "vlm.model.layers.19.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
355
- "vlm.model.layers.19.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
356
- "vlm.model.layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors",
357
- "vlm.model.layers.2.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
358
- "vlm.model.layers.2.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
359
- "vlm.model.layers.2.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
360
- "vlm.model.layers.2.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
361
- "vlm.model.layers.2.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
362
- "vlm.model.layers.2.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
363
- "vlm.model.layers.2.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
364
- "vlm.model.layers.2.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
365
- "vlm.model.layers.2.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
366
- "vlm.model.layers.2.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
367
- "vlm.model.layers.2.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
368
- "vlm.model.layers.20.input_layernorm.weight": "model-00002-of-00002.safetensors",
369
- "vlm.model.layers.20.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
370
- "vlm.model.layers.20.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
371
- "vlm.model.layers.20.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
372
- "vlm.model.layers.20.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
373
- "vlm.model.layers.20.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
374
- "vlm.model.layers.20.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
375
- "vlm.model.layers.20.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
376
- "vlm.model.layers.20.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
377
- "vlm.model.layers.20.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
378
- "vlm.model.layers.20.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
379
- "vlm.model.layers.20.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
380
- "vlm.model.layers.21.input_layernorm.weight": "model-00002-of-00002.safetensors",
381
- "vlm.model.layers.21.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
382
- "vlm.model.layers.21.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
383
- "vlm.model.layers.21.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
384
- "vlm.model.layers.21.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
385
- "vlm.model.layers.21.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
386
- "vlm.model.layers.21.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
387
- "vlm.model.layers.21.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
388
- "vlm.model.layers.21.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
389
- "vlm.model.layers.21.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
390
- "vlm.model.layers.21.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
391
- "vlm.model.layers.21.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
392
- "vlm.model.layers.22.input_layernorm.weight": "model-00002-of-00002.safetensors",
393
- "vlm.model.layers.22.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
394
- "vlm.model.layers.22.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
395
- "vlm.model.layers.22.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
396
- "vlm.model.layers.22.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
397
- "vlm.model.layers.22.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
398
- "vlm.model.layers.22.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
399
- "vlm.model.layers.22.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
400
- "vlm.model.layers.22.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
401
- "vlm.model.layers.22.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
402
- "vlm.model.layers.22.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
403
- "vlm.model.layers.22.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
404
- "vlm.model.layers.23.input_layernorm.weight": "model-00002-of-00002.safetensors",
405
- "vlm.model.layers.23.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
406
- "vlm.model.layers.23.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
407
- "vlm.model.layers.23.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
408
- "vlm.model.layers.23.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
409
- "vlm.model.layers.23.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
410
- "vlm.model.layers.23.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
411
- "vlm.model.layers.23.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
412
- "vlm.model.layers.23.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
413
- "vlm.model.layers.23.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
414
- "vlm.model.layers.23.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
415
- "vlm.model.layers.23.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
416
- "vlm.model.layers.24.input_layernorm.weight": "model-00002-of-00002.safetensors",
417
- "vlm.model.layers.24.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
418
- "vlm.model.layers.24.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
419
- "vlm.model.layers.24.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
420
- "vlm.model.layers.24.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
421
- "vlm.model.layers.24.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
422
- "vlm.model.layers.24.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
423
- "vlm.model.layers.24.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
424
- "vlm.model.layers.24.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
425
- "vlm.model.layers.24.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
426
- "vlm.model.layers.24.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
427
- "vlm.model.layers.24.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
428
- "vlm.model.layers.25.input_layernorm.weight": "model-00002-of-00002.safetensors",
429
- "vlm.model.layers.25.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
430
- "vlm.model.layers.25.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
431
- "vlm.model.layers.25.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
432
- "vlm.model.layers.25.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
433
- "vlm.model.layers.25.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
434
- "vlm.model.layers.25.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
435
- "vlm.model.layers.25.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
436
- "vlm.model.layers.25.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
437
- "vlm.model.layers.25.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
438
- "vlm.model.layers.25.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
439
- "vlm.model.layers.25.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
440
- "vlm.model.layers.26.input_layernorm.weight": "model-00002-of-00002.safetensors",
441
- "vlm.model.layers.26.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
442
- "vlm.model.layers.26.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
443
- "vlm.model.layers.26.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
444
- "vlm.model.layers.26.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
445
- "vlm.model.layers.26.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
446
- "vlm.model.layers.26.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
447
- "vlm.model.layers.26.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
448
- "vlm.model.layers.26.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
449
- "vlm.model.layers.26.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
450
- "vlm.model.layers.26.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
451
- "vlm.model.layers.26.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
452
- "vlm.model.layers.27.input_layernorm.weight": "model-00002-of-00002.safetensors",
453
- "vlm.model.layers.27.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
454
- "vlm.model.layers.27.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
455
- "vlm.model.layers.27.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
456
- "vlm.model.layers.27.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
457
- "vlm.model.layers.27.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
458
- "vlm.model.layers.27.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
459
- "vlm.model.layers.27.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
460
- "vlm.model.layers.27.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
461
- "vlm.model.layers.27.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
462
- "vlm.model.layers.27.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
463
- "vlm.model.layers.27.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
464
- "vlm.model.layers.28.input_layernorm.weight": "model-00002-of-00002.safetensors",
465
- "vlm.model.layers.28.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
466
- "vlm.model.layers.28.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
467
- "vlm.model.layers.28.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
468
- "vlm.model.layers.28.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
469
- "vlm.model.layers.28.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
470
- "vlm.model.layers.28.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
471
- "vlm.model.layers.28.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
472
- "vlm.model.layers.28.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
473
- "vlm.model.layers.28.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
474
- "vlm.model.layers.28.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
475
- "vlm.model.layers.28.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
476
- "vlm.model.layers.29.input_layernorm.weight": "model-00002-of-00002.safetensors",
477
- "vlm.model.layers.29.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
478
- "vlm.model.layers.29.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
479
- "vlm.model.layers.29.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
480
- "vlm.model.layers.29.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
481
- "vlm.model.layers.29.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
482
- "vlm.model.layers.29.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
483
- "vlm.model.layers.29.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
484
- "vlm.model.layers.29.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
485
- "vlm.model.layers.29.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
486
- "vlm.model.layers.29.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
487
- "vlm.model.layers.29.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
488
- "vlm.model.layers.3.input_layernorm.weight": "model-00001-of-00002.safetensors",
489
- "vlm.model.layers.3.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
490
- "vlm.model.layers.3.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
491
- "vlm.model.layers.3.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
492
- "vlm.model.layers.3.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
493
- "vlm.model.layers.3.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
494
- "vlm.model.layers.3.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
495
- "vlm.model.layers.3.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
496
- "vlm.model.layers.3.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
497
- "vlm.model.layers.3.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
498
- "vlm.model.layers.3.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
499
- "vlm.model.layers.3.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
500
- "vlm.model.layers.30.input_layernorm.weight": "model-00002-of-00002.safetensors",
501
- "vlm.model.layers.30.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
502
- "vlm.model.layers.30.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
503
- "vlm.model.layers.30.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
504
- "vlm.model.layers.30.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
505
- "vlm.model.layers.30.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
506
- "vlm.model.layers.30.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
507
- "vlm.model.layers.30.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
508
- "vlm.model.layers.30.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
509
- "vlm.model.layers.30.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
510
- "vlm.model.layers.30.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
511
- "vlm.model.layers.30.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
512
- "vlm.model.layers.31.input_layernorm.weight": "model-00002-of-00002.safetensors",
513
- "vlm.model.layers.31.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
514
- "vlm.model.layers.31.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
515
- "vlm.model.layers.31.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
516
- "vlm.model.layers.31.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
517
- "vlm.model.layers.31.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
518
- "vlm.model.layers.31.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
519
- "vlm.model.layers.31.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
520
- "vlm.model.layers.31.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
521
- "vlm.model.layers.31.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
522
- "vlm.model.layers.31.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
523
- "vlm.model.layers.31.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
524
- "vlm.model.layers.32.input_layernorm.weight": "model-00002-of-00002.safetensors",
525
- "vlm.model.layers.32.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
526
- "vlm.model.layers.32.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
527
- "vlm.model.layers.32.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
528
- "vlm.model.layers.32.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
529
- "vlm.model.layers.32.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
530
- "vlm.model.layers.32.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
531
- "vlm.model.layers.32.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
532
- "vlm.model.layers.32.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
533
- "vlm.model.layers.32.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
534
- "vlm.model.layers.32.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
535
- "vlm.model.layers.32.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
536
- "vlm.model.layers.33.input_layernorm.weight": "model-00002-of-00002.safetensors",
537
- "vlm.model.layers.33.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
538
- "vlm.model.layers.33.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
539
- "vlm.model.layers.33.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
540
- "vlm.model.layers.33.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
541
- "vlm.model.layers.33.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
542
- "vlm.model.layers.33.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
543
- "vlm.model.layers.33.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
544
- "vlm.model.layers.33.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
545
- "vlm.model.layers.33.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
546
- "vlm.model.layers.33.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
547
- "vlm.model.layers.33.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
548
- "vlm.model.layers.34.input_layernorm.weight": "model-00002-of-00002.safetensors",
549
- "vlm.model.layers.34.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
550
- "vlm.model.layers.34.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
551
- "vlm.model.layers.34.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
552
- "vlm.model.layers.34.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
553
- "vlm.model.layers.34.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
554
- "vlm.model.layers.34.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
555
- "vlm.model.layers.34.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
556
- "vlm.model.layers.34.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
557
- "vlm.model.layers.34.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
558
- "vlm.model.layers.34.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
559
- "vlm.model.layers.34.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
560
- "vlm.model.layers.35.input_layernorm.weight": "model-00002-of-00002.safetensors",
561
- "vlm.model.layers.35.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
562
- "vlm.model.layers.35.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
563
- "vlm.model.layers.35.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
564
- "vlm.model.layers.35.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
565
- "vlm.model.layers.35.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
566
- "vlm.model.layers.35.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
567
- "vlm.model.layers.35.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
568
- "vlm.model.layers.35.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
569
- "vlm.model.layers.35.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
570
- "vlm.model.layers.35.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
571
- "vlm.model.layers.35.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
572
- "vlm.model.layers.4.input_layernorm.weight": "model-00001-of-00002.safetensors",
573
- "vlm.model.layers.4.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
574
- "vlm.model.layers.4.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
575
- "vlm.model.layers.4.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
576
- "vlm.model.layers.4.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
577
- "vlm.model.layers.4.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
578
- "vlm.model.layers.4.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
579
- "vlm.model.layers.4.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
580
- "vlm.model.layers.4.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
581
- "vlm.model.layers.4.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
582
- "vlm.model.layers.4.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
583
- "vlm.model.layers.4.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
584
- "vlm.model.layers.5.input_layernorm.weight": "model-00001-of-00002.safetensors",
585
- "vlm.model.layers.5.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
586
- "vlm.model.layers.5.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
587
- "vlm.model.layers.5.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
588
- "vlm.model.layers.5.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
589
- "vlm.model.layers.5.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
590
- "vlm.model.layers.5.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
591
- "vlm.model.layers.5.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
592
- "vlm.model.layers.5.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
593
- "vlm.model.layers.5.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
594
- "vlm.model.layers.5.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
595
- "vlm.model.layers.5.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
596
- "vlm.model.layers.6.input_layernorm.weight": "model-00001-of-00002.safetensors",
597
- "vlm.model.layers.6.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
598
- "vlm.model.layers.6.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
599
- "vlm.model.layers.6.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
600
- "vlm.model.layers.6.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
601
- "vlm.model.layers.6.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
602
- "vlm.model.layers.6.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
603
- "vlm.model.layers.6.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
604
- "vlm.model.layers.6.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
605
- "vlm.model.layers.6.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
606
- "vlm.model.layers.6.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
607
- "vlm.model.layers.6.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
608
- "vlm.model.layers.7.input_layernorm.weight": "model-00001-of-00002.safetensors",
609
- "vlm.model.layers.7.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
610
- "vlm.model.layers.7.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
611
- "vlm.model.layers.7.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
612
- "vlm.model.layers.7.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
613
- "vlm.model.layers.7.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
614
- "vlm.model.layers.7.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
615
- "vlm.model.layers.7.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
616
- "vlm.model.layers.7.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
617
- "vlm.model.layers.7.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
618
- "vlm.model.layers.7.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
619
- "vlm.model.layers.7.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
620
- "vlm.model.layers.8.input_layernorm.weight": "model-00001-of-00002.safetensors",
621
- "vlm.model.layers.8.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
622
- "vlm.model.layers.8.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
623
- "vlm.model.layers.8.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
624
- "vlm.model.layers.8.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
625
- "vlm.model.layers.8.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
626
- "vlm.model.layers.8.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
627
- "vlm.model.layers.8.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
628
- "vlm.model.layers.8.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
629
- "vlm.model.layers.8.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
630
- "vlm.model.layers.8.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
631
- "vlm.model.layers.8.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
632
- "vlm.model.layers.9.input_layernorm.weight": "model-00001-of-00002.safetensors",
633
- "vlm.model.layers.9.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
634
- "vlm.model.layers.9.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
635
- "vlm.model.layers.9.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
636
- "vlm.model.layers.9.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
637
- "vlm.model.layers.9.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
638
- "vlm.model.layers.9.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
639
- "vlm.model.layers.9.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
640
- "vlm.model.layers.9.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
641
- "vlm.model.layers.9.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
642
- "vlm.model.layers.9.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
643
- "vlm.model.layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
644
- "vlm.model.norm.weight": "model-00002-of-00002.safetensors",
645
- "vlm.new_tensor": "model-00001-of-00002.safetensors",
646
- "vlm.padding_for_inserted_tokens": "model-00001-of-00002.safetensors",
647
- "vlm.should_update_latent_head.bias": "model-00002-of-00002.safetensors",
648
- "vlm.should_update_latent_head.weight": "model-00002-of-00002.safetensors",
649
- "vlm.state_pos_embedding": "model-00001-of-00002.safetensors",
650
- "vlm.state_projection.bias": "model-00002-of-00002.safetensors",
651
- "vlm.state_projection.weight": "model-00002-of-00002.safetensors",
652
- "vlm.visual.blocks.0.attn.proj.bias": "model-00001-of-00002.safetensors",
653
- "vlm.visual.blocks.0.attn.proj.weight": "model-00001-of-00002.safetensors",
654
- "vlm.visual.blocks.0.attn.qkv.bias": "model-00001-of-00002.safetensors",
655
- "vlm.visual.blocks.0.attn.qkv.weight": "model-00001-of-00002.safetensors",
656
- "vlm.visual.blocks.0.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
657
- "vlm.visual.blocks.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
658
- "vlm.visual.blocks.0.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
659
- "vlm.visual.blocks.0.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
660
- "vlm.visual.blocks.0.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
661
- "vlm.visual.blocks.0.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
662
- "vlm.visual.blocks.0.norm1.weight": "model-00001-of-00002.safetensors",
663
- "vlm.visual.blocks.0.norm2.weight": "model-00001-of-00002.safetensors",
664
- "vlm.visual.blocks.1.attn.proj.bias": "model-00001-of-00002.safetensors",
665
- "vlm.visual.blocks.1.attn.proj.weight": "model-00001-of-00002.safetensors",
666
- "vlm.visual.blocks.1.attn.qkv.bias": "model-00001-of-00002.safetensors",
667
- "vlm.visual.blocks.1.attn.qkv.weight": "model-00001-of-00002.safetensors",
668
- "vlm.visual.blocks.1.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
669
- "vlm.visual.blocks.1.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
670
- "vlm.visual.blocks.1.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
671
- "vlm.visual.blocks.1.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
672
- "vlm.visual.blocks.1.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
673
- "vlm.visual.blocks.1.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
674
- "vlm.visual.blocks.1.norm1.weight": "model-00001-of-00002.safetensors",
675
- "vlm.visual.blocks.1.norm2.weight": "model-00001-of-00002.safetensors",
676
- "vlm.visual.blocks.10.attn.proj.bias": "model-00001-of-00002.safetensors",
677
- "vlm.visual.blocks.10.attn.proj.weight": "model-00001-of-00002.safetensors",
678
- "vlm.visual.blocks.10.attn.qkv.bias": "model-00001-of-00002.safetensors",
679
- "vlm.visual.blocks.10.attn.qkv.weight": "model-00001-of-00002.safetensors",
680
- "vlm.visual.blocks.10.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
681
- "vlm.visual.blocks.10.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
682
- "vlm.visual.blocks.10.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
683
- "vlm.visual.blocks.10.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
684
- "vlm.visual.blocks.10.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
685
- "vlm.visual.blocks.10.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
686
- "vlm.visual.blocks.10.norm1.weight": "model-00001-of-00002.safetensors",
687
- "vlm.visual.blocks.10.norm2.weight": "model-00001-of-00002.safetensors",
688
- "vlm.visual.blocks.11.attn.proj.bias": "model-00001-of-00002.safetensors",
689
- "vlm.visual.blocks.11.attn.proj.weight": "model-00001-of-00002.safetensors",
690
- "vlm.visual.blocks.11.attn.qkv.bias": "model-00001-of-00002.safetensors",
691
- "vlm.visual.blocks.11.attn.qkv.weight": "model-00001-of-00002.safetensors",
692
- "vlm.visual.blocks.11.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
693
- "vlm.visual.blocks.11.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
694
- "vlm.visual.blocks.11.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
695
- "vlm.visual.blocks.11.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
696
- "vlm.visual.blocks.11.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
697
- "vlm.visual.blocks.11.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
698
- "vlm.visual.blocks.11.norm1.weight": "model-00001-of-00002.safetensors",
699
- "vlm.visual.blocks.11.norm2.weight": "model-00001-of-00002.safetensors",
700
- "vlm.visual.blocks.12.attn.proj.bias": "model-00001-of-00002.safetensors",
701
- "vlm.visual.blocks.12.attn.proj.weight": "model-00001-of-00002.safetensors",
702
- "vlm.visual.blocks.12.attn.qkv.bias": "model-00001-of-00002.safetensors",
703
- "vlm.visual.blocks.12.attn.qkv.weight": "model-00001-of-00002.safetensors",
704
- "vlm.visual.blocks.12.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
705
- "vlm.visual.blocks.12.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
706
- "vlm.visual.blocks.12.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
707
- "vlm.visual.blocks.12.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
708
- "vlm.visual.blocks.12.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
709
- "vlm.visual.blocks.12.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
710
- "vlm.visual.blocks.12.norm1.weight": "model-00001-of-00002.safetensors",
711
- "vlm.visual.blocks.12.norm2.weight": "model-00001-of-00002.safetensors",
712
- "vlm.visual.blocks.13.attn.proj.bias": "model-00001-of-00002.safetensors",
713
- "vlm.visual.blocks.13.attn.proj.weight": "model-00001-of-00002.safetensors",
714
- "vlm.visual.blocks.13.attn.qkv.bias": "model-00001-of-00002.safetensors",
715
- "vlm.visual.blocks.13.attn.qkv.weight": "model-00001-of-00002.safetensors",
716
- "vlm.visual.blocks.13.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
717
- "vlm.visual.blocks.13.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
718
- "vlm.visual.blocks.13.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
719
- "vlm.visual.blocks.13.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
720
- "vlm.visual.blocks.13.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
721
- "vlm.visual.blocks.13.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
722
- "vlm.visual.blocks.13.norm1.weight": "model-00001-of-00002.safetensors",
723
- "vlm.visual.blocks.13.norm2.weight": "model-00001-of-00002.safetensors",
724
- "vlm.visual.blocks.14.attn.proj.bias": "model-00001-of-00002.safetensors",
725
- "vlm.visual.blocks.14.attn.proj.weight": "model-00001-of-00002.safetensors",
726
- "vlm.visual.blocks.14.attn.qkv.bias": "model-00001-of-00002.safetensors",
727
- "vlm.visual.blocks.14.attn.qkv.weight": "model-00001-of-00002.safetensors",
728
- "vlm.visual.blocks.14.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
729
- "vlm.visual.blocks.14.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
730
- "vlm.visual.blocks.14.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
731
- "vlm.visual.blocks.14.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
732
- "vlm.visual.blocks.14.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
733
- "vlm.visual.blocks.14.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
734
- "vlm.visual.blocks.14.norm1.weight": "model-00001-of-00002.safetensors",
735
- "vlm.visual.blocks.14.norm2.weight": "model-00001-of-00002.safetensors",
736
- "vlm.visual.blocks.15.attn.proj.bias": "model-00001-of-00002.safetensors",
737
- "vlm.visual.blocks.15.attn.proj.weight": "model-00001-of-00002.safetensors",
738
- "vlm.visual.blocks.15.attn.qkv.bias": "model-00001-of-00002.safetensors",
739
- "vlm.visual.blocks.15.attn.qkv.weight": "model-00001-of-00002.safetensors",
740
- "vlm.visual.blocks.15.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
741
- "vlm.visual.blocks.15.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
742
- "vlm.visual.blocks.15.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
743
- "vlm.visual.blocks.15.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
744
- "vlm.visual.blocks.15.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
745
- "vlm.visual.blocks.15.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
746
- "vlm.visual.blocks.15.norm1.weight": "model-00001-of-00002.safetensors",
747
- "vlm.visual.blocks.15.norm2.weight": "model-00001-of-00002.safetensors",
748
- "vlm.visual.blocks.16.attn.proj.bias": "model-00001-of-00002.safetensors",
749
- "vlm.visual.blocks.16.attn.proj.weight": "model-00001-of-00002.safetensors",
750
- "vlm.visual.blocks.16.attn.qkv.bias": "model-00001-of-00002.safetensors",
751
- "vlm.visual.blocks.16.attn.qkv.weight": "model-00001-of-00002.safetensors",
752
- "vlm.visual.blocks.16.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
753
- "vlm.visual.blocks.16.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
754
- "vlm.visual.blocks.16.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
755
- "vlm.visual.blocks.16.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
756
- "vlm.visual.blocks.16.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
757
- "vlm.visual.blocks.16.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
758
- "vlm.visual.blocks.16.norm1.weight": "model-00001-of-00002.safetensors",
759
- "vlm.visual.blocks.16.norm2.weight": "model-00001-of-00002.safetensors",
760
- "vlm.visual.blocks.17.attn.proj.bias": "model-00001-of-00002.safetensors",
761
- "vlm.visual.blocks.17.attn.proj.weight": "model-00001-of-00002.safetensors",
762
- "vlm.visual.blocks.17.attn.qkv.bias": "model-00001-of-00002.safetensors",
763
- "vlm.visual.blocks.17.attn.qkv.weight": "model-00001-of-00002.safetensors",
764
- "vlm.visual.blocks.17.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
765
- "vlm.visual.blocks.17.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
766
- "vlm.visual.blocks.17.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
767
- "vlm.visual.blocks.17.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
768
- "vlm.visual.blocks.17.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
769
- "vlm.visual.blocks.17.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
770
- "vlm.visual.blocks.17.norm1.weight": "model-00001-of-00002.safetensors",
771
- "vlm.visual.blocks.17.norm2.weight": "model-00001-of-00002.safetensors",
772
- "vlm.visual.blocks.18.attn.proj.bias": "model-00001-of-00002.safetensors",
773
- "vlm.visual.blocks.18.attn.proj.weight": "model-00001-of-00002.safetensors",
774
- "vlm.visual.blocks.18.attn.qkv.bias": "model-00001-of-00002.safetensors",
775
- "vlm.visual.blocks.18.attn.qkv.weight": "model-00001-of-00002.safetensors",
776
- "vlm.visual.blocks.18.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
777
- "vlm.visual.blocks.18.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
778
- "vlm.visual.blocks.18.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
779
- "vlm.visual.blocks.18.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
780
- "vlm.visual.blocks.18.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
781
- "vlm.visual.blocks.18.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
782
- "vlm.visual.blocks.18.norm1.weight": "model-00001-of-00002.safetensors",
783
- "vlm.visual.blocks.18.norm2.weight": "model-00001-of-00002.safetensors",
784
- "vlm.visual.blocks.19.attn.proj.bias": "model-00001-of-00002.safetensors",
785
- "vlm.visual.blocks.19.attn.proj.weight": "model-00001-of-00002.safetensors",
786
- "vlm.visual.blocks.19.attn.qkv.bias": "model-00001-of-00002.safetensors",
787
- "vlm.visual.blocks.19.attn.qkv.weight": "model-00001-of-00002.safetensors",
788
- "vlm.visual.blocks.19.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
789
- "vlm.visual.blocks.19.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
790
- "vlm.visual.blocks.19.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
791
- "vlm.visual.blocks.19.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
792
- "vlm.visual.blocks.19.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
793
- "vlm.visual.blocks.19.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
794
- "vlm.visual.blocks.19.norm1.weight": "model-00001-of-00002.safetensors",
795
- "vlm.visual.blocks.19.norm2.weight": "model-00001-of-00002.safetensors",
796
- "vlm.visual.blocks.2.attn.proj.bias": "model-00001-of-00002.safetensors",
797
- "vlm.visual.blocks.2.attn.proj.weight": "model-00001-of-00002.safetensors",
798
- "vlm.visual.blocks.2.attn.qkv.bias": "model-00001-of-00002.safetensors",
799
- "vlm.visual.blocks.2.attn.qkv.weight": "model-00001-of-00002.safetensors",
800
- "vlm.visual.blocks.2.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
801
- "vlm.visual.blocks.2.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
802
- "vlm.visual.blocks.2.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
803
- "vlm.visual.blocks.2.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
804
- "vlm.visual.blocks.2.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
805
- "vlm.visual.blocks.2.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
806
- "vlm.visual.blocks.2.norm1.weight": "model-00001-of-00002.safetensors",
807
- "vlm.visual.blocks.2.norm2.weight": "model-00001-of-00002.safetensors",
808
- "vlm.visual.blocks.20.attn.proj.bias": "model-00001-of-00002.safetensors",
809
- "vlm.visual.blocks.20.attn.proj.weight": "model-00001-of-00002.safetensors",
810
- "vlm.visual.blocks.20.attn.qkv.bias": "model-00001-of-00002.safetensors",
811
- "vlm.visual.blocks.20.attn.qkv.weight": "model-00001-of-00002.safetensors",
812
- "vlm.visual.blocks.20.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
813
- "vlm.visual.blocks.20.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
814
- "vlm.visual.blocks.20.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
815
- "vlm.visual.blocks.20.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
816
- "vlm.visual.blocks.20.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
817
- "vlm.visual.blocks.20.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
818
- "vlm.visual.blocks.20.norm1.weight": "model-00001-of-00002.safetensors",
819
- "vlm.visual.blocks.20.norm2.weight": "model-00001-of-00002.safetensors",
820
- "vlm.visual.blocks.21.attn.proj.bias": "model-00001-of-00002.safetensors",
821
- "vlm.visual.blocks.21.attn.proj.weight": "model-00001-of-00002.safetensors",
822
- "vlm.visual.blocks.21.attn.qkv.bias": "model-00001-of-00002.safetensors",
823
- "vlm.visual.blocks.21.attn.qkv.weight": "model-00001-of-00002.safetensors",
824
- "vlm.visual.blocks.21.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
825
- "vlm.visual.blocks.21.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
826
- "vlm.visual.blocks.21.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
827
- "vlm.visual.blocks.21.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
828
- "vlm.visual.blocks.21.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
829
- "vlm.visual.blocks.21.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
830
- "vlm.visual.blocks.21.norm1.weight": "model-00001-of-00002.safetensors",
831
- "vlm.visual.blocks.21.norm2.weight": "model-00001-of-00002.safetensors",
832
- "vlm.visual.blocks.22.attn.proj.bias": "model-00001-of-00002.safetensors",
833
- "vlm.visual.blocks.22.attn.proj.weight": "model-00001-of-00002.safetensors",
834
- "vlm.visual.blocks.22.attn.qkv.bias": "model-00001-of-00002.safetensors",
835
- "vlm.visual.blocks.22.attn.qkv.weight": "model-00001-of-00002.safetensors",
836
- "vlm.visual.blocks.22.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
837
- "vlm.visual.blocks.22.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
838
- "vlm.visual.blocks.22.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
839
- "vlm.visual.blocks.22.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
840
- "vlm.visual.blocks.22.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
841
- "vlm.visual.blocks.22.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
842
- "vlm.visual.blocks.22.norm1.weight": "model-00001-of-00002.safetensors",
843
- "vlm.visual.blocks.22.norm2.weight": "model-00001-of-00002.safetensors",
844
- "vlm.visual.blocks.23.attn.proj.bias": "model-00001-of-00002.safetensors",
845
- "vlm.visual.blocks.23.attn.proj.weight": "model-00001-of-00002.safetensors",
846
- "vlm.visual.blocks.23.attn.qkv.bias": "model-00001-of-00002.safetensors",
847
- "vlm.visual.blocks.23.attn.qkv.weight": "model-00001-of-00002.safetensors",
848
- "vlm.visual.blocks.23.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
849
- "vlm.visual.blocks.23.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
850
- "vlm.visual.blocks.23.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
851
- "vlm.visual.blocks.23.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
852
- "vlm.visual.blocks.23.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
853
- "vlm.visual.blocks.23.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
854
- "vlm.visual.blocks.23.norm1.weight": "model-00001-of-00002.safetensors",
855
- "vlm.visual.blocks.23.norm2.weight": "model-00001-of-00002.safetensors",
856
- "vlm.visual.blocks.24.attn.proj.bias": "model-00001-of-00002.safetensors",
857
- "vlm.visual.blocks.24.attn.proj.weight": "model-00001-of-00002.safetensors",
858
- "vlm.visual.blocks.24.attn.qkv.bias": "model-00001-of-00002.safetensors",
859
- "vlm.visual.blocks.24.attn.qkv.weight": "model-00001-of-00002.safetensors",
860
- "vlm.visual.blocks.24.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
861
- "vlm.visual.blocks.24.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
862
- "vlm.visual.blocks.24.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
863
- "vlm.visual.blocks.24.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
864
- "vlm.visual.blocks.24.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
865
- "vlm.visual.blocks.24.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
866
- "vlm.visual.blocks.24.norm1.weight": "model-00001-of-00002.safetensors",
867
- "vlm.visual.blocks.24.norm2.weight": "model-00001-of-00002.safetensors",
868
- "vlm.visual.blocks.25.attn.proj.bias": "model-00001-of-00002.safetensors",
869
- "vlm.visual.blocks.25.attn.proj.weight": "model-00001-of-00002.safetensors",
870
- "vlm.visual.blocks.25.attn.qkv.bias": "model-00001-of-00002.safetensors",
871
- "vlm.visual.blocks.25.attn.qkv.weight": "model-00001-of-00002.safetensors",
872
- "vlm.visual.blocks.25.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
873
- "vlm.visual.blocks.25.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
874
- "vlm.visual.blocks.25.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
875
- "vlm.visual.blocks.25.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
876
- "vlm.visual.blocks.25.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
877
- "vlm.visual.blocks.25.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
878
- "vlm.visual.blocks.25.norm1.weight": "model-00001-of-00002.safetensors",
879
- "vlm.visual.blocks.25.norm2.weight": "model-00001-of-00002.safetensors",
880
- "vlm.visual.blocks.26.attn.proj.bias": "model-00001-of-00002.safetensors",
881
- "vlm.visual.blocks.26.attn.proj.weight": "model-00001-of-00002.safetensors",
882
- "vlm.visual.blocks.26.attn.qkv.bias": "model-00001-of-00002.safetensors",
883
- "vlm.visual.blocks.26.attn.qkv.weight": "model-00001-of-00002.safetensors",
884
- "vlm.visual.blocks.26.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
885
- "vlm.visual.blocks.26.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
886
- "vlm.visual.blocks.26.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
887
- "vlm.visual.blocks.26.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
888
- "vlm.visual.blocks.26.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
889
- "vlm.visual.blocks.26.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
890
- "vlm.visual.blocks.26.norm1.weight": "model-00001-of-00002.safetensors",
891
- "vlm.visual.blocks.26.norm2.weight": "model-00001-of-00002.safetensors",
892
- "vlm.visual.blocks.27.attn.proj.bias": "model-00001-of-00002.safetensors",
893
- "vlm.visual.blocks.27.attn.proj.weight": "model-00001-of-00002.safetensors",
894
- "vlm.visual.blocks.27.attn.qkv.bias": "model-00001-of-00002.safetensors",
895
- "vlm.visual.blocks.27.attn.qkv.weight": "model-00001-of-00002.safetensors",
896
- "vlm.visual.blocks.27.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
897
- "vlm.visual.blocks.27.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
898
- "vlm.visual.blocks.27.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
899
- "vlm.visual.blocks.27.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
900
- "vlm.visual.blocks.27.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
901
- "vlm.visual.blocks.27.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
902
- "vlm.visual.blocks.27.norm1.weight": "model-00001-of-00002.safetensors",
903
- "vlm.visual.blocks.27.norm2.weight": "model-00001-of-00002.safetensors",
904
- "vlm.visual.blocks.28.attn.proj.bias": "model-00001-of-00002.safetensors",
905
- "vlm.visual.blocks.28.attn.proj.weight": "model-00001-of-00002.safetensors",
906
- "vlm.visual.blocks.28.attn.qkv.bias": "model-00001-of-00002.safetensors",
907
- "vlm.visual.blocks.28.attn.qkv.weight": "model-00001-of-00002.safetensors",
908
- "vlm.visual.blocks.28.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
909
- "vlm.visual.blocks.28.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
910
- "vlm.visual.blocks.28.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
911
- "vlm.visual.blocks.28.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
912
- "vlm.visual.blocks.28.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
913
- "vlm.visual.blocks.28.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
914
- "vlm.visual.blocks.28.norm1.weight": "model-00001-of-00002.safetensors",
915
- "vlm.visual.blocks.28.norm2.weight": "model-00001-of-00002.safetensors",
916
- "vlm.visual.blocks.29.attn.proj.bias": "model-00001-of-00002.safetensors",
917
- "vlm.visual.blocks.29.attn.proj.weight": "model-00001-of-00002.safetensors",
918
- "vlm.visual.blocks.29.attn.qkv.bias": "model-00001-of-00002.safetensors",
919
- "vlm.visual.blocks.29.attn.qkv.weight": "model-00001-of-00002.safetensors",
920
- "vlm.visual.blocks.29.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
921
- "vlm.visual.blocks.29.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
922
- "vlm.visual.blocks.29.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
923
- "vlm.visual.blocks.29.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
924
- "vlm.visual.blocks.29.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
925
- "vlm.visual.blocks.29.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
926
- "vlm.visual.blocks.29.norm1.weight": "model-00001-of-00002.safetensors",
927
- "vlm.visual.blocks.29.norm2.weight": "model-00001-of-00002.safetensors",
928
- "vlm.visual.blocks.3.attn.proj.bias": "model-00001-of-00002.safetensors",
929
- "vlm.visual.blocks.3.attn.proj.weight": "model-00001-of-00002.safetensors",
930
- "vlm.visual.blocks.3.attn.qkv.bias": "model-00001-of-00002.safetensors",
931
- "vlm.visual.blocks.3.attn.qkv.weight": "model-00001-of-00002.safetensors",
932
- "vlm.visual.blocks.3.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
933
- "vlm.visual.blocks.3.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
934
- "vlm.visual.blocks.3.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
935
- "vlm.visual.blocks.3.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
936
- "vlm.visual.blocks.3.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
937
- "vlm.visual.blocks.3.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
938
- "vlm.visual.blocks.3.norm1.weight": "model-00001-of-00002.safetensors",
939
- "vlm.visual.blocks.3.norm2.weight": "model-00001-of-00002.safetensors",
940
- "vlm.visual.blocks.30.attn.proj.bias": "model-00001-of-00002.safetensors",
941
- "vlm.visual.blocks.30.attn.proj.weight": "model-00001-of-00002.safetensors",
942
- "vlm.visual.blocks.30.attn.qkv.bias": "model-00001-of-00002.safetensors",
943
- "vlm.visual.blocks.30.attn.qkv.weight": "model-00001-of-00002.safetensors",
944
- "vlm.visual.blocks.30.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
945
- "vlm.visual.blocks.30.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
946
- "vlm.visual.blocks.30.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
947
- "vlm.visual.blocks.30.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
948
- "vlm.visual.blocks.30.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
949
- "vlm.visual.blocks.30.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
950
- "vlm.visual.blocks.30.norm1.weight": "model-00001-of-00002.safetensors",
951
- "vlm.visual.blocks.30.norm2.weight": "model-00001-of-00002.safetensors",
952
- "vlm.visual.blocks.31.attn.proj.bias": "model-00001-of-00002.safetensors",
953
- "vlm.visual.blocks.31.attn.proj.weight": "model-00001-of-00002.safetensors",
954
- "vlm.visual.blocks.31.attn.qkv.bias": "model-00001-of-00002.safetensors",
955
- "vlm.visual.blocks.31.attn.qkv.weight": "model-00001-of-00002.safetensors",
956
- "vlm.visual.blocks.31.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
957
- "vlm.visual.blocks.31.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
958
- "vlm.visual.blocks.31.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
959
- "vlm.visual.blocks.31.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
960
- "vlm.visual.blocks.31.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
961
- "vlm.visual.blocks.31.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
962
- "vlm.visual.blocks.31.norm1.weight": "model-00001-of-00002.safetensors",
963
- "vlm.visual.blocks.31.norm2.weight": "model-00001-of-00002.safetensors",
964
- "vlm.visual.blocks.4.attn.proj.bias": "model-00001-of-00002.safetensors",
965
- "vlm.visual.blocks.4.attn.proj.weight": "model-00001-of-00002.safetensors",
966
- "vlm.visual.blocks.4.attn.qkv.bias": "model-00001-of-00002.safetensors",
967
- "vlm.visual.blocks.4.attn.qkv.weight": "model-00001-of-00002.safetensors",
968
- "vlm.visual.blocks.4.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
969
- "vlm.visual.blocks.4.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
970
- "vlm.visual.blocks.4.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
971
- "vlm.visual.blocks.4.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
972
- "vlm.visual.blocks.4.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
973
- "vlm.visual.blocks.4.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
974
- "vlm.visual.blocks.4.norm1.weight": "model-00001-of-00002.safetensors",
975
- "vlm.visual.blocks.4.norm2.weight": "model-00001-of-00002.safetensors",
976
- "vlm.visual.blocks.5.attn.proj.bias": "model-00001-of-00002.safetensors",
977
- "vlm.visual.blocks.5.attn.proj.weight": "model-00001-of-00002.safetensors",
978
- "vlm.visual.blocks.5.attn.qkv.bias": "model-00001-of-00002.safetensors",
979
- "vlm.visual.blocks.5.attn.qkv.weight": "model-00001-of-00002.safetensors",
980
- "vlm.visual.blocks.5.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
981
- "vlm.visual.blocks.5.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
982
- "vlm.visual.blocks.5.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
983
- "vlm.visual.blocks.5.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
984
- "vlm.visual.blocks.5.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
985
- "vlm.visual.blocks.5.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
986
- "vlm.visual.blocks.5.norm1.weight": "model-00001-of-00002.safetensors",
987
- "vlm.visual.blocks.5.norm2.weight": "model-00001-of-00002.safetensors",
988
- "vlm.visual.blocks.6.attn.proj.bias": "model-00001-of-00002.safetensors",
989
- "vlm.visual.blocks.6.attn.proj.weight": "model-00001-of-00002.safetensors",
990
- "vlm.visual.blocks.6.attn.qkv.bias": "model-00001-of-00002.safetensors",
991
- "vlm.visual.blocks.6.attn.qkv.weight": "model-00001-of-00002.safetensors",
992
- "vlm.visual.blocks.6.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
993
- "vlm.visual.blocks.6.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
994
- "vlm.visual.blocks.6.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
995
- "vlm.visual.blocks.6.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
996
- "vlm.visual.blocks.6.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
997
- "vlm.visual.blocks.6.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
998
- "vlm.visual.blocks.6.norm1.weight": "model-00001-of-00002.safetensors",
999
- "vlm.visual.blocks.6.norm2.weight": "model-00001-of-00002.safetensors",
1000
- "vlm.visual.blocks.7.attn.proj.bias": "model-00001-of-00002.safetensors",
1001
- "vlm.visual.blocks.7.attn.proj.weight": "model-00001-of-00002.safetensors",
1002
- "vlm.visual.blocks.7.attn.qkv.bias": "model-00001-of-00002.safetensors",
1003
- "vlm.visual.blocks.7.attn.qkv.weight": "model-00001-of-00002.safetensors",
1004
- "vlm.visual.blocks.7.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
1005
- "vlm.visual.blocks.7.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
1006
- "vlm.visual.blocks.7.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
1007
- "vlm.visual.blocks.7.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
1008
- "vlm.visual.blocks.7.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
1009
- "vlm.visual.blocks.7.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
1010
- "vlm.visual.blocks.7.norm1.weight": "model-00001-of-00002.safetensors",
1011
- "vlm.visual.blocks.7.norm2.weight": "model-00001-of-00002.safetensors",
1012
- "vlm.visual.blocks.8.attn.proj.bias": "model-00001-of-00002.safetensors",
1013
- "vlm.visual.blocks.8.attn.proj.weight": "model-00001-of-00002.safetensors",
1014
- "vlm.visual.blocks.8.attn.qkv.bias": "model-00001-of-00002.safetensors",
1015
- "vlm.visual.blocks.8.attn.qkv.weight": "model-00001-of-00002.safetensors",
1016
- "vlm.visual.blocks.8.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
1017
- "vlm.visual.blocks.8.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
1018
- "vlm.visual.blocks.8.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
1019
- "vlm.visual.blocks.8.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
1020
- "vlm.visual.blocks.8.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
1021
- "vlm.visual.blocks.8.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
1022
- "vlm.visual.blocks.8.norm1.weight": "model-00001-of-00002.safetensors",
1023
- "vlm.visual.blocks.8.norm2.weight": "model-00001-of-00002.safetensors",
1024
- "vlm.visual.blocks.9.attn.proj.bias": "model-00001-of-00002.safetensors",
1025
- "vlm.visual.blocks.9.attn.proj.weight": "model-00001-of-00002.safetensors",
1026
- "vlm.visual.blocks.9.attn.qkv.bias": "model-00001-of-00002.safetensors",
1027
- "vlm.visual.blocks.9.attn.qkv.weight": "model-00001-of-00002.safetensors",
1028
- "vlm.visual.blocks.9.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
1029
- "vlm.visual.blocks.9.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
1030
- "vlm.visual.blocks.9.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
1031
- "vlm.visual.blocks.9.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
1032
- "vlm.visual.blocks.9.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
1033
- "vlm.visual.blocks.9.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
1034
- "vlm.visual.blocks.9.norm1.weight": "model-00001-of-00002.safetensors",
1035
- "vlm.visual.blocks.9.norm2.weight": "model-00001-of-00002.safetensors",
1036
- "vlm.visual.merger.ln_q.weight": "model-00001-of-00002.safetensors",
1037
- "vlm.visual.merger.mlp.0.bias": "model-00001-of-00002.safetensors",
1038
- "vlm.visual.merger.mlp.0.weight": "model-00001-of-00002.safetensors",
1039
- "vlm.visual.merger.mlp.2.bias": "model-00001-of-00002.safetensors",
1040
- "vlm.visual.merger.mlp.2.weight": "model-00001-of-00002.safetensors",
1041
- "vlm.visual.patch_embed.proj.weight": "model-00001-of-00002.safetensors"
1042
- }
1043
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
testcheckpoint-8000/optimizer.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:7b36131084cf7c2f987b0958e2e5258e021b742c9aa294eb02352326a20e73e2
3
- size 6618048
 
 
 
 
testcheckpoint-8000/rng_state_0.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:ed728bcf3aa29f209014b99f610006e7fd3810970632e95649d43c53f704c38e
3
- size 15984
 
 
 
 
testcheckpoint-8000/rng_state_1.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:128921bc97133b94b5ca9ac04caf0419adb071b2d64b17722f1aa2d2a9bb22e7
3
- size 15984
 
 
 
 
testcheckpoint-8000/rng_state_2.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:d8c4ece65a58c5b645d3ed09c338548c6e52b018973772a911f5573ecd1fe4c9
3
- size 15984
 
 
 
 
testcheckpoint-8000/rng_state_3.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:9a86d6b2d1d7796ad239f7a7cd930b2edb6b7ebaeed806d5c51e976866128f99
3
- size 15984
 
 
 
 
testcheckpoint-8000/rng_state_4.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:0d483a13677cdec6f256dc336297bba21e9e9ba0d8857343a8fbdaee343258f7
3
- size 15984
 
 
 
 
testcheckpoint-8000/rng_state_5.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:3cbdc9b456c0b6d28caf9c0e06727fa5754f7e2dbffb2fc1dcacf67cef1b5ab5
3
- size 15984
 
 
 
 
testcheckpoint-8000/rng_state_6.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:d521db882adca3bd5d5b25b4e42bd40d7317f05ec20b832c2fa30370d5d51ce2
3
- size 15984
 
 
 
 
testcheckpoint-8000/rng_state_7.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:499066793e27298e57cfe6de78563ed9efb838fb5f104f6f2b4c0e159e0cf771
3
- size 15984
 
 
 
 
testcheckpoint-8000/scheduler.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:f4f7c32615bfb89becef0f957947f0383a390362b67d56aba970b215a41d98af
3
- size 1064
 
 
 
 
testcheckpoint-8000/trainer_state.json DELETED
@@ -1,626 +0,0 @@
1
- {
2
- "best_global_step": null,
3
- "best_metric": null,
4
- "best_model_checkpoint": null,
5
- "epoch": 3.2,
6
- "eval_steps": 2000,
7
- "global_step": 8000,
8
- "is_hyper_param_search": false,
9
- "is_local_process_zero": true,
10
- "is_world_process_zero": true,
11
- "log_history": [
12
- {
13
- "epoch": 0.04,
14
- "grad_norm": 1.3359375,
15
- "learning_rate": 9.900000000000001e-05,
16
- "loss": 1.3398,
17
- "step": 100
18
- },
19
- {
20
- "epoch": 0.08,
21
- "grad_norm": 1.9140625,
22
- "learning_rate": 0.000199,
23
- "loss": 1.2705,
24
- "step": 200
25
- },
26
- {
27
- "epoch": 0.12,
28
- "grad_norm": 3.96875,
29
- "learning_rate": 0.000299,
30
- "loss": 1.2406,
31
- "step": 300
32
- },
33
- {
34
- "epoch": 0.16,
35
- "grad_norm": 0.91015625,
36
- "learning_rate": 0.00039900000000000005,
37
- "loss": 1.2299,
38
- "step": 400
39
- },
40
- {
41
- "epoch": 0.2,
42
- "grad_norm": 0.734375,
43
- "learning_rate": 0.000499,
44
- "loss": 1.2155,
45
- "step": 500
46
- },
47
- {
48
- "epoch": 0.24,
49
- "grad_norm": 0.48046875,
50
- "learning_rate": 0.000599,
51
- "loss": 1.2151,
52
- "step": 600
53
- },
54
- {
55
- "epoch": 0.28,
56
- "grad_norm": 0.7265625,
57
- "learning_rate": 0.000699,
58
- "loss": 1.2115,
59
- "step": 700
60
- },
61
- {
62
- "epoch": 0.32,
63
- "grad_norm": 0.77734375,
64
- "learning_rate": 0.000799,
65
- "loss": 1.1924,
66
- "step": 800
67
- },
68
- {
69
- "epoch": 0.36,
70
- "grad_norm": 0.216796875,
71
- "learning_rate": 0.0008990000000000001,
72
- "loss": 1.1841,
73
- "step": 900
74
- },
75
- {
76
- "epoch": 0.4,
77
- "grad_norm": 0.60546875,
78
- "learning_rate": 0.000999,
79
- "loss": 1.179,
80
- "step": 1000
81
- },
82
- {
83
- "epoch": 0.44,
84
- "grad_norm": 0.4765625,
85
- "learning_rate": 0.0009996688963210703,
86
- "loss": 1.1748,
87
- "step": 1100
88
- },
89
- {
90
- "epoch": 0.48,
91
- "grad_norm": 0.2734375,
92
- "learning_rate": 0.000999334448160535,
93
- "loss": 1.1673,
94
- "step": 1200
95
- },
96
- {
97
- "epoch": 0.52,
98
- "grad_norm": 0.412109375,
99
- "learning_rate": 0.000999,
100
- "loss": 1.1671,
101
- "step": 1300
102
- },
103
- {
104
- "epoch": 0.56,
105
- "grad_norm": 0.408203125,
106
- "learning_rate": 0.000998665551839465,
107
- "loss": 1.1546,
108
- "step": 1400
109
- },
110
- {
111
- "epoch": 0.6,
112
- "grad_norm": 0.421875,
113
- "learning_rate": 0.0009983311036789297,
114
- "loss": 1.1581,
115
- "step": 1500
116
- },
117
- {
118
- "epoch": 0.64,
119
- "grad_norm": 0.62109375,
120
- "learning_rate": 0.0009979966555183947,
121
- "loss": 1.1597,
122
- "step": 1600
123
- },
124
- {
125
- "epoch": 0.68,
126
- "grad_norm": 0.2265625,
127
- "learning_rate": 0.0009976622073578595,
128
- "loss": 1.1539,
129
- "step": 1700
130
- },
131
- {
132
- "epoch": 0.72,
133
- "grad_norm": 1.9296875,
134
- "learning_rate": 0.0009973277591973245,
135
- "loss": 1.1685,
136
- "step": 1800
137
- },
138
- {
139
- "epoch": 0.76,
140
- "grad_norm": 0.24609375,
141
- "learning_rate": 0.0009969933110367893,
142
- "loss": 1.1583,
143
- "step": 1900
144
- },
145
- {
146
- "epoch": 0.8,
147
- "grad_norm": 0.16015625,
148
- "learning_rate": 0.0009966588628762543,
149
- "loss": 1.1506,
150
- "step": 2000
151
- },
152
- {
153
- "epoch": 0.8,
154
- "eval_loss": 1.5790327787399292,
155
- "eval_runtime": 3616.378,
156
- "eval_samples_per_second": 67.039,
157
- "eval_steps_per_second": 8.38,
158
- "step": 2000
159
- },
160
- {
161
- "epoch": 0.84,
162
- "grad_norm": 1.8984375,
163
- "learning_rate": 0.000996324414715719,
164
- "loss": 1.1529,
165
- "step": 2100
166
- },
167
- {
168
- "epoch": 0.88,
169
- "grad_norm": 0.75,
170
- "learning_rate": 0.0009959899665551838,
171
- "loss": 1.1467,
172
- "step": 2200
173
- },
174
- {
175
- "epoch": 0.92,
176
- "grad_norm": 0.2314453125,
177
- "learning_rate": 0.0009956555183946488,
178
- "loss": 1.1445,
179
- "step": 2300
180
- },
181
- {
182
- "epoch": 0.96,
183
- "grad_norm": 0.306640625,
184
- "learning_rate": 0.0009953210702341138,
185
- "loss": 1.1501,
186
- "step": 2400
187
- },
188
- {
189
- "epoch": 1.0,
190
- "grad_norm": 0.361328125,
191
- "learning_rate": 0.0009949866220735786,
192
- "loss": 1.135,
193
- "step": 2500
194
- },
195
- {
196
- "epoch": 1.04,
197
- "grad_norm": 1.2109375,
198
- "learning_rate": 0.0009946521739130434,
199
- "loss": 1.142,
200
- "step": 2600
201
- },
202
- {
203
- "epoch": 1.08,
204
- "grad_norm": 0.66796875,
205
- "learning_rate": 0.0009943177257525084,
206
- "loss": 1.1444,
207
- "step": 2700
208
- },
209
- {
210
- "epoch": 1.12,
211
- "grad_norm": 0.376953125,
212
- "learning_rate": 0.0009939832775919732,
213
- "loss": 1.1556,
214
- "step": 2800
215
- },
216
- {
217
- "epoch": 1.16,
218
- "grad_norm": 0.81640625,
219
- "learning_rate": 0.0009936488294314382,
220
- "loss": 1.1585,
221
- "step": 2900
222
- },
223
- {
224
- "epoch": 1.2,
225
- "grad_norm": 0.15625,
226
- "learning_rate": 0.000993314381270903,
227
- "loss": 1.1393,
228
- "step": 3000
229
- },
230
- {
231
- "epoch": 1.24,
232
- "grad_norm": 0.65234375,
233
- "learning_rate": 0.000992979933110368,
234
- "loss": 1.1448,
235
- "step": 3100
236
- },
237
- {
238
- "epoch": 1.28,
239
- "grad_norm": 0.9375,
240
- "learning_rate": 0.0009926454849498328,
241
- "loss": 1.1528,
242
- "step": 3200
243
- },
244
- {
245
- "epoch": 1.32,
246
- "grad_norm": 0.2373046875,
247
- "learning_rate": 0.0009923110367892976,
248
- "loss": 1.141,
249
- "step": 3300
250
- },
251
- {
252
- "epoch": 1.3599999999999999,
253
- "grad_norm": 0.1572265625,
254
- "learning_rate": 0.0009919765886287626,
255
- "loss": 1.1352,
256
- "step": 3400
257
- },
258
- {
259
- "epoch": 1.4,
260
- "grad_norm": 0.6875,
261
- "learning_rate": 0.0009916421404682274,
262
- "loss": 1.1477,
263
- "step": 3500
264
- },
265
- {
266
- "epoch": 1.44,
267
- "grad_norm": 0.5546875,
268
- "learning_rate": 0.0009913076923076924,
269
- "loss": 1.139,
270
- "step": 3600
271
- },
272
- {
273
- "epoch": 1.48,
274
- "grad_norm": 0.5234375,
275
- "learning_rate": 0.0009909732441471572,
276
- "loss": 1.1384,
277
- "step": 3700
278
- },
279
- {
280
- "epoch": 1.52,
281
- "grad_norm": 0.478515625,
282
- "learning_rate": 0.0009906387959866222,
283
- "loss": 1.1441,
284
- "step": 3800
285
- },
286
- {
287
- "epoch": 1.56,
288
- "grad_norm": 1.03125,
289
- "learning_rate": 0.000990304347826087,
290
- "loss": 1.1345,
291
- "step": 3900
292
- },
293
- {
294
- "epoch": 1.6,
295
- "grad_norm": 0.1708984375,
296
- "learning_rate": 0.0009899698996655517,
297
- "loss": 1.1391,
298
- "step": 4000
299
- },
300
- {
301
- "epoch": 1.6,
302
- "eval_loss": 1.5626392364501953,
303
- "eval_runtime": 3634.9176,
304
- "eval_samples_per_second": 66.697,
305
- "eval_steps_per_second": 8.337,
306
- "step": 4000
307
- },
308
- {
309
- "epoch": 1.6400000000000001,
310
- "grad_norm": 0.3359375,
311
- "learning_rate": 0.0009896354515050167,
312
- "loss": 1.1387,
313
- "step": 4100
314
- },
315
- {
316
- "epoch": 1.6800000000000002,
317
- "grad_norm": 0.2099609375,
318
- "learning_rate": 0.0009893010033444815,
319
- "loss": 1.1335,
320
- "step": 4200
321
- },
322
- {
323
- "epoch": 1.72,
324
- "grad_norm": 0.45703125,
325
- "learning_rate": 0.0009889665551839465,
326
- "loss": 1.1381,
327
- "step": 4300
328
- },
329
- {
330
- "epoch": 1.76,
331
- "grad_norm": 1.1171875,
332
- "learning_rate": 0.0009886321070234115,
333
- "loss": 1.1415,
334
- "step": 4400
335
- },
336
- {
337
- "epoch": 1.8,
338
- "grad_norm": 0.447265625,
339
- "learning_rate": 0.0009882976588628763,
340
- "loss": 1.1297,
341
- "step": 4500
342
- },
343
- {
344
- "epoch": 1.8399999999999999,
345
- "grad_norm": 1.046875,
346
- "learning_rate": 0.0009879632107023411,
347
- "loss": 1.133,
348
- "step": 4600
349
- },
350
- {
351
- "epoch": 1.88,
352
- "grad_norm": 0.79296875,
353
- "learning_rate": 0.000987628762541806,
354
- "loss": 1.1379,
355
- "step": 4700
356
- },
357
- {
358
- "epoch": 1.92,
359
- "grad_norm": 0.4765625,
360
- "learning_rate": 0.000987294314381271,
361
- "loss": 1.1355,
362
- "step": 4800
363
- },
364
- {
365
- "epoch": 1.96,
366
- "grad_norm": 0.2080078125,
367
- "learning_rate": 0.000986959866220736,
368
- "loss": 1.1349,
369
- "step": 4900
370
- },
371
- {
372
- "epoch": 2.0,
373
- "grad_norm": 0.458984375,
374
- "learning_rate": 0.0009866254180602007,
375
- "loss": 1.1308,
376
- "step": 5000
377
- },
378
- {
379
- "epoch": 2.04,
380
- "grad_norm": 0.287109375,
381
- "learning_rate": 0.0009862909698996657,
382
- "loss": 1.1252,
383
- "step": 5100
384
- },
385
- {
386
- "epoch": 2.08,
387
- "grad_norm": 0.2451171875,
388
- "learning_rate": 0.0009859565217391305,
389
- "loss": 1.1394,
390
- "step": 5200
391
- },
392
- {
393
- "epoch": 2.12,
394
- "grad_norm": 0.318359375,
395
- "learning_rate": 0.0009856220735785953,
396
- "loss": 1.1342,
397
- "step": 5300
398
- },
399
- {
400
- "epoch": 2.16,
401
- "grad_norm": 0.84375,
402
- "learning_rate": 0.0009852876254180603,
403
- "loss": 1.1296,
404
- "step": 5400
405
- },
406
- {
407
- "epoch": 2.2,
408
- "grad_norm": 0.240234375,
409
- "learning_rate": 0.000984953177257525,
410
- "loss": 1.1254,
411
- "step": 5500
412
- },
413
- {
414
- "epoch": 2.24,
415
- "grad_norm": 1.1640625,
416
- "learning_rate": 0.00098461872909699,
417
- "loss": 1.1371,
418
- "step": 5600
419
- },
420
- {
421
- "epoch": 2.2800000000000002,
422
- "grad_norm": 0.66015625,
423
- "learning_rate": 0.0009842842809364549,
424
- "loss": 1.1241,
425
- "step": 5700
426
- },
427
- {
428
- "epoch": 2.32,
429
- "grad_norm": 0.17578125,
430
- "learning_rate": 0.0009839498327759199,
431
- "loss": 1.1197,
432
- "step": 5800
433
- },
434
- {
435
- "epoch": 2.36,
436
- "grad_norm": 0.302734375,
437
- "learning_rate": 0.0009836153846153846,
438
- "loss": 1.1355,
439
- "step": 5900
440
- },
441
- {
442
- "epoch": 2.4,
443
- "grad_norm": 0.2216796875,
444
- "learning_rate": 0.0009832809364548494,
445
- "loss": 1.132,
446
- "step": 6000
447
- },
448
- {
449
- "epoch": 2.4,
450
- "eval_loss": 1.5653446912765503,
451
- "eval_runtime": 3610.856,
452
- "eval_samples_per_second": 67.141,
453
- "eval_steps_per_second": 8.393,
454
- "step": 6000
455
- },
456
- {
457
- "epoch": 2.44,
458
- "grad_norm": 1.265625,
459
- "learning_rate": 0.0009829464882943144,
460
- "loss": 1.1403,
461
- "step": 6100
462
- },
463
- {
464
- "epoch": 2.48,
465
- "grad_norm": 1.203125,
466
- "learning_rate": 0.0009826120401337792,
467
- "loss": 1.1431,
468
- "step": 6200
469
- },
470
- {
471
- "epoch": 2.52,
472
- "grad_norm": 0.390625,
473
- "learning_rate": 0.0009822775919732442,
474
- "loss": 1.125,
475
- "step": 6300
476
- },
477
- {
478
- "epoch": 2.56,
479
- "grad_norm": 0.220703125,
480
- "learning_rate": 0.000981943143812709,
481
- "loss": 1.1299,
482
- "step": 6400
483
- },
484
- {
485
- "epoch": 2.6,
486
- "grad_norm": 0.2451171875,
487
- "learning_rate": 0.000981608695652174,
488
- "loss": 1.1431,
489
- "step": 6500
490
- },
491
- {
492
- "epoch": 2.64,
493
- "grad_norm": 0.78515625,
494
- "learning_rate": 0.0009812742474916388,
495
- "loss": 1.1417,
496
- "step": 6600
497
- },
498
- {
499
- "epoch": 2.68,
500
- "grad_norm": 0.1865234375,
501
- "learning_rate": 0.0009809397993311036,
502
- "loss": 1.1387,
503
- "step": 6700
504
- },
505
- {
506
- "epoch": 2.7199999999999998,
507
- "grad_norm": 0.46484375,
508
- "learning_rate": 0.0009806053511705686,
509
- "loss": 1.1428,
510
- "step": 6800
511
- },
512
- {
513
- "epoch": 2.76,
514
- "grad_norm": 0.515625,
515
- "learning_rate": 0.0009802709030100334,
516
- "loss": 1.1237,
517
- "step": 6900
518
- },
519
- {
520
- "epoch": 2.8,
521
- "grad_norm": 0.451171875,
522
- "learning_rate": 0.0009799364548494984,
523
- "loss": 1.1284,
524
- "step": 7000
525
- },
526
- {
527
- "epoch": 2.84,
528
- "grad_norm": 0.318359375,
529
- "learning_rate": 0.0009796020066889632,
530
- "loss": 1.1207,
531
- "step": 7100
532
- },
533
- {
534
- "epoch": 2.88,
535
- "grad_norm": 0.1708984375,
536
- "learning_rate": 0.0009792675585284282,
537
- "loss": 1.1291,
538
- "step": 7200
539
- },
540
- {
541
- "epoch": 2.92,
542
- "grad_norm": 0.373046875,
543
- "learning_rate": 0.000978933110367893,
544
- "loss": 1.1235,
545
- "step": 7300
546
- },
547
- {
548
- "epoch": 2.96,
549
- "grad_norm": 0.8671875,
550
- "learning_rate": 0.0009785986622073578,
551
- "loss": 1.141,
552
- "step": 7400
553
- },
554
- {
555
- "epoch": 3.0,
556
- "grad_norm": 1.015625,
557
- "learning_rate": 0.0009782642140468228,
558
- "loss": 1.1493,
559
- "step": 7500
560
- },
561
- {
562
- "epoch": 3.04,
563
- "grad_norm": 0.173828125,
564
- "learning_rate": 0.0009779297658862878,
565
- "loss": 1.1353,
566
- "step": 7600
567
- },
568
- {
569
- "epoch": 3.08,
570
- "grad_norm": 0.33203125,
571
- "learning_rate": 0.0009775953177257525,
572
- "loss": 1.1284,
573
- "step": 7700
574
- },
575
- {
576
- "epoch": 3.12,
577
- "grad_norm": 0.30078125,
578
- "learning_rate": 0.0009772608695652173,
579
- "loss": 1.1386,
580
- "step": 7800
581
- },
582
- {
583
- "epoch": 3.16,
584
- "grad_norm": 0.35546875,
585
- "learning_rate": 0.0009769264214046823,
586
- "loss": 1.1248,
587
- "step": 7900
588
- },
589
- {
590
- "epoch": 3.2,
591
- "grad_norm": 0.482421875,
592
- "learning_rate": 0.0009765919732441471,
593
- "loss": 1.1136,
594
- "step": 8000
595
- },
596
- {
597
- "epoch": 3.2,
598
- "eval_loss": 1.564854621887207,
599
- "eval_runtime": 3599.684,
600
- "eval_samples_per_second": 67.35,
601
- "eval_steps_per_second": 8.419,
602
- "step": 8000
603
- }
604
- ],
605
- "logging_steps": 100,
606
- "max_steps": 300000,
607
- "num_input_tokens_seen": 0,
608
- "num_train_epochs": 120,
609
- "save_steps": 4000,
610
- "stateful_callbacks": {
611
- "TrainerControl": {
612
- "args": {
613
- "should_epoch_stop": false,
614
- "should_evaluate": false,
615
- "should_log": false,
616
- "should_save": true,
617
- "should_training_stop": false
618
- },
619
- "attributes": {}
620
- }
621
- },
622
- "total_flos": 0.0,
623
- "train_batch_size": 1,
624
- "trial_name": null,
625
- "trial_params": null
626
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
testcheckpoint-8000/training_args.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:1406932c7e1746fa2d9c0e1e777539c704d2df2b0d45c6176d86613f49417916
3
- size 5368