| { |
| "best_metric": 3.837082862854004, |
| "best_model_checkpoint": "/mmfs1/gscratch/stf/abhinavp/corpus-filtering/outputs/full/transformer/4/checkpoints/checkpoint-992150", |
| "epoch": 1.0250006060157382, |
| "eval_steps": 10, |
| "global_step": 1831670, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.999998362119627e-05, |
| "loss": 11.0625, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.999161405248948e-05, |
| "loss": 6.8438, |
| "step": 512 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.998322810497896e-05, |
| "loss": 6.19, |
| "step": 1024 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.997484215746844e-05, |
| "loss": 5.9707, |
| "step": 1536 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.996645620995792e-05, |
| "loss": 5.8179, |
| "step": 2048 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.99580702624474e-05, |
| "loss": 5.6964, |
| "step": 2560 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.994968431493688e-05, |
| "loss": 5.5965, |
| "step": 3072 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.994129836742636e-05, |
| "loss": 5.5336, |
| "step": 3584 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.993291241991584e-05, |
| "loss": 5.4804, |
| "step": 4096 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.992452647240532e-05, |
| "loss": 5.4012, |
| "step": 4608 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.99161405248948e-05, |
| "loss": 5.3622, |
| "step": 5120 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.990775457738428e-05, |
| "loss": 5.3225, |
| "step": 5632 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.989938500867749e-05, |
| "loss": 5.2804, |
| "step": 6144 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.989099906116697e-05, |
| "loss": 5.2196, |
| "step": 6656 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.988261311365645e-05, |
| "loss": 5.1926, |
| "step": 7168 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.987422716614593e-05, |
| "loss": 5.1604, |
| "step": 7680 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.986584121863541e-05, |
| "loss": 5.1211, |
| "step": 8192 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.985745527112489e-05, |
| "loss": 5.0868, |
| "step": 8704 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.984906932361437e-05, |
| "loss": 5.0669, |
| "step": 9216 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.984068337610385e-05, |
| "loss": 5.039, |
| "step": 9728 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.983229742859333e-05, |
| "loss": 5.0085, |
| "step": 10240 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9823927859886547e-05, |
| "loss": 4.9812, |
| "step": 10752 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9815541912376026e-05, |
| "loss": 4.9682, |
| "step": 11264 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9807155964865506e-05, |
| "loss": 4.9393, |
| "step": 11776 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9798770017354986e-05, |
| "loss": 4.929, |
| "step": 12288 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9790400448648195e-05, |
| "loss": 4.9023, |
| "step": 12800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9782014501137675e-05, |
| "loss": 4.8901, |
| "step": 13312 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9773628553627155e-05, |
| "loss": 4.8712, |
| "step": 13824 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9765242606116635e-05, |
| "loss": 4.8371, |
| "step": 14336 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9756873037409844e-05, |
| "loss": 4.8388, |
| "step": 14848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9748487089899324e-05, |
| "loss": 4.8094, |
| "step": 15360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9740101142388804e-05, |
| "loss": 4.7989, |
| "step": 15872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9731715194878284e-05, |
| "loss": 4.7874, |
| "step": 16384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.97233456261715e-05, |
| "loss": 4.7796, |
| "step": 16896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.971497605746471e-05, |
| "loss": 4.7549, |
| "step": 17408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.970659010995419e-05, |
| "loss": 4.7462, |
| "step": 17920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.969820416244367e-05, |
| "loss": 4.7438, |
| "step": 18432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.968981821493315e-05, |
| "loss": 4.7264, |
| "step": 18944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.968143226742263e-05, |
| "loss": 4.7184, |
| "step": 19456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.967306269871584e-05, |
| "loss": 4.6998, |
| "step": 19968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.966467675120532e-05, |
| "loss": 4.6839, |
| "step": 20480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.96562908036948e-05, |
| "loss": 4.6761, |
| "step": 20992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.964790485618428e-05, |
| "loss": 4.6541, |
| "step": 21504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.963951890867376e-05, |
| "loss": 4.6712, |
| "step": 22016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.963113296116324e-05, |
| "loss": 4.6455, |
| "step": 22528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.962274701365272e-05, |
| "loss": 4.6454, |
| "step": 23040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.96143610661422e-05, |
| "loss": 4.6347, |
| "step": 23552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9605991497435414e-05, |
| "loss": 4.6319, |
| "step": 24064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9597605549924894e-05, |
| "loss": 4.622, |
| "step": 24576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9589219602414374e-05, |
| "loss": 4.5979, |
| "step": 25088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.958083365490385e-05, |
| "loss": 4.5904, |
| "step": 25600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.957248046500079e-05, |
| "loss": 4.583, |
| "step": 26112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.956409451749027e-05, |
| "loss": 4.5902, |
| "step": 26624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.955570856997975e-05, |
| "loss": 4.5683, |
| "step": 27136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.954732262246923e-05, |
| "loss": 4.579, |
| "step": 27648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.953893667495871e-05, |
| "loss": 4.5542, |
| "step": 28160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.953055072744819e-05, |
| "loss": 4.5466, |
| "step": 28672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.952216477993767e-05, |
| "loss": 4.5431, |
| "step": 29184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.951377883242715e-05, |
| "loss": 4.5379, |
| "step": 29696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.950539288491663e-05, |
| "loss": 4.5345, |
| "step": 30208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.949702331620985e-05, |
| "loss": 4.5171, |
| "step": 30720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.948863736869932e-05, |
| "loss": 4.5127, |
| "step": 31232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.94802514211888e-05, |
| "loss": 4.5203, |
| "step": 31744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.947186547367828e-05, |
| "loss": 4.5014, |
| "step": 32256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.946349590497149e-05, |
| "loss": 4.4957, |
| "step": 32768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.945510995746097e-05, |
| "loss": 4.4955, |
| "step": 33280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.944672400995045e-05, |
| "loss": 4.4943, |
| "step": 33792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.943833806243993e-05, |
| "loss": 4.4729, |
| "step": 34304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9429968493733145e-05, |
| "loss": 4.4844, |
| "step": 34816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9421582546222625e-05, |
| "loss": 4.4737, |
| "step": 35328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9413196598712105e-05, |
| "loss": 4.4645, |
| "step": 35840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9404810651201585e-05, |
| "loss": 4.4718, |
| "step": 36352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9396441082494794e-05, |
| "loss": 4.4555, |
| "step": 36864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.938807151378801e-05, |
| "loss": 4.4685, |
| "step": 37376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.937968556627749e-05, |
| "loss": 4.469, |
| "step": 37888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.937129961876696e-05, |
| "loss": 4.4589, |
| "step": 38400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.936291367125644e-05, |
| "loss": 4.4418, |
| "step": 38912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.935452772374592e-05, |
| "loss": 4.4312, |
| "step": 39424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.934615815503914e-05, |
| "loss": 4.4218, |
| "step": 39936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.933777220752861e-05, |
| "loss": 4.4346, |
| "step": 40448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.93293862600181e-05, |
| "loss": 4.4345, |
| "step": 40960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.932100031250758e-05, |
| "loss": 4.4327, |
| "step": 41472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.931261436499706e-05, |
| "loss": 4.405, |
| "step": 41984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.930422841748654e-05, |
| "loss": 4.405, |
| "step": 42496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.929584246997602e-05, |
| "loss": 4.4099, |
| "step": 43008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.928747290126923e-05, |
| "loss": 4.3979, |
| "step": 43520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.927908695375871e-05, |
| "loss": 4.4033, |
| "step": 44032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.927070100624819e-05, |
| "loss": 4.3995, |
| "step": 44544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.926231505873767e-05, |
| "loss": 4.4, |
| "step": 45056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.925394549003088e-05, |
| "loss": 4.3893, |
| "step": 45568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.924555954252036e-05, |
| "loss": 4.3855, |
| "step": 46080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.923717359500984e-05, |
| "loss": 4.3747, |
| "step": 46592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.922878764749932e-05, |
| "loss": 4.3895, |
| "step": 47104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.922041807879253e-05, |
| "loss": 4.3722, |
| "step": 47616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.921203213128201e-05, |
| "loss": 4.3577, |
| "step": 48128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.920364618377149e-05, |
| "loss": 4.3631, |
| "step": 48640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.919526023626097e-05, |
| "loss": 4.3764, |
| "step": 49152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.918687428875045e-05, |
| "loss": 4.3688, |
| "step": 49664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.917850472004366e-05, |
| "loss": 4.3671, |
| "step": 50176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.917011877253314e-05, |
| "loss": 4.3471, |
| "step": 50688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.916173282502262e-05, |
| "loss": 4.3629, |
| "step": 51200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.91533468775121e-05, |
| "loss": 4.3361, |
| "step": 51712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.914496093000158e-05, |
| "loss": 4.3532, |
| "step": 52224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.913657498249106e-05, |
| "loss": 4.3554, |
| "step": 52736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.912820541378427e-05, |
| "loss": 4.3411, |
| "step": 53248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.911981946627375e-05, |
| "loss": 4.3353, |
| "step": 53760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.911143351876323e-05, |
| "loss": 4.3109, |
| "step": 54272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.910304757125272e-05, |
| "loss": 4.3252, |
| "step": 54784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.90946616237422e-05, |
| "loss": 4.3236, |
| "step": 55296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.908627567623168e-05, |
| "loss": 4.3224, |
| "step": 55808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9077906107524886e-05, |
| "loss": 4.3276, |
| "step": 56320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9069520160014366e-05, |
| "loss": 4.3088, |
| "step": 56832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9061134212503846e-05, |
| "loss": 4.3118, |
| "step": 57344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9052748264993326e-05, |
| "loss": 4.3183, |
| "step": 57856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.90443623174828e-05, |
| "loss": 4.3196, |
| "step": 58368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.903597636997228e-05, |
| "loss": 4.3069, |
| "step": 58880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9027606801265495e-05, |
| "loss": 4.3064, |
| "step": 59392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9019220853754975e-05, |
| "loss": 4.3005, |
| "step": 59904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9010834906244455e-05, |
| "loss": 4.3018, |
| "step": 60416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9002448958733935e-05, |
| "loss": 4.305, |
| "step": 60928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.899407939002715e-05, |
| "loss": 4.2918, |
| "step": 61440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8985693442516624e-05, |
| "loss": 4.2879, |
| "step": 61952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8977307495006104e-05, |
| "loss": 4.2838, |
| "step": 62464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8968921547495584e-05, |
| "loss": 4.2811, |
| "step": 62976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8960535599985064e-05, |
| "loss": 4.2874, |
| "step": 63488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8952149652474544e-05, |
| "loss": 4.2843, |
| "step": 64000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8943763704964024e-05, |
| "loss": 4.2902, |
| "step": 64512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8935377757453504e-05, |
| "loss": 4.2668, |
| "step": 65024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.892700818874671e-05, |
| "loss": 4.2798, |
| "step": 65536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.891862224123619e-05, |
| "loss": 4.2814, |
| "step": 66048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.891023629372567e-05, |
| "loss": 4.2691, |
| "step": 66560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.890185034621515e-05, |
| "loss": 4.2865, |
| "step": 67072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.889348077750837e-05, |
| "loss": 4.268, |
| "step": 67584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.888509482999785e-05, |
| "loss": 4.2792, |
| "step": 68096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.887670888248733e-05, |
| "loss": 4.2694, |
| "step": 68608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.886832293497681e-05, |
| "loss": 4.2477, |
| "step": 69120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.885995336627002e-05, |
| "loss": 4.2505, |
| "step": 69632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.88515674187595e-05, |
| "loss": 4.2539, |
| "step": 70144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8843197850052706e-05, |
| "loss": 4.2607, |
| "step": 70656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8834811902542186e-05, |
| "loss": 4.2614, |
| "step": 71168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8826425955031666e-05, |
| "loss": 4.2615, |
| "step": 71680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8818040007521146e-05, |
| "loss": 4.2509, |
| "step": 72192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8809654060010626e-05, |
| "loss": 4.2453, |
| "step": 72704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8801268112500106e-05, |
| "loss": 4.2399, |
| "step": 73216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8792882164989586e-05, |
| "loss": 4.2349, |
| "step": 73728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.878449621747907e-05, |
| "loss": 4.2444, |
| "step": 74240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.877612664877228e-05, |
| "loss": 4.2537, |
| "step": 74752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.876775708006549e-05, |
| "loss": 4.2458, |
| "step": 75264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.875937113255497e-05, |
| "loss": 4.2263, |
| "step": 75776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.875098518504445e-05, |
| "loss": 4.2293, |
| "step": 76288 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.1957807540893555, |
| "eval_runtime": 622.6197, |
| "eval_samples_per_second": 612.88, |
| "eval_steps_per_second": 19.153, |
| "step": 76319 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.874259923753393e-05, |
| "loss": 4.232, |
| "step": 76800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.873421329002341e-05, |
| "loss": 4.2268, |
| "step": 77312 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.872582734251289e-05, |
| "loss": 4.2213, |
| "step": 77824 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.871744139500237e-05, |
| "loss": 4.2219, |
| "step": 78336 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.870905544749185e-05, |
| "loss": 4.2131, |
| "step": 78848 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.870066949998133e-05, |
| "loss": 4.1968, |
| "step": 79360 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.869228355247081e-05, |
| "loss": 4.2042, |
| "step": 79872 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.868389760496029e-05, |
| "loss": 4.2168, |
| "step": 80384 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.867551165744977e-05, |
| "loss": 4.2047, |
| "step": 80896 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8667142088742986e-05, |
| "loss": 4.2081, |
| "step": 81408 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.865875614123246e-05, |
| "loss": 4.2105, |
| "step": 81920 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.865037019372194e-05, |
| "loss": 4.2093, |
| "step": 82432 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.864198424621142e-05, |
| "loss": 4.1997, |
| "step": 82944 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.86335982987009e-05, |
| "loss": 4.2022, |
| "step": 83456 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.862521235119038e-05, |
| "loss": 4.1959, |
| "step": 83968 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.861682640367986e-05, |
| "loss": 4.1815, |
| "step": 84480 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.860844045616934e-05, |
| "loss": 4.1899, |
| "step": 84992 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.860005450865882e-05, |
| "loss": 4.194, |
| "step": 85504 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.85916685611483e-05, |
| "loss": 4.1926, |
| "step": 86016 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.858329899244151e-05, |
| "loss": 4.1942, |
| "step": 86528 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8574913044930995e-05, |
| "loss": 4.1794, |
| "step": 87040 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8566527097420475e-05, |
| "loss": 4.191, |
| "step": 87552 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8558141149909955e-05, |
| "loss": 4.1757, |
| "step": 88064 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8549771581203164e-05, |
| "loss": 4.1871, |
| "step": 88576 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8541385633692644e-05, |
| "loss": 4.1779, |
| "step": 89088 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8532999686182124e-05, |
| "loss": 4.1756, |
| "step": 89600 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8524613738671604e-05, |
| "loss": 4.1801, |
| "step": 90112 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.8516227791161084e-05, |
| "loss": 4.1611, |
| "step": 90624 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.850785822245429e-05, |
| "loss": 4.1765, |
| "step": 91136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.849947227494377e-05, |
| "loss": 4.1609, |
| "step": 91648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.849108632743325e-05, |
| "loss": 4.1653, |
| "step": 92160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.848270037992273e-05, |
| "loss": 4.1668, |
| "step": 92672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.847433081121595e-05, |
| "loss": 4.1709, |
| "step": 93184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.846594486370543e-05, |
| "loss": 4.1598, |
| "step": 93696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.845755891619491e-05, |
| "loss": 4.1543, |
| "step": 94208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.844917296868439e-05, |
| "loss": 4.1683, |
| "step": 94720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.84408033999776e-05, |
| "loss": 4.1573, |
| "step": 95232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.843241745246708e-05, |
| "loss": 4.1646, |
| "step": 95744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.842403150495656e-05, |
| "loss": 4.149, |
| "step": 96256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.841564555744604e-05, |
| "loss": 4.1394, |
| "step": 96768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.840725960993552e-05, |
| "loss": 4.1427, |
| "step": 97280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.839889004122873e-05, |
| "loss": 4.1387, |
| "step": 97792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.839050409371821e-05, |
| "loss": 4.1534, |
| "step": 98304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8382118146207687e-05, |
| "loss": 4.1413, |
| "step": 98816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8373732198697167e-05, |
| "loss": 4.1469, |
| "step": 99328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8365346251186646e-05, |
| "loss": 4.1412, |
| "step": 99840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.835697668247986e-05, |
| "loss": 4.1466, |
| "step": 100352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.834859073496934e-05, |
| "loss": 4.1437, |
| "step": 100864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.834020478745882e-05, |
| "loss": 4.1199, |
| "step": 101376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8331818839948295e-05, |
| "loss": 4.1272, |
| "step": 101888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.832344927124151e-05, |
| "loss": 4.1237, |
| "step": 102400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.831506332373099e-05, |
| "loss": 4.1395, |
| "step": 102912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.830667737622047e-05, |
| "loss": 4.1172, |
| "step": 103424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8298291428709944e-05, |
| "loss": 4.1395, |
| "step": 103936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.828992186000316e-05, |
| "loss": 4.1154, |
| "step": 104448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.828153591249264e-05, |
| "loss": 4.1135, |
| "step": 104960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.827314996498212e-05, |
| "loss": 4.1231, |
| "step": 105472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.82647640174716e-05, |
| "loss": 4.1178, |
| "step": 105984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.825637806996108e-05, |
| "loss": 4.1172, |
| "step": 106496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8248008501254296e-05, |
| "loss": 4.1104, |
| "step": 107008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.823962255374377e-05, |
| "loss": 4.1057, |
| "step": 107520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.823123660623325e-05, |
| "loss": 4.1201, |
| "step": 108032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.822285065872273e-05, |
| "loss": 4.1074, |
| "step": 108544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8214481090015945e-05, |
| "loss": 4.1061, |
| "step": 109056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.820609514250542e-05, |
| "loss": 4.105, |
| "step": 109568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.81977091949949e-05, |
| "loss": 4.1106, |
| "step": 110080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.818932324748438e-05, |
| "loss": 4.0907, |
| "step": 110592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8180953678777594e-05, |
| "loss": 4.1112, |
| "step": 111104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8172567731267074e-05, |
| "loss": 4.0985, |
| "step": 111616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8164181783756554e-05, |
| "loss": 4.0997, |
| "step": 112128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8155795836246034e-05, |
| "loss": 4.1061, |
| "step": 112640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.814742626753924e-05, |
| "loss": 4.0959, |
| "step": 113152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.813904032002872e-05, |
| "loss": 4.1145, |
| "step": 113664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.81306543725182e-05, |
| "loss": 4.1171, |
| "step": 114176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.812226842500768e-05, |
| "loss": 4.1107, |
| "step": 114688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.811389885630089e-05, |
| "loss": 4.0947, |
| "step": 115200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.810551290879037e-05, |
| "loss": 4.0886, |
| "step": 115712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.809714334008359e-05, |
| "loss": 4.0839, |
| "step": 116224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.808875739257306e-05, |
| "loss": 4.0951, |
| "step": 116736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.808037144506255e-05, |
| "loss": 4.109, |
| "step": 117248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.807198549755203e-05, |
| "loss": 4.0976, |
| "step": 117760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.806359955004151e-05, |
| "loss": 4.0828, |
| "step": 118272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.805521360253099e-05, |
| "loss": 4.0772, |
| "step": 118784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.804682765502047e-05, |
| "loss": 4.0903, |
| "step": 119296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.803844170750995e-05, |
| "loss": 4.0786, |
| "step": 119808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8030072138803156e-05, |
| "loss": 4.0844, |
| "step": 120320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8021686191292636e-05, |
| "loss": 4.0859, |
| "step": 120832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8013300243782116e-05, |
| "loss": 4.0904, |
| "step": 121344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.8004914296271596e-05, |
| "loss": 4.081, |
| "step": 121856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7996561106368535e-05, |
| "loss": 4.0798, |
| "step": 122368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7988175158858015e-05, |
| "loss": 4.0729, |
| "step": 122880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7979789211347494e-05, |
| "loss": 4.0835, |
| "step": 123392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.797140326383698e-05, |
| "loss": 4.0806, |
| "step": 123904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.796301731632646e-05, |
| "loss": 4.0614, |
| "step": 124416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.795463136881594e-05, |
| "loss": 4.0723, |
| "step": 124928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.794624542130542e-05, |
| "loss": 4.0813, |
| "step": 125440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.79378594737949e-05, |
| "loss": 4.0875, |
| "step": 125952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.792948990508811e-05, |
| "loss": 4.076, |
| "step": 126464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.792110395757759e-05, |
| "loss": 4.0674, |
| "step": 126976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.791271801006707e-05, |
| "loss": 4.0799, |
| "step": 127488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.790433206255655e-05, |
| "loss": 4.0567, |
| "step": 128000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.789596249384976e-05, |
| "loss": 4.0724, |
| "step": 128512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.788757654633924e-05, |
| "loss": 4.0801, |
| "step": 129024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.787920697763245e-05, |
| "loss": 4.0702, |
| "step": 129536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7870821030121935e-05, |
| "loss": 4.0703, |
| "step": 130048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7862435082611415e-05, |
| "loss": 4.0482, |
| "step": 130560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7854049135100895e-05, |
| "loss": 4.0531, |
| "step": 131072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7845663187590375e-05, |
| "loss": 4.0572, |
| "step": 131584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7837277240079855e-05, |
| "loss": 4.0579, |
| "step": 132096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7828891292569335e-05, |
| "loss": 4.0657, |
| "step": 132608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7820505345058815e-05, |
| "loss": 4.0537, |
| "step": 133120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7812135776352024e-05, |
| "loss": 4.0492, |
| "step": 133632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7803749828841504e-05, |
| "loss": 4.0607, |
| "step": 134144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7795363881330984e-05, |
| "loss": 4.0674, |
| "step": 134656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7786977933820464e-05, |
| "loss": 4.0529, |
| "step": 135168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.777860836511367e-05, |
| "loss": 4.0605, |
| "step": 135680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.777022241760315e-05, |
| "loss": 4.0505, |
| "step": 136192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.776183647009263e-05, |
| "loss": 4.0533, |
| "step": 136704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.775345052258212e-05, |
| "loss": 4.0583, |
| "step": 137216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.774509733267906e-05, |
| "loss": 4.046, |
| "step": 137728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.773671138516854e-05, |
| "loss": 4.0456, |
| "step": 138240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.772832543765802e-05, |
| "loss": 4.0442, |
| "step": 138752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.77199394901475e-05, |
| "loss": 4.0423, |
| "step": 139264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.771155354263698e-05, |
| "loss": 4.0476, |
| "step": 139776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.770316759512646e-05, |
| "loss": 4.05, |
| "step": 140288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.769478164761594e-05, |
| "loss": 4.0542, |
| "step": 140800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.768639570010542e-05, |
| "loss": 4.0354, |
| "step": 141312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7678026131398626e-05, |
| "loss": 4.0421, |
| "step": 141824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7669640183888106e-05, |
| "loss": 4.0555, |
| "step": 142336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7661254236377586e-05, |
| "loss": 4.0397, |
| "step": 142848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.765286828886707e-05, |
| "loss": 4.0539, |
| "step": 143360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.764449872016028e-05, |
| "loss": 4.0402, |
| "step": 143872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.763611277264976e-05, |
| "loss": 4.0558, |
| "step": 144384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.762772682513924e-05, |
| "loss": 4.0446, |
| "step": 144896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.761934087762872e-05, |
| "loss": 4.0286, |
| "step": 145408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.761097130892193e-05, |
| "loss": 4.0282, |
| "step": 145920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.760258536141141e-05, |
| "loss": 4.032, |
| "step": 146432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.759419941390089e-05, |
| "loss": 4.0449, |
| "step": 146944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.758581346639037e-05, |
| "loss": 4.0412, |
| "step": 147456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.757744389768358e-05, |
| "loss": 4.0451, |
| "step": 147968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7569074328976796e-05, |
| "loss": 4.0366, |
| "step": 148480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7560688381466276e-05, |
| "loss": 4.0289, |
| "step": 148992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7552302433955756e-05, |
| "loss": 4.0287, |
| "step": 149504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7543916486445236e-05, |
| "loss": 4.0237, |
| "step": 150016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7535530538934716e-05, |
| "loss": 4.0329, |
| "step": 150528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7527144591424196e-05, |
| "loss": 4.0465, |
| "step": 151040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.751875864391367e-05, |
| "loss": 4.0318, |
| "step": 151552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.751037269640315e-05, |
| "loss": 4.0258, |
| "step": 152064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.7502003127696365e-05, |
| "loss": 4.0241, |
| "step": 152576 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.025460720062256, |
| "eval_runtime": 588.8804, |
| "eval_samples_per_second": 647.994, |
| "eval_steps_per_second": 20.25, |
| "step": 152638 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7493633558989574e-05, |
| "loss": 4.0321, |
| "step": 153088 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7485247611479054e-05, |
| "loss": 4.0269, |
| "step": 153600 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7476861663968534e-05, |
| "loss": 4.0179, |
| "step": 154112 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7468475716458014e-05, |
| "loss": 4.0223, |
| "step": 154624 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7460089768947494e-05, |
| "loss": 4.0194, |
| "step": 155136 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7451703821436974e-05, |
| "loss": 3.9993, |
| "step": 155648 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7443317873926453e-05, |
| "loss": 4.0086, |
| "step": 156160 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7434931926415933e-05, |
| "loss": 4.0242, |
| "step": 156672 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7426545978905413e-05, |
| "loss": 4.0067, |
| "step": 157184 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.741817641019862e-05, |
| "loss": 4.0143, |
| "step": 157696 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.74097904626881e-05, |
| "loss": 4.0201, |
| "step": 158208 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.740140451517758e-05, |
| "loss": 4.0178, |
| "step": 158720 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.739303494647079e-05, |
| "loss": 4.0068, |
| "step": 159232 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.738464899896027e-05, |
| "loss": 4.0138, |
| "step": 159744 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.737626305144975e-05, |
| "loss": 4.0061, |
| "step": 160256 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.736787710393923e-05, |
| "loss": 3.9991, |
| "step": 160768 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.735949115642871e-05, |
| "loss": 4.0034, |
| "step": 161280 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.73511052089182e-05, |
| "loss": 4.0082, |
| "step": 161792 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.734271926140768e-05, |
| "loss": 4.0109, |
| "step": 162304 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.733433331389716e-05, |
| "loss": 4.0134, |
| "step": 162816 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.732594736638664e-05, |
| "loss": 3.9979, |
| "step": 163328 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.731756141887612e-05, |
| "loss": 4.0119, |
| "step": 163840 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.73091754713656e-05, |
| "loss": 3.9996, |
| "step": 164352 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.730080590265881e-05, |
| "loss": 4.0055, |
| "step": 164864 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.729241995514829e-05, |
| "loss": 4.0014, |
| "step": 165376 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.728403400763777e-05, |
| "loss": 3.9995, |
| "step": 165888 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.727564806012725e-05, |
| "loss": 4.0071, |
| "step": 166400 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.726726211261673e-05, |
| "loss": 3.9887, |
| "step": 166912 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.725887616510621e-05, |
| "loss": 4.0015, |
| "step": 167424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.725049021759568e-05, |
| "loss": 3.9891, |
| "step": 167936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.724210427008517e-05, |
| "loss": 3.9933, |
| "step": 168448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.723373470137838e-05, |
| "loss": 3.996, |
| "step": 168960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.722536513267159e-05, |
| "loss": 3.9996, |
| "step": 169472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.721697918516107e-05, |
| "loss": 3.9942, |
| "step": 169984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.720859323765055e-05, |
| "loss": 3.9859, |
| "step": 170496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.720020729014003e-05, |
| "loss": 4.0009, |
| "step": 171008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7191821342629505e-05, |
| "loss": 3.9919, |
| "step": 171520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7183435395118985e-05, |
| "loss": 4.0014, |
| "step": 172032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7175049447608465e-05, |
| "loss": 3.9866, |
| "step": 172544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7166663500097945e-05, |
| "loss": 3.9749, |
| "step": 173056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7158293931391154e-05, |
| "loss": 3.9776, |
| "step": 173568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.714992436268437e-05, |
| "loss": 3.9817, |
| "step": 174080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.714153841517385e-05, |
| "loss": 3.9964, |
| "step": 174592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.713315246766333e-05, |
| "loss": 3.9758, |
| "step": 175104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.712476652015281e-05, |
| "loss": 3.9894, |
| "step": 175616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.711638057264229e-05, |
| "loss": 3.9841, |
| "step": 176128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7108011003935505e-05, |
| "loss": 3.9848, |
| "step": 176640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.709962505642498e-05, |
| "loss": 3.9902, |
| "step": 177152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.709123910891446e-05, |
| "loss": 3.9649, |
| "step": 177664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.708285316140394e-05, |
| "loss": 3.9707, |
| "step": 178176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7074483592697154e-05, |
| "loss": 3.966, |
| "step": 178688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.706609764518663e-05, |
| "loss": 3.9875, |
| "step": 179200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.705771169767611e-05, |
| "loss": 3.9669, |
| "step": 179712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.704932575016559e-05, |
| "loss": 3.9867, |
| "step": 180224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.70409561814588e-05, |
| "loss": 3.9631, |
| "step": 180736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.703257023394828e-05, |
| "loss": 3.9636, |
| "step": 181248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.702418428643776e-05, |
| "loss": 3.9698, |
| "step": 181760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.701579833892724e-05, |
| "loss": 3.964, |
| "step": 182272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.700742877022045e-05, |
| "loss": 3.9708, |
| "step": 182784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.699904282270993e-05, |
| "loss": 3.9663, |
| "step": 183296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.699065687519941e-05, |
| "loss": 3.9542, |
| "step": 183808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.698227092768889e-05, |
| "loss": 3.971, |
| "step": 184320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.697388498017837e-05, |
| "loss": 3.9607, |
| "step": 184832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.696549903266785e-05, |
| "loss": 3.9651, |
| "step": 185344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.695711308515733e-05, |
| "loss": 3.9583, |
| "step": 185856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.694872713764681e-05, |
| "loss": 3.9677, |
| "step": 186368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.694035756894003e-05, |
| "loss": 3.9448, |
| "step": 186880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.693198800023324e-05, |
| "loss": 3.9681, |
| "step": 187392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.692360205272272e-05, |
| "loss": 3.9535, |
| "step": 187904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.69152161052122e-05, |
| "loss": 3.962, |
| "step": 188416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6906830157701677e-05, |
| "loss": 3.9618, |
| "step": 188928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6898444210191157e-05, |
| "loss": 3.9554, |
| "step": 189440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6890074641484366e-05, |
| "loss": 3.9716, |
| "step": 189952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6881688693973846e-05, |
| "loss": 3.9778, |
| "step": 190464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6873302746463326e-05, |
| "loss": 3.9727, |
| "step": 190976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6864916798952806e-05, |
| "loss": 3.9615, |
| "step": 191488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6856547230246015e-05, |
| "loss": 3.9505, |
| "step": 192000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6848161282735495e-05, |
| "loss": 3.9429, |
| "step": 192512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.683977533522498e-05, |
| "loss": 3.9613, |
| "step": 193024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.683138938771446e-05, |
| "loss": 3.9694, |
| "step": 193536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.682301981900767e-05, |
| "loss": 3.9632, |
| "step": 194048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.681463387149715e-05, |
| "loss": 3.9478, |
| "step": 194560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.680624792398663e-05, |
| "loss": 3.9426, |
| "step": 195072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.679786197647611e-05, |
| "loss": 3.9527, |
| "step": 195584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.678947602896559e-05, |
| "loss": 3.9415, |
| "step": 196096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.67811064602588e-05, |
| "loss": 3.9484, |
| "step": 196608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.677272051274828e-05, |
| "loss": 3.9545, |
| "step": 197120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.676433456523776e-05, |
| "loss": 3.9556, |
| "step": 197632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.675594861772724e-05, |
| "loss": 3.951, |
| "step": 198144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.674757904902045e-05, |
| "loss": 3.9533, |
| "step": 198656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6739193101509935e-05, |
| "loss": 3.9419, |
| "step": 199168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6730807153999415e-05, |
| "loss": 3.9539, |
| "step": 199680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6722421206488895e-05, |
| "loss": 3.9478, |
| "step": 200192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6714035258978375e-05, |
| "loss": 3.9376, |
| "step": 200704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6705665690271584e-05, |
| "loss": 3.9417, |
| "step": 201216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6697279742761064e-05, |
| "loss": 3.952, |
| "step": 201728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6688893795250544e-05, |
| "loss": 3.9605, |
| "step": 202240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6680507847740024e-05, |
| "loss": 3.9463, |
| "step": 202752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6672121900229504e-05, |
| "loss": 3.9441, |
| "step": 203264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.666375233152271e-05, |
| "loss": 3.9504, |
| "step": 203776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.665536638401219e-05, |
| "loss": 3.9319, |
| "step": 204288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.664698043650167e-05, |
| "loss": 3.9498, |
| "step": 204800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.663859448899115e-05, |
| "loss": 3.9527, |
| "step": 205312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.663022492028437e-05, |
| "loss": 3.9429, |
| "step": 205824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.662183897277385e-05, |
| "loss": 3.9509, |
| "step": 206336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.661345302526333e-05, |
| "loss": 3.9271, |
| "step": 206848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.660506707775281e-05, |
| "loss": 3.9255, |
| "step": 207360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.659669750904602e-05, |
| "loss": 3.9342, |
| "step": 207872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.65883115615355e-05, |
| "loss": 3.9357, |
| "step": 208384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.657992561402498e-05, |
| "loss": 3.9467, |
| "step": 208896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.657153966651446e-05, |
| "loss": 3.9312, |
| "step": 209408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6563170097807667e-05, |
| "loss": 3.9274, |
| "step": 209920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6554784150297146e-05, |
| "loss": 3.9351, |
| "step": 210432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6546398202786626e-05, |
| "loss": 3.947, |
| "step": 210944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6538012255276106e-05, |
| "loss": 3.9371, |
| "step": 211456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6529626307765586e-05, |
| "loss": 3.9456, |
| "step": 211968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.65212567390588e-05, |
| "loss": 3.9301, |
| "step": 212480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.651287079154828e-05, |
| "loss": 3.9306, |
| "step": 212992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.650448484403776e-05, |
| "loss": 3.9443, |
| "step": 213504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6496098896527235e-05, |
| "loss": 3.9284, |
| "step": 214016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.648772932782045e-05, |
| "loss": 3.928, |
| "step": 214528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.647934338030993e-05, |
| "loss": 3.928, |
| "step": 215040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6470957432799404e-05, |
| "loss": 3.9244, |
| "step": 215552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6462571485288884e-05, |
| "loss": 3.9327, |
| "step": 216064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.64542019165821e-05, |
| "loss": 3.9315, |
| "step": 216576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.644581596907158e-05, |
| "loss": 3.9404, |
| "step": 217088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.643743002156106e-05, |
| "loss": 3.9211, |
| "step": 217600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.642904407405054e-05, |
| "loss": 3.9244, |
| "step": 218112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.642065812654002e-05, |
| "loss": 3.9456, |
| "step": 218624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6412288557833236e-05, |
| "loss": 3.9236, |
| "step": 219136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.640390261032271e-05, |
| "loss": 3.9404, |
| "step": 219648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.639551666281219e-05, |
| "loss": 3.928, |
| "step": 220160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.638713071530167e-05, |
| "loss": 3.9409, |
| "step": 220672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.637876114659488e-05, |
| "loss": 3.9318, |
| "step": 221184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.637037519908436e-05, |
| "loss": 3.9184, |
| "step": 221696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.636198925157384e-05, |
| "loss": 3.9172, |
| "step": 222208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.635360330406332e-05, |
| "loss": 3.9191, |
| "step": 222720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.634523373535653e-05, |
| "loss": 3.9308, |
| "step": 223232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6336847787846014e-05, |
| "loss": 3.9364, |
| "step": 223744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6328461840335494e-05, |
| "loss": 3.932, |
| "step": 224256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6320075892824974e-05, |
| "loss": 3.9266, |
| "step": 224768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.631170632411818e-05, |
| "loss": 3.9182, |
| "step": 225280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.630332037660766e-05, |
| "loss": 3.9199, |
| "step": 225792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.629493442909714e-05, |
| "loss": 3.9134, |
| "step": 226304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.628654848158662e-05, |
| "loss": 3.9251, |
| "step": 226816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.62781625340761e-05, |
| "loss": 3.933, |
| "step": 227328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.626979296536931e-05, |
| "loss": 3.9224, |
| "step": 227840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.626140701785879e-05, |
| "loss": 3.9198, |
| "step": 228352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.625302107034827e-05, |
| "loss": 3.9167, |
| "step": 228864 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.950981855392456, |
| "eval_runtime": 570.4475, |
| "eval_samples_per_second": 668.933, |
| "eval_steps_per_second": 20.905, |
| "step": 228957 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.624463512283775e-05, |
| "loss": 3.922, |
| "step": 229376 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.623624917532723e-05, |
| "loss": 3.9261, |
| "step": 229888 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.622786322781671e-05, |
| "loss": 3.9129, |
| "step": 230400 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.62194772803062e-05, |
| "loss": 3.9125, |
| "step": 230912 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.621109133279568e-05, |
| "loss": 3.9154, |
| "step": 231424 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.620270538528516e-05, |
| "loss": 3.8958, |
| "step": 231936 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.619431943777464e-05, |
| "loss": 3.9039, |
| "step": 232448 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.618593349026412e-05, |
| "loss": 3.917, |
| "step": 232960 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.617756392155733e-05, |
| "loss": 3.9061, |
| "step": 233472 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6169194352850536e-05, |
| "loss": 3.913, |
| "step": 233984 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6160808405340016e-05, |
| "loss": 3.9137, |
| "step": 234496 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6152422457829496e-05, |
| "loss": 3.9117, |
| "step": 235008 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6144036510318976e-05, |
| "loss": 3.9054, |
| "step": 235520 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6135650562808456e-05, |
| "loss": 3.9151, |
| "step": 236032 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6127264615297936e-05, |
| "loss": 3.9028, |
| "step": 236544 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6118878667787416e-05, |
| "loss": 3.9001, |
| "step": 237056 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6110492720276896e-05, |
| "loss": 3.8998, |
| "step": 237568 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.610212315157011e-05, |
| "loss": 3.9074, |
| "step": 238080 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.609373720405959e-05, |
| "loss": 3.9084, |
| "step": 238592 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6085351256549065e-05, |
| "loss": 3.9138, |
| "step": 239104 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6076965309038545e-05, |
| "loss": 3.8993, |
| "step": 239616 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.606859574033176e-05, |
| "loss": 3.9119, |
| "step": 240128 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.606020979282124e-05, |
| "loss": 3.9023, |
| "step": 240640 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.6051823845310714e-05, |
| "loss": 3.9022, |
| "step": 241152 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.604345427660393e-05, |
| "loss": 3.9045, |
| "step": 241664 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.603506832909341e-05, |
| "loss": 3.9008, |
| "step": 242176 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.602668238158289e-05, |
| "loss": 3.9079, |
| "step": 242688 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.601829643407237e-05, |
| "loss": 3.8921, |
| "step": 243200 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.600991048656185e-05, |
| "loss": 3.9049, |
| "step": 243712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.600152453905133e-05, |
| "loss": 3.8927, |
| "step": 244224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.599313859154081e-05, |
| "loss": 3.8942, |
| "step": 244736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.598475264403029e-05, |
| "loss": 3.8996, |
| "step": 245248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.59763830753235e-05, |
| "loss": 3.9062, |
| "step": 245760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.596799712781298e-05, |
| "loss": 3.8943, |
| "step": 246272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.595961118030246e-05, |
| "loss": 3.8957, |
| "step": 246784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.595122523279194e-05, |
| "loss": 3.8999, |
| "step": 247296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.594285566408515e-05, |
| "loss": 3.8962, |
| "step": 247808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.593446971657463e-05, |
| "loss": 3.9104, |
| "step": 248320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.592608376906411e-05, |
| "loss": 3.8925, |
| "step": 248832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.591769782155359e-05, |
| "loss": 3.88, |
| "step": 249344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.59093282528468e-05, |
| "loss": 3.8868, |
| "step": 249856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.590094230533628e-05, |
| "loss": 3.8825, |
| "step": 250368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.589255635782576e-05, |
| "loss": 3.9028, |
| "step": 250880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.588417041031524e-05, |
| "loss": 3.886, |
| "step": 251392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.587580084160845e-05, |
| "loss": 3.8951, |
| "step": 251904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.586741489409793e-05, |
| "loss": 3.8893, |
| "step": 252416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.585902894658741e-05, |
| "loss": 3.8932, |
| "step": 252928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.585064299907689e-05, |
| "loss": 3.8969, |
| "step": 253440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.58422734303701e-05, |
| "loss": 3.8766, |
| "step": 253952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.583388748285958e-05, |
| "loss": 3.8738, |
| "step": 254464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.582550153534906e-05, |
| "loss": 3.8813, |
| "step": 254976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.581711558783854e-05, |
| "loss": 3.8953, |
| "step": 255488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.580874601913176e-05, |
| "loss": 3.876, |
| "step": 256000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.580036007162124e-05, |
| "loss": 3.8933, |
| "step": 256512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.579197412411072e-05, |
| "loss": 3.8734, |
| "step": 257024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5783604555403926e-05, |
| "loss": 3.8709, |
| "step": 257536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5775218607893406e-05, |
| "loss": 3.8834, |
| "step": 258048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5766832660382886e-05, |
| "loss": 3.8716, |
| "step": 258560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5758446712872366e-05, |
| "loss": 3.8882, |
| "step": 259072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5750060765361846e-05, |
| "loss": 3.8801, |
| "step": 259584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5741674817851326e-05, |
| "loss": 3.8652, |
| "step": 260096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5733288870340806e-05, |
| "loss": 3.88, |
| "step": 260608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5724919301634015e-05, |
| "loss": 3.8717, |
| "step": 261120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5716533354123495e-05, |
| "loss": 3.8797, |
| "step": 261632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5708147406612975e-05, |
| "loss": 3.8679, |
| "step": 262144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.569976145910246e-05, |
| "loss": 3.8817, |
| "step": 262656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.569139189039567e-05, |
| "loss": 3.8612, |
| "step": 263168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.568300594288515e-05, |
| "loss": 3.8789, |
| "step": 263680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.567461999537463e-05, |
| "loss": 3.8655, |
| "step": 264192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.566623404786411e-05, |
| "loss": 3.8725, |
| "step": 264704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.565784810035359e-05, |
| "loss": 3.8767, |
| "step": 265216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.56494785316468e-05, |
| "loss": 3.8737, |
| "step": 265728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.564109258413628e-05, |
| "loss": 3.8791, |
| "step": 266240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.563270663662576e-05, |
| "loss": 3.8915, |
| "step": 266752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.562433706791897e-05, |
| "loss": 3.8858, |
| "step": 267264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.561595112040845e-05, |
| "loss": 3.875, |
| "step": 267776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.560756517289793e-05, |
| "loss": 3.868, |
| "step": 268288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5599179225387415e-05, |
| "loss": 3.8605, |
| "step": 268800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5590809656680624e-05, |
| "loss": 3.8755, |
| "step": 269312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5582423709170104e-05, |
| "loss": 3.8855, |
| "step": 269824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5574037761659584e-05, |
| "loss": 3.8809, |
| "step": 270336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.5565651814149064e-05, |
| "loss": 3.8619, |
| "step": 270848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.555728224544227e-05, |
| "loss": 3.8596, |
| "step": 271360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.554889629793175e-05, |
| "loss": 3.8742, |
| "step": 271872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.554051035042123e-05, |
| "loss": 3.8539, |
| "step": 272384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.553212440291071e-05, |
| "loss": 3.8681, |
| "step": 272896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.552375483420392e-05, |
| "loss": 3.8691, |
| "step": 273408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.55153688866934e-05, |
| "loss": 3.8716, |
| "step": 273920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.550698293918288e-05, |
| "loss": 3.8701, |
| "step": 274432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.549859699167237e-05, |
| "loss": 3.8729, |
| "step": 274944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.549022742296558e-05, |
| "loss": 3.8573, |
| "step": 275456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.548184147545506e-05, |
| "loss": 3.8682, |
| "step": 275968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.547345552794454e-05, |
| "loss": 3.8656, |
| "step": 276480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.546506958043402e-05, |
| "loss": 3.8587, |
| "step": 276992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.545670001172723e-05, |
| "loss": 3.8582, |
| "step": 277504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.544831406421671e-05, |
| "loss": 3.8747, |
| "step": 278016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.543992811670619e-05, |
| "loss": 3.8734, |
| "step": 278528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5431542169195667e-05, |
| "loss": 3.8702, |
| "step": 279040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5423172600488876e-05, |
| "loss": 3.8617, |
| "step": 279552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5414786652978356e-05, |
| "loss": 3.8708, |
| "step": 280064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5406400705467836e-05, |
| "loss": 3.855, |
| "step": 280576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.539801475795732e-05, |
| "loss": 3.8669, |
| "step": 281088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.538964518925053e-05, |
| "loss": 3.8742, |
| "step": 281600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.538125924174001e-05, |
| "loss": 3.8618, |
| "step": 282112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.537287329422949e-05, |
| "loss": 3.8762, |
| "step": 282624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.536448734671897e-05, |
| "loss": 3.8425, |
| "step": 283136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.535611777801218e-05, |
| "loss": 3.8488, |
| "step": 283648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.534773183050166e-05, |
| "loss": 3.8573, |
| "step": 284160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.533934588299114e-05, |
| "loss": 3.8581, |
| "step": 284672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5330959935480613e-05, |
| "loss": 3.8695, |
| "step": 285184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.532259036677383e-05, |
| "loss": 3.851, |
| "step": 285696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.531420441926331e-05, |
| "loss": 3.8488, |
| "step": 286208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5305834850556525e-05, |
| "loss": 3.8574, |
| "step": 286720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5297448903046005e-05, |
| "loss": 3.8687, |
| "step": 287232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5289062955535485e-05, |
| "loss": 3.8616, |
| "step": 287744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5280677008024965e-05, |
| "loss": 3.867, |
| "step": 288256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5272291060514445e-05, |
| "loss": 3.854, |
| "step": 288768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.526390511300392e-05, |
| "loss": 3.8524, |
| "step": 289280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.52555191654934e-05, |
| "loss": 3.8693, |
| "step": 289792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.524713321798288e-05, |
| "loss": 3.8515, |
| "step": 290304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.523876364927609e-05, |
| "loss": 3.8488, |
| "step": 290816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.523037770176557e-05, |
| "loss": 3.8538, |
| "step": 291328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.522199175425505e-05, |
| "loss": 3.846, |
| "step": 291840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.521360580674453e-05, |
| "loss": 3.8587, |
| "step": 292352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.520523623803774e-05, |
| "loss": 3.8549, |
| "step": 292864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.519685029052722e-05, |
| "loss": 3.8669, |
| "step": 293376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.51884643430167e-05, |
| "loss": 3.8458, |
| "step": 293888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.518009477430992e-05, |
| "loss": 3.8466, |
| "step": 294400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.517170882679939e-05, |
| "loss": 3.8728, |
| "step": 294912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.516332287928887e-05, |
| "loss": 3.8497, |
| "step": 295424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.515493693177835e-05, |
| "loss": 3.8625, |
| "step": 295936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.514656736307156e-05, |
| "loss": 3.8575, |
| "step": 296448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.513818141556104e-05, |
| "loss": 3.8642, |
| "step": 296960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.512979546805052e-05, |
| "loss": 3.8561, |
| "step": 297472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.512140952054e-05, |
| "loss": 3.8488, |
| "step": 297984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.511303995183322e-05, |
| "loss": 3.8418, |
| "step": 298496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5104654004322697e-05, |
| "loss": 3.8454, |
| "step": 299008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5096268056812177e-05, |
| "loss": 3.8546, |
| "step": 299520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5087882109301657e-05, |
| "loss": 3.8637, |
| "step": 300032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5079512540594866e-05, |
| "loss": 3.8556, |
| "step": 300544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5071126593084346e-05, |
| "loss": 3.8583, |
| "step": 301056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5062740645573826e-05, |
| "loss": 3.8444, |
| "step": 301568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5054354698063305e-05, |
| "loss": 3.8486, |
| "step": 302080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5045985129356515e-05, |
| "loss": 3.838, |
| "step": 302592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5037599181845995e-05, |
| "loss": 3.8521, |
| "step": 303104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5029213234335474e-05, |
| "loss": 3.8555, |
| "step": 303616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.5020827286824954e-05, |
| "loss": 3.8576, |
| "step": 304128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.501245771811817e-05, |
| "loss": 3.8456, |
| "step": 304640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.500407177060765e-05, |
| "loss": 3.8423, |
| "step": 305152 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.9093804359436035, |
| "eval_runtime": 570.3829, |
| "eval_samples_per_second": 669.008, |
| "eval_steps_per_second": 20.907, |
| "step": 305276 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.499570220190086e-05, |
| "loss": 3.8518, |
| "step": 305664 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.498731625439034e-05, |
| "loss": 3.85, |
| "step": 306176 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.497893030687982e-05, |
| "loss": 3.843, |
| "step": 306688 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.49705443593693e-05, |
| "loss": 3.842, |
| "step": 307200 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.496215841185878e-05, |
| "loss": 3.8437, |
| "step": 307712 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.495378884315199e-05, |
| "loss": 3.8304, |
| "step": 308224 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.494540289564147e-05, |
| "loss": 3.8344, |
| "step": 308736 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.493701694813095e-05, |
| "loss": 3.8411, |
| "step": 309248 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.492863100062043e-05, |
| "loss": 3.8398, |
| "step": 309760 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.492026143191364e-05, |
| "loss": 3.8443, |
| "step": 310272 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4911875484403124e-05, |
| "loss": 3.8425, |
| "step": 310784 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4903489536892604e-05, |
| "loss": 3.8374, |
| "step": 311296 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4895103589382084e-05, |
| "loss": 3.8373, |
| "step": 311808 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4886717641871564e-05, |
| "loss": 3.8445, |
| "step": 312320 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4878331694361044e-05, |
| "loss": 3.8353, |
| "step": 312832 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.4869945746850524e-05, |
| "loss": 3.8293, |
| "step": 313344 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.486157617814373e-05, |
| "loss": 3.8295, |
| "step": 313856 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.485319023063321e-05, |
| "loss": 3.8417, |
| "step": 314368 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.484480428312269e-05, |
| "loss": 3.8355, |
| "step": 314880 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.483641833561217e-05, |
| "loss": 3.8469, |
| "step": 315392 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.482803238810165e-05, |
| "loss": 3.8297, |
| "step": 315904 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.481964644059113e-05, |
| "loss": 3.8435, |
| "step": 316416 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.481126049308061e-05, |
| "loss": 3.8354, |
| "step": 316928 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.480287454557009e-05, |
| "loss": 3.8349, |
| "step": 317440 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.479450497686331e-05, |
| "loss": 3.8333, |
| "step": 317952 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.478611902935279e-05, |
| "loss": 3.8313, |
| "step": 318464 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.477773308184227e-05, |
| "loss": 3.8414, |
| "step": 318976 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.476934713433175e-05, |
| "loss": 3.825, |
| "step": 319488 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.476096118682122e-05, |
| "loss": 3.8355, |
| "step": 320000 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.47525752393107e-05, |
| "loss": 3.8287, |
| "step": 320512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.474420567060392e-05, |
| "loss": 3.8228, |
| "step": 321024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.47358197230934e-05, |
| "loss": 3.8344, |
| "step": 321536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.472743377558287e-05, |
| "loss": 3.8379, |
| "step": 322048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.471904782807235e-05, |
| "loss": 3.8304, |
| "step": 322560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.471066188056183e-05, |
| "loss": 3.8273, |
| "step": 323072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.470227593305131e-05, |
| "loss": 3.8303, |
| "step": 323584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.469388998554079e-05, |
| "loss": 3.832, |
| "step": 324096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.468550403803028e-05, |
| "loss": 3.8423, |
| "step": 324608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4677134469323486e-05, |
| "loss": 3.827, |
| "step": 325120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4668748521812966e-05, |
| "loss": 3.8135, |
| "step": 325632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4660362574302446e-05, |
| "loss": 3.8216, |
| "step": 326144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4651976626791926e-05, |
| "loss": 3.8191, |
| "step": 326656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4643607058085135e-05, |
| "loss": 3.8337, |
| "step": 327168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4635221110574615e-05, |
| "loss": 3.8236, |
| "step": 327680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4626835163064095e-05, |
| "loss": 3.8331, |
| "step": 328192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4618449215553575e-05, |
| "loss": 3.8254, |
| "step": 328704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4610079646846784e-05, |
| "loss": 3.8255, |
| "step": 329216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.460171007814e-05, |
| "loss": 3.8344, |
| "step": 329728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.459332413062948e-05, |
| "loss": 3.8155, |
| "step": 330240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.458493818311896e-05, |
| "loss": 3.8078, |
| "step": 330752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.457655223560844e-05, |
| "loss": 3.8212, |
| "step": 331264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.456818266690165e-05, |
| "loss": 3.826, |
| "step": 331776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.455979671939113e-05, |
| "loss": 3.8121, |
| "step": 332288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.455141077188061e-05, |
| "loss": 3.829, |
| "step": 332800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.454302482437009e-05, |
| "loss": 3.8106, |
| "step": 333312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.453463887685957e-05, |
| "loss": 3.8056, |
| "step": 333824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.452625292934905e-05, |
| "loss": 3.8197, |
| "step": 334336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.451788336064226e-05, |
| "loss": 3.8087, |
| "step": 334848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.450949741313174e-05, |
| "loss": 3.8296, |
| "step": 335360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.450111146562122e-05, |
| "loss": 3.819, |
| "step": 335872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.44927255181107e-05, |
| "loss": 3.7982, |
| "step": 336384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.448433957060018e-05, |
| "loss": 3.8152, |
| "step": 336896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4475953623089664e-05, |
| "loss": 3.8092, |
| "step": 337408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4467567675579144e-05, |
| "loss": 3.8223, |
| "step": 337920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4459181728068624e-05, |
| "loss": 3.8047, |
| "step": 338432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.445081215936183e-05, |
| "loss": 3.8163, |
| "step": 338944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.444242621185131e-05, |
| "loss": 3.8003, |
| "step": 339456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.443404026434079e-05, |
| "loss": 3.8144, |
| "step": 339968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.442565431683027e-05, |
| "loss": 3.8059, |
| "step": 340480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.441726836931975e-05, |
| "loss": 3.8087, |
| "step": 340992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.440888242180923e-05, |
| "loss": 3.8165, |
| "step": 341504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4400496474298706e-05, |
| "loss": 3.8091, |
| "step": 342016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.439212690559192e-05, |
| "loss": 3.8184, |
| "step": 342528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.43837409580814e-05, |
| "loss": 3.8277, |
| "step": 343040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.437535501057088e-05, |
| "loss": 3.8258, |
| "step": 343552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.436696906306036e-05, |
| "loss": 3.8147, |
| "step": 344064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.435859949435358e-05, |
| "loss": 3.8074, |
| "step": 344576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.435022992564679e-05, |
| "loss": 3.8002, |
| "step": 345088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.434184397813627e-05, |
| "loss": 3.8123, |
| "step": 345600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.433345803062575e-05, |
| "loss": 3.8281, |
| "step": 346112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.432507208311523e-05, |
| "loss": 3.8165, |
| "step": 346624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4316702514408436e-05, |
| "loss": 3.8048, |
| "step": 347136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4308316566897916e-05, |
| "loss": 3.7946, |
| "step": 347648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4299930619387396e-05, |
| "loss": 3.8176, |
| "step": 348160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4291544671876876e-05, |
| "loss": 3.7932, |
| "step": 348672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4283158724366356e-05, |
| "loss": 3.8049, |
| "step": 349184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4274772776855836e-05, |
| "loss": 3.8068, |
| "step": 349696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4266386829345316e-05, |
| "loss": 3.8097, |
| "step": 350208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.4258000881834796e-05, |
| "loss": 3.812, |
| "step": 350720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.424964769193174e-05, |
| "loss": 3.8158, |
| "step": 351232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.424126174442122e-05, |
| "loss": 3.7994, |
| "step": 351744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.42328757969107e-05, |
| "loss": 3.8085, |
| "step": 352256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.422448984940018e-05, |
| "loss": 3.8075, |
| "step": 352768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4216103901889654e-05, |
| "loss": 3.8005, |
| "step": 353280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4207717954379134e-05, |
| "loss": 3.7988, |
| "step": 353792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4199332006868614e-05, |
| "loss": 3.8149, |
| "step": 354304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.419096243816182e-05, |
| "loss": 3.8138, |
| "step": 354816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.418257649065131e-05, |
| "loss": 3.8145, |
| "step": 355328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.417419054314079e-05, |
| "loss": 3.8038, |
| "step": 355840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.416580459563027e-05, |
| "loss": 3.8095, |
| "step": 356352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.415741864811975e-05, |
| "loss": 3.7967, |
| "step": 356864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.414903270060923e-05, |
| "loss": 3.8106, |
| "step": 357376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.414064675309871e-05, |
| "loss": 3.8132, |
| "step": 357888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.413227718439192e-05, |
| "loss": 3.8044, |
| "step": 358400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.41238912368814e-05, |
| "loss": 3.8188, |
| "step": 358912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.411550528937088e-05, |
| "loss": 3.7879, |
| "step": 359424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.410711934186036e-05, |
| "loss": 3.792, |
| "step": 359936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.409874977315357e-05, |
| "loss": 3.7962, |
| "step": 360448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.409036382564305e-05, |
| "loss": 3.7997, |
| "step": 360960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.408197787813253e-05, |
| "loss": 3.8122, |
| "step": 361472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.407359193062201e-05, |
| "loss": 3.7991, |
| "step": 361984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.4065205983111494e-05, |
| "loss": 3.7871, |
| "step": 362496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.40568364144047e-05, |
| "loss": 3.7987, |
| "step": 363008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.404845046689418e-05, |
| "loss": 3.8115, |
| "step": 363520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.404006451938366e-05, |
| "loss": 3.8032, |
| "step": 364032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.403167857187314e-05, |
| "loss": 3.8119, |
| "step": 364544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.402329262436262e-05, |
| "loss": 3.7967, |
| "step": 365056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.401492305565583e-05, |
| "loss": 3.7948, |
| "step": 365568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.400653710814531e-05, |
| "loss": 3.8117, |
| "step": 366080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.399815116063479e-05, |
| "loss": 3.7982, |
| "step": 366592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.398976521312427e-05, |
| "loss": 3.7941, |
| "step": 367104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.398137926561375e-05, |
| "loss": 3.7939, |
| "step": 367616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.397300969690696e-05, |
| "loss": 3.7921, |
| "step": 368128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.396462374939645e-05, |
| "loss": 3.8019, |
| "step": 368640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.395623780188593e-05, |
| "loss": 3.8006, |
| "step": 369152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.394785185437541e-05, |
| "loss": 3.8089, |
| "step": 369664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.393946590686489e-05, |
| "loss": 3.7896, |
| "step": 370176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.393107995935437e-05, |
| "loss": 3.7932, |
| "step": 370688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.392269401184384e-05, |
| "loss": 3.8121, |
| "step": 371200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.391430806433332e-05, |
| "loss": 3.7965, |
| "step": 371712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3905938495626536e-05, |
| "loss": 3.8046, |
| "step": 372224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3897552548116016e-05, |
| "loss": 3.8068, |
| "step": 372736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.388916660060549e-05, |
| "loss": 3.8058, |
| "step": 373248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3880797031898705e-05, |
| "loss": 3.8031, |
| "step": 373760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3872411084388185e-05, |
| "loss": 3.7895, |
| "step": 374272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3864025136877665e-05, |
| "loss": 3.7883, |
| "step": 374784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3855639189367145e-05, |
| "loss": 3.794, |
| "step": 375296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3847253241856625e-05, |
| "loss": 3.7996, |
| "step": 375808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3838867294346105e-05, |
| "loss": 3.8042, |
| "step": 376320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3830481346835585e-05, |
| "loss": 3.8018, |
| "step": 376832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3822111778128794e-05, |
| "loss": 3.8015, |
| "step": 377344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3813725830618274e-05, |
| "loss": 3.7922, |
| "step": 377856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3805339883107754e-05, |
| "loss": 3.7934, |
| "step": 378368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3796953935597234e-05, |
| "loss": 3.7824, |
| "step": 378880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3788567988086714e-05, |
| "loss": 3.8016, |
| "step": 379392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3780182040576194e-05, |
| "loss": 3.8023, |
| "step": 379904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3771796093065674e-05, |
| "loss": 3.8014, |
| "step": 380416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3763410145555154e-05, |
| "loss": 3.787, |
| "step": 380928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.375504057684836e-05, |
| "loss": 3.79, |
| "step": 381440 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.884523630142212, |
| "eval_runtime": 566.2085, |
| "eval_samples_per_second": 673.941, |
| "eval_steps_per_second": 21.061, |
| "step": 381595 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.374667100814158e-05, |
| "loss": 3.8052, |
| "step": 381952 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.373828506063106e-05, |
| "loss": 3.7939, |
| "step": 382464 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.372989911312054e-05, |
| "loss": 3.7922, |
| "step": 382976 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.372151316561002e-05, |
| "loss": 3.786, |
| "step": 383488 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.37131272180995e-05, |
| "loss": 3.7883, |
| "step": 384000 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.370475764939271e-05, |
| "loss": 3.7789, |
| "step": 384512 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.369637170188219e-05, |
| "loss": 3.7811, |
| "step": 385024 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.368798575437167e-05, |
| "loss": 3.7823, |
| "step": 385536 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.367959980686115e-05, |
| "loss": 3.7876, |
| "step": 386048 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.367123023815436e-05, |
| "loss": 3.7949, |
| "step": 386560 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.366284429064384e-05, |
| "loss": 3.7851, |
| "step": 387072 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.365445834313332e-05, |
| "loss": 3.7849, |
| "step": 387584 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.36460723956228e-05, |
| "loss": 3.7881, |
| "step": 388096 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.363768644811228e-05, |
| "loss": 3.7911, |
| "step": 388608 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.362931687940549e-05, |
| "loss": 3.7834, |
| "step": 389120 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.362093093189497e-05, |
| "loss": 3.7769, |
| "step": 389632 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.361254498438445e-05, |
| "loss": 3.776, |
| "step": 390144 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.360415903687393e-05, |
| "loss": 3.7866, |
| "step": 390656 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.359577308936341e-05, |
| "loss": 3.783, |
| "step": 391168 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.358738714185289e-05, |
| "loss": 3.7957, |
| "step": 391680 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.357900119434237e-05, |
| "loss": 3.7801, |
| "step": 392192 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.3570615246831845e-05, |
| "loss": 3.7897, |
| "step": 392704 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.356224567812506e-05, |
| "loss": 3.787, |
| "step": 393216 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.355387610941827e-05, |
| "loss": 3.7838, |
| "step": 393728 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.354549016190776e-05, |
| "loss": 3.7818, |
| "step": 394240 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.353710421439724e-05, |
| "loss": 3.7807, |
| "step": 394752 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.352871826688672e-05, |
| "loss": 3.7893, |
| "step": 395264 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.35203323193762e-05, |
| "loss": 3.7725, |
| "step": 395776 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.351194637186568e-05, |
| "loss": 3.7867, |
| "step": 396288 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.350356042435515e-05, |
| "loss": 3.776, |
| "step": 396800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3495190855648366e-05, |
| "loss": 3.7686, |
| "step": 397312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3486804908137846e-05, |
| "loss": 3.7851, |
| "step": 397824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.347841896062732e-05, |
| "loss": 3.7843, |
| "step": 398336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.34700330131168e-05, |
| "loss": 3.7836, |
| "step": 398848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3461663444410015e-05, |
| "loss": 3.7787, |
| "step": 399360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3453277496899495e-05, |
| "loss": 3.7769, |
| "step": 399872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3444891549388975e-05, |
| "loss": 3.7801, |
| "step": 400384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3436505601878455e-05, |
| "loss": 3.7947, |
| "step": 400896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3428119654367935e-05, |
| "loss": 3.7772, |
| "step": 401408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3419733706857415e-05, |
| "loss": 3.7641, |
| "step": 401920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3411347759346895e-05, |
| "loss": 3.7722, |
| "step": 402432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3402978190640104e-05, |
| "loss": 3.7682, |
| "step": 402944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3394592243129584e-05, |
| "loss": 3.7827, |
| "step": 403456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3386206295619064e-05, |
| "loss": 3.7749, |
| "step": 403968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3377820348108544e-05, |
| "loss": 3.7787, |
| "step": 404480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3369434400598024e-05, |
| "loss": 3.7788, |
| "step": 404992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3361048453087503e-05, |
| "loss": 3.7748, |
| "step": 405504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.335267888438071e-05, |
| "loss": 3.7852, |
| "step": 406016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.334429293687019e-05, |
| "loss": 3.7653, |
| "step": 406528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.333590698935968e-05, |
| "loss": 3.7561, |
| "step": 407040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.332752104184916e-05, |
| "loss": 3.7746, |
| "step": 407552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.331915147314237e-05, |
| "loss": 3.7711, |
| "step": 408064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.331076552563185e-05, |
| "loss": 3.7676, |
| "step": 408576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.330237957812133e-05, |
| "loss": 3.7794, |
| "step": 409088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.329399363061081e-05, |
| "loss": 3.7598, |
| "step": 409600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.328560768310029e-05, |
| "loss": 3.7617, |
| "step": 410112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.327722173558977e-05, |
| "loss": 3.767, |
| "step": 410624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.326885216688298e-05, |
| "loss": 3.7602, |
| "step": 411136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.326046621937246e-05, |
| "loss": 3.7804, |
| "step": 411648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.325208027186194e-05, |
| "loss": 3.7672, |
| "step": 412160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.324369432435142e-05, |
| "loss": 3.7591, |
| "step": 412672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.32353083768409e-05, |
| "loss": 3.7628, |
| "step": 413184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.322692242933038e-05, |
| "loss": 3.7558, |
| "step": 413696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.321853648181986e-05, |
| "loss": 3.7769, |
| "step": 414208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.321015053430934e-05, |
| "loss": 3.7582, |
| "step": 414720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.320178096560255e-05, |
| "loss": 3.7732, |
| "step": 415232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.319339501809203e-05, |
| "loss": 3.7516, |
| "step": 415744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3185009070581506e-05, |
| "loss": 3.7646, |
| "step": 416256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3176623123070986e-05, |
| "loss": 3.7583, |
| "step": 416768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3168237175560466e-05, |
| "loss": 3.7602, |
| "step": 417280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3159851228049946e-05, |
| "loss": 3.7664, |
| "step": 417792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3151465280539426e-05, |
| "loss": 3.7645, |
| "step": 418304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3143095711832635e-05, |
| "loss": 3.767, |
| "step": 418816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3134709764322115e-05, |
| "loss": 3.7791, |
| "step": 419328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3126323816811595e-05, |
| "loss": 3.7777, |
| "step": 419840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.311795424810481e-05, |
| "loss": 3.7707, |
| "step": 420352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.310956830059429e-05, |
| "loss": 3.7603, |
| "step": 420864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.310118235308377e-05, |
| "loss": 3.7556, |
| "step": 421376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.309279640557325e-05, |
| "loss": 3.7613, |
| "step": 421888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.308442683686646e-05, |
| "loss": 3.7784, |
| "step": 422400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.307604088935594e-05, |
| "loss": 3.769, |
| "step": 422912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.306765494184542e-05, |
| "loss": 3.7588, |
| "step": 423424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.30592689943349e-05, |
| "loss": 3.7466, |
| "step": 423936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.305089942562811e-05, |
| "loss": 3.7737, |
| "step": 424448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.304251347811759e-05, |
| "loss": 3.7416, |
| "step": 424960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.303412753060707e-05, |
| "loss": 3.76, |
| "step": 425472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.302574158309655e-05, |
| "loss": 3.7595, |
| "step": 425984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3017355635586035e-05, |
| "loss": 3.7633, |
| "step": 426496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.3008986066879244e-05, |
| "loss": 3.7633, |
| "step": 427008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.3000600119368724e-05, |
| "loss": 3.7712, |
| "step": 427520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2992214171858204e-05, |
| "loss": 3.7507, |
| "step": 428032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2983828224347684e-05, |
| "loss": 3.7604, |
| "step": 428544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.297545865564089e-05, |
| "loss": 3.7643, |
| "step": 429056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.296707270813037e-05, |
| "loss": 3.7553, |
| "step": 429568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.295868676061985e-05, |
| "loss": 3.7521, |
| "step": 430080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.295030081310933e-05, |
| "loss": 3.7691, |
| "step": 430592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.294193124440254e-05, |
| "loss": 3.7663, |
| "step": 431104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.293354529689202e-05, |
| "loss": 3.7754, |
| "step": 431616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.29251593493815e-05, |
| "loss": 3.7561, |
| "step": 432128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.291677340187099e-05, |
| "loss": 3.7614, |
| "step": 432640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.290838745436047e-05, |
| "loss": 3.7494, |
| "step": 433152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.290001788565368e-05, |
| "loss": 3.7641, |
| "step": 433664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.289163193814316e-05, |
| "loss": 3.7704, |
| "step": 434176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.288324599063264e-05, |
| "loss": 3.7565, |
| "step": 434688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.287486004312212e-05, |
| "loss": 3.772, |
| "step": 435200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.28664740956116e-05, |
| "loss": 3.7419, |
| "step": 435712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.285808814810108e-05, |
| "loss": 3.7506, |
| "step": 436224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.284971857939429e-05, |
| "loss": 3.748, |
| "step": 436736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.284133263188377e-05, |
| "loss": 3.7547, |
| "step": 437248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2832946684373247e-05, |
| "loss": 3.7655, |
| "step": 437760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2824560736862727e-05, |
| "loss": 3.7592, |
| "step": 438272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2816174789352207e-05, |
| "loss": 3.7366, |
| "step": 438784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.280780522064542e-05, |
| "loss": 3.7582, |
| "step": 439296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.27994192731349e-05, |
| "loss": 3.7646, |
| "step": 439808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.279103332562438e-05, |
| "loss": 3.7611, |
| "step": 440320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.278264737811386e-05, |
| "loss": 3.7623, |
| "step": 440832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.277426143060334e-05, |
| "loss": 3.7523, |
| "step": 441344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.276589186189655e-05, |
| "loss": 3.7491, |
| "step": 441856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.275750591438603e-05, |
| "loss": 3.768, |
| "step": 442368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.274911996687551e-05, |
| "loss": 3.7533, |
| "step": 442880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.274073401936499e-05, |
| "loss": 3.7504, |
| "step": 443392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2732348071854464e-05, |
| "loss": 3.7445, |
| "step": 443904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.272397850314768e-05, |
| "loss": 3.7499, |
| "step": 444416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.271559255563716e-05, |
| "loss": 3.7577, |
| "step": 444928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.270720660812664e-05, |
| "loss": 3.7575, |
| "step": 445440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.269882066061612e-05, |
| "loss": 3.7642, |
| "step": 445952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.26904347131056e-05, |
| "loss": 3.7488, |
| "step": 446464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2682065144398816e-05, |
| "loss": 3.7478, |
| "step": 446976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.267367919688829e-05, |
| "loss": 3.768, |
| "step": 447488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.266529324937777e-05, |
| "loss": 3.7506, |
| "step": 448000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.265690730186725e-05, |
| "loss": 3.7587, |
| "step": 448512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.264852135435673e-05, |
| "loss": 3.764, |
| "step": 449024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.264013540684621e-05, |
| "loss": 3.7647, |
| "step": 449536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.263174945933569e-05, |
| "loss": 3.7607, |
| "step": 450048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.26233798906289e-05, |
| "loss": 3.7484, |
| "step": 450560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.261499394311838e-05, |
| "loss": 3.7406, |
| "step": 451072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.260660799560786e-05, |
| "loss": 3.7502, |
| "step": 451584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2598222048097345e-05, |
| "loss": 3.7563, |
| "step": 452096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2589852479390554e-05, |
| "loss": 3.7637, |
| "step": 452608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2581466531880034e-05, |
| "loss": 3.7572, |
| "step": 453120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2573080584369514e-05, |
| "loss": 3.7565, |
| "step": 453632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.2564694636858994e-05, |
| "loss": 3.7525, |
| "step": 454144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.25563250681522e-05, |
| "loss": 3.7522, |
| "step": 454656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.254793912064168e-05, |
| "loss": 3.7415, |
| "step": 455168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.253955317313116e-05, |
| "loss": 3.749, |
| "step": 455680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.253116722562064e-05, |
| "loss": 3.7613, |
| "step": 456192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.252278127811012e-05, |
| "loss": 3.7597, |
| "step": 456704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.25143953305996e-05, |
| "loss": 3.7446, |
| "step": 457216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.250602576189281e-05, |
| "loss": 3.7451, |
| "step": 457728 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.868739366531372, |
| "eval_runtime": 570.4041, |
| "eval_samples_per_second": 668.984, |
| "eval_steps_per_second": 20.906, |
| "step": 457914 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.24976398143823e-05, |
| "loss": 3.7636, |
| "step": 458240 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.248925386687178e-05, |
| "loss": 3.7501, |
| "step": 458752 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.248086791936126e-05, |
| "loss": 3.7465, |
| "step": 459264 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.247248197185074e-05, |
| "loss": 3.7468, |
| "step": 459776 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.246409602434022e-05, |
| "loss": 3.7413, |
| "step": 460288 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.245572645563343e-05, |
| "loss": 3.7391, |
| "step": 460800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.244734050812291e-05, |
| "loss": 3.7373, |
| "step": 461312 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.243895456061239e-05, |
| "loss": 3.7442, |
| "step": 461824 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.243056861310187e-05, |
| "loss": 3.7474, |
| "step": 462336 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2422199044395076e-05, |
| "loss": 3.7479, |
| "step": 462848 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2413813096884556e-05, |
| "loss": 3.747, |
| "step": 463360 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2405427149374036e-05, |
| "loss": 3.7391, |
| "step": 463872 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2397041201863516e-05, |
| "loss": 3.7501, |
| "step": 464384 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2388655254352996e-05, |
| "loss": 3.7459, |
| "step": 464896 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.238028568564621e-05, |
| "loss": 3.7461, |
| "step": 465408 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.237189973813569e-05, |
| "loss": 3.7311, |
| "step": 465920 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.236351379062517e-05, |
| "loss": 3.7308, |
| "step": 466432 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.235512784311465e-05, |
| "loss": 3.7481, |
| "step": 466944 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.234675827440786e-05, |
| "loss": 3.7402, |
| "step": 467456 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.233837232689734e-05, |
| "loss": 3.7553, |
| "step": 467968 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.232998637938682e-05, |
| "loss": 3.7378, |
| "step": 468480 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.23216004318763e-05, |
| "loss": 3.75, |
| "step": 468992 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2313214484365774e-05, |
| "loss": 3.7443, |
| "step": 469504 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.230484491565899e-05, |
| "loss": 3.7425, |
| "step": 470016 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.229645896814847e-05, |
| "loss": 3.7384, |
| "step": 470528 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.228807302063795e-05, |
| "loss": 3.7384, |
| "step": 471040 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.227968707312743e-05, |
| "loss": 3.7486, |
| "step": 471552 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2271317504420645e-05, |
| "loss": 3.732, |
| "step": 472064 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2262931556910125e-05, |
| "loss": 3.7442, |
| "step": 472576 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.22545456093996e-05, |
| "loss": 3.738, |
| "step": 473088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.224615966188908e-05, |
| "loss": 3.7258, |
| "step": 473600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2237790093182294e-05, |
| "loss": 3.7418, |
| "step": 474112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2229404145671774e-05, |
| "loss": 3.7445, |
| "step": 474624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.222101819816125e-05, |
| "loss": 3.7475, |
| "step": 475136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.221263225065073e-05, |
| "loss": 3.735, |
| "step": 475648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.220424630314021e-05, |
| "loss": 3.7387, |
| "step": 476160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.219587673443342e-05, |
| "loss": 3.738, |
| "step": 476672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.21874907869229e-05, |
| "loss": 3.7513, |
| "step": 477184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.217910483941238e-05, |
| "loss": 3.7383, |
| "step": 477696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.217071889190186e-05, |
| "loss": 3.7252, |
| "step": 478208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.216234932319507e-05, |
| "loss": 3.7295, |
| "step": 478720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.215396337568455e-05, |
| "loss": 3.7262, |
| "step": 479232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.214557742817403e-05, |
| "loss": 3.7453, |
| "step": 479744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.213719148066351e-05, |
| "loss": 3.7348, |
| "step": 480256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.212882191195672e-05, |
| "loss": 3.7353, |
| "step": 480768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.21204359644462e-05, |
| "loss": 3.7423, |
| "step": 481280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.211205001693568e-05, |
| "loss": 3.7326, |
| "step": 481792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.210366406942516e-05, |
| "loss": 3.7454, |
| "step": 482304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.209529450071838e-05, |
| "loss": 3.7273, |
| "step": 482816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.208690855320786e-05, |
| "loss": 3.7204, |
| "step": 483328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.207852260569734e-05, |
| "loss": 3.7338, |
| "step": 483840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.207013665818682e-05, |
| "loss": 3.727, |
| "step": 484352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.20617507106763e-05, |
| "loss": 3.7323, |
| "step": 484864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2053381141969506e-05, |
| "loss": 3.7344, |
| "step": 485376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2044995194458986e-05, |
| "loss": 3.7249, |
| "step": 485888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2036609246948466e-05, |
| "loss": 3.7226, |
| "step": 486400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2028223299437946e-05, |
| "loss": 3.7251, |
| "step": 486912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2019853730731155e-05, |
| "loss": 3.7248, |
| "step": 487424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2011467783220635e-05, |
| "loss": 3.7389, |
| "step": 487936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.2003081835710115e-05, |
| "loss": 3.7283, |
| "step": 488448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1994695888199595e-05, |
| "loss": 3.7188, |
| "step": 488960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.198632631949281e-05, |
| "loss": 3.7254, |
| "step": 489472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.197794037198229e-05, |
| "loss": 3.7121, |
| "step": 489984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.196955442447177e-05, |
| "loss": 3.7398, |
| "step": 490496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.196116847696125e-05, |
| "loss": 3.7212, |
| "step": 491008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.195278252945073e-05, |
| "loss": 3.7289, |
| "step": 491520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.194441296074394e-05, |
| "loss": 3.718, |
| "step": 492032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.193602701323342e-05, |
| "loss": 3.7182, |
| "step": 492544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.19276410657229e-05, |
| "loss": 3.723, |
| "step": 493056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.191925511821238e-05, |
| "loss": 3.7205, |
| "step": 493568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.191088554950559e-05, |
| "loss": 3.729, |
| "step": 494080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.190249960199507e-05, |
| "loss": 3.7252, |
| "step": 494592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.189411365448455e-05, |
| "loss": 3.7305, |
| "step": 495104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.188572770697403e-05, |
| "loss": 3.7394, |
| "step": 495616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1877341759463515e-05, |
| "loss": 3.7374, |
| "step": 496128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1868972190756724e-05, |
| "loss": 3.7345, |
| "step": 496640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1860586243246204e-05, |
| "loss": 3.7229, |
| "step": 497152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1852200295735684e-05, |
| "loss": 3.7167, |
| "step": 497664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.1843814348225164e-05, |
| "loss": 3.7219, |
| "step": 498176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.183544477951837e-05, |
| "loss": 3.7387, |
| "step": 498688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.182705883200785e-05, |
| "loss": 3.7314, |
| "step": 499200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.181867288449733e-05, |
| "loss": 3.7197, |
| "step": 499712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.181028693698681e-05, |
| "loss": 3.7083, |
| "step": 500224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.180191736828002e-05, |
| "loss": 3.7381, |
| "step": 500736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.17935314207695e-05, |
| "loss": 3.7035, |
| "step": 501248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.178514547325898e-05, |
| "loss": 3.722, |
| "step": 501760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.177675952574847e-05, |
| "loss": 3.7243, |
| "step": 502272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.176838995704168e-05, |
| "loss": 3.7217, |
| "step": 502784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.176000400953116e-05, |
| "loss": 3.73, |
| "step": 503296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.175161806202064e-05, |
| "loss": 3.7358, |
| "step": 503808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.174323211451012e-05, |
| "loss": 3.7104, |
| "step": 504320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.173486254580333e-05, |
| "loss": 3.7242, |
| "step": 504832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.172647659829281e-05, |
| "loss": 3.7264, |
| "step": 505344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.171809065078229e-05, |
| "loss": 3.7176, |
| "step": 505856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.170970470327177e-05, |
| "loss": 3.7138, |
| "step": 506368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1701335134564976e-05, |
| "loss": 3.7316, |
| "step": 506880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1692949187054456e-05, |
| "loss": 3.7304, |
| "step": 507392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1684563239543936e-05, |
| "loss": 3.7385, |
| "step": 507904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.167617729203342e-05, |
| "loss": 3.7184, |
| "step": 508416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.166780772332663e-05, |
| "loss": 3.7251, |
| "step": 508928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.165942177581611e-05, |
| "loss": 3.7122, |
| "step": 509440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.165103582830559e-05, |
| "loss": 3.7259, |
| "step": 509952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.164264988079507e-05, |
| "loss": 3.7317, |
| "step": 510464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.163426393328455e-05, |
| "loss": 3.7252, |
| "step": 510976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.162589436457776e-05, |
| "loss": 3.7321, |
| "step": 511488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.161750841706724e-05, |
| "loss": 3.7085, |
| "step": 512000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.160912246955672e-05, |
| "loss": 3.7099, |
| "step": 512512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.16007365220462e-05, |
| "loss": 3.7077, |
| "step": 513024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1592350574535674e-05, |
| "loss": 3.7183, |
| "step": 513536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.158398100582889e-05, |
| "loss": 3.7276, |
| "step": 514048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1575595058318376e-05, |
| "loss": 3.7257, |
| "step": 514560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1567209110807856e-05, |
| "loss": 3.6992, |
| "step": 515072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.155882316329733e-05, |
| "loss": 3.7186, |
| "step": 515584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1550453594590545e-05, |
| "loss": 3.7282, |
| "step": 516096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1542067647080025e-05, |
| "loss": 3.7271, |
| "step": 516608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.15336816995695e-05, |
| "loss": 3.7247, |
| "step": 517120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.152529575205898e-05, |
| "loss": 3.718, |
| "step": 517632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1516926183352194e-05, |
| "loss": 3.7156, |
| "step": 518144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1508540235841674e-05, |
| "loss": 3.7269, |
| "step": 518656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.150015428833115e-05, |
| "loss": 3.7201, |
| "step": 519168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.149176834082063e-05, |
| "loss": 3.7139, |
| "step": 519680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1483382393310114e-05, |
| "loss": 3.7079, |
| "step": 520192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.147501282460333e-05, |
| "loss": 3.7137, |
| "step": 520704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.14666268770928e-05, |
| "loss": 3.7211, |
| "step": 521216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.145824092958228e-05, |
| "loss": 3.7201, |
| "step": 521728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.144985498207176e-05, |
| "loss": 3.7232, |
| "step": 522240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.144146903456124e-05, |
| "loss": 3.7137, |
| "step": 522752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.143309946585445e-05, |
| "loss": 3.7115, |
| "step": 523264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.142471351834393e-05, |
| "loss": 3.7291, |
| "step": 523776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.141632757083341e-05, |
| "loss": 3.7096, |
| "step": 524288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.140794162332289e-05, |
| "loss": 3.7258, |
| "step": 524800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.13995720546161e-05, |
| "loss": 3.7294, |
| "step": 525312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.139118610710558e-05, |
| "loss": 3.7258, |
| "step": 525824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.138280015959507e-05, |
| "loss": 3.7238, |
| "step": 526336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.137441421208455e-05, |
| "loss": 3.7173, |
| "step": 526848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.136602826457403e-05, |
| "loss": 3.705, |
| "step": 527360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1357658695867237e-05, |
| "loss": 3.7124, |
| "step": 527872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1349272748356717e-05, |
| "loss": 3.7179, |
| "step": 528384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1340886800846197e-05, |
| "loss": 3.7302, |
| "step": 528896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1332500853335676e-05, |
| "loss": 3.7166, |
| "step": 529408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1324131284628886e-05, |
| "loss": 3.7257, |
| "step": 529920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1315745337118366e-05, |
| "loss": 3.7132, |
| "step": 530432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1307359389607845e-05, |
| "loss": 3.7162, |
| "step": 530944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1298973442097325e-05, |
| "loss": 3.7031, |
| "step": 531456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1290587494586805e-05, |
| "loss": 3.7143, |
| "step": 531968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.1282201547076285e-05, |
| "loss": 3.7259, |
| "step": 532480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.12738319783695e-05, |
| "loss": 3.7264, |
| "step": 532992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.126544603085898e-05, |
| "loss": 3.7071, |
| "step": 533504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.125706008334846e-05, |
| "loss": 3.7143, |
| "step": 534016 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.857569456100464, |
| "eval_runtime": 575.5623, |
| "eval_samples_per_second": 662.988, |
| "eval_steps_per_second": 20.719, |
| "step": 534233 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.124867413583794e-05, |
| "loss": 3.732, |
| "step": 534528 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.124028818832742e-05, |
| "loss": 3.7154, |
| "step": 535040 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.12319022408169e-05, |
| "loss": 3.7108, |
| "step": 535552 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.122351629330638e-05, |
| "loss": 3.713, |
| "step": 536064 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.121514672459959e-05, |
| "loss": 3.7025, |
| "step": 536576 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.120676077708907e-05, |
| "loss": 3.7078, |
| "step": 537088 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.119837482957855e-05, |
| "loss": 3.6969, |
| "step": 537600 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.118998888206803e-05, |
| "loss": 3.7098, |
| "step": 538112 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.118161931336124e-05, |
| "loss": 3.7093, |
| "step": 538624 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.117323336585072e-05, |
| "loss": 3.7155, |
| "step": 539136 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.11648474183402e-05, |
| "loss": 3.7101, |
| "step": 539648 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1156461470829686e-05, |
| "loss": 3.707, |
| "step": 540160 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.114807552331916e-05, |
| "loss": 3.7159, |
| "step": 540672 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1139705954612375e-05, |
| "loss": 3.7062, |
| "step": 541184 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1131320007101855e-05, |
| "loss": 3.7163, |
| "step": 541696 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1122934059591335e-05, |
| "loss": 3.6943, |
| "step": 542208 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.111454811208081e-05, |
| "loss": 3.6996, |
| "step": 542720 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1106178543374024e-05, |
| "loss": 3.7115, |
| "step": 543232 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1097792595863504e-05, |
| "loss": 3.7029, |
| "step": 543744 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.1089406648352984e-05, |
| "loss": 3.7191, |
| "step": 544256 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.108102070084246e-05, |
| "loss": 3.7078, |
| "step": 544768 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.107265113213567e-05, |
| "loss": 3.7118, |
| "step": 545280 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.106426518462515e-05, |
| "loss": 3.7103, |
| "step": 545792 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.105587923711463e-05, |
| "loss": 3.7036, |
| "step": 546304 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.104749328960411e-05, |
| "loss": 3.7075, |
| "step": 546816 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.103910734209359e-05, |
| "loss": 3.7006, |
| "step": 547328 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.103073777338681e-05, |
| "loss": 3.7144, |
| "step": 547840 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.102235182587628e-05, |
| "loss": 3.705, |
| "step": 548352 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.101396587836576e-05, |
| "loss": 3.7089, |
| "step": 548864 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.100557993085524e-05, |
| "loss": 3.7028, |
| "step": 549376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.099721036214846e-05, |
| "loss": 3.6953, |
| "step": 549888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.098882441463793e-05, |
| "loss": 3.7072, |
| "step": 550400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.098043846712741e-05, |
| "loss": 3.7092, |
| "step": 550912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.097205251961689e-05, |
| "loss": 3.7085, |
| "step": 551424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0963682950910106e-05, |
| "loss": 3.7061, |
| "step": 551936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0955297003399586e-05, |
| "loss": 3.7001, |
| "step": 552448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0946911055889066e-05, |
| "loss": 3.7052, |
| "step": 552960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0938525108378546e-05, |
| "loss": 3.7149, |
| "step": 553472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0930155539671755e-05, |
| "loss": 3.7055, |
| "step": 553984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0921769592161235e-05, |
| "loss": 3.6941, |
| "step": 554496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0913383644650715e-05, |
| "loss": 3.6924, |
| "step": 555008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0904997697140195e-05, |
| "loss": 3.6931, |
| "step": 555520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0896628128433404e-05, |
| "loss": 3.7118, |
| "step": 556032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0888242180922884e-05, |
| "loss": 3.7007, |
| "step": 556544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0879856233412364e-05, |
| "loss": 3.6987, |
| "step": 557056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0871470285901844e-05, |
| "loss": 3.7137, |
| "step": 557568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.086310071719506e-05, |
| "loss": 3.6987, |
| "step": 558080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.085471476968454e-05, |
| "loss": 3.71, |
| "step": 558592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.084632882217402e-05, |
| "loss": 3.694, |
| "step": 559104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.08379428746635e-05, |
| "loss": 3.686, |
| "step": 559616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.082957330595671e-05, |
| "loss": 3.6982, |
| "step": 560128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.082118735844619e-05, |
| "loss": 3.6967, |
| "step": 560640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.081280141093567e-05, |
| "loss": 3.693, |
| "step": 561152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.080441546342515e-05, |
| "loss": 3.7012, |
| "step": 561664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.079604589471836e-05, |
| "loss": 3.6936, |
| "step": 562176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.078765994720784e-05, |
| "loss": 3.687, |
| "step": 562688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.077927399969732e-05, |
| "loss": 3.6989, |
| "step": 563200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.07708880521868e-05, |
| "loss": 3.688, |
| "step": 563712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0762518483480014e-05, |
| "loss": 3.7048, |
| "step": 564224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0754132535969494e-05, |
| "loss": 3.6947, |
| "step": 564736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0745746588458974e-05, |
| "loss": 3.6884, |
| "step": 565248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0737360640948453e-05, |
| "loss": 3.6908, |
| "step": 565760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.072899107224166e-05, |
| "loss": 3.6824, |
| "step": 566272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.072060512473114e-05, |
| "loss": 3.7039, |
| "step": 566784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.071221917722062e-05, |
| "loss": 3.6878, |
| "step": 567296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.07038332297101e-05, |
| "loss": 3.6956, |
| "step": 567808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.069544728219958e-05, |
| "loss": 3.6896, |
| "step": 568320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.068707771349279e-05, |
| "loss": 3.6861, |
| "step": 568832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.067869176598227e-05, |
| "loss": 3.6866, |
| "step": 569344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.067030581847175e-05, |
| "loss": 3.6902, |
| "step": 569856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.066191987096124e-05, |
| "loss": 3.6945, |
| "step": 570368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.065353392345072e-05, |
| "loss": 3.6928, |
| "step": 570880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.064516435474393e-05, |
| "loss": 3.6978, |
| "step": 571392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.063677840723341e-05, |
| "loss": 3.7063, |
| "step": 571904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.062839245972289e-05, |
| "loss": 3.7029, |
| "step": 572416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.062000651221237e-05, |
| "loss": 3.6996, |
| "step": 572928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0611636943505576e-05, |
| "loss": 3.6976, |
| "step": 573440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0603250995995056e-05, |
| "loss": 3.6781, |
| "step": 573952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0594865048484536e-05, |
| "loss": 3.6914, |
| "step": 574464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0586479100974016e-05, |
| "loss": 3.7076, |
| "step": 574976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0578109532267225e-05, |
| "loss": 3.6981, |
| "step": 575488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0569723584756705e-05, |
| "loss": 3.6921, |
| "step": 576000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.056133763724619e-05, |
| "loss": 3.6754, |
| "step": 576512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.05529680685394e-05, |
| "loss": 3.7, |
| "step": 577024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.054458212102888e-05, |
| "loss": 3.6767, |
| "step": 577536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.053619617351836e-05, |
| "loss": 3.6881, |
| "step": 578048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.052781022600784e-05, |
| "loss": 3.6867, |
| "step": 578560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.051942427849732e-05, |
| "loss": 3.6931, |
| "step": 579072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.051105470979053e-05, |
| "loss": 3.6985, |
| "step": 579584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.050266876228001e-05, |
| "loss": 3.6989, |
| "step": 580096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.049428281476949e-05, |
| "loss": 3.6817, |
| "step": 580608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.048589686725897e-05, |
| "loss": 3.6954, |
| "step": 581120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.047752729855218e-05, |
| "loss": 3.6897, |
| "step": 581632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.046914135104166e-05, |
| "loss": 3.6859, |
| "step": 582144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.046075540353114e-05, |
| "loss": 3.6822, |
| "step": 582656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0452369456020625e-05, |
| "loss": 3.6969, |
| "step": 583168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0443999887313835e-05, |
| "loss": 3.6977, |
| "step": 583680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0435613939803314e-05, |
| "loss": 3.7064, |
| "step": 584192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0427227992292794e-05, |
| "loss": 3.6903, |
| "step": 584704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0418842044782274e-05, |
| "loss": 3.6928, |
| "step": 585216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0410472476075483e-05, |
| "loss": 3.6806, |
| "step": 585728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0402086528564963e-05, |
| "loss": 3.6954, |
| "step": 586240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.039370058105444e-05, |
| "loss": 3.6995, |
| "step": 586752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.038531463354392e-05, |
| "loss": 3.6963, |
| "step": 587264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.037694506483713e-05, |
| "loss": 3.697, |
| "step": 587776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.036855911732661e-05, |
| "loss": 3.6806, |
| "step": 588288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.036017316981609e-05, |
| "loss": 3.6768, |
| "step": 588800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.035178722230558e-05, |
| "loss": 3.6788, |
| "step": 589312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.034341765359879e-05, |
| "loss": 3.6847, |
| "step": 589824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.033503170608827e-05, |
| "loss": 3.6976, |
| "step": 590336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.032664575857775e-05, |
| "loss": 3.6934, |
| "step": 590848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.031825981106723e-05, |
| "loss": 3.6647, |
| "step": 591360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.030989024236044e-05, |
| "loss": 3.6933, |
| "step": 591872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.030150429484992e-05, |
| "loss": 3.6972, |
| "step": 592384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.02931183473394e-05, |
| "loss": 3.6914, |
| "step": 592896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.028473239982888e-05, |
| "loss": 3.6938, |
| "step": 593408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0276362831122086e-05, |
| "loss": 3.6889, |
| "step": 593920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0267976883611566e-05, |
| "loss": 3.6811, |
| "step": 594432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0259590936101046e-05, |
| "loss": 3.6963, |
| "step": 594944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.025120498859053e-05, |
| "loss": 3.6889, |
| "step": 595456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.024281904108001e-05, |
| "loss": 3.6829, |
| "step": 595968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.023444947237322e-05, |
| "loss": 3.6752, |
| "step": 596480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.02260635248627e-05, |
| "loss": 3.6842, |
| "step": 596992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.021767757735218e-05, |
| "loss": 3.6909, |
| "step": 597504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0209291629841655e-05, |
| "loss": 3.6868, |
| "step": 598016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.020092206113487e-05, |
| "loss": 3.6967, |
| "step": 598528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.019253611362435e-05, |
| "loss": 3.6827, |
| "step": 599040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.018415016611383e-05, |
| "loss": 3.6818, |
| "step": 599552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0175764218603304e-05, |
| "loss": 3.7011, |
| "step": 600064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.016739464989652e-05, |
| "loss": 3.679, |
| "step": 600576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0159008702386e-05, |
| "loss": 3.6952, |
| "step": 601088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0150622754875486e-05, |
| "loss": 3.7009, |
| "step": 601600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.014223680736496e-05, |
| "loss": 3.6921, |
| "step": 602112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0133867238658175e-05, |
| "loss": 3.6931, |
| "step": 602624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0125481291147655e-05, |
| "loss": 3.6862, |
| "step": 603136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.011709534363713e-05, |
| "loss": 3.6744, |
| "step": 603648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.010870939612661e-05, |
| "loss": 3.6808, |
| "step": 604160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0100339827419824e-05, |
| "loss": 3.689, |
| "step": 604672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.0091953879909304e-05, |
| "loss": 3.6944, |
| "step": 605184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.008356793239878e-05, |
| "loss": 3.6891, |
| "step": 605696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.007518198488826e-05, |
| "loss": 3.6935, |
| "step": 606208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.006679603737774e-05, |
| "loss": 3.6807, |
| "step": 606720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.005842646867095e-05, |
| "loss": 3.6862, |
| "step": 607232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.005004052116043e-05, |
| "loss": 3.6727, |
| "step": 607744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.004165457364991e-05, |
| "loss": 3.684, |
| "step": 608256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.003326862613939e-05, |
| "loss": 3.6937, |
| "step": 608768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.00248990574326e-05, |
| "loss": 3.6958, |
| "step": 609280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.001651310992208e-05, |
| "loss": 3.675, |
| "step": 609792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.000812716241156e-05, |
| "loss": 3.6846, |
| "step": 610304 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.851036787033081, |
| "eval_runtime": 579.0615, |
| "eval_samples_per_second": 658.982, |
| "eval_steps_per_second": 20.594, |
| "step": 610552 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.999974121490104e-05, |
| "loss": 3.7001, |
| "step": 610816 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.999135526739052e-05, |
| "loss": 3.6871, |
| "step": 611328 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.998296931988e-05, |
| "loss": 3.6814, |
| "step": 611840 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.997458337236948e-05, |
| "loss": 3.6842, |
| "step": 612352 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.996621380366269e-05, |
| "loss": 3.6722, |
| "step": 612864 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.995782785615218e-05, |
| "loss": 3.6753, |
| "step": 613376 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.994944190864166e-05, |
| "loss": 3.6743, |
| "step": 613888 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.994105596113114e-05, |
| "loss": 3.6779, |
| "step": 614400 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.993268639242435e-05, |
| "loss": 3.6789, |
| "step": 614912 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.992430044491383e-05, |
| "loss": 3.6856, |
| "step": 615424 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.991591449740331e-05, |
| "loss": 3.6791, |
| "step": 615936 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.990752854989279e-05, |
| "loss": 3.6804, |
| "step": 616448 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9899158981185996e-05, |
| "loss": 3.687, |
| "step": 616960 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9890773033675476e-05, |
| "loss": 3.6729, |
| "step": 617472 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9882387086164956e-05, |
| "loss": 3.6894, |
| "step": 617984 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9874001138654436e-05, |
| "loss": 3.6647, |
| "step": 618496 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9865631569947645e-05, |
| "loss": 3.6683, |
| "step": 619008 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.985724562243713e-05, |
| "loss": 3.683, |
| "step": 619520 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.984885967492661e-05, |
| "loss": 3.6747, |
| "step": 620032 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.984047372741609e-05, |
| "loss": 3.6925, |
| "step": 620544 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.98321041587093e-05, |
| "loss": 3.6789, |
| "step": 621056 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.982371821119878e-05, |
| "loss": 3.6801, |
| "step": 621568 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.981533226368826e-05, |
| "loss": 3.6809, |
| "step": 622080 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.980694631617774e-05, |
| "loss": 3.6747, |
| "step": 622592 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.979857674747095e-05, |
| "loss": 3.6802, |
| "step": 623104 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.979019079996043e-05, |
| "loss": 3.6696, |
| "step": 623616 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.978180485244991e-05, |
| "loss": 3.6843, |
| "step": 624128 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.977341890493939e-05, |
| "loss": 3.6746, |
| "step": 624640 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.97650493362326e-05, |
| "loss": 3.6817, |
| "step": 625152 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.9756663388722085e-05, |
| "loss": 3.6705, |
| "step": 625664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9748277441211565e-05, |
| "loss": 3.6645, |
| "step": 626176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9739891493701045e-05, |
| "loss": 3.6791, |
| "step": 626688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9731521924994254e-05, |
| "loss": 3.6806, |
| "step": 627200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9723135977483734e-05, |
| "loss": 3.6789, |
| "step": 627712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9714750029973214e-05, |
| "loss": 3.6758, |
| "step": 628224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9706364082462694e-05, |
| "loss": 3.6739, |
| "step": 628736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9697978134952174e-05, |
| "loss": 3.6737, |
| "step": 629248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.968960856624538e-05, |
| "loss": 3.6844, |
| "step": 629760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.968122261873486e-05, |
| "loss": 3.679, |
| "step": 630272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.967283667122434e-05, |
| "loss": 3.6648, |
| "step": 630784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.966445072371382e-05, |
| "loss": 3.6635, |
| "step": 631296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.965608115500703e-05, |
| "loss": 3.6633, |
| "step": 631808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.964769520749652e-05, |
| "loss": 3.682, |
| "step": 632320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9639309259986e-05, |
| "loss": 3.6776, |
| "step": 632832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.963092331247548e-05, |
| "loss": 3.6661, |
| "step": 633344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.962255374376869e-05, |
| "loss": 3.6856, |
| "step": 633856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.961416779625817e-05, |
| "loss": 3.6699, |
| "step": 634368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.960578184874765e-05, |
| "loss": 3.6802, |
| "step": 634880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.959739590123713e-05, |
| "loss": 3.6711, |
| "step": 635392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.958902633253034e-05, |
| "loss": 3.6612, |
| "step": 635904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.958064038501982e-05, |
| "loss": 3.6659, |
| "step": 636416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.95722544375093e-05, |
| "loss": 3.6656, |
| "step": 636928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.956386848999878e-05, |
| "loss": 3.6726, |
| "step": 637440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9555498921291986e-05, |
| "loss": 3.6671, |
| "step": 637952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.954711297378147e-05, |
| "loss": 3.6697, |
| "step": 638464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.953872702627095e-05, |
| "loss": 3.6576, |
| "step": 638976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.953034107876043e-05, |
| "loss": 3.6702, |
| "step": 639488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.952197151005364e-05, |
| "loss": 3.6602, |
| "step": 640000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.951358556254312e-05, |
| "loss": 3.6751, |
| "step": 640512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.95051996150326e-05, |
| "loss": 3.6613, |
| "step": 641024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.949681366752208e-05, |
| "loss": 3.6644, |
| "step": 641536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.948844409881529e-05, |
| "loss": 3.6644, |
| "step": 642048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.948005815130477e-05, |
| "loss": 3.6545, |
| "step": 642560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.947167220379425e-05, |
| "loss": 3.6742, |
| "step": 643072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.946328625628373e-05, |
| "loss": 3.6586, |
| "step": 643584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.945491668757694e-05, |
| "loss": 3.6686, |
| "step": 644096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9446530740066426e-05, |
| "loss": 3.6633, |
| "step": 644608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9438144792555906e-05, |
| "loss": 3.6566, |
| "step": 645120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9429758845045386e-05, |
| "loss": 3.6579, |
| "step": 645632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9421389276338595e-05, |
| "loss": 3.6626, |
| "step": 646144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9413003328828075e-05, |
| "loss": 3.6656, |
| "step": 646656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9404617381317555e-05, |
| "loss": 3.6673, |
| "step": 647168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.939623143380703e-05, |
| "loss": 3.6684, |
| "step": 647680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9387861865100244e-05, |
| "loss": 3.6862, |
| "step": 648192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9379475917589724e-05, |
| "loss": 3.6744, |
| "step": 648704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.9371089970079204e-05, |
| "loss": 3.6675, |
| "step": 649216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.936270402256868e-05, |
| "loss": 3.6678, |
| "step": 649728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.935433445386189e-05, |
| "loss": 3.6536, |
| "step": 650240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.934594850635138e-05, |
| "loss": 3.6612, |
| "step": 650752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.933756255884086e-05, |
| "loss": 3.677, |
| "step": 651264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.932917661133033e-05, |
| "loss": 3.6744, |
| "step": 651776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.932080704262355e-05, |
| "loss": 3.6636, |
| "step": 652288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.931242109511303e-05, |
| "loss": 3.6492, |
| "step": 652800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.93040351476025e-05, |
| "loss": 3.6705, |
| "step": 653312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.929564920009198e-05, |
| "loss": 3.6462, |
| "step": 653824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.92872796313852e-05, |
| "loss": 3.662, |
| "step": 654336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.927889368387468e-05, |
| "loss": 3.6619, |
| "step": 654848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.927050773636415e-05, |
| "loss": 3.6602, |
| "step": 655360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.926212178885363e-05, |
| "loss": 3.6739, |
| "step": 655872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.925375222014685e-05, |
| "loss": 3.669, |
| "step": 656384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9245366272636333e-05, |
| "loss": 3.6592, |
| "step": 656896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.923698032512581e-05, |
| "loss": 3.6613, |
| "step": 657408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.922859437761529e-05, |
| "loss": 3.6652, |
| "step": 657920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.92202248089085e-05, |
| "loss": 3.6579, |
| "step": 658432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9211838861397976e-05, |
| "loss": 3.6539, |
| "step": 658944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9203452913887456e-05, |
| "loss": 3.6672, |
| "step": 659456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9195066966376936e-05, |
| "loss": 3.6722, |
| "step": 659968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.918669739767015e-05, |
| "loss": 3.6814, |
| "step": 660480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9178311450159625e-05, |
| "loss": 3.6593, |
| "step": 660992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9169925502649105e-05, |
| "loss": 3.6658, |
| "step": 661504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9161539555138585e-05, |
| "loss": 3.6558, |
| "step": 662016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.91531699864318e-05, |
| "loss": 3.6631, |
| "step": 662528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.914478403892128e-05, |
| "loss": 3.6738, |
| "step": 663040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.913639809141076e-05, |
| "loss": 3.6688, |
| "step": 663552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.912801214390024e-05, |
| "loss": 3.6731, |
| "step": 664064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.911964257519345e-05, |
| "loss": 3.6518, |
| "step": 664576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.911125662768293e-05, |
| "loss": 3.6525, |
| "step": 665088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.910287068017241e-05, |
| "loss": 3.6517, |
| "step": 665600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.909448473266189e-05, |
| "loss": 3.6579, |
| "step": 666112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.90861151639551e-05, |
| "loss": 3.665, |
| "step": 666624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.907772921644458e-05, |
| "loss": 3.668, |
| "step": 667136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.906934326893406e-05, |
| "loss": 3.6368, |
| "step": 667648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.906095732142354e-05, |
| "loss": 3.6656, |
| "step": 668160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9052587752716754e-05, |
| "loss": 3.6694, |
| "step": 668672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9044201805206234e-05, |
| "loss": 3.6634, |
| "step": 669184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9035815857695714e-05, |
| "loss": 3.6669, |
| "step": 669696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.9027429910185194e-05, |
| "loss": 3.666, |
| "step": 670208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.90190603414784e-05, |
| "loss": 3.6509, |
| "step": 670720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.901067439396788e-05, |
| "loss": 3.6685, |
| "step": 671232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.900228844645736e-05, |
| "loss": 3.6679, |
| "step": 671744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.899390249894684e-05, |
| "loss": 3.6528, |
| "step": 672256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.898553293024005e-05, |
| "loss": 3.6484, |
| "step": 672768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.897714698272953e-05, |
| "loss": 3.6575, |
| "step": 673280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.896876103521901e-05, |
| "loss": 3.6604, |
| "step": 673792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.896037508770849e-05, |
| "loss": 3.6588, |
| "step": 674304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.895200551900171e-05, |
| "loss": 3.6679, |
| "step": 674816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.894361957149119e-05, |
| "loss": 3.6599, |
| "step": 675328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.893523362398067e-05, |
| "loss": 3.6518, |
| "step": 675840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.892684767647015e-05, |
| "loss": 3.6753, |
| "step": 676352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.891847810776336e-05, |
| "loss": 3.6518, |
| "step": 676864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.891009216025284e-05, |
| "loss": 3.6678, |
| "step": 677376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.890170621274232e-05, |
| "loss": 3.6783, |
| "step": 677888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8893320265231797e-05, |
| "loss": 3.6646, |
| "step": 678400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8884950696525006e-05, |
| "loss": 3.6675, |
| "step": 678912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8876564749014486e-05, |
| "loss": 3.6603, |
| "step": 679424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8868178801503966e-05, |
| "loss": 3.6489, |
| "step": 679936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.8859792853993446e-05, |
| "loss": 3.6568, |
| "step": 680448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.885142328528666e-05, |
| "loss": 3.6585, |
| "step": 680960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.884303733777614e-05, |
| "loss": 3.6699, |
| "step": 681472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.883465139026562e-05, |
| "loss": 3.6624, |
| "step": 681984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.88262654427551e-05, |
| "loss": 3.663, |
| "step": 682496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.881789587404831e-05, |
| "loss": 3.6606, |
| "step": 683008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.880950992653779e-05, |
| "loss": 3.6574, |
| "step": 683520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.880112397902727e-05, |
| "loss": 3.6463, |
| "step": 684032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.879273803151675e-05, |
| "loss": 3.658, |
| "step": 684544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.878436846280996e-05, |
| "loss": 3.6666, |
| "step": 685056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.877598251529944e-05, |
| "loss": 3.6701, |
| "step": 685568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.876759656778892e-05, |
| "loss": 3.6501, |
| "step": 686080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.87592106202784e-05, |
| "loss": 3.6544, |
| "step": 686592 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.8464159965515137, |
| "eval_runtime": 319.7597, |
| "eval_samples_per_second": 1193.368, |
| "eval_steps_per_second": 37.294, |
| "step": 686871 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8750841051571615e-05, |
| "loss": 3.6669, |
| "step": 687104 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8742455104061095e-05, |
| "loss": 3.6604, |
| "step": 687616 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8734069156550575e-05, |
| "loss": 3.6582, |
| "step": 688128 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8725683209040055e-05, |
| "loss": 3.6562, |
| "step": 688640 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8717313640333264e-05, |
| "loss": 3.6488, |
| "step": 689152 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8708927692822744e-05, |
| "loss": 3.6483, |
| "step": 689664 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8700541745312224e-05, |
| "loss": 3.6475, |
| "step": 690176 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8692155797801704e-05, |
| "loss": 3.65, |
| "step": 690688 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.868378622909491e-05, |
| "loss": 3.6505, |
| "step": 691200 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.867540028158439e-05, |
| "loss": 3.6608, |
| "step": 691712 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.866701433407387e-05, |
| "loss": 3.6498, |
| "step": 692224 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.865862838656335e-05, |
| "loss": 3.6538, |
| "step": 692736 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.865025881785657e-05, |
| "loss": 3.6619, |
| "step": 693248 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.864187287034605e-05, |
| "loss": 3.6478, |
| "step": 693760 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.863348692283553e-05, |
| "loss": 3.6586, |
| "step": 694272 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.862510097532501e-05, |
| "loss": 3.6388, |
| "step": 694784 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.861673140661822e-05, |
| "loss": 3.6417, |
| "step": 695296 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.86083454591077e-05, |
| "loss": 3.6585, |
| "step": 695808 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.859995951159718e-05, |
| "loss": 3.6508, |
| "step": 696320 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.859157356408666e-05, |
| "loss": 3.665, |
| "step": 696832 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.858320399537987e-05, |
| "loss": 3.6499, |
| "step": 697344 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.857481804786935e-05, |
| "loss": 3.6637, |
| "step": 697856 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8566432100358827e-05, |
| "loss": 3.6476, |
| "step": 698368 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8558046152848307e-05, |
| "loss": 3.6486, |
| "step": 698880 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.854967658414152e-05, |
| "loss": 3.6533, |
| "step": 699392 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8541290636631e-05, |
| "loss": 3.65, |
| "step": 699904 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.853290468912048e-05, |
| "loss": 3.6573, |
| "step": 700416 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.852451874160996e-05, |
| "loss": 3.6482, |
| "step": 700928 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.851614917290317e-05, |
| "loss": 3.6549, |
| "step": 701440 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.850776322539265e-05, |
| "loss": 3.6488, |
| "step": 701952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.849937727788213e-05, |
| "loss": 3.6341, |
| "step": 702464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.849099133037161e-05, |
| "loss": 3.6567, |
| "step": 702976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.848262176166482e-05, |
| "loss": 3.654, |
| "step": 703488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.84742358141543e-05, |
| "loss": 3.651, |
| "step": 704000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.846584986664378e-05, |
| "loss": 3.652, |
| "step": 704512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.845746391913326e-05, |
| "loss": 3.6461, |
| "step": 705024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8449094350426476e-05, |
| "loss": 3.6491, |
| "step": 705536 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8440708402915956e-05, |
| "loss": 3.6558, |
| "step": 706048 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8432322455405436e-05, |
| "loss": 3.6557, |
| "step": 706560 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8423936507894916e-05, |
| "loss": 3.6374, |
| "step": 707072 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8415550560384396e-05, |
| "loss": 3.6424, |
| "step": 707584 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8407180991677605e-05, |
| "loss": 3.642, |
| "step": 708096 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8398795044167085e-05, |
| "loss": 3.6496, |
| "step": 708608 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8390409096656565e-05, |
| "loss": 3.6537, |
| "step": 709120 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8382023149146045e-05, |
| "loss": 3.6411, |
| "step": 709632 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8373653580439254e-05, |
| "loss": 3.6586, |
| "step": 710144 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8365267632928734e-05, |
| "loss": 3.6428, |
| "step": 710656 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8356881685418214e-05, |
| "loss": 3.6573, |
| "step": 711168 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8348495737907694e-05, |
| "loss": 3.6445, |
| "step": 711680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.834012616920091e-05, |
| "loss": 3.6367, |
| "step": 712192 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.833174022169039e-05, |
| "loss": 3.6441, |
| "step": 712704 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.832335427417987e-05, |
| "loss": 3.6357, |
| "step": 713216 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.831496832666935e-05, |
| "loss": 3.6511, |
| "step": 713728 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.830659875796256e-05, |
| "loss": 3.64, |
| "step": 714240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.829821281045204e-05, |
| "loss": 3.647, |
| "step": 714752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.828982686294152e-05, |
| "loss": 3.6299, |
| "step": 715264 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8281440915431e-05, |
| "loss": 3.6438, |
| "step": 715776 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.827307134672421e-05, |
| "loss": 3.6331, |
| "step": 716288 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.826468539921369e-05, |
| "loss": 3.6517, |
| "step": 716800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.825629945170317e-05, |
| "loss": 3.6368, |
| "step": 717312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.824791350419265e-05, |
| "loss": 3.6393, |
| "step": 717824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.823954393548586e-05, |
| "loss": 3.638, |
| "step": 718336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.823115798797534e-05, |
| "loss": 3.6291, |
| "step": 718848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.822277204046482e-05, |
| "loss": 3.6488, |
| "step": 719360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.82143860929543e-05, |
| "loss": 3.6378, |
| "step": 719872 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.820601652424751e-05, |
| "loss": 3.6456, |
| "step": 720384 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.819763057673699e-05, |
| "loss": 3.6342, |
| "step": 720896 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.818924462922647e-05, |
| "loss": 3.6338, |
| "step": 721408 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.818087506051968e-05, |
| "loss": 3.632, |
| "step": 721920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.817248911300916e-05, |
| "loss": 3.6406, |
| "step": 722432 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.816410316549864e-05, |
| "loss": 3.637, |
| "step": 722944 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.815571721798812e-05, |
| "loss": 3.6412, |
| "step": 723456 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.814734764928134e-05, |
| "loss": 3.6426, |
| "step": 723968 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.813896170177082e-05, |
| "loss": 3.6619, |
| "step": 724480 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.81305757542603e-05, |
| "loss": 3.6469, |
| "step": 724992 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.812218980674977e-05, |
| "loss": 3.6457, |
| "step": 725504 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8113820238042986e-05, |
| "loss": 3.6412, |
| "step": 726016 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8105434290532466e-05, |
| "loss": 3.6335, |
| "step": 726528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8097048343021946e-05, |
| "loss": 3.6335, |
| "step": 727040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.808866239551142e-05, |
| "loss": 3.6508, |
| "step": 727552 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8080292826804635e-05, |
| "loss": 3.651, |
| "step": 728064 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8071906879294115e-05, |
| "loss": 3.6416, |
| "step": 728576 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8063520931783595e-05, |
| "loss": 3.6247, |
| "step": 729088 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8055134984273075e-05, |
| "loss": 3.6416, |
| "step": 729600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.804676541556629e-05, |
| "loss": 3.623, |
| "step": 730112 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.803837946805577e-05, |
| "loss": 3.6371, |
| "step": 730624 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8029993520545244e-05, |
| "loss": 3.637, |
| "step": 731136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.8021607573034724e-05, |
| "loss": 3.6369, |
| "step": 731648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.801323800432794e-05, |
| "loss": 3.6478, |
| "step": 732160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.800485205681742e-05, |
| "loss": 3.6421, |
| "step": 732672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.799646610930689e-05, |
| "loss": 3.635, |
| "step": 733184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.798808016179637e-05, |
| "loss": 3.6362, |
| "step": 733696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.797971059308959e-05, |
| "loss": 3.6419, |
| "step": 734208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.797132464557907e-05, |
| "loss": 3.633, |
| "step": 734720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.796293869806855e-05, |
| "loss": 3.6322, |
| "step": 735232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.795455275055803e-05, |
| "loss": 3.6413, |
| "step": 735744 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7946183181851244e-05, |
| "loss": 3.6473, |
| "step": 736256 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.793779723434072e-05, |
| "loss": 3.6574, |
| "step": 736768 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.79294112868302e-05, |
| "loss": 3.6354, |
| "step": 737280 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.792102533931968e-05, |
| "loss": 3.6447, |
| "step": 737792 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7912655770612893e-05, |
| "loss": 3.6336, |
| "step": 738304 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7904269823102367e-05, |
| "loss": 3.6397, |
| "step": 738816 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7895883875591847e-05, |
| "loss": 3.6409, |
| "step": 739328 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7887497928081326e-05, |
| "loss": 3.6467, |
| "step": 739840 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.787912835937454e-05, |
| "loss": 3.6485, |
| "step": 740352 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7870742411864016e-05, |
| "loss": 3.6284, |
| "step": 740864 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.78623564643535e-05, |
| "loss": 3.6297, |
| "step": 741376 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.785397051684298e-05, |
| "loss": 3.6269, |
| "step": 741888 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.784560094813619e-05, |
| "loss": 3.63, |
| "step": 742400 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.783721500062567e-05, |
| "loss": 3.6425, |
| "step": 742912 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.782882905311515e-05, |
| "loss": 3.6428, |
| "step": 743424 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.782044310560463e-05, |
| "loss": 3.6179, |
| "step": 743936 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.781207353689784e-05, |
| "loss": 3.641, |
| "step": 744448 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.780368758938732e-05, |
| "loss": 3.6451, |
| "step": 744960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.77953016418768e-05, |
| "loss": 3.6398, |
| "step": 745472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.778691569436628e-05, |
| "loss": 3.642, |
| "step": 745984 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.777854612565949e-05, |
| "loss": 3.6391, |
| "step": 746496 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.777016017814897e-05, |
| "loss": 3.6304, |
| "step": 747008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.776177423063845e-05, |
| "loss": 3.6409, |
| "step": 747520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7753388283127936e-05, |
| "loss": 3.6426, |
| "step": 748032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7745018714421145e-05, |
| "loss": 3.6358, |
| "step": 748544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7736632766910625e-05, |
| "loss": 3.6201, |
| "step": 749056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7728246819400105e-05, |
| "loss": 3.6343, |
| "step": 749568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7719860871889585e-05, |
| "loss": 3.6356, |
| "step": 750080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7711491303182794e-05, |
| "loss": 3.6382, |
| "step": 750592 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7703105355672274e-05, |
| "loss": 3.6371, |
| "step": 751104 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7694719408161754e-05, |
| "loss": 3.6427, |
| "step": 751616 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7686333460651234e-05, |
| "loss": 3.6286, |
| "step": 752128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.767796389194444e-05, |
| "loss": 3.6493, |
| "step": 752640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.766957794443392e-05, |
| "loss": 3.6312, |
| "step": 753152 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.76611919969234e-05, |
| "loss": 3.6436, |
| "step": 753664 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.765280604941289e-05, |
| "loss": 3.6509, |
| "step": 754176 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.76444364807061e-05, |
| "loss": 3.6397, |
| "step": 754688 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.763605053319558e-05, |
| "loss": 3.6431, |
| "step": 755200 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.762766458568506e-05, |
| "loss": 3.6383, |
| "step": 755712 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.761927863817454e-05, |
| "loss": 3.626, |
| "step": 756224 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.761090906946775e-05, |
| "loss": 3.6341, |
| "step": 756736 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.760252312195723e-05, |
| "loss": 3.6319, |
| "step": 757248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.759413717444671e-05, |
| "loss": 3.6438, |
| "step": 757760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.758575122693619e-05, |
| "loss": 3.6396, |
| "step": 758272 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7577381658229397e-05, |
| "loss": 3.6409, |
| "step": 758784 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7568995710718877e-05, |
| "loss": 3.6408, |
| "step": 759296 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.7560609763208356e-05, |
| "loss": 3.6327, |
| "step": 759808 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.755222381569784e-05, |
| "loss": 3.6222, |
| "step": 760320 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.754385424699105e-05, |
| "loss": 3.6333, |
| "step": 760832 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.753546829948053e-05, |
| "loss": 3.6446, |
| "step": 761344 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.752708235197001e-05, |
| "loss": 3.644, |
| "step": 761856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.751869640445949e-05, |
| "loss": 3.6272, |
| "step": 762368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.75103268357527e-05, |
| "loss": 3.6297, |
| "step": 762880 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.8441436290740967, |
| "eval_runtime": 319.558, |
| "eval_samples_per_second": 1194.121, |
| "eval_steps_per_second": 37.317, |
| "step": 763190 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.750194088824218e-05, |
| "loss": 3.6267, |
| "step": 763392 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.749355494073166e-05, |
| "loss": 3.6341, |
| "step": 763904 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.748516899322114e-05, |
| "loss": 3.6378, |
| "step": 764416 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.747678304571062e-05, |
| "loss": 3.6422, |
| "step": 764928 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.746841347700383e-05, |
| "loss": 3.6479, |
| "step": 765440 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.746002752949331e-05, |
| "loss": 3.6358, |
| "step": 765952 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.74516415819828e-05, |
| "loss": 3.6389, |
| "step": 766464 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.744325563447228e-05, |
| "loss": 3.6365, |
| "step": 766976 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7434886065765486e-05, |
| "loss": 3.6301, |
| "step": 767488 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7426500118254966e-05, |
| "loss": 3.633, |
| "step": 768000 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7418114170744446e-05, |
| "loss": 3.643, |
| "step": 768512 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7409728223233926e-05, |
| "loss": 3.6414, |
| "step": 769024 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7401375033330864e-05, |
| "loss": 3.6393, |
| "step": 769536 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7392989085820344e-05, |
| "loss": 3.6144, |
| "step": 770048 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7384603138309824e-05, |
| "loss": 3.6405, |
| "step": 770560 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7376217190799304e-05, |
| "loss": 3.6223, |
| "step": 771072 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7367831243288784e-05, |
| "loss": 3.6333, |
| "step": 771584 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.7359445295778264e-05, |
| "loss": 3.6312, |
| "step": 772096 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.735105934826775e-05, |
| "loss": 3.632, |
| "step": 772608 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.734267340075723e-05, |
| "loss": 3.6419, |
| "step": 773120 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.733430383205044e-05, |
| "loss": 3.6461, |
| "step": 773632 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.732591788453992e-05, |
| "loss": 3.6402, |
| "step": 774144 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.73175319370294e-05, |
| "loss": 3.6313, |
| "step": 774656 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.730914598951888e-05, |
| "loss": 3.626, |
| "step": 775168 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.730077642081209e-05, |
| "loss": 3.6377, |
| "step": 775680 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.729239047330157e-05, |
| "loss": 3.6347, |
| "step": 776192 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.728400452579105e-05, |
| "loss": 3.6305, |
| "step": 776704 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.727561857828053e-05, |
| "loss": 3.6293, |
| "step": 777216 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.726724900957374e-05, |
| "loss": 3.6281, |
| "step": 777728 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.725886306206322e-05, |
| "loss": 3.6314, |
| "step": 778240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7250477114552704e-05, |
| "loss": 3.62, |
| "step": 778752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7242091167042184e-05, |
| "loss": 3.6436, |
| "step": 779264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.723372159833539e-05, |
| "loss": 3.6388, |
| "step": 779776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.722533565082487e-05, |
| "loss": 3.6365, |
| "step": 780288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.721694970331435e-05, |
| "loss": 3.6312, |
| "step": 780800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.720856375580383e-05, |
| "loss": 3.6285, |
| "step": 781312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.720019418709704e-05, |
| "loss": 3.6353, |
| "step": 781824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.719180823958652e-05, |
| "loss": 3.6294, |
| "step": 782336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7183422292076e-05, |
| "loss": 3.6208, |
| "step": 782848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.717503634456548e-05, |
| "loss": 3.6242, |
| "step": 783360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.716666677585869e-05, |
| "loss": 3.6251, |
| "step": 783872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.715828082834817e-05, |
| "loss": 3.627, |
| "step": 784384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.714989488083766e-05, |
| "loss": 3.6308, |
| "step": 784896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.714150893332714e-05, |
| "loss": 3.6294, |
| "step": 785408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.713313936462035e-05, |
| "loss": 3.6399, |
| "step": 785920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.712475341710983e-05, |
| "loss": 3.6287, |
| "step": 786432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.711636746959931e-05, |
| "loss": 3.6338, |
| "step": 786944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.710798152208879e-05, |
| "loss": 3.6333, |
| "step": 787456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7099611953381996e-05, |
| "loss": 3.6063, |
| "step": 787968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7091226005871476e-05, |
| "loss": 3.6405, |
| "step": 788480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7082840058360956e-05, |
| "loss": 3.6235, |
| "step": 788992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7074454110850436e-05, |
| "loss": 3.6291, |
| "step": 789504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7066084542143645e-05, |
| "loss": 3.6341, |
| "step": 790016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.7057698594633125e-05, |
| "loss": 3.623, |
| "step": 790528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.704931264712261e-05, |
| "loss": 3.6105, |
| "step": 791040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.704092669961209e-05, |
| "loss": 3.623, |
| "step": 791552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.70325571309053e-05, |
| "loss": 3.6245, |
| "step": 792064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.702417118339478e-05, |
| "loss": 3.6199, |
| "step": 792576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.701578523588426e-05, |
| "loss": 3.6358, |
| "step": 793088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.700741566717747e-05, |
| "loss": 3.6234, |
| "step": 793600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.699902971966695e-05, |
| "loss": 3.6161, |
| "step": 794112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.699064377215643e-05, |
| "loss": 3.6441, |
| "step": 794624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.698225782464591e-05, |
| "loss": 3.6105, |
| "step": 795136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.697388825593912e-05, |
| "loss": 3.614, |
| "step": 795648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.69655023084286e-05, |
| "loss": 3.6249, |
| "step": 796160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.695711636091808e-05, |
| "loss": 3.6217, |
| "step": 796672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.694873041340756e-05, |
| "loss": 3.6174, |
| "step": 797184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6940360844700774e-05, |
| "loss": 3.623, |
| "step": 797696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6931974897190254e-05, |
| "loss": 3.6073, |
| "step": 798208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6923588949679734e-05, |
| "loss": 3.615, |
| "step": 798720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6915203002169214e-05, |
| "loss": 3.6338, |
| "step": 799232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.690683343346242e-05, |
| "loss": 3.6212, |
| "step": 799744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.68984474859519e-05, |
| "loss": 3.6336, |
| "step": 800256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.689006153844138e-05, |
| "loss": 3.624, |
| "step": 800768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.688167559093086e-05, |
| "loss": 3.6411, |
| "step": 801280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.687330602222407e-05, |
| "loss": 3.617, |
| "step": 801792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.686492007471355e-05, |
| "loss": 3.6312, |
| "step": 802304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.685653412720303e-05, |
| "loss": 3.6291, |
| "step": 802816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.684814817969251e-05, |
| "loss": 3.6074, |
| "step": 803328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.683977861098573e-05, |
| "loss": 3.6339, |
| "step": 803840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.683139266347521e-05, |
| "loss": 3.6268, |
| "step": 804352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.682300671596469e-05, |
| "loss": 3.6267, |
| "step": 804864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.681462076845416e-05, |
| "loss": 3.6293, |
| "step": 805376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.680625119974738e-05, |
| "loss": 3.607, |
| "step": 805888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.679786525223686e-05, |
| "loss": 3.6083, |
| "step": 806400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.678947930472634e-05, |
| "loss": 3.6176, |
| "step": 806912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.678109335721581e-05, |
| "loss": 3.6263, |
| "step": 807424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6772723788509026e-05, |
| "loss": 3.6158, |
| "step": 807936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6764337840998506e-05, |
| "loss": 3.6164, |
| "step": 808448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.6755951893487986e-05, |
| "loss": 3.6204, |
| "step": 808960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6747565945977466e-05, |
| "loss": 3.6252, |
| "step": 809472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.673919637727068e-05, |
| "loss": 3.6173, |
| "step": 809984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.673081042976016e-05, |
| "loss": 3.6152, |
| "step": 810496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6722424482249635e-05, |
| "loss": 3.6345, |
| "step": 811008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6714038534739115e-05, |
| "loss": 3.6347, |
| "step": 811520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.670566896603233e-05, |
| "loss": 3.6278, |
| "step": 812032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.669728301852181e-05, |
| "loss": 3.6113, |
| "step": 812544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6688897071011284e-05, |
| "loss": 3.621, |
| "step": 813056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6680511123500764e-05, |
| "loss": 3.6193, |
| "step": 813568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.667214155479398e-05, |
| "loss": 3.6307, |
| "step": 814080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.666375560728346e-05, |
| "loss": 3.6192, |
| "step": 814592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.665536965977294e-05, |
| "loss": 3.6199, |
| "step": 815104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.664698371226242e-05, |
| "loss": 3.6182, |
| "step": 815616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6638614143555635e-05, |
| "loss": 3.6316, |
| "step": 816128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.663022819604511e-05, |
| "loss": 3.6196, |
| "step": 816640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.662184224853459e-05, |
| "loss": 3.6232, |
| "step": 817152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.661345630102407e-05, |
| "loss": 3.611, |
| "step": 817664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6605086732317284e-05, |
| "loss": 3.6179, |
| "step": 818176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.659670078480676e-05, |
| "loss": 3.6144, |
| "step": 818688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.658831483729624e-05, |
| "loss": 3.611, |
| "step": 819200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.657992888978572e-05, |
| "loss": 3.6162, |
| "step": 819712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6571559321078926e-05, |
| "loss": 3.6168, |
| "step": 820224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.656317337356841e-05, |
| "loss": 3.6232, |
| "step": 820736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.655478742605789e-05, |
| "loss": 3.6141, |
| "step": 821248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.654640147854737e-05, |
| "loss": 3.6259, |
| "step": 821760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.653803190984058e-05, |
| "loss": 3.6309, |
| "step": 822272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.652964596233006e-05, |
| "loss": 3.6211, |
| "step": 822784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.652126001481954e-05, |
| "loss": 3.6321, |
| "step": 823296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.651287406730902e-05, |
| "loss": 3.6146, |
| "step": 823808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.650450449860223e-05, |
| "loss": 3.6158, |
| "step": 824320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.649611855109171e-05, |
| "loss": 3.6155, |
| "step": 824832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.648773260358119e-05, |
| "loss": 3.6093, |
| "step": 825344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.647934665607067e-05, |
| "loss": 3.6203, |
| "step": 825856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.647097708736388e-05, |
| "loss": 3.62, |
| "step": 826368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.646259113985337e-05, |
| "loss": 3.6211, |
| "step": 826880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.645420519234285e-05, |
| "loss": 3.6165, |
| "step": 827392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.644581924483233e-05, |
| "loss": 3.6255, |
| "step": 827904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6437449676125536e-05, |
| "loss": 3.6179, |
| "step": 828416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6429063728615016e-05, |
| "loss": 3.6136, |
| "step": 828928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6420677781104496e-05, |
| "loss": 3.6221, |
| "step": 829440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6412291833593976e-05, |
| "loss": 3.6222, |
| "step": 829952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6403922264887185e-05, |
| "loss": 3.629, |
| "step": 830464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6395536317376665e-05, |
| "loss": 3.6126, |
| "step": 830976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6387150369866145e-05, |
| "loss": 3.6316, |
| "step": 831488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6378764422355625e-05, |
| "loss": 3.618, |
| "step": 832000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6370394853648834e-05, |
| "loss": 3.6246, |
| "step": 832512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.636200890613832e-05, |
| "loss": 3.6253, |
| "step": 833024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.63536229586278e-05, |
| "loss": 3.6131, |
| "step": 833536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.634523701111728e-05, |
| "loss": 3.6129, |
| "step": 834048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.633686744241049e-05, |
| "loss": 3.6298, |
| "step": 834560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.632848149489997e-05, |
| "loss": 3.6067, |
| "step": 835072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.632009554738945e-05, |
| "loss": 3.6223, |
| "step": 835584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.631170959987893e-05, |
| "loss": 3.6211, |
| "step": 836096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.630334003117214e-05, |
| "loss": 3.6175, |
| "step": 836608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.629495408366162e-05, |
| "loss": 3.6103, |
| "step": 837120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.62865681361511e-05, |
| "loss": 3.6211, |
| "step": 837632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.627818218864058e-05, |
| "loss": 3.6212, |
| "step": 838144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.626981261993379e-05, |
| "loss": 3.6211, |
| "step": 838656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.6261426672423274e-05, |
| "loss": 3.6212, |
| "step": 839168 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.8373005390167236, |
| "eval_runtime": 303.2718, |
| "eval_samples_per_second": 1258.247, |
| "eval_steps_per_second": 39.321, |
| "step": 839510 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6253040724912754e-05, |
| "loss": 3.6049, |
| "step": 839680 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6244654777402234e-05, |
| "loss": 3.6121, |
| "step": 840192 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.623628520869544e-05, |
| "loss": 3.6132, |
| "step": 840704 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.622789926118492e-05, |
| "loss": 3.617, |
| "step": 841216 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.62195133136744e-05, |
| "loss": 3.6263, |
| "step": 841728 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.621112736616388e-05, |
| "loss": 3.6109, |
| "step": 842240 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.620275779745709e-05, |
| "loss": 3.6167, |
| "step": 842752 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.619437184994657e-05, |
| "loss": 3.6132, |
| "step": 843264 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.618598590243605e-05, |
| "loss": 3.6051, |
| "step": 843776 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.617759995492553e-05, |
| "loss": 3.6063, |
| "step": 844288 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.616923038621874e-05, |
| "loss": 3.6177, |
| "step": 844800 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.616084443870823e-05, |
| "loss": 3.6225, |
| "step": 845312 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.615247487000144e-05, |
| "loss": 3.6144, |
| "step": 845824 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.614408892249092e-05, |
| "loss": 3.5953, |
| "step": 846336 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.61357029749804e-05, |
| "loss": 3.6157, |
| "step": 846848 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.612731702746988e-05, |
| "loss": 3.5982, |
| "step": 847360 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.611893107995936e-05, |
| "loss": 3.609, |
| "step": 847872 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.611054513244884e-05, |
| "loss": 3.611, |
| "step": 848384 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.610215918493832e-05, |
| "loss": 3.6086, |
| "step": 848896 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.60937732374278e-05, |
| "loss": 3.6154, |
| "step": 849408 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6085403668721006e-05, |
| "loss": 3.6232, |
| "step": 849920 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6077017721210486e-05, |
| "loss": 3.6141, |
| "step": 850432 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6068631773699966e-05, |
| "loss": 3.6098, |
| "step": 850944 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6060245826189446e-05, |
| "loss": 3.6007, |
| "step": 851456 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.605187625748266e-05, |
| "loss": 3.6158, |
| "step": 851968 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.604349030997214e-05, |
| "loss": 3.6121, |
| "step": 852480 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.603510436246162e-05, |
| "loss": 3.6077, |
| "step": 852992 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.60267184149511e-05, |
| "loss": 3.6084, |
| "step": 853504 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.601834884624431e-05, |
| "loss": 3.6066, |
| "step": 854016 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.600996289873379e-05, |
| "loss": 3.6079, |
| "step": 854528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.600157695122327e-05, |
| "loss": 3.602, |
| "step": 855040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.599319100371275e-05, |
| "loss": 3.6121, |
| "step": 855552 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.598482143500596e-05, |
| "loss": 3.613, |
| "step": 856064 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.597643548749544e-05, |
| "loss": 3.6149, |
| "step": 856576 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.596804953998492e-05, |
| "loss": 3.609, |
| "step": 857088 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.59596635924744e-05, |
| "loss": 3.6065, |
| "step": 857600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5951294023767615e-05, |
| "loss": 3.6136, |
| "step": 858112 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5942908076257095e-05, |
| "loss": 3.6034, |
| "step": 858624 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5934522128746575e-05, |
| "loss": 3.5993, |
| "step": 859136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5926136181236055e-05, |
| "loss": 3.6071, |
| "step": 859648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5917766612529264e-05, |
| "loss": 3.5968, |
| "step": 860160 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5909380665018744e-05, |
| "loss": 3.6036, |
| "step": 860672 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5900994717508224e-05, |
| "loss": 3.6061, |
| "step": 861184 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5892608769997704e-05, |
| "loss": 3.6083, |
| "step": 861696 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.588423920129091e-05, |
| "loss": 3.6124, |
| "step": 862208 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.587585325378039e-05, |
| "loss": 3.6114, |
| "step": 862720 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.586746730626987e-05, |
| "loss": 3.6106, |
| "step": 863232 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.585908135875935e-05, |
| "loss": 3.606, |
| "step": 863744 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.585071179005257e-05, |
| "loss": 3.5847, |
| "step": 864256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.584232584254205e-05, |
| "loss": 3.6165, |
| "step": 864768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.583393989503153e-05, |
| "loss": 3.6025, |
| "step": 865280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.582557032632474e-05, |
| "loss": 3.6065, |
| "step": 865792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.581718437881422e-05, |
| "loss": 3.6105, |
| "step": 866304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.58087984313037e-05, |
| "loss": 3.602, |
| "step": 866816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.580041248379318e-05, |
| "loss": 3.5861, |
| "step": 867328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.579204291508639e-05, |
| "loss": 3.5963, |
| "step": 867840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.578365696757587e-05, |
| "loss": 3.6063, |
| "step": 868352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.577527102006535e-05, |
| "loss": 3.5975, |
| "step": 868864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.576688507255483e-05, |
| "loss": 3.6166, |
| "step": 869376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5758515503848036e-05, |
| "loss": 3.6007, |
| "step": 869888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.575012955633752e-05, |
| "loss": 3.5903, |
| "step": 870400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5741743608827e-05, |
| "loss": 3.6196, |
| "step": 870912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.573335766131648e-05, |
| "loss": 3.5938, |
| "step": 871424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.572498809260969e-05, |
| "loss": 3.5922, |
| "step": 871936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.571660214509917e-05, |
| "loss": 3.6041, |
| "step": 872448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.570821619758865e-05, |
| "loss": 3.5972, |
| "step": 872960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.569983025007813e-05, |
| "loss": 3.5968, |
| "step": 873472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.569146068137134e-05, |
| "loss": 3.6006, |
| "step": 873984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.568307473386082e-05, |
| "loss": 3.5831, |
| "step": 874496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.56746887863503e-05, |
| "loss": 3.5935, |
| "step": 875008 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.566630283883978e-05, |
| "loss": 3.6072, |
| "step": 875520 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.565793327013299e-05, |
| "loss": 3.6025, |
| "step": 876032 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5649547322622476e-05, |
| "loss": 3.6114, |
| "step": 876544 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5641161375111956e-05, |
| "loss": 3.6022, |
| "step": 877056 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.563277542760143e-05, |
| "loss": 3.6201, |
| "step": 877568 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5624405858894645e-05, |
| "loss": 3.5937, |
| "step": 878080 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5616019911384125e-05, |
| "loss": 3.6104, |
| "step": 878592 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5607633963873605e-05, |
| "loss": 3.6027, |
| "step": 879104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.559924801636308e-05, |
| "loss": 3.5872, |
| "step": 879616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5590878447656294e-05, |
| "loss": 3.61, |
| "step": 880128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5582492500145774e-05, |
| "loss": 3.6091, |
| "step": 880640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.5574106552635254e-05, |
| "loss": 3.6051, |
| "step": 881152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.556572060512473e-05, |
| "loss": 3.6064, |
| "step": 881664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.555735103641794e-05, |
| "loss": 3.5878, |
| "step": 882176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.554896508890743e-05, |
| "loss": 3.5841, |
| "step": 882688 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.55405791413969e-05, |
| "loss": 3.5946, |
| "step": 883200 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.553219319388638e-05, |
| "loss": 3.6045, |
| "step": 883712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.55238236251796e-05, |
| "loss": 3.5966, |
| "step": 884224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.551543767766908e-05, |
| "loss": 3.5935, |
| "step": 884736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.550705173015855e-05, |
| "loss": 3.5966, |
| "step": 885248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.549866578264803e-05, |
| "loss": 3.6006, |
| "step": 885760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.549029621394125e-05, |
| "loss": 3.5958, |
| "step": 886272 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.548191026643072e-05, |
| "loss": 3.5977, |
| "step": 886784 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.54735243189202e-05, |
| "loss": 3.6076, |
| "step": 887296 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.546513837140968e-05, |
| "loss": 3.6173, |
| "step": 887808 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.54567688027029e-05, |
| "loss": 3.6089, |
| "step": 888320 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.544838285519238e-05, |
| "loss": 3.591, |
| "step": 888832 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.543999690768186e-05, |
| "loss": 3.5949, |
| "step": 889344 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.543161096017134e-05, |
| "loss": 3.5991, |
| "step": 889856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.542324139146455e-05, |
| "loss": 3.6084, |
| "step": 890368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5414855443954026e-05, |
| "loss": 3.5997, |
| "step": 890880 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5406469496443506e-05, |
| "loss": 3.598, |
| "step": 891392 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5398083548932986e-05, |
| "loss": 3.5971, |
| "step": 891904 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5389713980226195e-05, |
| "loss": 3.6052, |
| "step": 892416 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5381328032715675e-05, |
| "loss": 3.6034, |
| "step": 892928 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5372942085205155e-05, |
| "loss": 3.5956, |
| "step": 893440 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5364556137694635e-05, |
| "loss": 3.5904, |
| "step": 893952 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.535618656898785e-05, |
| "loss": 3.5936, |
| "step": 894464 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.534780062147733e-05, |
| "loss": 3.5975, |
| "step": 894976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.533941467396681e-05, |
| "loss": 3.5886, |
| "step": 895488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.533102872645629e-05, |
| "loss": 3.5959, |
| "step": 896000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.53226591577495e-05, |
| "loss": 3.5932, |
| "step": 896512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.531427321023898e-05, |
| "loss": 3.5997, |
| "step": 897024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.530588726272846e-05, |
| "loss": 3.5929, |
| "step": 897536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.529750131521794e-05, |
| "loss": 3.6065, |
| "step": 898048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.528913174651115e-05, |
| "loss": 3.6106, |
| "step": 898560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.528074579900063e-05, |
| "loss": 3.598, |
| "step": 899072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.527235985149011e-05, |
| "loss": 3.607, |
| "step": 899584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5263990282783324e-05, |
| "loss": 3.5999, |
| "step": 900096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5255604335272804e-05, |
| "loss": 3.5954, |
| "step": 900608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5247218387762284e-05, |
| "loss": 3.5948, |
| "step": 901120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5238832440251764e-05, |
| "loss": 3.5863, |
| "step": 901632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.523046287154497e-05, |
| "loss": 3.597, |
| "step": 902144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.522207692403445e-05, |
| "loss": 3.5983, |
| "step": 902656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.521369097652393e-05, |
| "loss": 3.6016, |
| "step": 903168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.520530502901341e-05, |
| "loss": 3.5928, |
| "step": 903680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.519693546030662e-05, |
| "loss": 3.6049, |
| "step": 904192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.51885495127961e-05, |
| "loss": 3.5971, |
| "step": 904704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.518016356528558e-05, |
| "loss": 3.5931, |
| "step": 905216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.517177761777506e-05, |
| "loss": 3.601, |
| "step": 905728 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.516340804906828e-05, |
| "loss": 3.603, |
| "step": 906240 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.515502210155776e-05, |
| "loss": 3.6079, |
| "step": 906752 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.514663615404724e-05, |
| "loss": 3.5898, |
| "step": 907264 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.513825020653672e-05, |
| "loss": 3.6135, |
| "step": 907776 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.512988063782993e-05, |
| "loss": 3.5943, |
| "step": 908288 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.512149469031941e-05, |
| "loss": 3.605, |
| "step": 908800 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.511310874280889e-05, |
| "loss": 3.6019, |
| "step": 909312 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.510472279529837e-05, |
| "loss": 3.5959, |
| "step": 909824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5096353226591576e-05, |
| "loss": 3.5895, |
| "step": 910336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5087967279081056e-05, |
| "loss": 3.6117, |
| "step": 910848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5079581331570536e-05, |
| "loss": 3.585, |
| "step": 911360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.5071195384060016e-05, |
| "loss": 3.602, |
| "step": 911872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.506282581535323e-05, |
| "loss": 3.5967, |
| "step": 912384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.505443986784271e-05, |
| "loss": 3.597, |
| "step": 912896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.504605392033219e-05, |
| "loss": 3.5905, |
| "step": 913408 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.503766797282167e-05, |
| "loss": 3.598, |
| "step": 913920 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.502929840411488e-05, |
| "loss": 3.5988, |
| "step": 914432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.502091245660436e-05, |
| "loss": 3.6062, |
| "step": 914944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.501252650909384e-05, |
| "loss": 3.5951, |
| "step": 915456 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.83720064163208, |
| "eval_runtime": 302.3365, |
| "eval_samples_per_second": 1262.14, |
| "eval_steps_per_second": 39.443, |
| "step": 915830 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.500414056158332e-05, |
| "loss": 3.5839, |
| "step": 915968 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.49957546140728e-05, |
| "loss": 3.5875, |
| "step": 916480 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.498736866656228e-05, |
| "loss": 3.5937, |
| "step": 916992 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.497898271905176e-05, |
| "loss": 3.5996, |
| "step": 917504 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.497061315034497e-05, |
| "loss": 3.6001, |
| "step": 918016 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.496222720283445e-05, |
| "loss": 3.5926, |
| "step": 918528 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.495384125532393e-05, |
| "loss": 3.5992, |
| "step": 919040 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4945455307813416e-05, |
| "loss": 3.5946, |
| "step": 919552 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4937085739106625e-05, |
| "loss": 3.586, |
| "step": 920064 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4928699791596105e-05, |
| "loss": 3.5806, |
| "step": 920576 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4920313844085585e-05, |
| "loss": 3.6026, |
| "step": 921088 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4911927896575065e-05, |
| "loss": 3.6026, |
| "step": 921600 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4903574706672e-05, |
| "loss": 3.5944, |
| "step": 922112 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.489518875916148e-05, |
| "loss": 3.5739, |
| "step": 922624 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.488680281165096e-05, |
| "loss": 3.5951, |
| "step": 923136 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.487841686414044e-05, |
| "loss": 3.5759, |
| "step": 923648 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.487003091662992e-05, |
| "loss": 3.5922, |
| "step": 924160 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.48616449691194e-05, |
| "loss": 3.5898, |
| "step": 924672 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.485325902160888e-05, |
| "loss": 3.5869, |
| "step": 925184 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.484487307409837e-05, |
| "loss": 3.5964, |
| "step": 925696 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.483650350539158e-05, |
| "loss": 3.6021, |
| "step": 926208 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.482811755788106e-05, |
| "loss": 3.5907, |
| "step": 926720 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.481973161037054e-05, |
| "loss": 3.5896, |
| "step": 927232 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.481134566286002e-05, |
| "loss": 3.5855, |
| "step": 927744 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.480297609415323e-05, |
| "loss": 3.5942, |
| "step": 928256 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.479459014664271e-05, |
| "loss": 3.5912, |
| "step": 928768 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.478620419913219e-05, |
| "loss": 3.5827, |
| "step": 929280 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.477781825162167e-05, |
| "loss": 3.5891, |
| "step": 929792 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4769448682914877e-05, |
| "loss": 3.5883, |
| "step": 930304 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4761062735404357e-05, |
| "loss": 3.586, |
| "step": 930816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4752676787893837e-05, |
| "loss": 3.5858, |
| "step": 931328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.474429084038332e-05, |
| "loss": 3.593, |
| "step": 931840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.473592127167653e-05, |
| "loss": 3.5886, |
| "step": 932352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.472753532416601e-05, |
| "loss": 3.5984, |
| "step": 932864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.471914937665549e-05, |
| "loss": 3.5874, |
| "step": 933376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.471076342914497e-05, |
| "loss": 3.5861, |
| "step": 933888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.470239386043818e-05, |
| "loss": 3.5964, |
| "step": 934400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.469400791292766e-05, |
| "loss": 3.5803, |
| "step": 934912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.468562196541714e-05, |
| "loss": 3.5804, |
| "step": 935424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.467723601790662e-05, |
| "loss": 3.5856, |
| "step": 935936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.466886644919983e-05, |
| "loss": 3.5808, |
| "step": 936448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.466048050168931e-05, |
| "loss": 3.5825, |
| "step": 936960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.465209455417879e-05, |
| "loss": 3.5868, |
| "step": 937472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.464370860666828e-05, |
| "loss": 3.5891, |
| "step": 937984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4635339037961486e-05, |
| "loss": 3.5905, |
| "step": 938496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4626953090450966e-05, |
| "loss": 3.5927, |
| "step": 939008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4618567142940446e-05, |
| "loss": 3.5891, |
| "step": 939520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4610181195429926e-05, |
| "loss": 3.5905, |
| "step": 940032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4601811626723135e-05, |
| "loss": 3.5696, |
| "step": 940544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4593425679212615e-05, |
| "loss": 3.5951, |
| "step": 941056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4585039731702095e-05, |
| "loss": 3.5824, |
| "step": 941568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4576653784191575e-05, |
| "loss": 3.5847, |
| "step": 942080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4568284215484784e-05, |
| "loss": 3.5893, |
| "step": 942592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4559898267974264e-05, |
| "loss": 3.5826, |
| "step": 943104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4551512320463744e-05, |
| "loss": 3.5684, |
| "step": 943616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.454314275175696e-05, |
| "loss": 3.5796, |
| "step": 944128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.453475680424644e-05, |
| "loss": 3.5858, |
| "step": 944640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.452637085673592e-05, |
| "loss": 3.5756, |
| "step": 945152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.45179849092254e-05, |
| "loss": 3.5956, |
| "step": 945664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.450961534051861e-05, |
| "loss": 3.5856, |
| "step": 946176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.450122939300809e-05, |
| "loss": 3.5676, |
| "step": 946688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.449284344549757e-05, |
| "loss": 3.5994, |
| "step": 947200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.448445749798705e-05, |
| "loss": 3.5746, |
| "step": 947712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.447608792928026e-05, |
| "loss": 3.5727, |
| "step": 948224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.446770198176974e-05, |
| "loss": 3.5853, |
| "step": 948736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.445931603425922e-05, |
| "loss": 3.5769, |
| "step": 949248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.44509300867487e-05, |
| "loss": 3.5743, |
| "step": 949760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4442560518041913e-05, |
| "loss": 3.582, |
| "step": 950272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.443417457053139e-05, |
| "loss": 3.5677, |
| "step": 950784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.442578862302087e-05, |
| "loss": 3.5691, |
| "step": 951296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4417402675510346e-05, |
| "loss": 3.5908, |
| "step": 951808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.440903310680356e-05, |
| "loss": 3.5846, |
| "step": 952320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.440064715929304e-05, |
| "loss": 3.5873, |
| "step": 952832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.439226121178252e-05, |
| "loss": 3.5843, |
| "step": 953344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4383875264271995e-05, |
| "loss": 3.5989, |
| "step": 953856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.437550569556521e-05, |
| "loss": 3.5746, |
| "step": 954368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.436711974805469e-05, |
| "loss": 3.5935, |
| "step": 954880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.435873380054417e-05, |
| "loss": 3.5801, |
| "step": 955392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.435034785303365e-05, |
| "loss": 3.572, |
| "step": 955904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.434197828432687e-05, |
| "loss": 3.5858, |
| "step": 956416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.433359233681635e-05, |
| "loss": 3.594, |
| "step": 956928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.432520638930582e-05, |
| "loss": 3.5822, |
| "step": 957440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.43168204417953e-05, |
| "loss": 3.5919, |
| "step": 957952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4308450873088516e-05, |
| "loss": 3.5683, |
| "step": 958464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4300064925577996e-05, |
| "loss": 3.5656, |
| "step": 958976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.429167897806747e-05, |
| "loss": 3.5752, |
| "step": 959488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.428329303055695e-05, |
| "loss": 3.5867, |
| "step": 960000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4274923461850165e-05, |
| "loss": 3.578, |
| "step": 960512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4266537514339645e-05, |
| "loss": 3.5759, |
| "step": 961024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4258151566829125e-05, |
| "loss": 3.5792, |
| "step": 961536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4249765619318605e-05, |
| "loss": 3.5823, |
| "step": 962048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.424139605061182e-05, |
| "loss": 3.5737, |
| "step": 962560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4233010103101294e-05, |
| "loss": 3.5777, |
| "step": 963072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4224624155590774e-05, |
| "loss": 3.5859, |
| "step": 963584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4216238208080254e-05, |
| "loss": 3.5968, |
| "step": 964096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.420786863937347e-05, |
| "loss": 3.5898, |
| "step": 964608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.419948269186294e-05, |
| "loss": 3.5706, |
| "step": 965120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.419109674435242e-05, |
| "loss": 3.5776, |
| "step": 965632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.41827107968419e-05, |
| "loss": 3.5785, |
| "step": 966144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.417434122813511e-05, |
| "loss": 3.5882, |
| "step": 966656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.41659552806246e-05, |
| "loss": 3.5829, |
| "step": 967168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.415756933311408e-05, |
| "loss": 3.5778, |
| "step": 967680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.414918338560356e-05, |
| "loss": 3.579, |
| "step": 968192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.414081381689677e-05, |
| "loss": 3.5841, |
| "step": 968704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.413242786938625e-05, |
| "loss": 3.5855, |
| "step": 969216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.412404192187573e-05, |
| "loss": 3.5786, |
| "step": 969728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.411565597436521e-05, |
| "loss": 3.5729, |
| "step": 970240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4107286405658417e-05, |
| "loss": 3.5727, |
| "step": 970752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4098900458147897e-05, |
| "loss": 3.576, |
| "step": 971264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4090514510637376e-05, |
| "loss": 3.5727, |
| "step": 971776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4082128563126856e-05, |
| "loss": 3.5741, |
| "step": 972288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.4073758994420066e-05, |
| "loss": 3.5741, |
| "step": 972800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.406537304690955e-05, |
| "loss": 3.5814, |
| "step": 973312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.405698709939903e-05, |
| "loss": 3.5715, |
| "step": 973824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.404860115188851e-05, |
| "loss": 3.5855, |
| "step": 974336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.404023158318172e-05, |
| "loss": 3.595, |
| "step": 974848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.40318456356712e-05, |
| "loss": 3.5787, |
| "step": 975360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.402345968816068e-05, |
| "loss": 3.5844, |
| "step": 975872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.401507374065016e-05, |
| "loss": 3.5828, |
| "step": 976384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.400670417194337e-05, |
| "loss": 3.5752, |
| "step": 976896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.399831822443285e-05, |
| "loss": 3.5783, |
| "step": 977408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.398993227692233e-05, |
| "loss": 3.563, |
| "step": 977920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.398154632941181e-05, |
| "loss": 3.5824, |
| "step": 978432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.397317676070502e-05, |
| "loss": 3.5775, |
| "step": 978944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.39647908131945e-05, |
| "loss": 3.5834, |
| "step": 979456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3956404865683986e-05, |
| "loss": 3.5728, |
| "step": 979968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3948018918173466e-05, |
| "loss": 3.5872, |
| "step": 980480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3939649349466675e-05, |
| "loss": 3.5781, |
| "step": 980992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3931263401956155e-05, |
| "loss": 3.5746, |
| "step": 981504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3922877454445635e-05, |
| "loss": 3.5859, |
| "step": 982016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3914491506935115e-05, |
| "loss": 3.5823, |
| "step": 982528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3906121938228324e-05, |
| "loss": 3.5907, |
| "step": 983040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3897735990717804e-05, |
| "loss": 3.574, |
| "step": 983552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3889350043207284e-05, |
| "loss": 3.5919, |
| "step": 984064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.388098047450049e-05, |
| "loss": 3.5777, |
| "step": 984576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.387259452698997e-05, |
| "loss": 3.5841, |
| "step": 985088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.386420857947945e-05, |
| "loss": 3.5858, |
| "step": 985600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.385582263196894e-05, |
| "loss": 3.5769, |
| "step": 986112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.384743668445842e-05, |
| "loss": 3.5726, |
| "step": 986624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.383906711575163e-05, |
| "loss": 3.5918, |
| "step": 987136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.383068116824111e-05, |
| "loss": 3.5695, |
| "step": 987648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.382229522073059e-05, |
| "loss": 3.5807, |
| "step": 988160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.38139256520238e-05, |
| "loss": 3.5799, |
| "step": 988672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.380553970451328e-05, |
| "loss": 3.5786, |
| "step": 989184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.379715375700276e-05, |
| "loss": 3.5754, |
| "step": 989696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.378876780949224e-05, |
| "loss": 3.5767, |
| "step": 990208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3780398240785447e-05, |
| "loss": 3.5815, |
| "step": 990720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3772012293274927e-05, |
| "loss": 3.5832, |
| "step": 991232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3763626345764407e-05, |
| "loss": 3.5801, |
| "step": 991744 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.837082862854004, |
| "eval_runtime": 360.0266, |
| "eval_samples_per_second": 1059.897, |
| "eval_steps_per_second": 33.123, |
| "step": 992150 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.375524039825389e-05, |
| "loss": 3.5721, |
| "step": 992256 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.37468708295471e-05, |
| "loss": 3.5672, |
| "step": 992768 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.373848488203658e-05, |
| "loss": 3.5722, |
| "step": 993280 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.373009893452606e-05, |
| "loss": 3.5867, |
| "step": 993792 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.372171298701554e-05, |
| "loss": 3.5775, |
| "step": 994304 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.371334341830875e-05, |
| "loss": 3.5768, |
| "step": 994816 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.370495747079823e-05, |
| "loss": 3.5799, |
| "step": 995328 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.369657152328771e-05, |
| "loss": 3.5768, |
| "step": 995840 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.368818557577719e-05, |
| "loss": 3.5643, |
| "step": 996352 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.36798160070704e-05, |
| "loss": 3.5661, |
| "step": 996864 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.367143005955988e-05, |
| "loss": 3.5819, |
| "step": 997376 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.366304411204936e-05, |
| "loss": 3.5823, |
| "step": 997888 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3654674543342576e-05, |
| "loss": 3.5827, |
| "step": 998400 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3646288595832056e-05, |
| "loss": 3.5532, |
| "step": 998912 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3637902648321536e-05, |
| "loss": 3.5731, |
| "step": 999424 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3629516700811016e-05, |
| "loss": 3.5612, |
| "step": 999936 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3621130753300496e-05, |
| "loss": 3.575, |
| "step": 1000448 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3612744805789976e-05, |
| "loss": 3.5712, |
| "step": 1000960 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3604358858279456e-05, |
| "loss": 3.5692, |
| "step": 1001472 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3595972910768936e-05, |
| "loss": 3.5725, |
| "step": 1001984 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3587603342062145e-05, |
| "loss": 3.5905, |
| "step": 1002496 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3579217394551625e-05, |
| "loss": 3.5741, |
| "step": 1003008 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3570831447041105e-05, |
| "loss": 3.5722, |
| "step": 1003520 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.3562445499530585e-05, |
| "loss": 3.5676, |
| "step": 1004032 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.35540759308238e-05, |
| "loss": 3.5801, |
| "step": 1004544 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.354568998331328e-05, |
| "loss": 3.5735, |
| "step": 1005056 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.353730403580276e-05, |
| "loss": 3.5641, |
| "step": 1005568 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.352891808829224e-05, |
| "loss": 3.5731, |
| "step": 1006080 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.352054851958545e-05, |
| "loss": 3.567, |
| "step": 1006592 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.351216257207493e-05, |
| "loss": 3.5695, |
| "step": 1007104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.350377662456441e-05, |
| "loss": 3.5709, |
| "step": 1007616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.349539067705389e-05, |
| "loss": 3.5736, |
| "step": 1008128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.34870211083471e-05, |
| "loss": 3.5717, |
| "step": 1008640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.347863516083658e-05, |
| "loss": 3.578, |
| "step": 1009152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.347024921332606e-05, |
| "loss": 3.5728, |
| "step": 1009664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.346186326581554e-05, |
| "loss": 3.5708, |
| "step": 1010176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3453493697108754e-05, |
| "loss": 3.5764, |
| "step": 1010688 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3445107749598234e-05, |
| "loss": 3.5606, |
| "step": 1011200 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3436721802087714e-05, |
| "loss": 3.5636, |
| "step": 1011712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3428335854577194e-05, |
| "loss": 3.5689, |
| "step": 1012224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.34199662858704e-05, |
| "loss": 3.5615, |
| "step": 1012736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.341158033835988e-05, |
| "loss": 3.5651, |
| "step": 1013248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.340319439084936e-05, |
| "loss": 3.5706, |
| "step": 1013760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.339480844333884e-05, |
| "loss": 3.5736, |
| "step": 1014272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.338643887463205e-05, |
| "loss": 3.5716, |
| "step": 1014784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.337805292712153e-05, |
| "loss": 3.573, |
| "step": 1015296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.336966697961101e-05, |
| "loss": 3.575, |
| "step": 1015808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.336128103210049e-05, |
| "loss": 3.5691, |
| "step": 1016320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.335291146339371e-05, |
| "loss": 3.5533, |
| "step": 1016832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.334452551588319e-05, |
| "loss": 3.5732, |
| "step": 1017344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.333613956837267e-05, |
| "loss": 3.5677, |
| "step": 1017856 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.332775362086214e-05, |
| "loss": 3.5695, |
| "step": 1018368 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.331938405215536e-05, |
| "loss": 3.5702, |
| "step": 1018880 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.331099810464484e-05, |
| "loss": 3.5622, |
| "step": 1019392 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.330261215713432e-05, |
| "loss": 3.5539, |
| "step": 1019904 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.329422620962379e-05, |
| "loss": 3.557, |
| "step": 1020416 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3285856640917006e-05, |
| "loss": 3.5723, |
| "step": 1020928 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3277470693406486e-05, |
| "loss": 3.5556, |
| "step": 1021440 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.326908474589596e-05, |
| "loss": 3.5799, |
| "step": 1021952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3260698798385446e-05, |
| "loss": 3.571, |
| "step": 1022464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3252329229678655e-05, |
| "loss": 3.5458, |
| "step": 1022976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.324394328216814e-05, |
| "loss": 3.5801, |
| "step": 1023488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3235557334657615e-05, |
| "loss": 3.5571, |
| "step": 1024000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3227171387147095e-05, |
| "loss": 3.5579, |
| "step": 1024512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.321880181844031e-05, |
| "loss": 3.5614, |
| "step": 1025024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.321041587092979e-05, |
| "loss": 3.5652, |
| "step": 1025536 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3202029923419264e-05, |
| "loss": 3.5561, |
| "step": 1026048 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3193643975908744e-05, |
| "loss": 3.5664, |
| "step": 1026560 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.318527440720196e-05, |
| "loss": 3.5472, |
| "step": 1027072 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.317688845969143e-05, |
| "loss": 3.5536, |
| "step": 1027584 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.316850251218091e-05, |
| "loss": 3.5738, |
| "step": 1028096 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.316011656467039e-05, |
| "loss": 3.5647, |
| "step": 1028608 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.315174699596361e-05, |
| "loss": 3.5736, |
| "step": 1029120 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.314336104845309e-05, |
| "loss": 3.5629, |
| "step": 1029632 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.313497510094257e-05, |
| "loss": 3.5819, |
| "step": 1030144 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.312658915343205e-05, |
| "loss": 3.5583, |
| "step": 1030656 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3118219584725264e-05, |
| "loss": 3.5752, |
| "step": 1031168 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.310983363721474e-05, |
| "loss": 3.5622, |
| "step": 1031680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.310144768970422e-05, |
| "loss": 3.5582, |
| "step": 1032192 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.30930617421937e-05, |
| "loss": 3.5646, |
| "step": 1032704 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3084692173486906e-05, |
| "loss": 3.582, |
| "step": 1033216 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3076306225976386e-05, |
| "loss": 3.5629, |
| "step": 1033728 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3067920278465866e-05, |
| "loss": 3.5733, |
| "step": 1034240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3059534330955346e-05, |
| "loss": 3.549, |
| "step": 1034752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.305116476224856e-05, |
| "loss": 3.5499, |
| "step": 1035264 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.304277881473804e-05, |
| "loss": 3.558, |
| "step": 1035776 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.303439286722752e-05, |
| "loss": 3.5632, |
| "step": 1036288 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.3026006919717e-05, |
| "loss": 3.5584, |
| "step": 1036800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.301763735101021e-05, |
| "loss": 3.5618, |
| "step": 1037312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 3.300925140349969e-05, |
| "loss": 3.5594, |
| "step": 1037824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.300086545598917e-05, |
| "loss": 3.5623, |
| "step": 1038336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.299247950847865e-05, |
| "loss": 3.56, |
| "step": 1038848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.298410993977186e-05, |
| "loss": 3.5592, |
| "step": 1039360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.297572399226134e-05, |
| "loss": 3.5685, |
| "step": 1039872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.296733804475082e-05, |
| "loss": 3.5794, |
| "step": 1040384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2958968476044036e-05, |
| "loss": 3.5758, |
| "step": 1040896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2950582528533516e-05, |
| "loss": 3.5529, |
| "step": 1041408 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2942196581022996e-05, |
| "loss": 3.5612, |
| "step": 1041920 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2933810633512476e-05, |
| "loss": 3.5635, |
| "step": 1042432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2925441064805685e-05, |
| "loss": 3.5713, |
| "step": 1042944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2917055117295165e-05, |
| "loss": 3.5638, |
| "step": 1043456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2908669169784645e-05, |
| "loss": 3.5619, |
| "step": 1043968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2900283222274125e-05, |
| "loss": 3.5628, |
| "step": 1044480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2891913653567334e-05, |
| "loss": 3.5684, |
| "step": 1044992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2883527706056814e-05, |
| "loss": 3.5657, |
| "step": 1045504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2875141758546294e-05, |
| "loss": 3.5633, |
| "step": 1046016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2866755811035774e-05, |
| "loss": 3.5571, |
| "step": 1046528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.285838624232899e-05, |
| "loss": 3.5529, |
| "step": 1047040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.285000029481847e-05, |
| "loss": 3.5635, |
| "step": 1047552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.284161434730795e-05, |
| "loss": 3.5536, |
| "step": 1048064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.283322839979743e-05, |
| "loss": 3.5547, |
| "step": 1048576 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.282485883109064e-05, |
| "loss": 3.5562, |
| "step": 1049088 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.281647288358012e-05, |
| "loss": 3.5622, |
| "step": 1049600 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.28080869360696e-05, |
| "loss": 3.556, |
| "step": 1050112 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.279970098855908e-05, |
| "loss": 3.5668, |
| "step": 1050624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.279133141985229e-05, |
| "loss": 3.5741, |
| "step": 1051136 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.278294547234177e-05, |
| "loss": 3.5666, |
| "step": 1051648 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.277455952483125e-05, |
| "loss": 3.567, |
| "step": 1052160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.276617357732073e-05, |
| "loss": 3.5676, |
| "step": 1052672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.275780400861394e-05, |
| "loss": 3.5573, |
| "step": 1053184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.274941806110342e-05, |
| "loss": 3.5619, |
| "step": 1053696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.27410321135929e-05, |
| "loss": 3.5483, |
| "step": 1054208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.273264616608238e-05, |
| "loss": 3.5639, |
| "step": 1054720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.272427659737559e-05, |
| "loss": 3.5557, |
| "step": 1055232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.271589064986507e-05, |
| "loss": 3.568, |
| "step": 1055744 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.270750470235455e-05, |
| "loss": 3.5616, |
| "step": 1056256 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.269911875484403e-05, |
| "loss": 3.5653, |
| "step": 1056768 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.269074918613724e-05, |
| "loss": 3.5575, |
| "step": 1057280 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.268236323862672e-05, |
| "loss": 3.5571, |
| "step": 1057792 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.26739772911162e-05, |
| "loss": 3.5643, |
| "step": 1058304 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.266559134360568e-05, |
| "loss": 3.5669, |
| "step": 1058816 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.26572217748989e-05, |
| "loss": 3.5742, |
| "step": 1059328 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.264883582738838e-05, |
| "loss": 3.556, |
| "step": 1059840 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.264044987987786e-05, |
| "loss": 3.5714, |
| "step": 1060352 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.263206393236734e-05, |
| "loss": 3.561, |
| "step": 1060864 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2623694363660546e-05, |
| "loss": 3.5638, |
| "step": 1061376 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2615308416150026e-05, |
| "loss": 3.5698, |
| "step": 1061888 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2606922468639506e-05, |
| "loss": 3.5599, |
| "step": 1062400 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2598536521128986e-05, |
| "loss": 3.5573, |
| "step": 1062912 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2590166952422195e-05, |
| "loss": 3.5731, |
| "step": 1063424 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2581781004911675e-05, |
| "loss": 3.5574, |
| "step": 1063936 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2573395057401155e-05, |
| "loss": 3.5575, |
| "step": 1064448 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.2565009109890635e-05, |
| "loss": 3.5676, |
| "step": 1064960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.255663954118385e-05, |
| "loss": 3.5639, |
| "step": 1065472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.254825359367333e-05, |
| "loss": 3.5565, |
| "step": 1065984 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.253986764616281e-05, |
| "loss": 3.5651, |
| "step": 1066496 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.253148169865229e-05, |
| "loss": 3.5586, |
| "step": 1067008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.25231121299455e-05, |
| "loss": 3.5701, |
| "step": 1067520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 3.251472618243498e-05, |
| "loss": 3.5587, |
| "step": 1068032 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.8380465507507324, |
| "eval_runtime": 361.7471, |
| "eval_samples_per_second": 1054.856, |
| "eval_steps_per_second": 32.965, |
| "step": 1068470 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.250634023492446e-05, |
| "loss": 3.5465, |
| "step": 1068544 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.249795428741394e-05, |
| "loss": 3.5505, |
| "step": 1069056 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.248956833990342e-05, |
| "loss": 3.5559, |
| "step": 1069568 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.24811823923929e-05, |
| "loss": 3.5711, |
| "step": 1070080 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.247281282368611e-05, |
| "loss": 3.5613, |
| "step": 1070592 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.246442687617559e-05, |
| "loss": 3.5584, |
| "step": 1071104 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.245604092866507e-05, |
| "loss": 3.5576, |
| "step": 1071616 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.244765498115455e-05, |
| "loss": 3.5596, |
| "step": 1072128 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.2439285412447764e-05, |
| "loss": 3.5476, |
| "step": 1072640 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.2430899464937244e-05, |
| "loss": 3.5557, |
| "step": 1073152 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.2422513517426724e-05, |
| "loss": 3.5599, |
| "step": 1073664 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.2414127569916204e-05, |
| "loss": 3.5714, |
| "step": 1074176 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.240577438001314e-05, |
| "loss": 3.5634, |
| "step": 1074688 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.239738843250262e-05, |
| "loss": 3.5374, |
| "step": 1075200 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.23890024849921e-05, |
| "loss": 3.5559, |
| "step": 1075712 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.238061653748158e-05, |
| "loss": 3.5427, |
| "step": 1076224 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.237223058997106e-05, |
| "loss": 3.5601, |
| "step": 1076736 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.236384464246054e-05, |
| "loss": 3.5516, |
| "step": 1077248 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.235545869495002e-05, |
| "loss": 3.556, |
| "step": 1077760 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.23470727474395e-05, |
| "loss": 3.552, |
| "step": 1078272 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.233870317873272e-05, |
| "loss": 3.5743, |
| "step": 1078784 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.23303172312222e-05, |
| "loss": 3.5546, |
| "step": 1079296 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.232193128371168e-05, |
| "loss": 3.5573, |
| "step": 1079808 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.231354533620116e-05, |
| "loss": 3.5503, |
| "step": 1080320 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.230517576749437e-05, |
| "loss": 3.567, |
| "step": 1080832 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.229678981998385e-05, |
| "loss": 3.553, |
| "step": 1081344 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.228840387247333e-05, |
| "loss": 3.5488, |
| "step": 1081856 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.2280017924962807e-05, |
| "loss": 3.5556, |
| "step": 1082368 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.2271648356256016e-05, |
| "loss": 3.5552, |
| "step": 1082880 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.2263262408745496e-05, |
| "loss": 3.5481, |
| "step": 1083392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2254876461234976e-05, |
| "loss": 3.5553, |
| "step": 1083904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2246490513724456e-05, |
| "loss": 3.5527, |
| "step": 1084416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.223812094501767e-05, |
| "loss": 3.5583, |
| "step": 1084928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.222973499750715e-05, |
| "loss": 3.5574, |
| "step": 1085440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.222134904999663e-05, |
| "loss": 3.5612, |
| "step": 1085952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.221296310248611e-05, |
| "loss": 3.5557, |
| "step": 1086464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.220459353377932e-05, |
| "loss": 3.5623, |
| "step": 1086976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.21962075862688e-05, |
| "loss": 3.54, |
| "step": 1087488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.218782163875828e-05, |
| "loss": 3.5491, |
| "step": 1088000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2179435691247753e-05, |
| "loss": 3.5522, |
| "step": 1088512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.217106612254097e-05, |
| "loss": 3.5407, |
| "step": 1089024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.216268017503045e-05, |
| "loss": 3.5492, |
| "step": 1089536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.215429422751993e-05, |
| "loss": 3.558, |
| "step": 1090048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2145924658813145e-05, |
| "loss": 3.5495, |
| "step": 1090560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2137538711302625e-05, |
| "loss": 3.5597, |
| "step": 1091072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2129152763792105e-05, |
| "loss": 3.5558, |
| "step": 1091584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.2120766816281585e-05, |
| "loss": 3.558, |
| "step": 1092096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.211238086877106e-05, |
| "loss": 3.5519, |
| "step": 1092608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.210399492126054e-05, |
| "loss": 3.5387, |
| "step": 1093120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.209560897375002e-05, |
| "loss": 3.5548, |
| "step": 1093632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.20872230262395e-05, |
| "loss": 3.5528, |
| "step": 1094144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.207885345753271e-05, |
| "loss": 3.5565, |
| "step": 1094656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.207046751002219e-05, |
| "loss": 3.5491, |
| "step": 1095168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.206208156251167e-05, |
| "loss": 3.5457, |
| "step": 1095680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.205369561500115e-05, |
| "loss": 3.5387, |
| "step": 1096192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.204532604629436e-05, |
| "loss": 3.5409, |
| "step": 1096704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.203694009878384e-05, |
| "loss": 3.5554, |
| "step": 1097216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.202855415127332e-05, |
| "loss": 3.5388, |
| "step": 1097728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.20201682037628e-05, |
| "loss": 3.5621, |
| "step": 1098240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.201179863505601e-05, |
| "loss": 3.5552, |
| "step": 1098752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.200341268754549e-05, |
| "loss": 3.5309, |
| "step": 1099264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.199502674003497e-05, |
| "loss": 3.5621, |
| "step": 1099776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.198664079252445e-05, |
| "loss": 3.5397, |
| "step": 1100288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.197827122381766e-05, |
| "loss": 3.5456, |
| "step": 1100800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.196988527630714e-05, |
| "loss": 3.5415, |
| "step": 1101312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.196149932879662e-05, |
| "loss": 3.549, |
| "step": 1101824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.19531133812861e-05, |
| "loss": 3.5412, |
| "step": 1102336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1944743812579317e-05, |
| "loss": 3.552, |
| "step": 1102848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1936357865068797e-05, |
| "loss": 3.5273, |
| "step": 1103360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1927971917558276e-05, |
| "loss": 3.5401, |
| "step": 1103872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1919585970047756e-05, |
| "loss": 3.5554, |
| "step": 1104384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1911216401340966e-05, |
| "loss": 3.5484, |
| "step": 1104896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1902846832634175e-05, |
| "loss": 3.5559, |
| "step": 1105408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1894460885123655e-05, |
| "loss": 3.551, |
| "step": 1105920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1886074937613135e-05, |
| "loss": 3.5643, |
| "step": 1106432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1877688990102614e-05, |
| "loss": 3.5446, |
| "step": 1106944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1869303042592094e-05, |
| "loss": 3.5653, |
| "step": 1107456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1860917095081574e-05, |
| "loss": 3.5434, |
| "step": 1107968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.1852531147571054e-05, |
| "loss": 3.5416, |
| "step": 1108480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.184414520006054e-05, |
| "loss": 3.5504, |
| "step": 1108992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.183577563135375e-05, |
| "loss": 3.5607, |
| "step": 1109504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.182738968384323e-05, |
| "loss": 3.5511, |
| "step": 1110016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.181900373633271e-05, |
| "loss": 3.5567, |
| "step": 1110528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.181061778882219e-05, |
| "loss": 3.5357, |
| "step": 1111040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.18022482201154e-05, |
| "loss": 3.5299, |
| "step": 1111552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.179386227260488e-05, |
| "loss": 3.5431, |
| "step": 1112064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.178547632509436e-05, |
| "loss": 3.5495, |
| "step": 1112576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.177709037758384e-05, |
| "loss": 3.5421, |
| "step": 1113088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.176872080887705e-05, |
| "loss": 3.5445, |
| "step": 1113600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.176033486136653e-05, |
| "loss": 3.5465, |
| "step": 1114112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.175194891385601e-05, |
| "loss": 3.5458, |
| "step": 1114624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1743562966345495e-05, |
| "loss": 3.5428, |
| "step": 1115136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1735193397638704e-05, |
| "loss": 3.5424, |
| "step": 1115648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1726807450128184e-05, |
| "loss": 3.5536, |
| "step": 1116160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1718421502617664e-05, |
| "loss": 3.5639, |
| "step": 1116672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.171005193391087e-05, |
| "loss": 3.5571, |
| "step": 1117184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.170166598640035e-05, |
| "loss": 3.5374, |
| "step": 1117696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.169328003888983e-05, |
| "loss": 3.5444, |
| "step": 1118208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.168489409137931e-05, |
| "loss": 3.5469, |
| "step": 1118720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.167652452267252e-05, |
| "loss": 3.5534, |
| "step": 1119232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1668138575162e-05, |
| "loss": 3.5507, |
| "step": 1119744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.165975262765148e-05, |
| "loss": 3.544, |
| "step": 1120256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.165136668014096e-05, |
| "loss": 3.5462, |
| "step": 1120768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.164299711143418e-05, |
| "loss": 3.5504, |
| "step": 1121280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.163461116392366e-05, |
| "loss": 3.552, |
| "step": 1121792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.162622521641314e-05, |
| "loss": 3.5453, |
| "step": 1122304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.161783926890262e-05, |
| "loss": 3.5427, |
| "step": 1122816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1609469700195827e-05, |
| "loss": 3.536, |
| "step": 1123328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1601083752685306e-05, |
| "loss": 3.5467, |
| "step": 1123840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1592697805174786e-05, |
| "loss": 3.5347, |
| "step": 1124352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1584311857664266e-05, |
| "loss": 3.5394, |
| "step": 1124864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1575942288957475e-05, |
| "loss": 3.542, |
| "step": 1125376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1567556341446955e-05, |
| "loss": 3.5473, |
| "step": 1125888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1559170393936435e-05, |
| "loss": 3.5418, |
| "step": 1126400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1550784446425915e-05, |
| "loss": 3.5495, |
| "step": 1126912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.154241487771913e-05, |
| "loss": 3.5559, |
| "step": 1127424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.153402893020861e-05, |
| "loss": 3.5541, |
| "step": 1127936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.152564298269809e-05, |
| "loss": 3.5517, |
| "step": 1128448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.151725703518757e-05, |
| "loss": 3.5492, |
| "step": 1128960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.150888746648078e-05, |
| "loss": 3.5437, |
| "step": 1129472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.150050151897026e-05, |
| "loss": 3.544, |
| "step": 1129984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.149211557145974e-05, |
| "loss": 3.5294, |
| "step": 1130496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.148372962394922e-05, |
| "loss": 3.5498, |
| "step": 1131008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.147536005524243e-05, |
| "loss": 3.5421, |
| "step": 1131520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.146697410773191e-05, |
| "loss": 3.5528, |
| "step": 1132032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.145858816022139e-05, |
| "loss": 3.543, |
| "step": 1132544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.145020221271087e-05, |
| "loss": 3.5522, |
| "step": 1133056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1441832644004085e-05, |
| "loss": 3.5413, |
| "step": 1133568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1433446696493565e-05, |
| "loss": 3.5424, |
| "step": 1134080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1425060748983045e-05, |
| "loss": 3.5502, |
| "step": 1134592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1416674801472525e-05, |
| "loss": 3.5487, |
| "step": 1135104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1408305232765734e-05, |
| "loss": 3.5582, |
| "step": 1135616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1399919285255214e-05, |
| "loss": 3.5424, |
| "step": 1136128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1391533337744694e-05, |
| "loss": 3.5522, |
| "step": 1136640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1383147390234174e-05, |
| "loss": 3.5487, |
| "step": 1137152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.137477782152738e-05, |
| "loss": 3.5452, |
| "step": 1137664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.136639187401686e-05, |
| "loss": 3.5549, |
| "step": 1138176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.135800592650634e-05, |
| "loss": 3.5429, |
| "step": 1138688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.134961997899582e-05, |
| "loss": 3.5476, |
| "step": 1139200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.134125041028904e-05, |
| "loss": 3.5546, |
| "step": 1139712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.133286446277852e-05, |
| "loss": 3.5418, |
| "step": 1140224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1324478515268e-05, |
| "loss": 3.5417, |
| "step": 1140736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.131609256775748e-05, |
| "loss": 3.5506, |
| "step": 1141248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.130772299905069e-05, |
| "loss": 3.5433, |
| "step": 1141760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.129933705154017e-05, |
| "loss": 3.5411, |
| "step": 1142272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.129095110402965e-05, |
| "loss": 3.5475, |
| "step": 1142784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.128256515651913e-05, |
| "loss": 3.5406, |
| "step": 1143296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1274195587812336e-05, |
| "loss": 3.5602, |
| "step": 1143808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1265809640301816e-05, |
| "loss": 3.541, |
| "step": 1144320 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.8387787342071533, |
| "eval_runtime": 306.01, |
| "eval_samples_per_second": 1246.989, |
| "eval_steps_per_second": 38.969, |
| "step": 1144790 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1257423692791296e-05, |
| "loss": 3.507, |
| "step": 1144832 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1249037745280776e-05, |
| "loss": 3.5435, |
| "step": 1145344 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1240651797770256e-05, |
| "loss": 3.5326, |
| "step": 1145856 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1232265850259736e-05, |
| "loss": 3.5574, |
| "step": 1146368 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.122389628155295e-05, |
| "loss": 3.5447, |
| "step": 1146880 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.121551033404243e-05, |
| "loss": 3.5484, |
| "step": 1147392 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1207124386531905e-05, |
| "loss": 3.541, |
| "step": 1147904 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1198738439021385e-05, |
| "loss": 3.5432, |
| "step": 1148416 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.11903688703146e-05, |
| "loss": 3.533, |
| "step": 1148928 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.118198292280408e-05, |
| "loss": 3.5402, |
| "step": 1149440 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1173596975293554e-05, |
| "loss": 3.5419, |
| "step": 1149952 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1165211027783034e-05, |
| "loss": 3.5546, |
| "step": 1150464 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.115685783787998e-05, |
| "loss": 3.5498, |
| "step": 1150976 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1148471890369466e-05, |
| "loss": 3.5237, |
| "step": 1151488 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1140085942858946e-05, |
| "loss": 3.539, |
| "step": 1152000 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1131699995348426e-05, |
| "loss": 3.5304, |
| "step": 1152512 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1123314047837906e-05, |
| "loss": 3.5409, |
| "step": 1153024 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.111492810032738e-05, |
| "loss": 3.5345, |
| "step": 1153536 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.110654215281686e-05, |
| "loss": 3.5399, |
| "step": 1154048 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.109815620530634e-05, |
| "loss": 3.5367, |
| "step": 1154560 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1089786636599555e-05, |
| "loss": 3.5599, |
| "step": 1155072 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.108140068908903e-05, |
| "loss": 3.5447, |
| "step": 1155584 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.107301474157851e-05, |
| "loss": 3.5357, |
| "step": 1156096 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.106462879406799e-05, |
| "loss": 3.5354, |
| "step": 1156608 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1056259225361204e-05, |
| "loss": 3.5542, |
| "step": 1157120 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1047873277850684e-05, |
| "loss": 3.5374, |
| "step": 1157632 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1039487330340164e-05, |
| "loss": 3.5326, |
| "step": 1158144 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1031101382829644e-05, |
| "loss": 3.541, |
| "step": 1158656 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.102273181412285e-05, |
| "loss": 3.5361, |
| "step": 1159168 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.101434586661233e-05, |
| "loss": 3.5332, |
| "step": 1159680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.100595991910181e-05, |
| "loss": 3.5403, |
| "step": 1160192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.099757397159129e-05, |
| "loss": 3.5373, |
| "step": 1160704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.09892044028845e-05, |
| "loss": 3.5428, |
| "step": 1161216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.098081845537398e-05, |
| "loss": 3.5432, |
| "step": 1161728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.097243250786346e-05, |
| "loss": 3.5449, |
| "step": 1162240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.096404656035294e-05, |
| "loss": 3.5403, |
| "step": 1162752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.095567699164616e-05, |
| "loss": 3.5455, |
| "step": 1163264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.094729104413564e-05, |
| "loss": 3.5267, |
| "step": 1163776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.093890509662512e-05, |
| "loss": 3.5339, |
| "step": 1164288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.09305191491146e-05, |
| "loss": 3.5358, |
| "step": 1164800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0922149580407806e-05, |
| "loss": 3.5287, |
| "step": 1165312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0913763632897286e-05, |
| "loss": 3.5337, |
| "step": 1165824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0905377685386766e-05, |
| "loss": 3.5433, |
| "step": 1166336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0897008116679975e-05, |
| "loss": 3.5334, |
| "step": 1166848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0888622169169455e-05, |
| "loss": 3.5413, |
| "step": 1167360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0880236221658935e-05, |
| "loss": 3.5417, |
| "step": 1167872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0871850274148415e-05, |
| "loss": 3.5414, |
| "step": 1168384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0863464326637895e-05, |
| "loss": 3.5381, |
| "step": 1168896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0855078379127375e-05, |
| "loss": 3.5221, |
| "step": 1169408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0846692431616855e-05, |
| "loss": 3.5379, |
| "step": 1169920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.083830648410634e-05, |
| "loss": 3.5396, |
| "step": 1170432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.082993691539955e-05, |
| "loss": 3.5388, |
| "step": 1170944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.082155096788903e-05, |
| "loss": 3.5369, |
| "step": 1171456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.081316502037851e-05, |
| "loss": 3.5281, |
| "step": 1171968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.080477907286799e-05, |
| "loss": 3.5209, |
| "step": 1172480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.07964095041612e-05, |
| "loss": 3.527, |
| "step": 1172992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.078802355665068e-05, |
| "loss": 3.5397, |
| "step": 1173504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.077963760914016e-05, |
| "loss": 3.5281, |
| "step": 1174016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.077125166162964e-05, |
| "loss": 3.5435, |
| "step": 1174528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.076288209292285e-05, |
| "loss": 3.5385, |
| "step": 1175040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.075449614541233e-05, |
| "loss": 3.5232, |
| "step": 1175552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.074611019790181e-05, |
| "loss": 3.5458, |
| "step": 1176064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0737724250391295e-05, |
| "loss": 3.5229, |
| "step": 1176576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0729354681684505e-05, |
| "loss": 3.5285, |
| "step": 1177088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0720968734173985e-05, |
| "loss": 3.5264, |
| "step": 1177600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0712582786663465e-05, |
| "loss": 3.5377, |
| "step": 1178112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0704196839152944e-05, |
| "loss": 3.5235, |
| "step": 1178624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0695827270446154e-05, |
| "loss": 3.5376, |
| "step": 1179136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0687441322935634e-05, |
| "loss": 3.5136, |
| "step": 1179648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0679055375425113e-05, |
| "loss": 3.5241, |
| "step": 1180160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0670669427914593e-05, |
| "loss": 3.5408, |
| "step": 1180672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.06622998592078e-05, |
| "loss": 3.5328, |
| "step": 1181184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.065391391169728e-05, |
| "loss": 3.5375, |
| "step": 1181696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.064552796418676e-05, |
| "loss": 3.5385, |
| "step": 1182208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.063714201667624e-05, |
| "loss": 3.5479, |
| "step": 1182720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.062877244796946e-05, |
| "loss": 3.5304, |
| "step": 1183232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.062038650045894e-05, |
| "loss": 3.5492, |
| "step": 1183744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.061200055294842e-05, |
| "loss": 3.5268, |
| "step": 1184256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.06036146054379e-05, |
| "loss": 3.5257, |
| "step": 1184768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.059524503673111e-05, |
| "loss": 3.5358, |
| "step": 1185280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.058685908922059e-05, |
| "loss": 3.5425, |
| "step": 1185792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.057847314171007e-05, |
| "loss": 3.537, |
| "step": 1186304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.057008719419955e-05, |
| "loss": 3.541, |
| "step": 1186816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0561717625492756e-05, |
| "loss": 3.5228, |
| "step": 1187328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0553331677982236e-05, |
| "loss": 3.5144, |
| "step": 1187840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0544945730471716e-05, |
| "loss": 3.5274, |
| "step": 1188352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.053657616176493e-05, |
| "loss": 3.5342, |
| "step": 1188864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.052819021425441e-05, |
| "loss": 3.5268, |
| "step": 1189376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.051980426674389e-05, |
| "loss": 3.5273, |
| "step": 1189888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0511418319233372e-05, |
| "loss": 3.5352, |
| "step": 1190400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.050304875052658e-05, |
| "loss": 3.5266, |
| "step": 1190912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.049466280301606e-05, |
| "loss": 3.528, |
| "step": 1191424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.048627685550554e-05, |
| "loss": 3.5281, |
| "step": 1191936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.047789090799502e-05, |
| "loss": 3.5378, |
| "step": 1192448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0469521339288233e-05, |
| "loss": 3.548, |
| "step": 1192960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0461135391777713e-05, |
| "loss": 3.5436, |
| "step": 1193472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0452749444267193e-05, |
| "loss": 3.5189, |
| "step": 1193984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0444363496756673e-05, |
| "loss": 3.532, |
| "step": 1194496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0435993928049882e-05, |
| "loss": 3.5319, |
| "step": 1195008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0427607980539362e-05, |
| "loss": 3.5359, |
| "step": 1195520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0419222033028842e-05, |
| "loss": 3.5368, |
| "step": 1196032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0410836085518326e-05, |
| "loss": 3.5276, |
| "step": 1196544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0402466516811535e-05, |
| "loss": 3.5306, |
| "step": 1197056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0394080569301015e-05, |
| "loss": 3.5364, |
| "step": 1197568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0385694621790495e-05, |
| "loss": 3.5355, |
| "step": 1198080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0377308674279974e-05, |
| "loss": 3.5322, |
| "step": 1198592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0368939105573187e-05, |
| "loss": 3.5263, |
| "step": 1199104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0360553158062667e-05, |
| "loss": 3.5214, |
| "step": 1199616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0352167210552147e-05, |
| "loss": 3.5344, |
| "step": 1200128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0343781263041627e-05, |
| "loss": 3.5196, |
| "step": 1200640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0335411694334836e-05, |
| "loss": 3.5226, |
| "step": 1201152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0327025746824316e-05, |
| "loss": 3.5257, |
| "step": 1201664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0318639799313796e-05, |
| "loss": 3.5363, |
| "step": 1202176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.031025385180328e-05, |
| "loss": 3.5283, |
| "step": 1202688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0301884283096488e-05, |
| "loss": 3.5292, |
| "step": 1203200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0293498335585968e-05, |
| "loss": 3.5454, |
| "step": 1203712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0285112388075448e-05, |
| "loss": 3.5388, |
| "step": 1204224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0276726440564928e-05, |
| "loss": 3.5351, |
| "step": 1204736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.026835687185814e-05, |
| "loss": 3.5342, |
| "step": 1205248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.025997092434762e-05, |
| "loss": 3.5301, |
| "step": 1205760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.02515849768371e-05, |
| "loss": 3.5278, |
| "step": 1206272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0243199029326574e-05, |
| "loss": 3.517, |
| "step": 1206784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.023482946061979e-05, |
| "loss": 3.5326, |
| "step": 1207296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.022644351310927e-05, |
| "loss": 3.5294, |
| "step": 1207808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.021805756559875e-05, |
| "loss": 3.5332, |
| "step": 1208320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0209671618088226e-05, |
| "loss": 3.5282, |
| "step": 1208832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0201302049381442e-05, |
| "loss": 3.5347, |
| "step": 1209344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0192916101870922e-05, |
| "loss": 3.5298, |
| "step": 1209856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0184530154360402e-05, |
| "loss": 3.527, |
| "step": 1210368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.017614420684988e-05, |
| "loss": 3.5357, |
| "step": 1210880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.016777463814309e-05, |
| "loss": 3.5338, |
| "step": 1211392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0159388690632574e-05, |
| "loss": 3.5452, |
| "step": 1211904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0151002743122047e-05, |
| "loss": 3.5295, |
| "step": 1212416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0142616795611527e-05, |
| "loss": 3.5365, |
| "step": 1212928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0134247226904743e-05, |
| "loss": 3.5351, |
| "step": 1213440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0125861279394223e-05, |
| "loss": 3.5316, |
| "step": 1213952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.01174753318837e-05, |
| "loss": 3.5347, |
| "step": 1214464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.010908938437318e-05, |
| "loss": 3.5315, |
| "step": 1214976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0100719815666396e-05, |
| "loss": 3.5334, |
| "step": 1215488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0092333868155876e-05, |
| "loss": 3.5392, |
| "step": 1216000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.008394792064535e-05, |
| "loss": 3.5285, |
| "step": 1216512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.007556197313483e-05, |
| "loss": 3.5252, |
| "step": 1217024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0067192404428045e-05, |
| "loss": 3.532, |
| "step": 1217536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.005880645691752e-05, |
| "loss": 3.53, |
| "step": 1218048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0050420509407e-05, |
| "loss": 3.5271, |
| "step": 1218560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.004203456189648e-05, |
| "loss": 3.5367, |
| "step": 1219072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0033664993189697e-05, |
| "loss": 3.5259, |
| "step": 1219584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0025279045679173e-05, |
| "loss": 3.5423, |
| "step": 1220096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.0016893098168653e-05, |
| "loss": 3.5279, |
| "step": 1220608 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.840581178665161, |
| "eval_runtime": 302.771, |
| "eval_samples_per_second": 1260.329, |
| "eval_steps_per_second": 39.386, |
| "step": 1221110 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.0008507150658133e-05, |
| "loss": 3.4931, |
| "step": 1221120 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.0000121203147613e-05, |
| "loss": 3.5226, |
| "step": 1221632 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9991735255637093e-05, |
| "loss": 3.5179, |
| "step": 1222144 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9983349308126573e-05, |
| "loss": 3.5394, |
| "step": 1222656 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9974979739419782e-05, |
| "loss": 3.5326, |
| "step": 1223168 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9966593791909266e-05, |
| "loss": 3.532, |
| "step": 1223680 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9958207844398746e-05, |
| "loss": 3.5274, |
| "step": 1224192 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9949821896888226e-05, |
| "loss": 3.5291, |
| "step": 1224704 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9941452328181435e-05, |
| "loss": 3.5144, |
| "step": 1225216 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9933066380670915e-05, |
| "loss": 3.5279, |
| "step": 1225728 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9924680433160395e-05, |
| "loss": 3.5263, |
| "step": 1226240 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9916294485649875e-05, |
| "loss": 3.5438, |
| "step": 1226752 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9907941295746823e-05, |
| "loss": 3.5387, |
| "step": 1227264 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9899555348236296e-05, |
| "loss": 3.5098, |
| "step": 1227776 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9891169400725776e-05, |
| "loss": 3.5223, |
| "step": 1228288 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9882783453215256e-05, |
| "loss": 3.5196, |
| "step": 1228800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9874397505704736e-05, |
| "loss": 3.5227, |
| "step": 1229312 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.986601155819422e-05, |
| "loss": 3.5255, |
| "step": 1229824 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.98576256106837e-05, |
| "loss": 3.5293, |
| "step": 1230336 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.984923966317318e-05, |
| "loss": 3.5198, |
| "step": 1230848 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.984087009446639e-05, |
| "loss": 3.5442, |
| "step": 1231360 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.983248414695587e-05, |
| "loss": 3.529, |
| "step": 1231872 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9824098199445348e-05, |
| "loss": 3.5228, |
| "step": 1232384 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9815712251934828e-05, |
| "loss": 3.5191, |
| "step": 1232896 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.980734268322804e-05, |
| "loss": 3.5414, |
| "step": 1233408 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.979895673571752e-05, |
| "loss": 3.5204, |
| "step": 1233920 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9790570788207e-05, |
| "loss": 3.521, |
| "step": 1234432 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.978218484069648e-05, |
| "loss": 3.5284, |
| "step": 1234944 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.977381527198969e-05, |
| "loss": 3.5205, |
| "step": 1235456 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9765429324479173e-05, |
| "loss": 3.5213, |
| "step": 1235968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9757043376968653e-05, |
| "loss": 3.5245, |
| "step": 1236480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9748657429458133e-05, |
| "loss": 3.5236, |
| "step": 1236992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9740287860751342e-05, |
| "loss": 3.526, |
| "step": 1237504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9731901913240822e-05, |
| "loss": 3.5265, |
| "step": 1238016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9723515965730302e-05, |
| "loss": 3.5295, |
| "step": 1238528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9715130018219782e-05, |
| "loss": 3.5265, |
| "step": 1239040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9706760449512994e-05, |
| "loss": 3.5312, |
| "step": 1239552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9698374502002474e-05, |
| "loss": 3.5137, |
| "step": 1240064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9689988554491954e-05, |
| "loss": 3.521, |
| "step": 1240576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9681602606981434e-05, |
| "loss": 3.5151, |
| "step": 1241088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9673233038274643e-05, |
| "loss": 3.5188, |
| "step": 1241600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9664847090764127e-05, |
| "loss": 3.5153, |
| "step": 1242112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9656461143253607e-05, |
| "loss": 3.5303, |
| "step": 1242624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9648091574546816e-05, |
| "loss": 3.5187, |
| "step": 1243136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9639705627036296e-05, |
| "loss": 3.5278, |
| "step": 1243648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9631319679525776e-05, |
| "loss": 3.5284, |
| "step": 1244160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9622933732015256e-05, |
| "loss": 3.5293, |
| "step": 1244672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9614547784504736e-05, |
| "loss": 3.5215, |
| "step": 1245184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.960616183699422e-05, |
| "loss": 3.5071, |
| "step": 1245696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.95977758894837e-05, |
| "loss": 3.5203, |
| "step": 1246208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.958938994197318e-05, |
| "loss": 3.5296, |
| "step": 1246720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9581020373266388e-05, |
| "loss": 3.5245, |
| "step": 1247232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9572634425755868e-05, |
| "loss": 3.5176, |
| "step": 1247744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9564248478245348e-05, |
| "loss": 3.5151, |
| "step": 1248256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9555862530734828e-05, |
| "loss": 3.513, |
| "step": 1248768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.954749296202804e-05, |
| "loss": 3.5069, |
| "step": 1249280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.953910701451752e-05, |
| "loss": 3.5288, |
| "step": 1249792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9530721067007e-05, |
| "loss": 3.5117, |
| "step": 1250304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.952233511949648e-05, |
| "loss": 3.5259, |
| "step": 1250816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.951396555078969e-05, |
| "loss": 3.5237, |
| "step": 1251328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9505579603279173e-05, |
| "loss": 3.5056, |
| "step": 1251840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9497193655768653e-05, |
| "loss": 3.5324, |
| "step": 1252352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9488807708258126e-05, |
| "loss": 3.5106, |
| "step": 1252864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.948043813955134e-05, |
| "loss": 3.5187, |
| "step": 1253376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.947205219204082e-05, |
| "loss": 3.5084, |
| "step": 1253888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.94636662445303e-05, |
| "loss": 3.5265, |
| "step": 1254400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9455280297019778e-05, |
| "loss": 3.5114, |
| "step": 1254912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9446910728312994e-05, |
| "loss": 3.5171, |
| "step": 1255424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9438524780802474e-05, |
| "loss": 3.5013, |
| "step": 1255936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9430138833291954e-05, |
| "loss": 3.5094, |
| "step": 1256448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9421752885781427e-05, |
| "loss": 3.5213, |
| "step": 1256960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9413383317074643e-05, |
| "loss": 3.5254, |
| "step": 1257472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9404997369564123e-05, |
| "loss": 3.5207, |
| "step": 1257984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.93966114220536e-05, |
| "loss": 3.5235, |
| "step": 1258496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.938822547454308e-05, |
| "loss": 3.5342, |
| "step": 1259008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9379855905836295e-05, |
| "loss": 3.5166, |
| "step": 1259520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9371469958325775e-05, |
| "loss": 3.5343, |
| "step": 1260032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9363084010815252e-05, |
| "loss": 3.5146, |
| "step": 1260544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9354698063304732e-05, |
| "loss": 3.5134, |
| "step": 1261056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9346328494597948e-05, |
| "loss": 3.5204, |
| "step": 1261568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9337942547087428e-05, |
| "loss": 3.5278, |
| "step": 1262080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.93295565995769e-05, |
| "loss": 3.522, |
| "step": 1262592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.932117065206638e-05, |
| "loss": 3.53, |
| "step": 1263104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9312801083359597e-05, |
| "loss": 3.5123, |
| "step": 1263616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9304415135849073e-05, |
| "loss": 3.4968, |
| "step": 1264128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9296029188338553e-05, |
| "loss": 3.5143, |
| "step": 1264640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9287643240828033e-05, |
| "loss": 3.5182, |
| "step": 1265152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.927927367212125e-05, |
| "loss": 3.5104, |
| "step": 1265664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9270887724610725e-05, |
| "loss": 3.5117, |
| "step": 1266176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.9262501777100205e-05, |
| "loss": 3.5223, |
| "step": 1266688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9254115829589685e-05, |
| "loss": 3.5139, |
| "step": 1267200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.92457462608829e-05, |
| "loss": 3.5145, |
| "step": 1267712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9237360313372374e-05, |
| "loss": 3.515, |
| "step": 1268224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9228974365861854e-05, |
| "loss": 3.5209, |
| "step": 1268736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.922060479715507e-05, |
| "loss": 3.5339, |
| "step": 1269248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9212218849644547e-05, |
| "loss": 3.5288, |
| "step": 1269760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9203832902134027e-05, |
| "loss": 3.5053, |
| "step": 1270272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9195446954623507e-05, |
| "loss": 3.5142, |
| "step": 1270784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9187077385916723e-05, |
| "loss": 3.5198, |
| "step": 1271296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9178691438406196e-05, |
| "loss": 3.5229, |
| "step": 1271808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9170305490895676e-05, |
| "loss": 3.5228, |
| "step": 1272320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.916191954338516e-05, |
| "loss": 3.5146, |
| "step": 1272832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9153549974678375e-05, |
| "loss": 3.5129, |
| "step": 1273344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9145164027167848e-05, |
| "loss": 3.523, |
| "step": 1273856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9136778079657328e-05, |
| "loss": 3.5244, |
| "step": 1274368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9128392132146808e-05, |
| "loss": 3.5128, |
| "step": 1274880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.912002256344002e-05, |
| "loss": 3.5176, |
| "step": 1275392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.91116366159295e-05, |
| "loss": 3.5101, |
| "step": 1275904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.910325066841898e-05, |
| "loss": 3.5136, |
| "step": 1276416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.909486472090846e-05, |
| "loss": 3.5075, |
| "step": 1276928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.908649515220167e-05, |
| "loss": 3.5084, |
| "step": 1277440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.907810920469115e-05, |
| "loss": 3.5084, |
| "step": 1277952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.906972325718063e-05, |
| "loss": 3.5213, |
| "step": 1278464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9061337309670113e-05, |
| "loss": 3.5149, |
| "step": 1278976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9052967740963322e-05, |
| "loss": 3.5147, |
| "step": 1279488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9044581793452802e-05, |
| "loss": 3.5297, |
| "step": 1280000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9036195845942282e-05, |
| "loss": 3.5262, |
| "step": 1280512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9027809898431762e-05, |
| "loss": 3.5205, |
| "step": 1281024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9019440329724974e-05, |
| "loss": 3.5214, |
| "step": 1281536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9011054382214454e-05, |
| "loss": 3.5153, |
| "step": 1282048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9002668434703934e-05, |
| "loss": 3.5109, |
| "step": 1282560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8994282487193414e-05, |
| "loss": 3.5066, |
| "step": 1283072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8985912918486623e-05, |
| "loss": 3.5124, |
| "step": 1283584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8977526970976103e-05, |
| "loss": 3.5158, |
| "step": 1284096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8969141023465583e-05, |
| "loss": 3.52, |
| "step": 1284608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8960755075955066e-05, |
| "loss": 3.5137, |
| "step": 1285120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8952385507248276e-05, |
| "loss": 3.5217, |
| "step": 1285632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8943999559737756e-05, |
| "loss": 3.518, |
| "step": 1286144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8935613612227235e-05, |
| "loss": 3.5161, |
| "step": 1286656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8927227664716715e-05, |
| "loss": 3.5117, |
| "step": 1287168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8918858096009928e-05, |
| "loss": 3.5276, |
| "step": 1287680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8910472148499408e-05, |
| "loss": 3.5244, |
| "step": 1288192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8902086200988888e-05, |
| "loss": 3.5174, |
| "step": 1288704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8893700253478368e-05, |
| "loss": 3.5175, |
| "step": 1289216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8885330684771577e-05, |
| "loss": 3.524, |
| "step": 1289728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8876944737261057e-05, |
| "loss": 3.5189, |
| "step": 1290240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8868558789750537e-05, |
| "loss": 3.5206, |
| "step": 1290752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.886017284224002e-05, |
| "loss": 3.5136, |
| "step": 1291264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.885180327353323e-05, |
| "loss": 3.5194, |
| "step": 1291776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.884341732602271e-05, |
| "loss": 3.5237, |
| "step": 1292288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.883503137851219e-05, |
| "loss": 3.5134, |
| "step": 1292800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.882664543100167e-05, |
| "loss": 3.511, |
| "step": 1293312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.881827586229488e-05, |
| "loss": 3.5174, |
| "step": 1293824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.880988991478436e-05, |
| "loss": 3.5183, |
| "step": 1294336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.880150396727384e-05, |
| "loss": 3.507, |
| "step": 1294848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.879311801976332e-05, |
| "loss": 3.5238, |
| "step": 1295360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.878474845105653e-05, |
| "loss": 3.5157, |
| "step": 1295872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.877636250354601e-05, |
| "loss": 3.5248, |
| "step": 1296384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.876797655603549e-05, |
| "loss": 3.5187, |
| "step": 1296896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8759590608524974e-05, |
| "loss": 3.5065, |
| "step": 1297408 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.8407561779022217, |
| "eval_runtime": 321.091, |
| "eval_samples_per_second": 1188.42, |
| "eval_steps_per_second": 37.139, |
| "step": 1297430 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8751221039818183e-05, |
| "loss": 3.5096, |
| "step": 1297920 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8742835092307663e-05, |
| "loss": 3.506, |
| "step": 1298432 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8734449144797143e-05, |
| "loss": 3.5259, |
| "step": 1298944 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8726063197286623e-05, |
| "loss": 3.5152, |
| "step": 1299456 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8717693628579835e-05, |
| "loss": 3.5207, |
| "step": 1299968 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8709307681069315e-05, |
| "loss": 3.5165, |
| "step": 1300480 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8700921733558795e-05, |
| "loss": 3.51, |
| "step": 1300992 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8692535786048275e-05, |
| "loss": 3.5026, |
| "step": 1301504 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8684166217341484e-05, |
| "loss": 3.5168, |
| "step": 1302016 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8675780269830964e-05, |
| "loss": 3.507, |
| "step": 1302528 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8667394322320444e-05, |
| "loss": 3.531, |
| "step": 1303040 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8659024753613657e-05, |
| "loss": 3.516, |
| "step": 1303552 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8650655184906866e-05, |
| "loss": 3.5042, |
| "step": 1304064 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8642269237396346e-05, |
| "loss": 3.5033, |
| "step": 1304576 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8633883289885826e-05, |
| "loss": 3.5093, |
| "step": 1305088 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8625497342375306e-05, |
| "loss": 3.503, |
| "step": 1305600 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.861711139486479e-05, |
| "loss": 3.5129, |
| "step": 1306112 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.860872544735427e-05, |
| "loss": 3.5149, |
| "step": 1306624 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.860033949984375e-05, |
| "loss": 3.5027, |
| "step": 1307136 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8591969931136958e-05, |
| "loss": 3.5333, |
| "step": 1307648 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8583583983626438e-05, |
| "loss": 3.5131, |
| "step": 1308160 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8575198036115918e-05, |
| "loss": 3.5123, |
| "step": 1308672 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8566812088605398e-05, |
| "loss": 3.5059, |
| "step": 1309184 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.855844251989861e-05, |
| "loss": 3.5248, |
| "step": 1309696 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.855005657238809e-05, |
| "loss": 3.5051, |
| "step": 1310208 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.854167062487757e-05, |
| "loss": 3.5115, |
| "step": 1310720 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.853328467736705e-05, |
| "loss": 3.5097, |
| "step": 1311232 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.852491510866026e-05, |
| "loss": 3.51, |
| "step": 1311744 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.8516529161149743e-05, |
| "loss": 3.5048, |
| "step": 1312256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8508143213639223e-05, |
| "loss": 3.5107, |
| "step": 1312768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8499757266128702e-05, |
| "loss": 3.5129, |
| "step": 1313280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.849138769742191e-05, |
| "loss": 3.5095, |
| "step": 1313792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.848300174991139e-05, |
| "loss": 3.5123, |
| "step": 1314304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.847461580240087e-05, |
| "loss": 3.5177, |
| "step": 1314816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.846622985489035e-05, |
| "loss": 3.514, |
| "step": 1315328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8457860286183564e-05, |
| "loss": 3.5166, |
| "step": 1315840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8449474338673044e-05, |
| "loss": 3.501, |
| "step": 1316352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8441088391162524e-05, |
| "loss": 3.5104, |
| "step": 1316864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8432702443652004e-05, |
| "loss": 3.501, |
| "step": 1317376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8424332874945213e-05, |
| "loss": 3.5074, |
| "step": 1317888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8415946927434696e-05, |
| "loss": 3.5018, |
| "step": 1318400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8407560979924176e-05, |
| "loss": 3.515, |
| "step": 1318912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8399175032413656e-05, |
| "loss": 3.5055, |
| "step": 1319424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8390805463706865e-05, |
| "loss": 3.5162, |
| "step": 1319936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8382419516196345e-05, |
| "loss": 3.5118, |
| "step": 1320448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8374033568685825e-05, |
| "loss": 3.5165, |
| "step": 1320960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8365647621175305e-05, |
| "loss": 3.5076, |
| "step": 1321472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8357278052468518e-05, |
| "loss": 3.4979, |
| "step": 1321984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8348892104957998e-05, |
| "loss": 3.5057, |
| "step": 1322496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8340506157447478e-05, |
| "loss": 3.5181, |
| "step": 1323008 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8332120209936957e-05, |
| "loss": 3.5092, |
| "step": 1323520 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8323750641230167e-05, |
| "loss": 3.5046, |
| "step": 1324032 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8315364693719647e-05, |
| "loss": 3.4973, |
| "step": 1324544 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.830697874620913e-05, |
| "loss": 3.5028, |
| "step": 1325056 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.829859279869861e-05, |
| "loss": 3.4887, |
| "step": 1325568 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.829022322999182e-05, |
| "loss": 3.5202, |
| "step": 1326080 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.82818372824813e-05, |
| "loss": 3.4946, |
| "step": 1326592 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.827345133497078e-05, |
| "loss": 3.5104, |
| "step": 1327104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.826506538746026e-05, |
| "loss": 3.5118, |
| "step": 1327616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.825669581875347e-05, |
| "loss": 3.4936, |
| "step": 1328128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.824830987124295e-05, |
| "loss": 3.5134, |
| "step": 1328640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.823992392373243e-05, |
| "loss": 3.4981, |
| "step": 1329152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.823153797622191e-05, |
| "loss": 3.5058, |
| "step": 1329664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.822316840751512e-05, |
| "loss": 3.4925, |
| "step": 1330176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.82147824600046e-05, |
| "loss": 3.5134, |
| "step": 1330688 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8206396512494084e-05, |
| "loss": 3.4975, |
| "step": 1331200 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8198010564983563e-05, |
| "loss": 3.5052, |
| "step": 1331712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8189640996276773e-05, |
| "loss": 3.486, |
| "step": 1332224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8181255048766253e-05, |
| "loss": 3.4945, |
| "step": 1332736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8172869101255732e-05, |
| "loss": 3.507, |
| "step": 1333248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8164483153745212e-05, |
| "loss": 3.5097, |
| "step": 1333760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8156113585038425e-05, |
| "loss": 3.5097, |
| "step": 1334272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8147727637527905e-05, |
| "loss": 3.5097, |
| "step": 1334784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8139341690017385e-05, |
| "loss": 3.5158, |
| "step": 1335296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8130972121310594e-05, |
| "loss": 3.5054, |
| "step": 1335808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8122586173800074e-05, |
| "loss": 3.5176, |
| "step": 1336320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8114200226289554e-05, |
| "loss": 3.5064, |
| "step": 1336832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8105814278779037e-05, |
| "loss": 3.5013, |
| "step": 1337344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8097444710072246e-05, |
| "loss": 3.5026, |
| "step": 1337856 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8089058762561726e-05, |
| "loss": 3.514, |
| "step": 1338368 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8080672815051206e-05, |
| "loss": 3.5096, |
| "step": 1338880 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8072286867540686e-05, |
| "loss": 3.5126, |
| "step": 1339392 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.80639172988339e-05, |
| "loss": 3.504, |
| "step": 1339904 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.805553135132338e-05, |
| "loss": 3.4821, |
| "step": 1340416 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.804714540381286e-05, |
| "loss": 3.4973, |
| "step": 1340928 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.803875945630234e-05, |
| "loss": 3.5037, |
| "step": 1341440 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8030389887595548e-05, |
| "loss": 3.5002, |
| "step": 1341952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8022003940085028e-05, |
| "loss": 3.4983, |
| "step": 1342464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.8013617992574508e-05, |
| "loss": 3.5093, |
| "step": 1342976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.800523204506399e-05, |
| "loss": 3.4988, |
| "step": 1343488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.79968624763572e-05, |
| "loss": 3.4958, |
| "step": 1344000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.798847652884668e-05, |
| "loss": 3.5035, |
| "step": 1344512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.798009058133616e-05, |
| "loss": 3.5096, |
| "step": 1345024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.797170463382564e-05, |
| "loss": 3.5163, |
| "step": 1345536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7963335065118852e-05, |
| "loss": 3.5184, |
| "step": 1346048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7954949117608332e-05, |
| "loss": 3.4903, |
| "step": 1346560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7946563170097812e-05, |
| "loss": 3.5068, |
| "step": 1347072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7938177222587285e-05, |
| "loss": 3.5038, |
| "step": 1347584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.79298076538805e-05, |
| "loss": 3.5074, |
| "step": 1348096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.792142170636998e-05, |
| "loss": 3.5139, |
| "step": 1348608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.791303575885946e-05, |
| "loss": 3.4959, |
| "step": 1349120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7904649811348938e-05, |
| "loss": 3.5006, |
| "step": 1349632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7896280242642154e-05, |
| "loss": 3.5107, |
| "step": 1350144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7887894295131634e-05, |
| "loss": 3.5131, |
| "step": 1350656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7879508347621114e-05, |
| "loss": 3.4967, |
| "step": 1351168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.787112240011059e-05, |
| "loss": 3.5075, |
| "step": 1351680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7862752831403806e-05, |
| "loss": 3.4964, |
| "step": 1352192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7854366883893286e-05, |
| "loss": 3.499, |
| "step": 1352704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.784598093638276e-05, |
| "loss": 3.4932, |
| "step": 1353216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.783759498887224e-05, |
| "loss": 3.4954, |
| "step": 1353728 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7829225420165455e-05, |
| "loss": 3.4941, |
| "step": 1354240 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7820839472654935e-05, |
| "loss": 3.5042, |
| "step": 1354752 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.781245352514441e-05, |
| "loss": 3.5019, |
| "step": 1355264 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.780406757763389e-05, |
| "loss": 3.5015, |
| "step": 1355776 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7795698008927107e-05, |
| "loss": 3.5158, |
| "step": 1356288 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7787312061416587e-05, |
| "loss": 3.5142, |
| "step": 1356800 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.777892611390606e-05, |
| "loss": 3.5035, |
| "step": 1357312 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7770540166395544e-05, |
| "loss": 3.5085, |
| "step": 1357824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.776217059768876e-05, |
| "loss": 3.5023, |
| "step": 1358336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7753784650178233e-05, |
| "loss": 3.4978, |
| "step": 1358848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7745398702667713e-05, |
| "loss": 3.4925, |
| "step": 1359360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7737012755157193e-05, |
| "loss": 3.5006, |
| "step": 1359872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.772864318645041e-05, |
| "loss": 3.5045, |
| "step": 1360384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7720257238939885e-05, |
| "loss": 3.5067, |
| "step": 1360896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7711871291429365e-05, |
| "loss": 3.5033, |
| "step": 1361408 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7703485343918845e-05, |
| "loss": 3.5045, |
| "step": 1361920 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.769511577521206e-05, |
| "loss": 3.5034, |
| "step": 1362432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7686729827701534e-05, |
| "loss": 3.5026, |
| "step": 1362944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7678343880191014e-05, |
| "loss": 3.4989, |
| "step": 1363456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.766997431148423e-05, |
| "loss": 3.5148, |
| "step": 1363968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7661588363973707e-05, |
| "loss": 3.5125, |
| "step": 1364480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7653202416463186e-05, |
| "loss": 3.505, |
| "step": 1364992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7644816468952666e-05, |
| "loss": 3.5045, |
| "step": 1365504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7636446900245882e-05, |
| "loss": 3.5083, |
| "step": 1366016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.762806095273536e-05, |
| "loss": 3.504, |
| "step": 1366528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.761967500522484e-05, |
| "loss": 3.5118, |
| "step": 1367040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.761128905771432e-05, |
| "loss": 3.5003, |
| "step": 1367552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7602919489007528e-05, |
| "loss": 3.5049, |
| "step": 1368064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7594533541497008e-05, |
| "loss": 3.5049, |
| "step": 1368576 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7586147593986488e-05, |
| "loss": 3.5034, |
| "step": 1369088 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7577761646475968e-05, |
| "loss": 3.499, |
| "step": 1369600 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.756939207776918e-05, |
| "loss": 3.5054, |
| "step": 1370112 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.756100613025866e-05, |
| "loss": 3.5041, |
| "step": 1370624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.755262018274814e-05, |
| "loss": 3.4934, |
| "step": 1371136 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.754423423523762e-05, |
| "loss": 3.5134, |
| "step": 1371648 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.753586466653083e-05, |
| "loss": 3.5021, |
| "step": 1372160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7527478719020313e-05, |
| "loss": 3.5071, |
| "step": 1372672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7519092771509793e-05, |
| "loss": 3.5046, |
| "step": 1373184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.7510706823999272e-05, |
| "loss": 3.4926, |
| "step": 1373696 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.8426108360290527, |
| "eval_runtime": 316.1505, |
| "eval_samples_per_second": 1206.992, |
| "eval_steps_per_second": 37.719, |
| "step": 1373750 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7502320876488752e-05, |
| "loss": 3.4947, |
| "step": 1374208 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7493934928978232e-05, |
| "loss": 3.4935, |
| "step": 1374720 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7485548981467712e-05, |
| "loss": 3.5124, |
| "step": 1375232 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7477163033957192e-05, |
| "loss": 3.4976, |
| "step": 1375744 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7468793465250405e-05, |
| "loss": 3.5104, |
| "step": 1376256 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7460407517739885e-05, |
| "loss": 3.4978, |
| "step": 1376768 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7452021570229365e-05, |
| "loss": 3.5017, |
| "step": 1377280 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7443652001522574e-05, |
| "loss": 3.4854, |
| "step": 1377792 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7435266054012054e-05, |
| "loss": 3.5027, |
| "step": 1378304 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7426880106501534e-05, |
| "loss": 3.4989, |
| "step": 1378816 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7418494158991014e-05, |
| "loss": 3.5158, |
| "step": 1379328 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7410124590284226e-05, |
| "loss": 3.5038, |
| "step": 1379840 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7401755021577435e-05, |
| "loss": 3.4909, |
| "step": 1380352 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7393369074066915e-05, |
| "loss": 3.4914, |
| "step": 1380864 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7384983126556395e-05, |
| "loss": 3.5008, |
| "step": 1381376 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7376597179045875e-05, |
| "loss": 3.4817, |
| "step": 1381888 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7368211231535355e-05, |
| "loss": 3.4991, |
| "step": 1382400 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.735982528402484e-05, |
| "loss": 3.5, |
| "step": 1382912 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.735143933651432e-05, |
| "loss": 3.4933, |
| "step": 1383424 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7343069767807527e-05, |
| "loss": 3.5202, |
| "step": 1383936 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7334683820297007e-05, |
| "loss": 3.4983, |
| "step": 1384448 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7326297872786487e-05, |
| "loss": 3.4997, |
| "step": 1384960 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.7317911925275967e-05, |
| "loss": 3.4939, |
| "step": 1385472 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.730954235656918e-05, |
| "loss": 3.5125, |
| "step": 1385984 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.730115640905866e-05, |
| "loss": 3.4857, |
| "step": 1386496 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.729278684035187e-05, |
| "loss": 3.5014, |
| "step": 1387008 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.728440089284135e-05, |
| "loss": 3.4912, |
| "step": 1387520 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.727601494533083e-05, |
| "loss": 3.4963, |
| "step": 1388032 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.726762899782031e-05, |
| "loss": 3.4962, |
| "step": 1388544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7259243050309792e-05, |
| "loss": 3.4951, |
| "step": 1389056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7250857102799272e-05, |
| "loss": 3.5, |
| "step": 1389568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7242471155288752e-05, |
| "loss": 3.4985, |
| "step": 1390080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7234085207778232e-05, |
| "loss": 3.4938, |
| "step": 1390592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.722571563907144e-05, |
| "loss": 3.5089, |
| "step": 1391104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.721732969156092e-05, |
| "loss": 3.4973, |
| "step": 1391616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.72089437440504e-05, |
| "loss": 3.5022, |
| "step": 1392128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7200557796539884e-05, |
| "loss": 3.4903, |
| "step": 1392640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7192188227833093e-05, |
| "loss": 3.4949, |
| "step": 1393152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7183802280322573e-05, |
| "loss": 3.4865, |
| "step": 1393664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7175416332812053e-05, |
| "loss": 3.4945, |
| "step": 1394176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7167030385301533e-05, |
| "loss": 3.4897, |
| "step": 1394688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7158660816594746e-05, |
| "loss": 3.5042, |
| "step": 1395200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7150274869084226e-05, |
| "loss": 3.4911, |
| "step": 1395712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7141888921573706e-05, |
| "loss": 3.4985, |
| "step": 1396224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7133502974063186e-05, |
| "loss": 3.5041, |
| "step": 1396736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7125133405356395e-05, |
| "loss": 3.4999, |
| "step": 1397248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7116747457845875e-05, |
| "loss": 3.4957, |
| "step": 1397760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7108361510335355e-05, |
| "loss": 3.4832, |
| "step": 1398272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7099975562824838e-05, |
| "loss": 3.4929, |
| "step": 1398784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7091605994118047e-05, |
| "loss": 3.5005, |
| "step": 1399296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7083220046607527e-05, |
| "loss": 3.4959, |
| "step": 1399808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7074834099097007e-05, |
| "loss": 3.4966, |
| "step": 1400320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7066448151586487e-05, |
| "loss": 3.4812, |
| "step": 1400832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.70580785828797e-05, |
| "loss": 3.4923, |
| "step": 1401344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.704969263536918e-05, |
| "loss": 3.4752, |
| "step": 1401856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.704130668785866e-05, |
| "loss": 3.5064, |
| "step": 1402368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.703292074034814e-05, |
| "loss": 3.4801, |
| "step": 1402880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.702455117164135e-05, |
| "loss": 3.5032, |
| "step": 1403392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.701616522413083e-05, |
| "loss": 3.494, |
| "step": 1403904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7007779276620308e-05, |
| "loss": 3.48, |
| "step": 1404416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6999393329109785e-05, |
| "loss": 3.5036, |
| "step": 1404928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6991023760403e-05, |
| "loss": 3.4882, |
| "step": 1405440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.698263781289248e-05, |
| "loss": 3.4889, |
| "step": 1405952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.697425186538196e-05, |
| "loss": 3.4807, |
| "step": 1406464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6965865917871437e-05, |
| "loss": 3.5014, |
| "step": 1406976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6957496349164653e-05, |
| "loss": 3.4858, |
| "step": 1407488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6949110401654133e-05, |
| "loss": 3.4906, |
| "step": 1408000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6940724454143606e-05, |
| "loss": 3.4737, |
| "step": 1408512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6932338506633086e-05, |
| "loss": 3.4839, |
| "step": 1409024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6923968937926302e-05, |
| "loss": 3.4905, |
| "step": 1409536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6915582990415782e-05, |
| "loss": 3.4973, |
| "step": 1410048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.690719704290526e-05, |
| "loss": 3.4924, |
| "step": 1410560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.689881109539474e-05, |
| "loss": 3.5008, |
| "step": 1411072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6890441526687954e-05, |
| "loss": 3.4982, |
| "step": 1411584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6882055579177434e-05, |
| "loss": 3.4937, |
| "step": 1412096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6873669631666908e-05, |
| "loss": 3.5014, |
| "step": 1412608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6865300062960123e-05, |
| "loss": 3.4971, |
| "step": 1413120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6856914115449607e-05, |
| "loss": 3.488, |
| "step": 1413632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.684852816793908e-05, |
| "loss": 3.4909, |
| "step": 1414144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.684014222042856e-05, |
| "loss": 3.4991, |
| "step": 1414656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6831772651721776e-05, |
| "loss": 3.4994, |
| "step": 1415168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6823386704211256e-05, |
| "loss": 3.5005, |
| "step": 1415680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6815000756700732e-05, |
| "loss": 3.491, |
| "step": 1416192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6806614809190212e-05, |
| "loss": 3.4681, |
| "step": 1416704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6798245240483428e-05, |
| "loss": 3.486, |
| "step": 1417216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6789859292972908e-05, |
| "loss": 3.4923, |
| "step": 1417728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.678147334546238e-05, |
| "loss": 3.4877, |
| "step": 1418240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.677308739795186e-05, |
| "loss": 3.4861, |
| "step": 1418752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.6764717829245077e-05, |
| "loss": 3.4958, |
| "step": 1419264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6756331881734554e-05, |
| "loss": 3.4892, |
| "step": 1419776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6747945934224034e-05, |
| "loss": 3.4792, |
| "step": 1420288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6739559986713514e-05, |
| "loss": 3.489, |
| "step": 1420800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.673119041800673e-05, |
| "loss": 3.5018, |
| "step": 1421312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6722804470496206e-05, |
| "loss": 3.499, |
| "step": 1421824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6714418522985686e-05, |
| "loss": 3.5059, |
| "step": 1422336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6706032575475166e-05, |
| "loss": 3.4822, |
| "step": 1422848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6697663006768382e-05, |
| "loss": 3.4931, |
| "step": 1423360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6689277059257855e-05, |
| "loss": 3.4911, |
| "step": 1423872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6680891111747335e-05, |
| "loss": 3.4939, |
| "step": 1424384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6672505164236815e-05, |
| "loss": 3.4987, |
| "step": 1424896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6664135595530027e-05, |
| "loss": 3.4861, |
| "step": 1425408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6655749648019507e-05, |
| "loss": 3.4886, |
| "step": 1425920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6647363700508987e-05, |
| "loss": 3.497, |
| "step": 1426432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6638977752998467e-05, |
| "loss": 3.5015, |
| "step": 1426944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6630608184291676e-05, |
| "loss": 3.4837, |
| "step": 1427456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.662222223678116e-05, |
| "loss": 3.4903, |
| "step": 1427968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.661383628927064e-05, |
| "loss": 3.4894, |
| "step": 1428480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.660545034176012e-05, |
| "loss": 3.4884, |
| "step": 1428992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.659708077305333e-05, |
| "loss": 3.4779, |
| "step": 1429504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.658869482554281e-05, |
| "loss": 3.4834, |
| "step": 1430016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.658030887803229e-05, |
| "loss": 3.4854, |
| "step": 1430528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.657192293052177e-05, |
| "loss": 3.4906, |
| "step": 1431040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.656355336181498e-05, |
| "loss": 3.4863, |
| "step": 1431552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.655516741430446e-05, |
| "loss": 3.4897, |
| "step": 1432064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.654678146679394e-05, |
| "loss": 3.5016, |
| "step": 1432576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.653839551928342e-05, |
| "loss": 3.5013, |
| "step": 1433088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.653002595057663e-05, |
| "loss": 3.4911, |
| "step": 1433600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6521640003066113e-05, |
| "loss": 3.4978, |
| "step": 1434112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6513254055555593e-05, |
| "loss": 3.4895, |
| "step": 1434624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6504868108045073e-05, |
| "loss": 3.4828, |
| "step": 1435136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6496498539338282e-05, |
| "loss": 3.4836, |
| "step": 1435648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6488112591827762e-05, |
| "loss": 3.4842, |
| "step": 1436160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6479726644317242e-05, |
| "loss": 3.4896, |
| "step": 1436672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6471340696806722e-05, |
| "loss": 3.497, |
| "step": 1437184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6462971128099935e-05, |
| "loss": 3.4892, |
| "step": 1437696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6454585180589415e-05, |
| "loss": 3.4902, |
| "step": 1438208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6446199233078895e-05, |
| "loss": 3.4923, |
| "step": 1438720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6437813285568375e-05, |
| "loss": 3.4885, |
| "step": 1439232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6429443716861584e-05, |
| "loss": 3.4845, |
| "step": 1439744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6421057769351064e-05, |
| "loss": 3.5015, |
| "step": 1440256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6412671821840547e-05, |
| "loss": 3.5012, |
| "step": 1440768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6404285874330027e-05, |
| "loss": 3.4948, |
| "step": 1441280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6395916305623236e-05, |
| "loss": 3.4889, |
| "step": 1441792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6387530358112716e-05, |
| "loss": 3.4918, |
| "step": 1442304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6379144410602196e-05, |
| "loss": 3.4916, |
| "step": 1442816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6370758463091676e-05, |
| "loss": 3.507, |
| "step": 1443328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.636238889438489e-05, |
| "loss": 3.4895, |
| "step": 1443840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6354002946874368e-05, |
| "loss": 3.4902, |
| "step": 1444352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6345616999363848e-05, |
| "loss": 3.4897, |
| "step": 1444864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6337231051853328e-05, |
| "loss": 3.4913, |
| "step": 1445376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6328861483146537e-05, |
| "loss": 3.4868, |
| "step": 1445888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6320475535636017e-05, |
| "loss": 3.4921, |
| "step": 1446400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.63120895881255e-05, |
| "loss": 3.4947, |
| "step": 1446912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.630370364061498e-05, |
| "loss": 3.4787, |
| "step": 1447424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.629533407190819e-05, |
| "loss": 3.4969, |
| "step": 1447936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.628694812439767e-05, |
| "loss": 3.493, |
| "step": 1448448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.627856217688715e-05, |
| "loss": 3.4912, |
| "step": 1448960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.627017622937663e-05, |
| "loss": 3.4903, |
| "step": 1449472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6261806660669842e-05, |
| "loss": 3.4816, |
| "step": 1449984 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.84440279006958, |
| "eval_runtime": 306.5123, |
| "eval_samples_per_second": 1244.945, |
| "eval_steps_per_second": 38.905, |
| "step": 1450070 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6253420713159322e-05, |
| "loss": 3.4878, |
| "step": 1450496 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6245034765648802e-05, |
| "loss": 3.4813, |
| "step": 1451008 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6236648818138282e-05, |
| "loss": 3.5007, |
| "step": 1451520 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.622827924943149e-05, |
| "loss": 3.485, |
| "step": 1452032 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.621989330192097e-05, |
| "loss": 3.5005, |
| "step": 1452544 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6211507354410454e-05, |
| "loss": 3.4818, |
| "step": 1453056 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6203137785703663e-05, |
| "loss": 3.4917, |
| "step": 1453568 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6194751838193143e-05, |
| "loss": 3.4752, |
| "step": 1454080 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6186365890682623e-05, |
| "loss": 3.492, |
| "step": 1454592 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6177979943172103e-05, |
| "loss": 3.4825, |
| "step": 1455104 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6169610374465316e-05, |
| "loss": 3.5021, |
| "step": 1455616 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6161224426954796e-05, |
| "loss": 3.4862, |
| "step": 1456128 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6152854858248005e-05, |
| "loss": 3.4765, |
| "step": 1456640 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6144468910737485e-05, |
| "loss": 3.4788, |
| "step": 1457152 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6136082963226965e-05, |
| "loss": 3.4894, |
| "step": 1457664 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6127697015716445e-05, |
| "loss": 3.4739, |
| "step": 1458176 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6119311068205925e-05, |
| "loss": 3.4836, |
| "step": 1458688 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6110925120695408e-05, |
| "loss": 3.4852, |
| "step": 1459200 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6102539173184888e-05, |
| "loss": 3.481, |
| "step": 1459712 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6094169604478097e-05, |
| "loss": 3.5067, |
| "step": 1460224 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6085783656967577e-05, |
| "loss": 3.4884, |
| "step": 1460736 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6077397709457057e-05, |
| "loss": 3.4848, |
| "step": 1461248 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6069011761946537e-05, |
| "loss": 3.4828, |
| "step": 1461760 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.606064219323975e-05, |
| "loss": 3.4984, |
| "step": 1462272 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.605225624572923e-05, |
| "loss": 3.4771, |
| "step": 1462784 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.604388667702244e-05, |
| "loss": 3.4839, |
| "step": 1463296 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.603550072951192e-05, |
| "loss": 3.4819, |
| "step": 1463808 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6027114782001398e-05, |
| "loss": 3.4844, |
| "step": 1464320 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.6018728834490878e-05, |
| "loss": 3.4824, |
| "step": 1464832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.601034288698036e-05, |
| "loss": 3.4798, |
| "step": 1465344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.600195693946984e-05, |
| "loss": 3.4904, |
| "step": 1465856 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.599357099195932e-05, |
| "loss": 3.4852, |
| "step": 1466368 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.59851850444488e-05, |
| "loss": 3.4855, |
| "step": 1466880 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.597681547574201e-05, |
| "loss": 3.4948, |
| "step": 1467392 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.596842952823149e-05, |
| "loss": 3.4834, |
| "step": 1467904 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.596004358072097e-05, |
| "loss": 3.4909, |
| "step": 1468416 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5951657633210454e-05, |
| "loss": 3.4739, |
| "step": 1468928 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5943288064503663e-05, |
| "loss": 3.4861, |
| "step": 1469440 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5934902116993143e-05, |
| "loss": 3.4746, |
| "step": 1469952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5926516169482623e-05, |
| "loss": 3.481, |
| "step": 1470464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5918130221972103e-05, |
| "loss": 3.477, |
| "step": 1470976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5909760653265315e-05, |
| "loss": 3.492, |
| "step": 1471488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5901374705754795e-05, |
| "loss": 3.4781, |
| "step": 1472000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5892988758244275e-05, |
| "loss": 3.4833, |
| "step": 1472512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5884602810733755e-05, |
| "loss": 3.4954, |
| "step": 1473024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5876233242026964e-05, |
| "loss": 3.4885, |
| "step": 1473536 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5867847294516444e-05, |
| "loss": 3.4863, |
| "step": 1474048 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5859461347005924e-05, |
| "loss": 3.4688, |
| "step": 1474560 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5851075399495407e-05, |
| "loss": 3.4784, |
| "step": 1475072 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5842705830788617e-05, |
| "loss": 3.4876, |
| "step": 1475584 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5834319883278097e-05, |
| "loss": 3.484, |
| "step": 1476096 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5825933935767576e-05, |
| "loss": 3.4823, |
| "step": 1476608 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5817547988257053e-05, |
| "loss": 3.4737, |
| "step": 1477120 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.580917841955027e-05, |
| "loss": 3.479, |
| "step": 1477632 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.580079247203975e-05, |
| "loss": 3.4621, |
| "step": 1478144 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.579240652452923e-05, |
| "loss": 3.4877, |
| "step": 1478656 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5784020577018702e-05, |
| "loss": 3.4718, |
| "step": 1479168 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5775651008311918e-05, |
| "loss": 3.4842, |
| "step": 1479680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5767265060801398e-05, |
| "loss": 3.4869, |
| "step": 1480192 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5758879113290878e-05, |
| "loss": 3.4694, |
| "step": 1480704 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5750493165780354e-05, |
| "loss": 3.4854, |
| "step": 1481216 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.574212359707357e-05, |
| "loss": 3.4786, |
| "step": 1481728 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.573373764956305e-05, |
| "loss": 3.4779, |
| "step": 1482240 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5725351702052523e-05, |
| "loss": 3.4694, |
| "step": 1482752 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5716965754542007e-05, |
| "loss": 3.4847, |
| "step": 1483264 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.570859618583522e-05, |
| "loss": 3.4745, |
| "step": 1483776 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5700210238324703e-05, |
| "loss": 3.479, |
| "step": 1484288 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5691824290814176e-05, |
| "loss": 3.4632, |
| "step": 1484800 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.568345472210739e-05, |
| "loss": 3.4676, |
| "step": 1485312 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.567506877459687e-05, |
| "loss": 3.4804, |
| "step": 1485824 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5666682827086348e-05, |
| "loss": 3.4875, |
| "step": 1486336 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5658296879575828e-05, |
| "loss": 3.4771, |
| "step": 1486848 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5649927310869044e-05, |
| "loss": 3.4902, |
| "step": 1487360 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5641541363358524e-05, |
| "loss": 3.4902, |
| "step": 1487872 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5633155415847997e-05, |
| "loss": 3.4813, |
| "step": 1488384 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5624769468337477e-05, |
| "loss": 3.4862, |
| "step": 1488896 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5616399899630693e-05, |
| "loss": 3.4841, |
| "step": 1489408 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5608013952120173e-05, |
| "loss": 3.4777, |
| "step": 1489920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.559962800460965e-05, |
| "loss": 3.477, |
| "step": 1490432 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.559124205709913e-05, |
| "loss": 3.4865, |
| "step": 1490944 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5582872488392345e-05, |
| "loss": 3.4871, |
| "step": 1491456 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5574486540881822e-05, |
| "loss": 3.4866, |
| "step": 1491968 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5566100593371302e-05, |
| "loss": 3.4788, |
| "step": 1492480 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5557714645860782e-05, |
| "loss": 3.458, |
| "step": 1492992 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5549345077153998e-05, |
| "loss": 3.4733, |
| "step": 1493504 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.554095912964347e-05, |
| "loss": 3.4747, |
| "step": 1494016 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.553257318213295e-05, |
| "loss": 3.4806, |
| "step": 1494528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.552418723462243e-05, |
| "loss": 3.4714, |
| "step": 1495040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.5515817665915647e-05, |
| "loss": 3.4818, |
| "step": 1495552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5507431718405123e-05, |
| "loss": 3.4768, |
| "step": 1496064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5499045770894603e-05, |
| "loss": 3.4673, |
| "step": 1496576 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5490659823384083e-05, |
| "loss": 3.4791, |
| "step": 1497088 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5482290254677292e-05, |
| "loss": 3.4859, |
| "step": 1497600 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5473904307166772e-05, |
| "loss": 3.4838, |
| "step": 1498112 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5465518359656255e-05, |
| "loss": 3.4934, |
| "step": 1498624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5457132412145735e-05, |
| "loss": 3.4738, |
| "step": 1499136 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5448762843438945e-05, |
| "loss": 3.4781, |
| "step": 1499648 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5440376895928424e-05, |
| "loss": 3.4809, |
| "step": 1500160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5431990948417904e-05, |
| "loss": 3.482, |
| "step": 1500672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5423605000907384e-05, |
| "loss": 3.4841, |
| "step": 1501184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5415235432200597e-05, |
| "loss": 3.4756, |
| "step": 1501696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5406849484690077e-05, |
| "loss": 3.4753, |
| "step": 1502208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5398463537179557e-05, |
| "loss": 3.483, |
| "step": 1502720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5390077589669037e-05, |
| "loss": 3.4876, |
| "step": 1503232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5381708020962246e-05, |
| "loss": 3.4711, |
| "step": 1503744 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5373322073451726e-05, |
| "loss": 3.4811, |
| "step": 1504256 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.536493612594121e-05, |
| "loss": 3.4771, |
| "step": 1504768 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.535655017843069e-05, |
| "loss": 3.4757, |
| "step": 1505280 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5348196988527634e-05, |
| "loss": 3.4659, |
| "step": 1505792 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5339811041017114e-05, |
| "loss": 3.4739, |
| "step": 1506304 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5331425093506594e-05, |
| "loss": 3.4753, |
| "step": 1506816 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.532303914599607e-05, |
| "loss": 3.4758, |
| "step": 1507328 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.531465319848555e-05, |
| "loss": 3.4769, |
| "step": 1507840 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.530626725097503e-05, |
| "loss": 3.4747, |
| "step": 1508352 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.529788130346451e-05, |
| "loss": 3.4889, |
| "step": 1508864 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.528949535595399e-05, |
| "loss": 3.4893, |
| "step": 1509376 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.52811257872472e-05, |
| "loss": 3.4784, |
| "step": 1509888 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.527273983973668e-05, |
| "loss": 3.4844, |
| "step": 1510400 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5264353892226163e-05, |
| "loss": 3.4779, |
| "step": 1510912 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5255967944715643e-05, |
| "loss": 3.4739, |
| "step": 1511424 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5247598376008852e-05, |
| "loss": 3.4693, |
| "step": 1511936 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5239212428498332e-05, |
| "loss": 3.4721, |
| "step": 1512448 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5230826480987812e-05, |
| "loss": 3.4769, |
| "step": 1512960 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5222440533477292e-05, |
| "loss": 3.4849, |
| "step": 1513472 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5214070964770504e-05, |
| "loss": 3.4779, |
| "step": 1513984 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5205685017259984e-05, |
| "loss": 3.4826, |
| "step": 1514496 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5197299069749464e-05, |
| "loss": 3.4758, |
| "step": 1515008 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5188913122238944e-05, |
| "loss": 3.4747, |
| "step": 1515520 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5180543553532153e-05, |
| "loss": 3.4738, |
| "step": 1516032 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5172157606021633e-05, |
| "loss": 3.4895, |
| "step": 1516544 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5163771658511116e-05, |
| "loss": 3.4924, |
| "step": 1517056 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5155385711000596e-05, |
| "loss": 3.4785, |
| "step": 1517568 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5147016142293806e-05, |
| "loss": 3.4762, |
| "step": 1518080 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5138630194783285e-05, |
| "loss": 3.4812, |
| "step": 1518592 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5130244247272765e-05, |
| "loss": 3.4773, |
| "step": 1519104 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5121858299762245e-05, |
| "loss": 3.4924, |
| "step": 1519616 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5113488731055458e-05, |
| "loss": 3.4748, |
| "step": 1520128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5105102783544938e-05, |
| "loss": 3.4815, |
| "step": 1520640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5096716836034418e-05, |
| "loss": 3.477, |
| "step": 1521152 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5088330888523898e-05, |
| "loss": 3.4822, |
| "step": 1521664 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5079961319817107e-05, |
| "loss": 3.4734, |
| "step": 1522176 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.5071575372306587e-05, |
| "loss": 3.4758, |
| "step": 1522688 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.506318942479607e-05, |
| "loss": 3.4812, |
| "step": 1523200 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.505481985608928e-05, |
| "loss": 3.4703, |
| "step": 1523712 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.504643390857876e-05, |
| "loss": 3.4818, |
| "step": 1524224 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.503804796106824e-05, |
| "loss": 3.486, |
| "step": 1524736 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.502966201355772e-05, |
| "loss": 3.4796, |
| "step": 1525248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.502129244485093e-05, |
| "loss": 3.477, |
| "step": 1525760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.501290649734041e-05, |
| "loss": 3.4687, |
| "step": 1526272 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.8461496829986572, |
| "eval_runtime": 306.6904, |
| "eval_samples_per_second": 1244.222, |
| "eval_steps_per_second": 38.883, |
| "step": 1526390 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.500452054982989e-05, |
| "loss": 3.4675, |
| "step": 1526784 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.499613460231937e-05, |
| "loss": 3.4662, |
| "step": 1527296 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.498774865480885e-05, |
| "loss": 3.4853, |
| "step": 1527808 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.497936270729833e-05, |
| "loss": 3.4758, |
| "step": 1528320 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.497099313859154e-05, |
| "loss": 3.4905, |
| "step": 1528832 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4962607191081024e-05, |
| "loss": 3.4693, |
| "step": 1529344 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4954221243570504e-05, |
| "loss": 3.4809, |
| "step": 1529856 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4945835296059984e-05, |
| "loss": 3.4603, |
| "step": 1530368 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4937465727353193e-05, |
| "loss": 3.4786, |
| "step": 1530880 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4929079779842673e-05, |
| "loss": 3.4658, |
| "step": 1531392 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4920693832332153e-05, |
| "loss": 3.4878, |
| "step": 1531904 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4912307884821633e-05, |
| "loss": 3.4787, |
| "step": 1532416 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4903954694918574e-05, |
| "loss": 3.4636, |
| "step": 1532928 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4895568747408054e-05, |
| "loss": 3.4684, |
| "step": 1533440 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4887182799897534e-05, |
| "loss": 3.4779, |
| "step": 1533952 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4878796852387014e-05, |
| "loss": 3.4625, |
| "step": 1534464 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4870410904876494e-05, |
| "loss": 3.4701, |
| "step": 1534976 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4862024957365977e-05, |
| "loss": 3.4741, |
| "step": 1535488 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4853639009855457e-05, |
| "loss": 3.4707, |
| "step": 1536000 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4845253062344934e-05, |
| "loss": 3.4928, |
| "step": 1536512 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4836883493638146e-05, |
| "loss": 3.4751, |
| "step": 1537024 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4828497546127626e-05, |
| "loss": 3.4713, |
| "step": 1537536 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4820111598617106e-05, |
| "loss": 3.4734, |
| "step": 1538048 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.481174202991032e-05, |
| "loss": 3.4823, |
| "step": 1538560 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.48033560823998e-05, |
| "loss": 3.4679, |
| "step": 1539072 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4794986513693008e-05, |
| "loss": 3.4754, |
| "step": 1539584 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4786600566182488e-05, |
| "loss": 3.4678, |
| "step": 1540096 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4778214618671968e-05, |
| "loss": 3.4691, |
| "step": 1540608 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4769828671161448e-05, |
| "loss": 3.4691, |
| "step": 1541120 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.4761442723650928e-05, |
| "loss": 3.4694, |
| "step": 1541632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4753056776140408e-05, |
| "loss": 3.4722, |
| "step": 1542144 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4744670828629888e-05, |
| "loss": 3.4771, |
| "step": 1542656 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4736284881119368e-05, |
| "loss": 3.4746, |
| "step": 1543168 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.472791531241258e-05, |
| "loss": 3.4825, |
| "step": 1543680 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4719529364902057e-05, |
| "loss": 3.4655, |
| "step": 1544192 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.471114341739154e-05, |
| "loss": 3.4768, |
| "step": 1544704 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.470275746988102e-05, |
| "loss": 3.4652, |
| "step": 1545216 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.469438790117423e-05, |
| "loss": 3.4748, |
| "step": 1545728 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.468600195366371e-05, |
| "loss": 3.4622, |
| "step": 1546240 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.467761600615319e-05, |
| "loss": 3.4666, |
| "step": 1546752 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.466923005864267e-05, |
| "loss": 3.4651, |
| "step": 1547264 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.466086048993588e-05, |
| "loss": 3.482, |
| "step": 1547776 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.465247454242536e-05, |
| "loss": 3.4678, |
| "step": 1548288 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.464408859491484e-05, |
| "loss": 3.4666, |
| "step": 1548800 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.463570264740432e-05, |
| "loss": 3.4843, |
| "step": 1549312 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.462733307869753e-05, |
| "loss": 3.4751, |
| "step": 1549824 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.461894713118701e-05, |
| "loss": 3.4739, |
| "step": 1550336 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4610561183676494e-05, |
| "loss": 3.4629, |
| "step": 1550848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4602175236165974e-05, |
| "loss": 3.4592, |
| "step": 1551360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4593805667459183e-05, |
| "loss": 3.478, |
| "step": 1551872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4585419719948663e-05, |
| "loss": 3.4722, |
| "step": 1552384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4577033772438143e-05, |
| "loss": 3.4641, |
| "step": 1552896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4568647824927623e-05, |
| "loss": 3.4625, |
| "step": 1553408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4560278256220835e-05, |
| "loss": 3.4683, |
| "step": 1553920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4551892308710315e-05, |
| "loss": 3.4513, |
| "step": 1554432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4543506361199795e-05, |
| "loss": 3.474, |
| "step": 1554944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4535120413689275e-05, |
| "loss": 3.4609, |
| "step": 1555456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4526734466178755e-05, |
| "loss": 3.4691, |
| "step": 1555968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4518348518668235e-05, |
| "loss": 3.4747, |
| "step": 1556480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.450996257115771e-05, |
| "loss": 3.4597, |
| "step": 1556992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4501576623647195e-05, |
| "loss": 3.4706, |
| "step": 1557504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4493207054940407e-05, |
| "loss": 3.4664, |
| "step": 1558016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4484821107429884e-05, |
| "loss": 3.4613, |
| "step": 1558528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4476435159919364e-05, |
| "loss": 3.4634, |
| "step": 1559040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4468049212408844e-05, |
| "loss": 3.4684, |
| "step": 1559552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4459679643702056e-05, |
| "loss": 3.4633, |
| "step": 1560064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4451293696191536e-05, |
| "loss": 3.4662, |
| "step": 1560576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4442907748681016e-05, |
| "loss": 3.4559, |
| "step": 1561088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4434521801170496e-05, |
| "loss": 3.4555, |
| "step": 1561600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.442615223246371e-05, |
| "loss": 3.4659, |
| "step": 1562112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4417766284953185e-05, |
| "loss": 3.4774, |
| "step": 1562624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4409380337442665e-05, |
| "loss": 3.4655, |
| "step": 1563136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.440099438993215e-05, |
| "loss": 3.4773, |
| "step": 1563648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4392624821225358e-05, |
| "loss": 3.4725, |
| "step": 1564160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4384238873714837e-05, |
| "loss": 3.4698, |
| "step": 1564672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.4375852926204317e-05, |
| "loss": 3.4721, |
| "step": 1565184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.436748335749753e-05, |
| "loss": 3.4779, |
| "step": 1565696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.435909740998701e-05, |
| "loss": 3.4658, |
| "step": 1566208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.435071146247649e-05, |
| "loss": 3.4656, |
| "step": 1566720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.434232551496597e-05, |
| "loss": 3.4755, |
| "step": 1567232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.433395594625918e-05, |
| "loss": 3.4736, |
| "step": 1567744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.432556999874866e-05, |
| "loss": 3.4722, |
| "step": 1568256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.431718405123814e-05, |
| "loss": 3.4698, |
| "step": 1568768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.430879810372762e-05, |
| "loss": 3.4502, |
| "step": 1569280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.430042853502083e-05, |
| "loss": 3.4567, |
| "step": 1569792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.429204258751031e-05, |
| "loss": 3.4692, |
| "step": 1570304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.428365663999979e-05, |
| "loss": 3.4671, |
| "step": 1570816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.427527069248927e-05, |
| "loss": 3.4627, |
| "step": 1571328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.426690112378248e-05, |
| "loss": 3.4685, |
| "step": 1571840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4258515176271964e-05, |
| "loss": 3.4646, |
| "step": 1572352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4250129228761444e-05, |
| "loss": 3.4575, |
| "step": 1572864 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4241743281250923e-05, |
| "loss": 3.4639, |
| "step": 1573376 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4233373712544133e-05, |
| "loss": 3.4693, |
| "step": 1573888 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4224987765033613e-05, |
| "loss": 3.4781, |
| "step": 1574400 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4216601817523092e-05, |
| "loss": 3.475, |
| "step": 1574912 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4208215870012572e-05, |
| "loss": 3.4702, |
| "step": 1575424 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4199846301305785e-05, |
| "loss": 3.4573, |
| "step": 1575936 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4191460353795265e-05, |
| "loss": 3.4735, |
| "step": 1576448 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4183074406284745e-05, |
| "loss": 3.4661, |
| "step": 1576960 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4174688458774225e-05, |
| "loss": 3.4738, |
| "step": 1577472 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4166318890067434e-05, |
| "loss": 3.4665, |
| "step": 1577984 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4157932942556917e-05, |
| "loss": 3.4578, |
| "step": 1578496 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4149546995046397e-05, |
| "loss": 3.4722, |
| "step": 1579008 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4141161047535877e-05, |
| "loss": 3.476, |
| "step": 1579520 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4132791478829086e-05, |
| "loss": 3.4642, |
| "step": 1580032 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4124405531318566e-05, |
| "loss": 3.4652, |
| "step": 1580544 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4116019583808046e-05, |
| "loss": 3.466, |
| "step": 1581056 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4107633636297526e-05, |
| "loss": 3.4593, |
| "step": 1581568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.409928044639447e-05, |
| "loss": 3.4535, |
| "step": 1582080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.409089449888395e-05, |
| "loss": 3.4657, |
| "step": 1582592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4082508551373428e-05, |
| "loss": 3.4621, |
| "step": 1583104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4074122603862908e-05, |
| "loss": 3.4616, |
| "step": 1583616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4065736656352388e-05, |
| "loss": 3.4632, |
| "step": 1584128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.405735070884187e-05, |
| "loss": 3.4631, |
| "step": 1584640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.404896476133135e-05, |
| "loss": 3.4789, |
| "step": 1585152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.404057881382083e-05, |
| "loss": 3.4813, |
| "step": 1585664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.403220924511404e-05, |
| "loss": 3.4676, |
| "step": 1586176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.402382329760352e-05, |
| "loss": 3.4725, |
| "step": 1586688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4015437350093e-05, |
| "loss": 3.4628, |
| "step": 1587200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.400705140258248e-05, |
| "loss": 3.4603, |
| "step": 1587712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3998681833875692e-05, |
| "loss": 3.46, |
| "step": 1588224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3990295886365172e-05, |
| "loss": 3.4615, |
| "step": 1588736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3981909938854652e-05, |
| "loss": 3.4657, |
| "step": 1589248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3973523991344132e-05, |
| "loss": 3.47, |
| "step": 1589760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.396515442263734e-05, |
| "loss": 3.4668, |
| "step": 1590272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3956768475126825e-05, |
| "loss": 3.4701, |
| "step": 1590784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3948382527616305e-05, |
| "loss": 3.4639, |
| "step": 1591296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.393999658010578e-05, |
| "loss": 3.463, |
| "step": 1591808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3931627011398994e-05, |
| "loss": 3.4626, |
| "step": 1592320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3923241063888474e-05, |
| "loss": 3.4771, |
| "step": 1592832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3914855116377953e-05, |
| "loss": 3.4779, |
| "step": 1593344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3906469168867433e-05, |
| "loss": 3.4707, |
| "step": 1593856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3898099600160646e-05, |
| "loss": 3.4636, |
| "step": 1594368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3889713652650126e-05, |
| "loss": 3.4707, |
| "step": 1594880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3881327705139606e-05, |
| "loss": 3.4643, |
| "step": 1595392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3872941757629082e-05, |
| "loss": 3.478, |
| "step": 1595904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3864572188922295e-05, |
| "loss": 3.465, |
| "step": 1596416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3856186241411775e-05, |
| "loss": 3.4678, |
| "step": 1596928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3847800293901255e-05, |
| "loss": 3.468, |
| "step": 1597440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3839414346390735e-05, |
| "loss": 3.4736, |
| "step": 1597952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3831044777683947e-05, |
| "loss": 3.4602, |
| "step": 1598464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3822658830173427e-05, |
| "loss": 3.4614, |
| "step": 1598976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3814272882662904e-05, |
| "loss": 3.4698, |
| "step": 1599488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.380590331395612e-05, |
| "loss": 3.46, |
| "step": 1600000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.37975173664456e-05, |
| "loss": 3.4683, |
| "step": 1600512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.378913141893508e-05, |
| "loss": 3.4749, |
| "step": 1601024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3780745471424556e-05, |
| "loss": 3.4668, |
| "step": 1601536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.377237590271777e-05, |
| "loss": 3.461, |
| "step": 1602048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.376398995520725e-05, |
| "loss": 3.4613, |
| "step": 1602560 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.8465778827667236, |
| "eval_runtime": 303.3778, |
| "eval_samples_per_second": 1257.808, |
| "eval_steps_per_second": 39.307, |
| "step": 1602710 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.375560400769673e-05, |
| "loss": 3.4643, |
| "step": 1603072 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.374721806018621e-05, |
| "loss": 3.4564, |
| "step": 1603584 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.373884849147942e-05, |
| "loss": 3.4725, |
| "step": 1604096 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.37304625439689e-05, |
| "loss": 3.4672, |
| "step": 1604608 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3722076596458377e-05, |
| "loss": 3.4747, |
| "step": 1605120 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3713690648947857e-05, |
| "loss": 3.4625, |
| "step": 1605632 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3705321080241073e-05, |
| "loss": 3.4668, |
| "step": 1606144 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.369693513273055e-05, |
| "loss": 3.4501, |
| "step": 1606656 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.368854918522003e-05, |
| "loss": 3.4644, |
| "step": 1607168 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.368016323770951e-05, |
| "loss": 3.4585, |
| "step": 1607680 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3671793669002722e-05, |
| "loss": 3.4701, |
| "step": 1608192 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3663407721492202e-05, |
| "loss": 3.4675, |
| "step": 1608704 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3655038152785415e-05, |
| "loss": 3.451, |
| "step": 1609216 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3646652205274895e-05, |
| "loss": 3.4578, |
| "step": 1609728 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3638266257764375e-05, |
| "loss": 3.4655, |
| "step": 1610240 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.362988031025385e-05, |
| "loss": 3.4517, |
| "step": 1610752 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.362149436274333e-05, |
| "loss": 3.4583, |
| "step": 1611264 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.361310841523281e-05, |
| "loss": 3.4613, |
| "step": 1611776 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3604722467722294e-05, |
| "loss": 3.4601, |
| "step": 1612288 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3596336520211774e-05, |
| "loss": 3.4771, |
| "step": 1612800 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3587966951504983e-05, |
| "loss": 3.4634, |
| "step": 1613312 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3579581003994463e-05, |
| "loss": 3.4616, |
| "step": 1613824 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3571195056483943e-05, |
| "loss": 3.4621, |
| "step": 1614336 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3562809108973423e-05, |
| "loss": 3.4642, |
| "step": 1614848 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3554439540266636e-05, |
| "loss": 3.4575, |
| "step": 1615360 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3546053592756116e-05, |
| "loss": 3.4654, |
| "step": 1615872 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3537667645245596e-05, |
| "loss": 3.4579, |
| "step": 1616384 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3529298076538805e-05, |
| "loss": 3.4565, |
| "step": 1616896 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3520912129028285e-05, |
| "loss": 3.4563, |
| "step": 1617408 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.3512526181517765e-05, |
| "loss": 3.4617, |
| "step": 1617920 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3504140234007248e-05, |
| "loss": 3.459, |
| "step": 1618432 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3495770665300457e-05, |
| "loss": 3.465, |
| "step": 1618944 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3487384717789937e-05, |
| "loss": 3.4616, |
| "step": 1619456 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3478998770279417e-05, |
| "loss": 3.4692, |
| "step": 1619968 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3470612822768897e-05, |
| "loss": 3.456, |
| "step": 1620480 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.346224325406211e-05, |
| "loss": 3.4648, |
| "step": 1620992 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.345385730655159e-05, |
| "loss": 3.457, |
| "step": 1621504 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.344547135904107e-05, |
| "loss": 3.4604, |
| "step": 1622016 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.343708541153055e-05, |
| "loss": 3.4482, |
| "step": 1622528 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.342871584282376e-05, |
| "loss": 3.4537, |
| "step": 1623040 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.342032989531324e-05, |
| "loss": 3.4556, |
| "step": 1623552 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.341194394780272e-05, |
| "loss": 3.4664, |
| "step": 1624064 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.34035580002922e-05, |
| "loss": 3.455, |
| "step": 1624576 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.339518843158541e-05, |
| "loss": 3.4581, |
| "step": 1625088 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.338680248407489e-05, |
| "loss": 3.4751, |
| "step": 1625600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.337841653656437e-05, |
| "loss": 3.4612, |
| "step": 1626112 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.337003058905385e-05, |
| "loss": 3.4621, |
| "step": 1626624 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.336166102034706e-05, |
| "loss": 3.4526, |
| "step": 1627136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3353275072836543e-05, |
| "loss": 3.4406, |
| "step": 1627648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3344889125326023e-05, |
| "loss": 3.4674, |
| "step": 1628160 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3336503177815503e-05, |
| "loss": 3.4601, |
| "step": 1628672 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3328133609108712e-05, |
| "loss": 3.4536, |
| "step": 1629184 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3319747661598192e-05, |
| "loss": 3.4541, |
| "step": 1629696 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3311378092891405e-05, |
| "loss": 3.46, |
| "step": 1630208 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3302992145380885e-05, |
| "loss": 3.4363, |
| "step": 1630720 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3294606197870365e-05, |
| "loss": 3.4632, |
| "step": 1631232 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3286220250359844e-05, |
| "loss": 3.4476, |
| "step": 1631744 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3277834302849324e-05, |
| "loss": 3.4573, |
| "step": 1632256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.32694483553388e-05, |
| "loss": 3.462, |
| "step": 1632768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.326106240782828e-05, |
| "loss": 3.4535, |
| "step": 1633280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3252676460317764e-05, |
| "loss": 3.454, |
| "step": 1633792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3244306891610977e-05, |
| "loss": 3.4585, |
| "step": 1634304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3235920944100453e-05, |
| "loss": 3.4459, |
| "step": 1634816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3227534996589933e-05, |
| "loss": 3.4532, |
| "step": 1635328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3219149049079413e-05, |
| "loss": 3.4553, |
| "step": 1635840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3210779480372626e-05, |
| "loss": 3.458, |
| "step": 1636352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3202393532862106e-05, |
| "loss": 3.4511, |
| "step": 1636864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3194007585351586e-05, |
| "loss": 3.4442, |
| "step": 1637376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3185621637841066e-05, |
| "loss": 3.4437, |
| "step": 1637888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3177252069134275e-05, |
| "loss": 3.4553, |
| "step": 1638400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3168866121623755e-05, |
| "loss": 3.4617, |
| "step": 1638912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3160480174113235e-05, |
| "loss": 3.4531, |
| "step": 1639424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3152094226602718e-05, |
| "loss": 3.4683, |
| "step": 1639936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3143724657895927e-05, |
| "loss": 3.4646, |
| "step": 1640448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3135338710385407e-05, |
| "loss": 3.4594, |
| "step": 1640960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3126952762874887e-05, |
| "loss": 3.4566, |
| "step": 1641472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3118566815364367e-05, |
| "loss": 3.464, |
| "step": 1641984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.311019724665758e-05, |
| "loss": 3.4568, |
| "step": 1642496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.310181129914706e-05, |
| "loss": 3.4524, |
| "step": 1643008 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.309342535163654e-05, |
| "loss": 3.4636, |
| "step": 1643520 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.308503940412602e-05, |
| "loss": 3.4604, |
| "step": 1644032 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.307666983541923e-05, |
| "loss": 3.4621, |
| "step": 1644544 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.306828388790871e-05, |
| "loss": 3.4563, |
| "step": 1645056 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.3059897940398188e-05, |
| "loss": 3.4379, |
| "step": 1645568 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.30515283716914e-05, |
| "loss": 3.4435, |
| "step": 1646080 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.304314242418088e-05, |
| "loss": 3.4532, |
| "step": 1646592 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.303475647667036e-05, |
| "loss": 3.457, |
| "step": 1647104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.302637052915984e-05, |
| "loss": 3.4508, |
| "step": 1647616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.301800096045305e-05, |
| "loss": 3.456, |
| "step": 1648128 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3009615012942533e-05, |
| "loss": 3.4521, |
| "step": 1648640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.3001229065432013e-05, |
| "loss": 3.4479, |
| "step": 1649152 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2992843117921493e-05, |
| "loss": 3.4564, |
| "step": 1649664 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2984473549214702e-05, |
| "loss": 3.4534, |
| "step": 1650176 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2976087601704182e-05, |
| "loss": 3.4662, |
| "step": 1650688 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2967701654193662e-05, |
| "loss": 3.4657, |
| "step": 1651200 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2959315706683142e-05, |
| "loss": 3.4605, |
| "step": 1651712 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2950946137976354e-05, |
| "loss": 3.4455, |
| "step": 1652224 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2942560190465834e-05, |
| "loss": 3.4568, |
| "step": 1652736 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2934174242955314e-05, |
| "loss": 3.4557, |
| "step": 1653248 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2925788295444794e-05, |
| "loss": 3.4625, |
| "step": 1653760 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2917418726738003e-05, |
| "loss": 3.4557, |
| "step": 1654272 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2909032779227483e-05, |
| "loss": 3.4462, |
| "step": 1654784 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2900646831716967e-05, |
| "loss": 3.4627, |
| "step": 1655296 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2892260884206447e-05, |
| "loss": 3.4574, |
| "step": 1655808 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2883891315499656e-05, |
| "loss": 3.4553, |
| "step": 1656320 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2875505367989136e-05, |
| "loss": 3.4567, |
| "step": 1656832 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2867119420478616e-05, |
| "loss": 3.4513, |
| "step": 1657344 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2858733472968096e-05, |
| "loss": 3.4488, |
| "step": 1657856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.285038028306504e-05, |
| "loss": 3.4397, |
| "step": 1658368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2841994335554517e-05, |
| "loss": 3.4538, |
| "step": 1658880 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2833608388043997e-05, |
| "loss": 3.4557, |
| "step": 1659392 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2825222440533477e-05, |
| "loss": 3.4502, |
| "step": 1659904 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2816836493022957e-05, |
| "loss": 3.4521, |
| "step": 1660416 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2808450545512437e-05, |
| "loss": 3.4475, |
| "step": 1660928 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.280006459800192e-05, |
| "loss": 3.4672, |
| "step": 1661440 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.27916786504914e-05, |
| "loss": 3.4684, |
| "step": 1661952 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.278330908178461e-05, |
| "loss": 3.4586, |
| "step": 1662464 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.277492313427409e-05, |
| "loss": 3.4618, |
| "step": 1662976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.276653718676357e-05, |
| "loss": 3.4514, |
| "step": 1663488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.275815123925305e-05, |
| "loss": 3.4499, |
| "step": 1664000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2749781670546262e-05, |
| "loss": 3.4468, |
| "step": 1664512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2741395723035742e-05, |
| "loss": 3.4507, |
| "step": 1665024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.273300977552522e-05, |
| "loss": 3.4561, |
| "step": 1665536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2724623828014698e-05, |
| "loss": 3.4552, |
| "step": 1666048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.271625425930791e-05, |
| "loss": 3.4569, |
| "step": 1666560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.270786831179739e-05, |
| "loss": 3.4542, |
| "step": 1667072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2699482364286874e-05, |
| "loss": 3.4541, |
| "step": 1667584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.269109641677635e-05, |
| "loss": 3.4532, |
| "step": 1668096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2682726848069563e-05, |
| "loss": 3.4545, |
| "step": 1668608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2674340900559043e-05, |
| "loss": 3.4584, |
| "step": 1669120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.266595495304852e-05, |
| "loss": 3.465, |
| "step": 1669632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2657569005538003e-05, |
| "loss": 3.461, |
| "step": 1670144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2649199436831215e-05, |
| "loss": 3.4484, |
| "step": 1670656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2640813489320695e-05, |
| "loss": 3.4627, |
| "step": 1671168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2632427541810172e-05, |
| "loss": 3.4531, |
| "step": 1671680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2624041594299652e-05, |
| "loss": 3.4627, |
| "step": 1672192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2615672025592864e-05, |
| "loss": 3.4544, |
| "step": 1672704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2607286078082344e-05, |
| "loss": 3.457, |
| "step": 1673216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2598900130571824e-05, |
| "loss": 3.453, |
| "step": 1673728 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2590514183061304e-05, |
| "loss": 3.4619, |
| "step": 1674240 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2582144614354517e-05, |
| "loss": 3.4501, |
| "step": 1674752 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2573758666843993e-05, |
| "loss": 3.4529, |
| "step": 1675264 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2565372719333473e-05, |
| "loss": 3.4507, |
| "step": 1675776 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2556986771822957e-05, |
| "loss": 3.4527, |
| "step": 1676288 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.254861720311617e-05, |
| "loss": 3.4569, |
| "step": 1676800 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2540231255605646e-05, |
| "loss": 3.4566, |
| "step": 1677312 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2531845308095126e-05, |
| "loss": 3.4616, |
| "step": 1677824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2523459360584606e-05, |
| "loss": 3.445, |
| "step": 1678336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.2515089791877818e-05, |
| "loss": 3.4541, |
| "step": 1678848 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.8492212295532227, |
| "eval_runtime": 303.192, |
| "eval_samples_per_second": 1258.579, |
| "eval_steps_per_second": 39.332, |
| "step": 1679030 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2506703844367298e-05, |
| "loss": 3.4472, |
| "step": 1679360 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2498317896856778e-05, |
| "loss": 3.4419, |
| "step": 1679872 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.248994832814999e-05, |
| "loss": 3.4605, |
| "step": 1680384 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2481562380639467e-05, |
| "loss": 3.4555, |
| "step": 1680896 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2473176433128947e-05, |
| "loss": 3.4632, |
| "step": 1681408 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2464790485618427e-05, |
| "loss": 3.4494, |
| "step": 1681920 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2456404538107907e-05, |
| "loss": 3.4506, |
| "step": 1682432 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.244801859059739e-05, |
| "loss": 3.4457, |
| "step": 1682944 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.243963264308687e-05, |
| "loss": 3.4514, |
| "step": 1683456 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.243124669557635e-05, |
| "loss": 3.4485, |
| "step": 1683968 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.242287712686956e-05, |
| "loss": 3.4569, |
| "step": 1684480 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.241449117935904e-05, |
| "loss": 3.4587, |
| "step": 1684992 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2406121610652252e-05, |
| "loss": 3.4402, |
| "step": 1685504 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.239773566314173e-05, |
| "loss": 3.4455, |
| "step": 1686016 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.238934971563121e-05, |
| "loss": 3.4575, |
| "step": 1686528 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.238096376812069e-05, |
| "loss": 3.4375, |
| "step": 1687040 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.237257782061017e-05, |
| "loss": 3.4454, |
| "step": 1687552 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2364191873099648e-05, |
| "loss": 3.451, |
| "step": 1688064 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2355805925589128e-05, |
| "loss": 3.4466, |
| "step": 1688576 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.234741997807861e-05, |
| "loss": 3.4659, |
| "step": 1689088 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2339050409371824e-05, |
| "loss": 3.4533, |
| "step": 1689600 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.23306644618613e-05, |
| "loss": 3.4528, |
| "step": 1690112 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.232227851435078e-05, |
| "loss": 3.4499, |
| "step": 1690624 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.231389256684026e-05, |
| "loss": 3.4527, |
| "step": 1691136 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2305522998133473e-05, |
| "loss": 3.4481, |
| "step": 1691648 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2297137050622953e-05, |
| "loss": 3.4521, |
| "step": 1692160 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2288751103112433e-05, |
| "loss": 3.4483, |
| "step": 1692672 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2280365155601913e-05, |
| "loss": 3.441, |
| "step": 1693184 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2271995586895122e-05, |
| "loss": 3.4477, |
| "step": 1693696 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.2263609639384602e-05, |
| "loss": 3.4477, |
| "step": 1694208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2255223691874082e-05, |
| "loss": 3.4517, |
| "step": 1694720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2246854123167298e-05, |
| "loss": 3.4513, |
| "step": 1695232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2238468175656774e-05, |
| "loss": 3.4532, |
| "step": 1695744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2230082228146254e-05, |
| "loss": 3.4593, |
| "step": 1696256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2221696280635734e-05, |
| "loss": 3.4458, |
| "step": 1696768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2213326711928943e-05, |
| "loss": 3.4507, |
| "step": 1697280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2204940764418427e-05, |
| "loss": 3.4491, |
| "step": 1697792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2196554816907906e-05, |
| "loss": 3.4449, |
| "step": 1698304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2188168869397386e-05, |
| "loss": 3.4386, |
| "step": 1698816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2179799300690596e-05, |
| "loss": 3.4438, |
| "step": 1699328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2171413353180075e-05, |
| "loss": 3.4421, |
| "step": 1699840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2163027405669555e-05, |
| "loss": 3.4518, |
| "step": 1700352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2154641458159035e-05, |
| "loss": 3.447, |
| "step": 1700864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2146271889452248e-05, |
| "loss": 3.4441, |
| "step": 1701376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2137885941941728e-05, |
| "loss": 3.4651, |
| "step": 1701888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2129499994431208e-05, |
| "loss": 3.4484, |
| "step": 1702400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2121114046920688e-05, |
| "loss": 3.4486, |
| "step": 1702912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2112744478213897e-05, |
| "loss": 3.4407, |
| "step": 1703424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2104358530703377e-05, |
| "loss": 3.4303, |
| "step": 1703936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.209597258319286e-05, |
| "loss": 3.4582, |
| "step": 1704448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.208758663568234e-05, |
| "loss": 3.4476, |
| "step": 1704960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.207921706697555e-05, |
| "loss": 3.4449, |
| "step": 1705472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.207083111946503e-05, |
| "loss": 3.4425, |
| "step": 1705984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.206246155075824e-05, |
| "loss": 3.4503, |
| "step": 1706496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.205407560324772e-05, |
| "loss": 3.4253, |
| "step": 1707008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.20456896557372e-05, |
| "loss": 3.4441, |
| "step": 1707520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.203730370822668e-05, |
| "loss": 3.4433, |
| "step": 1708032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.202891776071616e-05, |
| "loss": 3.4406, |
| "step": 1708544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.202053181320564e-05, |
| "loss": 3.4546, |
| "step": 1709056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.201214586569512e-05, |
| "loss": 3.4395, |
| "step": 1709568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.2003759918184598e-05, |
| "loss": 3.4422, |
| "step": 1710080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1995390349477814e-05, |
| "loss": 3.4511, |
| "step": 1710592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1987004401967294e-05, |
| "loss": 3.4394, |
| "step": 1711104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1978618454456774e-05, |
| "loss": 3.4385, |
| "step": 1711616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.197023250694625e-05, |
| "loss": 3.444, |
| "step": 1712128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1961862938239463e-05, |
| "loss": 3.4454, |
| "step": 1712640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1953476990728943e-05, |
| "loss": 3.442, |
| "step": 1713152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1945091043218423e-05, |
| "loss": 3.4363, |
| "step": 1713664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1936705095707903e-05, |
| "loss": 3.4262, |
| "step": 1714176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1928335527001115e-05, |
| "loss": 3.4408, |
| "step": 1714688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1919949579490595e-05, |
| "loss": 3.4525, |
| "step": 1715200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.191156363198007e-05, |
| "loss": 3.4414, |
| "step": 1715712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.190317768446955e-05, |
| "loss": 3.4589, |
| "step": 1716224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1894808115762767e-05, |
| "loss": 3.453, |
| "step": 1716736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1886422168252247e-05, |
| "loss": 3.4484, |
| "step": 1717248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1878036220741724e-05, |
| "loss": 3.4402, |
| "step": 1717760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1869650273231204e-05, |
| "loss": 3.4587, |
| "step": 1718272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1861280704524416e-05, |
| "loss": 3.4453, |
| "step": 1718784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1852894757013896e-05, |
| "loss": 3.4386, |
| "step": 1719296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1844508809503376e-05, |
| "loss": 3.4551, |
| "step": 1719808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1836122861992856e-05, |
| "loss": 3.4465, |
| "step": 1720320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.182775329328607e-05, |
| "loss": 3.4516, |
| "step": 1720832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1819367345775545e-05, |
| "loss": 3.4434, |
| "step": 1721344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1810981398265025e-05, |
| "loss": 3.4265, |
| "step": 1721856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1802595450754505e-05, |
| "loss": 3.4315, |
| "step": 1722368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.179422588204772e-05, |
| "loss": 3.4406, |
| "step": 1722880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1785839934537198e-05, |
| "loss": 3.4463, |
| "step": 1723392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.1777453987026678e-05, |
| "loss": 3.4418, |
| "step": 1723904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.176908441831989e-05, |
| "loss": 3.4408, |
| "step": 1724416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.17607148496131e-05, |
| "loss": 3.4447, |
| "step": 1724928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1752328902102583e-05, |
| "loss": 3.4322, |
| "step": 1725440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1743942954592063e-05, |
| "loss": 3.441, |
| "step": 1725952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1735557007081542e-05, |
| "loss": 3.4399, |
| "step": 1726464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.172717105957102e-05, |
| "loss": 3.4581, |
| "step": 1726976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.17187851120605e-05, |
| "loss": 3.4581, |
| "step": 1727488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.171039916454998e-05, |
| "loss": 3.4472, |
| "step": 1728000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.170201321703946e-05, |
| "loss": 3.4383, |
| "step": 1728512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.169364364833267e-05, |
| "loss": 3.4478, |
| "step": 1729024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.168525770082215e-05, |
| "loss": 3.4416, |
| "step": 1729536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.167687175331163e-05, |
| "loss": 3.4525, |
| "step": 1730048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.166848580580111e-05, |
| "loss": 3.4405, |
| "step": 1730560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.166011623709432e-05, |
| "loss": 3.44, |
| "step": 1731072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.16517302895838e-05, |
| "loss": 3.4477, |
| "step": 1731584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1643344342073284e-05, |
| "loss": 3.4525, |
| "step": 1732096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1634958394562764e-05, |
| "loss": 3.4409, |
| "step": 1732608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1626588825855973e-05, |
| "loss": 3.4463, |
| "step": 1733120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1618202878345453e-05, |
| "loss": 3.4365, |
| "step": 1733632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1609816930834933e-05, |
| "loss": 3.4407, |
| "step": 1734144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1601447362128145e-05, |
| "loss": 3.4274, |
| "step": 1734656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1593061414617625e-05, |
| "loss": 3.4435, |
| "step": 1735168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1584675467107105e-05, |
| "loss": 3.4422, |
| "step": 1735680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1576289519596585e-05, |
| "loss": 3.4385, |
| "step": 1736192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1567903572086065e-05, |
| "loss": 3.4431, |
| "step": 1736704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1559517624575545e-05, |
| "loss": 3.4364, |
| "step": 1737216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.155113167706502e-05, |
| "loss": 3.4525, |
| "step": 1737728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1542745729554505e-05, |
| "loss": 3.4587, |
| "step": 1738240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1534376160847717e-05, |
| "loss": 3.4472, |
| "step": 1738752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1525990213337197e-05, |
| "loss": 3.4493, |
| "step": 1739264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1517604265826674e-05, |
| "loss": 3.4424, |
| "step": 1739776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1509218318316154e-05, |
| "loss": 3.4387, |
| "step": 1740288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1500848749609366e-05, |
| "loss": 3.4322, |
| "step": 1740800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1492462802098846e-05, |
| "loss": 3.4398, |
| "step": 1741312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1484076854588326e-05, |
| "loss": 3.4437, |
| "step": 1741824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1475690907077806e-05, |
| "loss": 3.4447, |
| "step": 1742336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.146732133837102e-05, |
| "loss": 3.4461, |
| "step": 1742848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1458935390860495e-05, |
| "loss": 3.4407, |
| "step": 1743360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1450549443349975e-05, |
| "loss": 3.4486, |
| "step": 1743872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.144216349583946e-05, |
| "loss": 3.4384, |
| "step": 1744384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.143379392713267e-05, |
| "loss": 3.4455, |
| "step": 1744896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1425407979622148e-05, |
| "loss": 3.4453, |
| "step": 1745408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1417022032111627e-05, |
| "loss": 3.4514, |
| "step": 1745920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1408636084601107e-05, |
| "loss": 3.4506, |
| "step": 1746432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.140026651589432e-05, |
| "loss": 3.4402, |
| "step": 1746944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.13918805683838e-05, |
| "loss": 3.4514, |
| "step": 1747456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.138349462087328e-05, |
| "loss": 3.4413, |
| "step": 1747968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.137510867336276e-05, |
| "loss": 3.4506, |
| "step": 1748480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.136673910465597e-05, |
| "loss": 3.4438, |
| "step": 1748992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.135835315714545e-05, |
| "loss": 3.4515, |
| "step": 1749504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.134996720963493e-05, |
| "loss": 3.4388, |
| "step": 1750016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1341581262124412e-05, |
| "loss": 3.4475, |
| "step": 1750528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.133321169341762e-05, |
| "loss": 3.438, |
| "step": 1751040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.13248257459071e-05, |
| "loss": 3.4428, |
| "step": 1751552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.131643979839658e-05, |
| "loss": 3.4431, |
| "step": 1752064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.130805385088606e-05, |
| "loss": 3.4433, |
| "step": 1752576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1299684282179274e-05, |
| "loss": 3.4426, |
| "step": 1753088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1291298334668754e-05, |
| "loss": 3.4445, |
| "step": 1753600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1282912387158233e-05, |
| "loss": 3.4526, |
| "step": 1754112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1274526439647713e-05, |
| "loss": 3.4325, |
| "step": 1754624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1266156870940923e-05, |
| "loss": 3.4461, |
| "step": 1755136 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 3.8516488075256348, |
| "eval_runtime": 305.6026, |
| "eval_samples_per_second": 1248.651, |
| "eval_steps_per_second": 39.021, |
| "step": 1755350 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.1257770923430403e-05, |
| "loss": 3.4407, |
| "step": 1755648 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.1249384975919882e-05, |
| "loss": 3.4309, |
| "step": 1756160 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.1241015407213095e-05, |
| "loss": 3.4475, |
| "step": 1756672 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.1232629459702575e-05, |
| "loss": 3.4439, |
| "step": 1757184 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.1224243512192055e-05, |
| "loss": 3.4513, |
| "step": 1757696 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.1215857564681535e-05, |
| "loss": 3.4385, |
| "step": 1758208 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.1207471617171015e-05, |
| "loss": 3.4397, |
| "step": 1758720 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.1199085669660495e-05, |
| "loss": 3.4369, |
| "step": 1759232 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.1190699722149975e-05, |
| "loss": 3.4406, |
| "step": 1759744 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.1182313774639455e-05, |
| "loss": 3.4326, |
| "step": 1760256 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.1173944205932667e-05, |
| "loss": 3.449, |
| "step": 1760768 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.1165558258422147e-05, |
| "loss": 3.4446, |
| "step": 1761280 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.1157188689715356e-05, |
| "loss": 3.4338, |
| "step": 1761792 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.1148802742204836e-05, |
| "loss": 3.4302, |
| "step": 1762304 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.1140416794694316e-05, |
| "loss": 3.4423, |
| "step": 1762816 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.11320308471838e-05, |
| "loss": 3.4281, |
| "step": 1763328 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.1123644899673276e-05, |
| "loss": 3.4361, |
| "step": 1763840 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.1115258952162756e-05, |
| "loss": 3.4369, |
| "step": 1764352 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.1106873004652236e-05, |
| "loss": 3.4396, |
| "step": 1764864 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.1098487057141716e-05, |
| "loss": 3.4511, |
| "step": 1765376 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.109011748843493e-05, |
| "loss": 3.4461, |
| "step": 1765888 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.1081731540924408e-05, |
| "loss": 3.4364, |
| "step": 1766400 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.1073345593413888e-05, |
| "loss": 3.4396, |
| "step": 1766912 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.1064959645903368e-05, |
| "loss": 3.4413, |
| "step": 1767424 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.1056590077196577e-05, |
| "loss": 3.4338, |
| "step": 1767936 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.1048204129686057e-05, |
| "loss": 3.4439, |
| "step": 1768448 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.103983456097927e-05, |
| "loss": 3.4373, |
| "step": 1768960 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.103144861346875e-05, |
| "loss": 3.4301, |
| "step": 1769472 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.102306266595823e-05, |
| "loss": 3.4344, |
| "step": 1769984 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.101467671844771e-05, |
| "loss": 3.4403, |
| "step": 1770496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.100629077093719e-05, |
| "loss": 3.4388, |
| "step": 1771008 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.099790482342667e-05, |
| "loss": 3.4391, |
| "step": 1771520 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.098951887591615e-05, |
| "loss": 3.4448, |
| "step": 1772032 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0981149307209362e-05, |
| "loss": 3.449, |
| "step": 1772544 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0972763359698842e-05, |
| "loss": 3.4304, |
| "step": 1773056 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0964377412188322e-05, |
| "loss": 3.4414, |
| "step": 1773568 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0955991464677802e-05, |
| "loss": 3.4409, |
| "step": 1774080 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.094762189597101e-05, |
| "loss": 3.4332, |
| "step": 1774592 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.093923594846049e-05, |
| "loss": 3.4285, |
| "step": 1775104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.093085000094997e-05, |
| "loss": 3.4305, |
| "step": 1775616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0922464053439454e-05, |
| "loss": 3.4345, |
| "step": 1776128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0914094484732663e-05, |
| "loss": 3.441, |
| "step": 1776640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0905708537222143e-05, |
| "loss": 3.4341, |
| "step": 1777152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0897322589711623e-05, |
| "loss": 3.4319, |
| "step": 1777664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.08889366422011e-05, |
| "loss": 3.4533, |
| "step": 1778176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0880567073494316e-05, |
| "loss": 3.4413, |
| "step": 1778688 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0872181125983796e-05, |
| "loss": 3.4403, |
| "step": 1779200 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0863795178473276e-05, |
| "loss": 3.4287, |
| "step": 1779712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0855409230962752e-05, |
| "loss": 3.418, |
| "step": 1780224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0847039662255965e-05, |
| "loss": 3.4494, |
| "step": 1780736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0838653714745445e-05, |
| "loss": 3.4347, |
| "step": 1781248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0830267767234925e-05, |
| "loss": 3.4338, |
| "step": 1781760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0821881819724404e-05, |
| "loss": 3.4381, |
| "step": 1782272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0813528629821346e-05, |
| "loss": 3.4326, |
| "step": 1782784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0805142682310826e-05, |
| "loss": 3.417, |
| "step": 1783296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0796756734800306e-05, |
| "loss": 3.4288, |
| "step": 1783808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0788370787289786e-05, |
| "loss": 3.4344, |
| "step": 1784320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.077998483977927e-05, |
| "loss": 3.431, |
| "step": 1784832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.077159889226875e-05, |
| "loss": 3.4418, |
| "step": 1785344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0763212944758226e-05, |
| "loss": 3.4289, |
| "step": 1785856 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0754826997247706e-05, |
| "loss": 3.4295, |
| "step": 1786368 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0746457428540918e-05, |
| "loss": 3.4425, |
| "step": 1786880 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0738071481030398e-05, |
| "loss": 3.4248, |
| "step": 1787392 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0729685533519878e-05, |
| "loss": 3.4316, |
| "step": 1787904 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0721299586009358e-05, |
| "loss": 3.4305, |
| "step": 1788416 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.071293001730257e-05, |
| "loss": 3.4311, |
| "step": 1788928 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0704544069792047e-05, |
| "loss": 3.4297, |
| "step": 1789440 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0696158122281527e-05, |
| "loss": 3.4286, |
| "step": 1789952 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0687772174771007e-05, |
| "loss": 3.4153, |
| "step": 1790464 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0679402606064223e-05, |
| "loss": 3.4261, |
| "step": 1790976 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.06710166585537e-05, |
| "loss": 3.4439, |
| "step": 1791488 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.066263071104318e-05, |
| "loss": 3.4267, |
| "step": 1792000 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.065424476353266e-05, |
| "loss": 3.4512, |
| "step": 1792512 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0645875194825872e-05, |
| "loss": 3.439, |
| "step": 1793024 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0637489247315352e-05, |
| "loss": 3.4438, |
| "step": 1793536 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0629103299804832e-05, |
| "loss": 3.4288, |
| "step": 1794048 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0620717352294312e-05, |
| "loss": 3.443, |
| "step": 1794560 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.061234778358752e-05, |
| "loss": 3.4385, |
| "step": 1795072 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0603978214880733e-05, |
| "loss": 3.4245, |
| "step": 1795584 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0595592267370213e-05, |
| "loss": 3.4458, |
| "step": 1796096 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0587206319859693e-05, |
| "loss": 3.4347, |
| "step": 1796608 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0578820372349173e-05, |
| "loss": 3.4385, |
| "step": 1797120 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0570434424838653e-05, |
| "loss": 3.437, |
| "step": 1797632 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0562048477328133e-05, |
| "loss": 3.4139, |
| "step": 1798144 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0553662529817613e-05, |
| "loss": 3.4188, |
| "step": 1798656 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0545292961110822e-05, |
| "loss": 3.4262, |
| "step": 1799168 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0536907013600306e-05, |
| "loss": 3.4354, |
| "step": 1799680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0528521066089786e-05, |
| "loss": 3.4322, |
| "step": 1800192 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.0520135118579265e-05, |
| "loss": 3.4295, |
| "step": 1800704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0511765549872475e-05, |
| "loss": 3.4334, |
| "step": 1801216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0503379602361955e-05, |
| "loss": 3.426, |
| "step": 1801728 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0494993654851434e-05, |
| "loss": 3.4287, |
| "step": 1802240 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0486607707340914e-05, |
| "loss": 3.4296, |
| "step": 1802752 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0478221759830394e-05, |
| "loss": 3.4465, |
| "step": 1803264 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0469835812319878e-05, |
| "loss": 3.4519, |
| "step": 1803776 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0461449864809354e-05, |
| "loss": 3.4326, |
| "step": 1804288 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0453063917298834e-05, |
| "loss": 3.4253, |
| "step": 1804800 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0444694348592047e-05, |
| "loss": 3.437, |
| "step": 1805312 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0436308401081523e-05, |
| "loss": 3.4319, |
| "step": 1805824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0427922453571007e-05, |
| "loss": 3.4404, |
| "step": 1806336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0419536506060487e-05, |
| "loss": 3.4306, |
| "step": 1806848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.04111669373537e-05, |
| "loss": 3.4295, |
| "step": 1807360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0402780989843176e-05, |
| "loss": 3.4371, |
| "step": 1807872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0394395042332656e-05, |
| "loss": 3.4392, |
| "step": 1808384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0386009094822136e-05, |
| "loss": 3.4289, |
| "step": 1808896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0377639526115348e-05, |
| "loss": 3.4331, |
| "step": 1809408 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0369253578604828e-05, |
| "loss": 3.4283, |
| "step": 1809920 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0360867631094308e-05, |
| "loss": 3.4279, |
| "step": 1810432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.035249806238752e-05, |
| "loss": 3.4205, |
| "step": 1810944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0344112114876997e-05, |
| "loss": 3.4303, |
| "step": 1811456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0335726167366477e-05, |
| "loss": 3.4337, |
| "step": 1811968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.032734021985596e-05, |
| "loss": 3.4241, |
| "step": 1812480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.031895427234544e-05, |
| "loss": 3.4324, |
| "step": 1812992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.031056832483492e-05, |
| "loss": 3.4291, |
| "step": 1813504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.03021823773244e-05, |
| "loss": 3.4395, |
| "step": 1814016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.029379642981388e-05, |
| "loss": 3.4468, |
| "step": 1814528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.028542686110709e-05, |
| "loss": 3.438, |
| "step": 1815040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.027704091359657e-05, |
| "loss": 3.4375, |
| "step": 1815552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0268654966086053e-05, |
| "loss": 3.4299, |
| "step": 1816064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0260269018575532e-05, |
| "loss": 3.4305, |
| "step": 1816576 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.025189944986874e-05, |
| "loss": 3.4206, |
| "step": 1817088 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.024351350235822e-05, |
| "loss": 3.4284, |
| "step": 1817600 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.02351275548477e-05, |
| "loss": 3.4332, |
| "step": 1818112 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0226741607337178e-05, |
| "loss": 3.4322, |
| "step": 1818624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0218372038630394e-05, |
| "loss": 3.4305, |
| "step": 1819136 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0209986091119874e-05, |
| "loss": 3.4311, |
| "step": 1819648 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0201600143609354e-05, |
| "loss": 3.44, |
| "step": 1820160 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.019321419609883e-05, |
| "loss": 3.4294, |
| "step": 1820672 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0184844627392043e-05, |
| "loss": 3.4315, |
| "step": 1821184 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0176458679881523e-05, |
| "loss": 3.4326, |
| "step": 1821696 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0168072732371003e-05, |
| "loss": 3.4398, |
| "step": 1822208 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0159686784860483e-05, |
| "loss": 3.4435, |
| "step": 1822720 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0151317216153695e-05, |
| "loss": 3.4291, |
| "step": 1823232 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0142931268643175e-05, |
| "loss": 3.4409, |
| "step": 1823744 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0134545321132652e-05, |
| "loss": 3.4318, |
| "step": 1824256 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0126159373622132e-05, |
| "loss": 3.4394, |
| "step": 1824768 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0117789804915348e-05, |
| "loss": 3.4288, |
| "step": 1825280 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0109403857404828e-05, |
| "loss": 3.4384, |
| "step": 1825792 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0101017909894304e-05, |
| "loss": 3.4301, |
| "step": 1826304 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0092631962383784e-05, |
| "loss": 3.4405, |
| "step": 1826816 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0084262393676997e-05, |
| "loss": 3.4249, |
| "step": 1827328 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0075876446166477e-05, |
| "loss": 3.4317, |
| "step": 1827840 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0067490498655956e-05, |
| "loss": 3.4344, |
| "step": 1828352 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0059104551145436e-05, |
| "loss": 3.4331, |
| "step": 1828864 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.005073498243865e-05, |
| "loss": 3.4291, |
| "step": 1829376 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0042349034928125e-05, |
| "loss": 3.434, |
| "step": 1829888 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0033963087417605e-05, |
| "loss": 3.4394, |
| "step": 1830400 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.0025577139907085e-05, |
| "loss": 3.4223, |
| "step": 1830912 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.00172075712003e-05, |
| "loss": 3.4383, |
| "step": 1831424 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 3.852069854736328, |
| "eval_runtime": 304.7158, |
| "eval_samples_per_second": 1252.285, |
| "eval_steps_per_second": 39.135, |
| "step": 1831670 |
| } |
| ], |
| "logging_steps": 512, |
| "max_steps": 3052726, |
| "num_train_epochs": 9223372036854775807, |
| "save_steps": 10, |
| "total_flos": 1.2742391509820744e+18, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|