| { | |
| "best_global_step": 14058, | |
| "best_metric": 0.6273267865180969, | |
| "best_model_checkpoint": "./mcqa_qwen3_letter_m3/checkpoint-14058", | |
| "epoch": 2.0, | |
| "eval_steps": 500, | |
| "global_step": 14058, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.007113640405477503, | |
| "grad_norm": 20.266469955444336, | |
| "learning_rate": 4.551920341394026e-07, | |
| "loss": 2.1439, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.014227280810955006, | |
| "grad_norm": 21.066892623901367, | |
| "learning_rate": 9.293504030346136e-07, | |
| "loss": 1.4471, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.02134092121643251, | |
| "grad_norm": 18.918508529663086, | |
| "learning_rate": 1.4035087719298246e-06, | |
| "loss": 1.3067, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.028454561621910013, | |
| "grad_norm": 9.563344955444336, | |
| "learning_rate": 1.8681839734471315e-06, | |
| "loss": 1.2745, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.035568202027387516, | |
| "grad_norm": 20.408035278320312, | |
| "learning_rate": 2.3423423423423424e-06, | |
| "loss": 1.2323, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.04268184243286502, | |
| "grad_norm": 17.020856857299805, | |
| "learning_rate": 2.8165007112375536e-06, | |
| "loss": 1.2476, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.04979548283834252, | |
| "grad_norm": 10.942744255065918, | |
| "learning_rate": 3.2906590801327647e-06, | |
| "loss": 1.2084, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.056909123243820026, | |
| "grad_norm": 11.239660263061523, | |
| "learning_rate": 3.7648174490279754e-06, | |
| "loss": 1.1453, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.06402276364929753, | |
| "grad_norm": 8.421320915222168, | |
| "learning_rate": 4.2389758179231865e-06, | |
| "loss": 1.1257, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.07113640405477503, | |
| "grad_norm": 7.429409027099609, | |
| "learning_rate": 4.713134186818398e-06, | |
| "loss": 1.0896, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.07825004446025254, | |
| "grad_norm": 8.3453950881958, | |
| "learning_rate": 5.187292555713609e-06, | |
| "loss": 1.0589, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.08536368486573004, | |
| "grad_norm": 9.157875061035156, | |
| "learning_rate": 5.66145092460882e-06, | |
| "loss": 1.0628, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.09247732527120754, | |
| "grad_norm": 9.339110374450684, | |
| "learning_rate": 6.135609293504031e-06, | |
| "loss": 1.0211, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.09959096567668504, | |
| "grad_norm": 7.010004043579102, | |
| "learning_rate": 6.609767662399243e-06, | |
| "loss": 1.0684, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.10670460608216255, | |
| "grad_norm": 8.618551254272461, | |
| "learning_rate": 7.0839260312944525e-06, | |
| "loss": 1.0467, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.11381824648764005, | |
| "grad_norm": 9.206182479858398, | |
| "learning_rate": 7.558084400189663e-06, | |
| "loss": 1.0855, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.12093188689311755, | |
| "grad_norm": 11.882657051086426, | |
| "learning_rate": 8.032242769084875e-06, | |
| "loss": 0.9897, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.12804552729859506, | |
| "grad_norm": 6.381322860717773, | |
| "learning_rate": 8.506401137980086e-06, | |
| "loss": 1.0227, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.13515916770407255, | |
| "grad_norm": 7.592447280883789, | |
| "learning_rate": 8.980559506875298e-06, | |
| "loss": 0.9508, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.14227280810955006, | |
| "grad_norm": 10.622550010681152, | |
| "learning_rate": 9.454717875770508e-06, | |
| "loss": 0.9916, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.14938644851502755, | |
| "grad_norm": 16.836315155029297, | |
| "learning_rate": 9.92887624466572e-06, | |
| "loss": 0.9838, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.15650008892050507, | |
| "grad_norm": 8.732917785644531, | |
| "learning_rate": 1.040303461356093e-05, | |
| "loss": 0.9678, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.16361372932598256, | |
| "grad_norm": 10.75668716430664, | |
| "learning_rate": 1.0877192982456142e-05, | |
| "loss": 0.9535, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.17072736973146008, | |
| "grad_norm": 10.604599952697754, | |
| "learning_rate": 1.1351351351351352e-05, | |
| "loss": 1.0164, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.17784101013693757, | |
| "grad_norm": 11.192643165588379, | |
| "learning_rate": 1.1825509720246564e-05, | |
| "loss": 0.9811, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.18495465054241508, | |
| "grad_norm": 11.667268753051758, | |
| "learning_rate": 1.2299668089141774e-05, | |
| "loss": 0.9329, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.19206829094789257, | |
| "grad_norm": 12.122210502624512, | |
| "learning_rate": 1.2773826458036987e-05, | |
| "loss": 0.9693, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.1991819313533701, | |
| "grad_norm": 10.696669578552246, | |
| "learning_rate": 1.3247984826932197e-05, | |
| "loss": 0.9118, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.20629557175884758, | |
| "grad_norm": 9.4218168258667, | |
| "learning_rate": 1.3722143195827407e-05, | |
| "loss": 0.9242, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.2134092121643251, | |
| "grad_norm": 15.12156867980957, | |
| "learning_rate": 1.4196301564722618e-05, | |
| "loss": 0.9161, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.22052285256980259, | |
| "grad_norm": 10.341136932373047, | |
| "learning_rate": 1.4670459933617828e-05, | |
| "loss": 0.9112, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.2276364929752801, | |
| "grad_norm": 8.621809005737305, | |
| "learning_rate": 1.5144618302513041e-05, | |
| "loss": 0.923, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.2347501333807576, | |
| "grad_norm": 8.037166595458984, | |
| "learning_rate": 1.561877667140825e-05, | |
| "loss": 0.8646, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.2418637737862351, | |
| "grad_norm": 12.160765647888184, | |
| "learning_rate": 1.6092935040303463e-05, | |
| "loss": 0.8811, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.2489774141917126, | |
| "grad_norm": 9.850648880004883, | |
| "learning_rate": 1.6567093409198673e-05, | |
| "loss": 0.9158, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.2560910545971901, | |
| "grad_norm": 6.849952220916748, | |
| "learning_rate": 1.7041251778093886e-05, | |
| "loss": 0.8903, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.26320469500266763, | |
| "grad_norm": 7.7248334884643555, | |
| "learning_rate": 1.7515410146989096e-05, | |
| "loss": 0.8853, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.2703183354081451, | |
| "grad_norm": 5.612997531890869, | |
| "learning_rate": 1.7989568515884306e-05, | |
| "loss": 0.8407, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.2774319758136226, | |
| "grad_norm": 6.731159210205078, | |
| "learning_rate": 1.846372688477952e-05, | |
| "loss": 0.8447, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.28454561621910013, | |
| "grad_norm": 8.783773422241211, | |
| "learning_rate": 1.893788525367473e-05, | |
| "loss": 0.8201, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.29165925662457765, | |
| "grad_norm": 8.748090744018555, | |
| "learning_rate": 1.9412043622569942e-05, | |
| "loss": 0.8202, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.2987728970300551, | |
| "grad_norm": 6.985095977783203, | |
| "learning_rate": 1.9886201991465152e-05, | |
| "loss": 0.8686, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.3058865374355326, | |
| "grad_norm": 9.010405540466309, | |
| "learning_rate": 1.9959947299077735e-05, | |
| "loss": 0.8541, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.31300017784101014, | |
| "grad_norm": 10.884967803955078, | |
| "learning_rate": 1.9907246376811596e-05, | |
| "loss": 0.9021, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.32011381824648766, | |
| "grad_norm": 6.696805953979492, | |
| "learning_rate": 1.9854545454545456e-05, | |
| "loss": 0.8352, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.3272274586519651, | |
| "grad_norm": 6.158838272094727, | |
| "learning_rate": 1.9801844532279317e-05, | |
| "loss": 0.8078, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.33434109905744264, | |
| "grad_norm": 10.230331420898438, | |
| "learning_rate": 1.9749143610013177e-05, | |
| "loss": 0.8659, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.34145473946292015, | |
| "grad_norm": 7.0724945068359375, | |
| "learning_rate": 1.9696442687747038e-05, | |
| "loss": 0.862, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.34856837986839767, | |
| "grad_norm": 9.820209503173828, | |
| "learning_rate": 1.9643741765480898e-05, | |
| "loss": 0.8036, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.35568202027387513, | |
| "grad_norm": 6.326645374298096, | |
| "learning_rate": 1.959104084321476e-05, | |
| "loss": 0.8255, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.36279566067935265, | |
| "grad_norm": 4.881910800933838, | |
| "learning_rate": 1.953939393939394e-05, | |
| "loss": 0.861, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.36990930108483017, | |
| "grad_norm": 6.977218151092529, | |
| "learning_rate": 1.94866930171278e-05, | |
| "loss": 0.8487, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.3770229414903077, | |
| "grad_norm": 6.354854106903076, | |
| "learning_rate": 1.943399209486166e-05, | |
| "loss": 0.8122, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.38413658189578515, | |
| "grad_norm": 6.679897308349609, | |
| "learning_rate": 1.9381291172595522e-05, | |
| "loss": 0.8239, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.39125022230126266, | |
| "grad_norm": 7.958705902099609, | |
| "learning_rate": 1.9328590250329382e-05, | |
| "loss": 0.7808, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.3983638627067402, | |
| "grad_norm": 6.450643062591553, | |
| "learning_rate": 1.9275889328063243e-05, | |
| "loss": 0.8359, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.4054775031122177, | |
| "grad_norm": 5.986859321594238, | |
| "learning_rate": 1.9223188405797103e-05, | |
| "loss": 0.8278, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 0.41259114351769516, | |
| "grad_norm": 8.40491771697998, | |
| "learning_rate": 1.9170487483530964e-05, | |
| "loss": 0.7932, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.4197047839231727, | |
| "grad_norm": 7.491307735443115, | |
| "learning_rate": 1.9117786561264824e-05, | |
| "loss": 0.7932, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 0.4268184243286502, | |
| "grad_norm": 5.475683689117432, | |
| "learning_rate": 1.9065085638998684e-05, | |
| "loss": 0.8292, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.4339320647341277, | |
| "grad_norm": 6.489156723022461, | |
| "learning_rate": 1.9012384716732545e-05, | |
| "loss": 0.7585, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 0.44104570513960517, | |
| "grad_norm": 6.417465686798096, | |
| "learning_rate": 1.8959683794466402e-05, | |
| "loss": 0.7628, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 0.4481593455450827, | |
| "grad_norm": 4.215211868286133, | |
| "learning_rate": 1.8906982872200266e-05, | |
| "loss": 0.7418, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 0.4552729859505602, | |
| "grad_norm": 5.503742218017578, | |
| "learning_rate": 1.8854281949934126e-05, | |
| "loss": 0.8346, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 0.4623866263560377, | |
| "grad_norm": 5.3524651527404785, | |
| "learning_rate": 1.8801581027667987e-05, | |
| "loss": 0.8285, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 0.4695002667615152, | |
| "grad_norm": 5.5221781730651855, | |
| "learning_rate": 1.8748880105401844e-05, | |
| "loss": 0.7363, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 0.4766139071669927, | |
| "grad_norm": 7.9843268394470215, | |
| "learning_rate": 1.8696179183135704e-05, | |
| "loss": 0.7964, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 0.4837275475724702, | |
| "grad_norm": 6.0546417236328125, | |
| "learning_rate": 1.8643478260869568e-05, | |
| "loss": 0.7696, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 0.49084118797794773, | |
| "grad_norm": 5.521228313446045, | |
| "learning_rate": 1.859077733860343e-05, | |
| "loss": 0.7309, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 0.4979548283834252, | |
| "grad_norm": 9.225383758544922, | |
| "learning_rate": 1.853807641633729e-05, | |
| "loss": 0.7869, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.5050684687889028, | |
| "grad_norm": 4.0471601486206055, | |
| "learning_rate": 1.8485375494071146e-05, | |
| "loss": 0.7323, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 0.5121821091943802, | |
| "grad_norm": 4.831805229187012, | |
| "learning_rate": 1.8432674571805006e-05, | |
| "loss": 0.7795, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 0.5192957495998577, | |
| "grad_norm": 5.822267055511475, | |
| "learning_rate": 1.837997364953887e-05, | |
| "loss": 0.712, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 0.5264093900053353, | |
| "grad_norm": 4.78722620010376, | |
| "learning_rate": 1.832727272727273e-05, | |
| "loss": 0.7408, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 0.5335230304108127, | |
| "grad_norm": 5.776618480682373, | |
| "learning_rate": 1.8274571805006588e-05, | |
| "loss": 0.6803, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 0.5406366708162902, | |
| "grad_norm": 11.762743949890137, | |
| "learning_rate": 1.8221870882740448e-05, | |
| "loss": 0.7343, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 0.5477503112217678, | |
| "grad_norm": 3.884990692138672, | |
| "learning_rate": 1.816916996047431e-05, | |
| "loss": 0.7551, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 0.5548639516272452, | |
| "grad_norm": 9.219622611999512, | |
| "learning_rate": 1.8116469038208173e-05, | |
| "loss": 0.764, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 0.5619775920327228, | |
| "grad_norm": 7.808120250701904, | |
| "learning_rate": 1.806376811594203e-05, | |
| "loss": 0.745, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 0.5690912324382003, | |
| "grad_norm": 7.188344955444336, | |
| "learning_rate": 1.801106719367589e-05, | |
| "loss": 0.7642, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.5762048728436777, | |
| "grad_norm": 4.781955242156982, | |
| "learning_rate": 1.795836627140975e-05, | |
| "loss": 0.7089, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 0.5833185132491553, | |
| "grad_norm": 4.489506721496582, | |
| "learning_rate": 1.790566534914361e-05, | |
| "loss": 0.7512, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 0.5904321536546328, | |
| "grad_norm": 5.0979905128479, | |
| "learning_rate": 1.785296442687747e-05, | |
| "loss": 0.7753, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 0.5975457940601102, | |
| "grad_norm": 5.302441596984863, | |
| "learning_rate": 1.7800263504611332e-05, | |
| "loss": 0.7412, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 0.6046594344655878, | |
| "grad_norm": 4.942580223083496, | |
| "learning_rate": 1.7747562582345192e-05, | |
| "loss": 0.7474, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 0.6117730748710652, | |
| "grad_norm": 4.685510158538818, | |
| "learning_rate": 1.7694861660079053e-05, | |
| "loss": 0.7173, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 0.6188867152765428, | |
| "grad_norm": 4.8454999923706055, | |
| "learning_rate": 1.7642160737812913e-05, | |
| "loss": 0.7207, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 0.6260003556820203, | |
| "grad_norm": 7.190736293792725, | |
| "learning_rate": 1.7589459815546774e-05, | |
| "loss": 0.754, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 0.6331139960874977, | |
| "grad_norm": 7.0048675537109375, | |
| "learning_rate": 1.7536758893280634e-05, | |
| "loss": 0.7282, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 0.6402276364929753, | |
| "grad_norm": 5.777256011962891, | |
| "learning_rate": 1.7484057971014495e-05, | |
| "loss": 0.6597, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.6473412768984528, | |
| "grad_norm": 8.8043851852417, | |
| "learning_rate": 1.7431357048748355e-05, | |
| "loss": 0.6672, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 0.6544549173039302, | |
| "grad_norm": 7.159558296203613, | |
| "learning_rate": 1.7378656126482215e-05, | |
| "loss": 0.7266, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 0.6615685577094078, | |
| "grad_norm": 5.110541820526123, | |
| "learning_rate": 1.7325955204216076e-05, | |
| "loss": 0.6399, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 0.6686821981148853, | |
| "grad_norm": 3.4727582931518555, | |
| "learning_rate": 1.7273254281949936e-05, | |
| "loss": 0.68, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 0.6757958385203628, | |
| "grad_norm": 5.788763999938965, | |
| "learning_rate": 1.7220553359683797e-05, | |
| "loss": 0.7349, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 0.6829094789258403, | |
| "grad_norm": 4.407217979431152, | |
| "learning_rate": 1.7167852437417657e-05, | |
| "loss": 0.695, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 0.6900231193313178, | |
| "grad_norm": 4.685910224914551, | |
| "learning_rate": 1.7115151515151514e-05, | |
| "loss": 0.7816, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 0.6971367597367953, | |
| "grad_norm": 2.9753873348236084, | |
| "learning_rate": 1.7062450592885378e-05, | |
| "loss": 0.6595, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 0.7042504001422728, | |
| "grad_norm": 3.8326473236083984, | |
| "learning_rate": 1.700974967061924e-05, | |
| "loss": 0.6875, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 0.7113640405477503, | |
| "grad_norm": 5.974637985229492, | |
| "learning_rate": 1.69570487483531e-05, | |
| "loss": 0.7414, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.7184776809532278, | |
| "grad_norm": 3.4977550506591797, | |
| "learning_rate": 1.6904347826086956e-05, | |
| "loss": 0.6832, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 0.7255913213587053, | |
| "grad_norm": 5.954402446746826, | |
| "learning_rate": 1.6851646903820817e-05, | |
| "loss": 0.7257, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 0.7327049617641829, | |
| "grad_norm": 4.272510528564453, | |
| "learning_rate": 1.679894598155468e-05, | |
| "loss": 0.6808, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 0.7398186021696603, | |
| "grad_norm": 4.155337810516357, | |
| "learning_rate": 1.674624505928854e-05, | |
| "loss": 0.7062, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 0.7469322425751378, | |
| "grad_norm": 4.898423671722412, | |
| "learning_rate": 1.6693544137022398e-05, | |
| "loss": 0.6549, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 0.7540458829806154, | |
| "grad_norm": 6.021943092346191, | |
| "learning_rate": 1.664084321475626e-05, | |
| "loss": 0.6886, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 0.7611595233860928, | |
| "grad_norm": 4.547012805938721, | |
| "learning_rate": 1.658814229249012e-05, | |
| "loss": 0.6653, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 0.7682731637915703, | |
| "grad_norm": 3.5163490772247314, | |
| "learning_rate": 1.6535441370223983e-05, | |
| "loss": 0.6549, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 0.7753868041970479, | |
| "grad_norm": 4.255113124847412, | |
| "learning_rate": 1.648274044795784e-05, | |
| "loss": 0.6473, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 0.7825004446025253, | |
| "grad_norm": 8.696562767028809, | |
| "learning_rate": 1.64300395256917e-05, | |
| "loss": 0.67, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.7896140850080029, | |
| "grad_norm": 3.098172903060913, | |
| "learning_rate": 1.637733860342556e-05, | |
| "loss": 0.6626, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 0.7967277254134804, | |
| "grad_norm": 5.950908660888672, | |
| "learning_rate": 1.632463768115942e-05, | |
| "loss": 0.7476, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 0.8038413658189578, | |
| "grad_norm": 5.9870686531066895, | |
| "learning_rate": 1.627193675889328e-05, | |
| "loss": 0.7006, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 0.8109550062244354, | |
| "grad_norm": 8.210447311401367, | |
| "learning_rate": 1.6219235836627142e-05, | |
| "loss": 0.6104, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 0.8180686466299129, | |
| "grad_norm": 2.8877291679382324, | |
| "learning_rate": 1.6166534914361002e-05, | |
| "loss": 0.6183, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 0.8251822870353903, | |
| "grad_norm": 3.5387609004974365, | |
| "learning_rate": 1.6113833992094863e-05, | |
| "loss": 0.6816, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 0.8322959274408679, | |
| "grad_norm": 3.3335936069488525, | |
| "learning_rate": 1.6061133069828723e-05, | |
| "loss": 0.6948, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 0.8394095678463454, | |
| "grad_norm": 3.515953302383423, | |
| "learning_rate": 1.6008432147562584e-05, | |
| "loss": 0.6664, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 0.8465232082518229, | |
| "grad_norm": 2.2991325855255127, | |
| "learning_rate": 1.5955731225296444e-05, | |
| "loss": 0.6363, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 0.8536368486573004, | |
| "grad_norm": 5.262946128845215, | |
| "learning_rate": 1.5903030303030305e-05, | |
| "loss": 0.7375, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.8607504890627778, | |
| "grad_norm": 4.494121074676514, | |
| "learning_rate": 1.5850329380764165e-05, | |
| "loss": 0.6818, | |
| "step": 6050 | |
| }, | |
| { | |
| "epoch": 0.8678641294682554, | |
| "grad_norm": 4.698576927185059, | |
| "learning_rate": 1.5797628458498026e-05, | |
| "loss": 0.7246, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 0.8749777698737329, | |
| "grad_norm": 4.571809768676758, | |
| "learning_rate": 1.5744927536231883e-05, | |
| "loss": 0.6112, | |
| "step": 6150 | |
| }, | |
| { | |
| "epoch": 0.8820914102792103, | |
| "grad_norm": 7.1781110763549805, | |
| "learning_rate": 1.5692226613965746e-05, | |
| "loss": 0.6593, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 0.8892050506846879, | |
| "grad_norm": 5.867689609527588, | |
| "learning_rate": 1.5639525691699607e-05, | |
| "loss": 0.6491, | |
| "step": 6250 | |
| }, | |
| { | |
| "epoch": 0.8963186910901654, | |
| "grad_norm": 5.427711009979248, | |
| "learning_rate": 1.5586824769433467e-05, | |
| "loss": 0.6419, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 0.903432331495643, | |
| "grad_norm": 5.6072869300842285, | |
| "learning_rate": 1.5534123847167328e-05, | |
| "loss": 0.6443, | |
| "step": 6350 | |
| }, | |
| { | |
| "epoch": 0.9105459719011204, | |
| "grad_norm": 4.355414867401123, | |
| "learning_rate": 1.5481422924901185e-05, | |
| "loss": 0.7021, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 0.9176596123065979, | |
| "grad_norm": 4.039976596832275, | |
| "learning_rate": 1.542872200263505e-05, | |
| "loss": 0.7351, | |
| "step": 6450 | |
| }, | |
| { | |
| "epoch": 0.9247732527120754, | |
| "grad_norm": 3.736238956451416, | |
| "learning_rate": 1.537602108036891e-05, | |
| "loss": 0.6629, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.9318868931175529, | |
| "grad_norm": 3.91310453414917, | |
| "learning_rate": 1.532332015810277e-05, | |
| "loss": 0.6856, | |
| "step": 6550 | |
| }, | |
| { | |
| "epoch": 0.9390005335230304, | |
| "grad_norm": 5.676443576812744, | |
| "learning_rate": 1.5270619235836627e-05, | |
| "loss": 0.6344, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 0.9461141739285079, | |
| "grad_norm": 3.6127190589904785, | |
| "learning_rate": 1.5217918313570487e-05, | |
| "loss": 0.6845, | |
| "step": 6650 | |
| }, | |
| { | |
| "epoch": 0.9532278143339854, | |
| "grad_norm": 4.957319259643555, | |
| "learning_rate": 1.516521739130435e-05, | |
| "loss": 0.7074, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 0.960341454739463, | |
| "grad_norm": 3.351602554321289, | |
| "learning_rate": 1.511251646903821e-05, | |
| "loss": 0.6722, | |
| "step": 6750 | |
| }, | |
| { | |
| "epoch": 0.9674550951449404, | |
| "grad_norm": 5.316050052642822, | |
| "learning_rate": 1.505981554677207e-05, | |
| "loss": 0.5993, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 0.9745687355504179, | |
| "grad_norm": 3.5226705074310303, | |
| "learning_rate": 1.5007114624505929e-05, | |
| "loss": 0.6663, | |
| "step": 6850 | |
| }, | |
| { | |
| "epoch": 0.9816823759558955, | |
| "grad_norm": 4.123106479644775, | |
| "learning_rate": 1.495441370223979e-05, | |
| "loss": 0.6865, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 0.9887960163613729, | |
| "grad_norm": 3.942507743835449, | |
| "learning_rate": 1.4901712779973652e-05, | |
| "loss": 0.6535, | |
| "step": 6950 | |
| }, | |
| { | |
| "epoch": 0.9959096567668504, | |
| "grad_norm": 4.765227317810059, | |
| "learning_rate": 1.4849011857707512e-05, | |
| "loss": 0.5895, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 0.6490142345428467, | |
| "eval_runtime": 321.1223, | |
| "eval_samples_per_second": 16.414, | |
| "eval_steps_per_second": 2.052, | |
| "step": 7029 | |
| }, | |
| { | |
| "epoch": 1.0029877289703006, | |
| "grad_norm": 3.913954496383667, | |
| "learning_rate": 1.479631093544137e-05, | |
| "loss": 0.5676, | |
| "step": 7050 | |
| }, | |
| { | |
| "epoch": 1.010101369375778, | |
| "grad_norm": 2.70705246925354, | |
| "learning_rate": 1.4743610013175231e-05, | |
| "loss": 0.5268, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 1.0172150097812556, | |
| "grad_norm": 5.550088882446289, | |
| "learning_rate": 1.4690909090909092e-05, | |
| "loss": 0.507, | |
| "step": 7150 | |
| }, | |
| { | |
| "epoch": 1.024328650186733, | |
| "grad_norm": 6.876676082611084, | |
| "learning_rate": 1.4638208168642954e-05, | |
| "loss": 0.5285, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 1.0314422905922105, | |
| "grad_norm": 2.911832809448242, | |
| "learning_rate": 1.4585507246376813e-05, | |
| "loss": 0.5357, | |
| "step": 7250 | |
| }, | |
| { | |
| "epoch": 1.0385559309976882, | |
| "grad_norm": 5.395831108093262, | |
| "learning_rate": 1.4532806324110673e-05, | |
| "loss": 0.5442, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 1.0456695714031656, | |
| "grad_norm": 8.814352035522461, | |
| "learning_rate": 1.4480105401844533e-05, | |
| "loss": 0.5394, | |
| "step": 7350 | |
| }, | |
| { | |
| "epoch": 1.052783211808643, | |
| "grad_norm": 2.947053909301758, | |
| "learning_rate": 1.4427404479578392e-05, | |
| "loss": 0.5549, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 1.0598968522141206, | |
| "grad_norm": 5.564446926116943, | |
| "learning_rate": 1.4374703557312254e-05, | |
| "loss": 0.5328, | |
| "step": 7450 | |
| }, | |
| { | |
| "epoch": 1.067010492619598, | |
| "grad_norm": 4.452253341674805, | |
| "learning_rate": 1.4322002635046115e-05, | |
| "loss": 0.5197, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 1.0741241330250757, | |
| "grad_norm": 3.506099224090576, | |
| "learning_rate": 1.4269301712779975e-05, | |
| "loss": 0.5509, | |
| "step": 7550 | |
| }, | |
| { | |
| "epoch": 1.0812377734305532, | |
| "grad_norm": 4.477612018585205, | |
| "learning_rate": 1.4216600790513834e-05, | |
| "loss": 0.5005, | |
| "step": 7600 | |
| }, | |
| { | |
| "epoch": 1.0883514138360306, | |
| "grad_norm": 4.912115097045898, | |
| "learning_rate": 1.4163899868247694e-05, | |
| "loss": 0.5351, | |
| "step": 7650 | |
| }, | |
| { | |
| "epoch": 1.095465054241508, | |
| "grad_norm": 4.499685764312744, | |
| "learning_rate": 1.4111198945981557e-05, | |
| "loss": 0.5218, | |
| "step": 7700 | |
| }, | |
| { | |
| "epoch": 1.1025786946469855, | |
| "grad_norm": 4.6450934410095215, | |
| "learning_rate": 1.4058498023715417e-05, | |
| "loss": 0.5195, | |
| "step": 7750 | |
| }, | |
| { | |
| "epoch": 1.109692335052463, | |
| "grad_norm": 4.883353233337402, | |
| "learning_rate": 1.4005797101449276e-05, | |
| "loss": 0.5464, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 1.1168059754579407, | |
| "grad_norm": 3.312135934829712, | |
| "learning_rate": 1.3953096179183136e-05, | |
| "loss": 0.4766, | |
| "step": 7850 | |
| }, | |
| { | |
| "epoch": 1.1239196158634182, | |
| "grad_norm": 2.180053234100342, | |
| "learning_rate": 1.3900395256916997e-05, | |
| "loss": 0.5742, | |
| "step": 7900 | |
| }, | |
| { | |
| "epoch": 1.1310332562688956, | |
| "grad_norm": 4.708530426025391, | |
| "learning_rate": 1.3847694334650859e-05, | |
| "loss": 0.5499, | |
| "step": 7950 | |
| }, | |
| { | |
| "epoch": 1.138146896674373, | |
| "grad_norm": 3.369694709777832, | |
| "learning_rate": 1.379499341238472e-05, | |
| "loss": 0.4839, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 1.1452605370798505, | |
| "grad_norm": 3.1507697105407715, | |
| "learning_rate": 1.3742292490118578e-05, | |
| "loss": 0.5118, | |
| "step": 8050 | |
| }, | |
| { | |
| "epoch": 1.1523741774853282, | |
| "grad_norm": 3.773084878921509, | |
| "learning_rate": 1.3689591567852438e-05, | |
| "loss": 0.5406, | |
| "step": 8100 | |
| }, | |
| { | |
| "epoch": 1.1594878178908057, | |
| "grad_norm": 3.446446180343628, | |
| "learning_rate": 1.3636890645586299e-05, | |
| "loss": 0.5175, | |
| "step": 8150 | |
| }, | |
| { | |
| "epoch": 1.1666014582962831, | |
| "grad_norm": 5.0827155113220215, | |
| "learning_rate": 1.3584189723320161e-05, | |
| "loss": 0.4963, | |
| "step": 8200 | |
| }, | |
| { | |
| "epoch": 1.1737150987017606, | |
| "grad_norm": 5.854413032531738, | |
| "learning_rate": 1.353148880105402e-05, | |
| "loss": 0.6038, | |
| "step": 8250 | |
| }, | |
| { | |
| "epoch": 1.180828739107238, | |
| "grad_norm": 4.135270595550537, | |
| "learning_rate": 1.347878787878788e-05, | |
| "loss": 0.5876, | |
| "step": 8300 | |
| }, | |
| { | |
| "epoch": 1.1879423795127155, | |
| "grad_norm": 4.9584197998046875, | |
| "learning_rate": 1.342608695652174e-05, | |
| "loss": 0.5272, | |
| "step": 8350 | |
| }, | |
| { | |
| "epoch": 1.1950560199181932, | |
| "grad_norm": 2.152341842651367, | |
| "learning_rate": 1.33733860342556e-05, | |
| "loss": 0.5146, | |
| "step": 8400 | |
| }, | |
| { | |
| "epoch": 1.2021696603236707, | |
| "grad_norm": 3.346052885055542, | |
| "learning_rate": 1.3320685111989462e-05, | |
| "loss": 0.4661, | |
| "step": 8450 | |
| }, | |
| { | |
| "epoch": 1.2092833007291481, | |
| "grad_norm": 4.768698692321777, | |
| "learning_rate": 1.3267984189723322e-05, | |
| "loss": 0.5354, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 1.2163969411346256, | |
| "grad_norm": 5.182507514953613, | |
| "learning_rate": 1.3215283267457183e-05, | |
| "loss": 0.4845, | |
| "step": 8550 | |
| }, | |
| { | |
| "epoch": 1.223510581540103, | |
| "grad_norm": 4.584784030914307, | |
| "learning_rate": 1.3162582345191041e-05, | |
| "loss": 0.5261, | |
| "step": 8600 | |
| }, | |
| { | |
| "epoch": 1.2306242219455807, | |
| "grad_norm": 3.520695209503174, | |
| "learning_rate": 1.3110935441370225e-05, | |
| "loss": 0.5735, | |
| "step": 8650 | |
| }, | |
| { | |
| "epoch": 1.2377378623510582, | |
| "grad_norm": 5.633211612701416, | |
| "learning_rate": 1.3058234519104085e-05, | |
| "loss": 0.516, | |
| "step": 8700 | |
| }, | |
| { | |
| "epoch": 1.2448515027565357, | |
| "grad_norm": 5.559876441955566, | |
| "learning_rate": 1.3005533596837944e-05, | |
| "loss": 0.5597, | |
| "step": 8750 | |
| }, | |
| { | |
| "epoch": 1.2519651431620131, | |
| "grad_norm": 4.474053382873535, | |
| "learning_rate": 1.2952832674571806e-05, | |
| "loss": 0.5171, | |
| "step": 8800 | |
| }, | |
| { | |
| "epoch": 1.2590787835674906, | |
| "grad_norm": 3.866060733795166, | |
| "learning_rate": 1.2900131752305667e-05, | |
| "loss": 0.5057, | |
| "step": 8850 | |
| }, | |
| { | |
| "epoch": 1.266192423972968, | |
| "grad_norm": 5.632756233215332, | |
| "learning_rate": 1.2847430830039527e-05, | |
| "loss": 0.5514, | |
| "step": 8900 | |
| }, | |
| { | |
| "epoch": 1.2733060643784457, | |
| "grad_norm": 4.323223114013672, | |
| "learning_rate": 1.2794729907773386e-05, | |
| "loss": 0.5052, | |
| "step": 8950 | |
| }, | |
| { | |
| "epoch": 1.2804197047839232, | |
| "grad_norm": 3.6960432529449463, | |
| "learning_rate": 1.2742028985507246e-05, | |
| "loss": 0.5295, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 1.2875333451894007, | |
| "grad_norm": 3.777792453765869, | |
| "learning_rate": 1.2689328063241108e-05, | |
| "loss": 0.49, | |
| "step": 9050 | |
| }, | |
| { | |
| "epoch": 1.2946469855948781, | |
| "grad_norm": 2.103578805923462, | |
| "learning_rate": 1.2636627140974969e-05, | |
| "loss": 0.5437, | |
| "step": 9100 | |
| }, | |
| { | |
| "epoch": 1.3017606260003558, | |
| "grad_norm": 5.730409622192383, | |
| "learning_rate": 1.258392621870883e-05, | |
| "loss": 0.532, | |
| "step": 9150 | |
| }, | |
| { | |
| "epoch": 1.3088742664058333, | |
| "grad_norm": 6.736247539520264, | |
| "learning_rate": 1.2532279314888011e-05, | |
| "loss": 0.554, | |
| "step": 9200 | |
| }, | |
| { | |
| "epoch": 1.3159879068113107, | |
| "grad_norm": 2.0751116275787354, | |
| "learning_rate": 1.2479578392621872e-05, | |
| "loss": 0.5128, | |
| "step": 9250 | |
| }, | |
| { | |
| "epoch": 1.3231015472167882, | |
| "grad_norm": 2.6663055419921875, | |
| "learning_rate": 1.2426877470355732e-05, | |
| "loss": 0.5251, | |
| "step": 9300 | |
| }, | |
| { | |
| "epoch": 1.3302151876222656, | |
| "grad_norm": 4.778317928314209, | |
| "learning_rate": 1.2374176548089594e-05, | |
| "loss": 0.5219, | |
| "step": 9350 | |
| }, | |
| { | |
| "epoch": 1.337328828027743, | |
| "grad_norm": 5.950105667114258, | |
| "learning_rate": 1.2321475625823453e-05, | |
| "loss": 0.5192, | |
| "step": 9400 | |
| }, | |
| { | |
| "epoch": 1.3444424684332206, | |
| "grad_norm": 3.5832479000091553, | |
| "learning_rate": 1.2268774703557313e-05, | |
| "loss": 0.5185, | |
| "step": 9450 | |
| }, | |
| { | |
| "epoch": 1.3515561088386983, | |
| "grad_norm": 2.7390801906585693, | |
| "learning_rate": 1.2216073781291174e-05, | |
| "loss": 0.5375, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 1.3586697492441757, | |
| "grad_norm": 2.688469648361206, | |
| "learning_rate": 1.2163372859025033e-05, | |
| "loss": 0.5352, | |
| "step": 9550 | |
| }, | |
| { | |
| "epoch": 1.3657833896496532, | |
| "grad_norm": 4.956066608428955, | |
| "learning_rate": 1.2110671936758893e-05, | |
| "loss": 0.5341, | |
| "step": 9600 | |
| }, | |
| { | |
| "epoch": 1.3728970300551306, | |
| "grad_norm": 5.483800411224365, | |
| "learning_rate": 1.2057971014492755e-05, | |
| "loss": 0.5644, | |
| "step": 9650 | |
| }, | |
| { | |
| "epoch": 1.3800106704606083, | |
| "grad_norm": 4.272674083709717, | |
| "learning_rate": 1.2005270092226616e-05, | |
| "loss": 0.5044, | |
| "step": 9700 | |
| }, | |
| { | |
| "epoch": 1.3871243108660858, | |
| "grad_norm": 2.5622191429138184, | |
| "learning_rate": 1.1952569169960474e-05, | |
| "loss": 0.5171, | |
| "step": 9750 | |
| }, | |
| { | |
| "epoch": 1.3942379512715632, | |
| "grad_norm": 7.005847454071045, | |
| "learning_rate": 1.1899868247694335e-05, | |
| "loss": 0.5156, | |
| "step": 9800 | |
| }, | |
| { | |
| "epoch": 1.4013515916770407, | |
| "grad_norm": 4.284529685974121, | |
| "learning_rate": 1.1847167325428195e-05, | |
| "loss": 0.5833, | |
| "step": 9850 | |
| }, | |
| { | |
| "epoch": 1.4084652320825182, | |
| "grad_norm": 3.930690288543701, | |
| "learning_rate": 1.1794466403162057e-05, | |
| "loss": 0.5219, | |
| "step": 9900 | |
| }, | |
| { | |
| "epoch": 1.4155788724879956, | |
| "grad_norm": 4.899880886077881, | |
| "learning_rate": 1.1741765480895916e-05, | |
| "loss": 0.5605, | |
| "step": 9950 | |
| }, | |
| { | |
| "epoch": 1.4226925128934733, | |
| "grad_norm": 5.612455368041992, | |
| "learning_rate": 1.1689064558629777e-05, | |
| "loss": 0.5408, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 1.4298061532989508, | |
| "grad_norm": 2.502410411834717, | |
| "learning_rate": 1.1636363636363637e-05, | |
| "loss": 0.5052, | |
| "step": 10050 | |
| }, | |
| { | |
| "epoch": 1.4369197937044282, | |
| "grad_norm": 4.965051174163818, | |
| "learning_rate": 1.1583662714097496e-05, | |
| "loss": 0.5036, | |
| "step": 10100 | |
| }, | |
| { | |
| "epoch": 1.4440334341099057, | |
| "grad_norm": 4.5783371925354, | |
| "learning_rate": 1.1530961791831358e-05, | |
| "loss": 0.5474, | |
| "step": 10150 | |
| }, | |
| { | |
| "epoch": 1.4511470745153834, | |
| "grad_norm": 5.443408012390137, | |
| "learning_rate": 1.1478260869565218e-05, | |
| "loss": 0.5018, | |
| "step": 10200 | |
| }, | |
| { | |
| "epoch": 1.4582607149208608, | |
| "grad_norm": 5.504450798034668, | |
| "learning_rate": 1.1425559947299079e-05, | |
| "loss": 0.4877, | |
| "step": 10250 | |
| }, | |
| { | |
| "epoch": 1.4653743553263383, | |
| "grad_norm": 3.819101572036743, | |
| "learning_rate": 1.137285902503294e-05, | |
| "loss": 0.526, | |
| "step": 10300 | |
| }, | |
| { | |
| "epoch": 1.4724879957318158, | |
| "grad_norm": 5.407709121704102, | |
| "learning_rate": 1.1320158102766798e-05, | |
| "loss": 0.4949, | |
| "step": 10350 | |
| }, | |
| { | |
| "epoch": 1.4796016361372932, | |
| "grad_norm": 4.124641418457031, | |
| "learning_rate": 1.126745718050066e-05, | |
| "loss": 0.5923, | |
| "step": 10400 | |
| }, | |
| { | |
| "epoch": 1.4867152765427707, | |
| "grad_norm": 4.266039848327637, | |
| "learning_rate": 1.121475625823452e-05, | |
| "loss": 0.5545, | |
| "step": 10450 | |
| }, | |
| { | |
| "epoch": 1.4938289169482482, | |
| "grad_norm": 4.402031421661377, | |
| "learning_rate": 1.1162055335968381e-05, | |
| "loss": 0.505, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 1.5009425573537256, | |
| "grad_norm": 6.378383159637451, | |
| "learning_rate": 1.110935441370224e-05, | |
| "loss": 0.5015, | |
| "step": 10550 | |
| }, | |
| { | |
| "epoch": 1.5080561977592033, | |
| "grad_norm": 4.023849964141846, | |
| "learning_rate": 1.10566534914361e-05, | |
| "loss": 0.5371, | |
| "step": 10600 | |
| }, | |
| { | |
| "epoch": 1.5151698381646808, | |
| "grad_norm": 4.202026844024658, | |
| "learning_rate": 1.1003952569169962e-05, | |
| "loss": 0.5527, | |
| "step": 10650 | |
| }, | |
| { | |
| "epoch": 1.5222834785701584, | |
| "grad_norm": 4.9058685302734375, | |
| "learning_rate": 1.0951251646903823e-05, | |
| "loss": 0.5193, | |
| "step": 10700 | |
| }, | |
| { | |
| "epoch": 1.529397118975636, | |
| "grad_norm": 5.402507781982422, | |
| "learning_rate": 1.0898550724637682e-05, | |
| "loss": 0.542, | |
| "step": 10750 | |
| }, | |
| { | |
| "epoch": 1.5365107593811134, | |
| "grad_norm": 4.229278087615967, | |
| "learning_rate": 1.0845849802371542e-05, | |
| "loss": 0.5284, | |
| "step": 10800 | |
| }, | |
| { | |
| "epoch": 1.5436243997865908, | |
| "grad_norm": 3.6707725524902344, | |
| "learning_rate": 1.0793148880105403e-05, | |
| "loss": 0.561, | |
| "step": 10850 | |
| }, | |
| { | |
| "epoch": 1.5507380401920683, | |
| "grad_norm": 3.6205899715423584, | |
| "learning_rate": 1.0740447957839265e-05, | |
| "loss": 0.4981, | |
| "step": 10900 | |
| }, | |
| { | |
| "epoch": 1.5578516805975458, | |
| "grad_norm": 4.793003559112549, | |
| "learning_rate": 1.0687747035573123e-05, | |
| "loss": 0.5062, | |
| "step": 10950 | |
| }, | |
| { | |
| "epoch": 1.5649653210030232, | |
| "grad_norm": 3.2560999393463135, | |
| "learning_rate": 1.0635046113306984e-05, | |
| "loss": 0.5305, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 1.5720789614085007, | |
| "grad_norm": 4.039142608642578, | |
| "learning_rate": 1.0582345191040844e-05, | |
| "loss": 0.5412, | |
| "step": 11050 | |
| }, | |
| { | |
| "epoch": 1.5791926018139784, | |
| "grad_norm": 4.209068298339844, | |
| "learning_rate": 1.0529644268774703e-05, | |
| "loss": 0.5014, | |
| "step": 11100 | |
| }, | |
| { | |
| "epoch": 1.5863062422194558, | |
| "grad_norm": 3.390684127807617, | |
| "learning_rate": 1.0476943346508565e-05, | |
| "loss": 0.5605, | |
| "step": 11150 | |
| }, | |
| { | |
| "epoch": 1.5934198826249333, | |
| "grad_norm": 3.7350914478302, | |
| "learning_rate": 1.0424242424242426e-05, | |
| "loss": 0.5021, | |
| "step": 11200 | |
| }, | |
| { | |
| "epoch": 1.600533523030411, | |
| "grad_norm": 3.5504932403564453, | |
| "learning_rate": 1.0371541501976286e-05, | |
| "loss": 0.4967, | |
| "step": 11250 | |
| }, | |
| { | |
| "epoch": 1.6076471634358884, | |
| "grad_norm": 3.8655166625976562, | |
| "learning_rate": 1.0318840579710145e-05, | |
| "loss": 0.4857, | |
| "step": 11300 | |
| }, | |
| { | |
| "epoch": 1.6147608038413659, | |
| "grad_norm": 6.647157669067383, | |
| "learning_rate": 1.0266139657444005e-05, | |
| "loss": 0.5033, | |
| "step": 11350 | |
| }, | |
| { | |
| "epoch": 1.6218744442468433, | |
| "grad_norm": 5.384164810180664, | |
| "learning_rate": 1.0213438735177868e-05, | |
| "loss": 0.5486, | |
| "step": 11400 | |
| }, | |
| { | |
| "epoch": 1.6289880846523208, | |
| "grad_norm": 4.699065685272217, | |
| "learning_rate": 1.016179183135705e-05, | |
| "loss": 0.5565, | |
| "step": 11450 | |
| }, | |
| { | |
| "epoch": 1.6361017250577983, | |
| "grad_norm": 4.786224365234375, | |
| "learning_rate": 1.010909090909091e-05, | |
| "loss": 0.5155, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 1.6432153654632757, | |
| "grad_norm": 4.30881929397583, | |
| "learning_rate": 1.005638998682477e-05, | |
| "loss": 0.5303, | |
| "step": 11550 | |
| }, | |
| { | |
| "epoch": 1.6503290058687532, | |
| "grad_norm": 4.938950538635254, | |
| "learning_rate": 1.000368906455863e-05, | |
| "loss": 0.4891, | |
| "step": 11600 | |
| }, | |
| { | |
| "epoch": 1.6574426462742309, | |
| "grad_norm": 4.875250339508057, | |
| "learning_rate": 9.950988142292491e-06, | |
| "loss": 0.531, | |
| "step": 11650 | |
| }, | |
| { | |
| "epoch": 1.6645562866797083, | |
| "grad_norm": 5.96549654006958, | |
| "learning_rate": 9.898287220026352e-06, | |
| "loss": 0.5092, | |
| "step": 11700 | |
| }, | |
| { | |
| "epoch": 1.6716699270851858, | |
| "grad_norm": 5.401658535003662, | |
| "learning_rate": 9.845586297760212e-06, | |
| "loss": 0.5018, | |
| "step": 11750 | |
| }, | |
| { | |
| "epoch": 1.6787835674906635, | |
| "grad_norm": 4.49137020111084, | |
| "learning_rate": 9.792885375494072e-06, | |
| "loss": 0.4877, | |
| "step": 11800 | |
| }, | |
| { | |
| "epoch": 1.685897207896141, | |
| "grad_norm": 3.0070083141326904, | |
| "learning_rate": 9.740184453227933e-06, | |
| "loss": 0.4884, | |
| "step": 11850 | |
| }, | |
| { | |
| "epoch": 1.6930108483016184, | |
| "grad_norm": 3.979234218597412, | |
| "learning_rate": 9.687483530961793e-06, | |
| "loss": 0.5013, | |
| "step": 11900 | |
| }, | |
| { | |
| "epoch": 1.7001244887070959, | |
| "grad_norm": 3.876878499984741, | |
| "learning_rate": 9.634782608695654e-06, | |
| "loss": 0.5322, | |
| "step": 11950 | |
| }, | |
| { | |
| "epoch": 1.7072381291125733, | |
| "grad_norm": 5.199528694152832, | |
| "learning_rate": 9.582081686429513e-06, | |
| "loss": 0.5052, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 1.7143517695180508, | |
| "grad_norm": 3.97282075881958, | |
| "learning_rate": 9.529380764163375e-06, | |
| "loss": 0.4887, | |
| "step": 12050 | |
| }, | |
| { | |
| "epoch": 1.7214654099235283, | |
| "grad_norm": 4.776126384735107, | |
| "learning_rate": 9.476679841897233e-06, | |
| "loss": 0.5384, | |
| "step": 12100 | |
| }, | |
| { | |
| "epoch": 1.7285790503290057, | |
| "grad_norm": 6.539297580718994, | |
| "learning_rate": 9.423978919631094e-06, | |
| "loss": 0.5283, | |
| "step": 12150 | |
| }, | |
| { | |
| "epoch": 1.7356926907344834, | |
| "grad_norm": 5.380597114562988, | |
| "learning_rate": 9.371277997364954e-06, | |
| "loss": 0.5481, | |
| "step": 12200 | |
| }, | |
| { | |
| "epoch": 1.7428063311399609, | |
| "grad_norm": 4.773643493652344, | |
| "learning_rate": 9.318577075098815e-06, | |
| "loss": 0.4815, | |
| "step": 12250 | |
| }, | |
| { | |
| "epoch": 1.7499199715454383, | |
| "grad_norm": 1.9399245977401733, | |
| "learning_rate": 9.265876152832675e-06, | |
| "loss": 0.515, | |
| "step": 12300 | |
| }, | |
| { | |
| "epoch": 1.757033611950916, | |
| "grad_norm": 3.586733102798462, | |
| "learning_rate": 9.213175230566536e-06, | |
| "loss": 0.4856, | |
| "step": 12350 | |
| }, | |
| { | |
| "epoch": 1.7641472523563935, | |
| "grad_norm": 3.9114813804626465, | |
| "learning_rate": 9.160474308300396e-06, | |
| "loss": 0.5114, | |
| "step": 12400 | |
| }, | |
| { | |
| "epoch": 1.771260892761871, | |
| "grad_norm": 3.8798410892486572, | |
| "learning_rate": 9.107773386034257e-06, | |
| "loss": 0.4939, | |
| "step": 12450 | |
| }, | |
| { | |
| "epoch": 1.7783745331673484, | |
| "grad_norm": 2.8337159156799316, | |
| "learning_rate": 9.055072463768117e-06, | |
| "loss": 0.4807, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 1.7854881735728259, | |
| "grad_norm": 4.102101802825928, | |
| "learning_rate": 9.002371541501978e-06, | |
| "loss": 0.5632, | |
| "step": 12550 | |
| }, | |
| { | |
| "epoch": 1.7926018139783033, | |
| "grad_norm": 2.918848991394043, | |
| "learning_rate": 8.949670619235838e-06, | |
| "loss": 0.5012, | |
| "step": 12600 | |
| }, | |
| { | |
| "epoch": 1.7997154543837808, | |
| "grad_norm": 3.708796739578247, | |
| "learning_rate": 8.896969696969697e-06, | |
| "loss": 0.5159, | |
| "step": 12650 | |
| }, | |
| { | |
| "epoch": 1.8068290947892582, | |
| "grad_norm": 4.967949867248535, | |
| "learning_rate": 8.844268774703559e-06, | |
| "loss": 0.5037, | |
| "step": 12700 | |
| }, | |
| { | |
| "epoch": 1.813942735194736, | |
| "grad_norm": 2.7646732330322266, | |
| "learning_rate": 8.791567852437418e-06, | |
| "loss": 0.4842, | |
| "step": 12750 | |
| }, | |
| { | |
| "epoch": 1.8210563756002134, | |
| "grad_norm": 3.667774200439453, | |
| "learning_rate": 8.73886693017128e-06, | |
| "loss": 0.4744, | |
| "step": 12800 | |
| }, | |
| { | |
| "epoch": 1.828170016005691, | |
| "grad_norm": 5.304251670837402, | |
| "learning_rate": 8.686166007905139e-06, | |
| "loss": 0.521, | |
| "step": 12850 | |
| }, | |
| { | |
| "epoch": 1.8352836564111685, | |
| "grad_norm": 4.663897514343262, | |
| "learning_rate": 8.633465085638999e-06, | |
| "loss": 0.5143, | |
| "step": 12900 | |
| }, | |
| { | |
| "epoch": 1.842397296816646, | |
| "grad_norm": 6.192160129547119, | |
| "learning_rate": 8.58076416337286e-06, | |
| "loss": 0.5489, | |
| "step": 12950 | |
| }, | |
| { | |
| "epoch": 1.8495109372221235, | |
| "grad_norm": 3.5136263370513916, | |
| "learning_rate": 8.52806324110672e-06, | |
| "loss": 0.5124, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 1.856624577627601, | |
| "grad_norm": 3.0261971950531006, | |
| "learning_rate": 8.47536231884058e-06, | |
| "loss": 0.5295, | |
| "step": 13050 | |
| }, | |
| { | |
| "epoch": 1.8637382180330784, | |
| "grad_norm": 3.9308557510375977, | |
| "learning_rate": 8.42266139657444e-06, | |
| "loss": 0.5209, | |
| "step": 13100 | |
| }, | |
| { | |
| "epoch": 1.8708518584385558, | |
| "grad_norm": 4.964597225189209, | |
| "learning_rate": 8.369960474308301e-06, | |
| "loss": 0.5187, | |
| "step": 13150 | |
| }, | |
| { | |
| "epoch": 1.8779654988440333, | |
| "grad_norm": 5.161786079406738, | |
| "learning_rate": 8.317259552042162e-06, | |
| "loss": 0.5207, | |
| "step": 13200 | |
| }, | |
| { | |
| "epoch": 1.885079139249511, | |
| "grad_norm": 2.5828144550323486, | |
| "learning_rate": 8.264558629776022e-06, | |
| "loss": 0.4598, | |
| "step": 13250 | |
| }, | |
| { | |
| "epoch": 1.8921927796549884, | |
| "grad_norm": 3.169773817062378, | |
| "learning_rate": 8.211857707509883e-06, | |
| "loss": 0.531, | |
| "step": 13300 | |
| }, | |
| { | |
| "epoch": 1.899306420060466, | |
| "grad_norm": 3.6591272354125977, | |
| "learning_rate": 8.159156785243743e-06, | |
| "loss": 0.5226, | |
| "step": 13350 | |
| }, | |
| { | |
| "epoch": 1.9064200604659436, | |
| "grad_norm": 2.735844850540161, | |
| "learning_rate": 8.106455862977602e-06, | |
| "loss": 0.5808, | |
| "step": 13400 | |
| }, | |
| { | |
| "epoch": 1.913533700871421, | |
| "grad_norm": 5.649113655090332, | |
| "learning_rate": 8.053754940711464e-06, | |
| "loss": 0.5154, | |
| "step": 13450 | |
| }, | |
| { | |
| "epoch": 1.9206473412768985, | |
| "grad_norm": 2.830268621444702, | |
| "learning_rate": 8.001054018445323e-06, | |
| "loss": 0.5017, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 1.927760981682376, | |
| "grad_norm": 5.496303558349609, | |
| "learning_rate": 7.948353096179183e-06, | |
| "loss": 0.4824, | |
| "step": 13550 | |
| }, | |
| { | |
| "epoch": 1.9348746220878534, | |
| "grad_norm": 3.1559128761291504, | |
| "learning_rate": 7.895652173913044e-06, | |
| "loss": 0.5465, | |
| "step": 13600 | |
| }, | |
| { | |
| "epoch": 1.941988262493331, | |
| "grad_norm": 3.265587091445923, | |
| "learning_rate": 7.842951251646904e-06, | |
| "loss": 0.4673, | |
| "step": 13650 | |
| }, | |
| { | |
| "epoch": 1.9491019028988084, | |
| "grad_norm": 6.156945705413818, | |
| "learning_rate": 7.790250329380764e-06, | |
| "loss": 0.5558, | |
| "step": 13700 | |
| }, | |
| { | |
| "epoch": 1.9562155433042858, | |
| "grad_norm": 7.1588029861450195, | |
| "learning_rate": 7.737549407114625e-06, | |
| "loss": 0.5013, | |
| "step": 13750 | |
| }, | |
| { | |
| "epoch": 1.9633291837097635, | |
| "grad_norm": 3.9235057830810547, | |
| "learning_rate": 7.684848484848485e-06, | |
| "loss": 0.5214, | |
| "step": 13800 | |
| }, | |
| { | |
| "epoch": 1.970442824115241, | |
| "grad_norm": 3.9342610836029053, | |
| "learning_rate": 7.632147562582346e-06, | |
| "loss": 0.5147, | |
| "step": 13850 | |
| }, | |
| { | |
| "epoch": 1.9775564645207184, | |
| "grad_norm": 2.9213314056396484, | |
| "learning_rate": 7.579446640316206e-06, | |
| "loss": 0.4756, | |
| "step": 13900 | |
| }, | |
| { | |
| "epoch": 1.984670104926196, | |
| "grad_norm": 2.211418390274048, | |
| "learning_rate": 7.526745718050067e-06, | |
| "loss": 0.4862, | |
| "step": 13950 | |
| }, | |
| { | |
| "epoch": 1.9917837453316736, | |
| "grad_norm": 3.4956717491149902, | |
| "learning_rate": 7.474044795783927e-06, | |
| "loss": 0.5235, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 1.998897385737151, | |
| "grad_norm": 4.755037784576416, | |
| "learning_rate": 7.421343873517787e-06, | |
| "loss": 0.5176, | |
| "step": 14050 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_loss": 0.6273267865180969, | |
| "eval_runtime": 321.2086, | |
| "eval_samples_per_second": 16.41, | |
| "eval_steps_per_second": 2.052, | |
| "step": 14058 | |
| } | |
| ], | |
| "logging_steps": 50, | |
| "max_steps": 21084, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 7.608571407630336e+16, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |