| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.670148318362231, | |
| "eval_steps": 500, | |
| "global_step": 1000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.016711928138709004, | |
| "grad_norm": 1.12109375, | |
| "learning_rate": 5e-06, | |
| "loss": 1.1894, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.03342385627741801, | |
| "grad_norm": 0.7248284816741943, | |
| "learning_rate": 1.0555555555555555e-05, | |
| "loss": 1.1506, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.05013578441612701, | |
| "grad_norm": 1.1556353569030762, | |
| "learning_rate": 1.6111111111111115e-05, | |
| "loss": 1.1033, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.06684771255483601, | |
| "grad_norm": 0.4772647023200989, | |
| "learning_rate": 2.1666666666666667e-05, | |
| "loss": 0.9371, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.08355964069354502, | |
| "grad_norm": 0.27529847621917725, | |
| "learning_rate": 2.7222222222222223e-05, | |
| "loss": 0.8313, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.10027156883225402, | |
| "grad_norm": 0.22090303897857666, | |
| "learning_rate": 3.277777777777778e-05, | |
| "loss": 0.7724, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.11698349697096302, | |
| "grad_norm": 0.21275630593299866, | |
| "learning_rate": 3.8333333333333334e-05, | |
| "loss": 0.733, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.13369542510967203, | |
| "grad_norm": 0.2134302407503128, | |
| "learning_rate": 4.388888888888889e-05, | |
| "loss": 0.7134, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.15040735324838103, | |
| "grad_norm": 0.18984167277812958, | |
| "learning_rate": 4.9444444444444446e-05, | |
| "loss": 0.6959, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.16711928138709004, | |
| "grad_norm": 0.2290167659521103, | |
| "learning_rate": 5.500000000000001e-05, | |
| "loss": 0.6518, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.18383120952579904, | |
| "grad_norm": 0.2181967943906784, | |
| "learning_rate": 6.055555555555555e-05, | |
| "loss": 0.6313, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.20054313766450804, | |
| "grad_norm": 0.26380836963653564, | |
| "learning_rate": 6.611111111111111e-05, | |
| "loss": 0.6059, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.21725506580321705, | |
| "grad_norm": 0.25219714641571045, | |
| "learning_rate": 7.166666666666667e-05, | |
| "loss": 0.6613, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.23396699394192605, | |
| "grad_norm": 0.24282532930374146, | |
| "learning_rate": 7.722222222222223e-05, | |
| "loss": 0.6676, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.2506789220806351, | |
| "grad_norm": 0.28735119104385376, | |
| "learning_rate": 8.277777777777778e-05, | |
| "loss": 0.647, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.26739085021934406, | |
| "grad_norm": 0.3064824938774109, | |
| "learning_rate": 8.833333333333333e-05, | |
| "loss": 0.6222, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.2841027783580531, | |
| "grad_norm": 0.33395013213157654, | |
| "learning_rate": 9.388888888888889e-05, | |
| "loss": 0.6109, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.30081470649676206, | |
| "grad_norm": 0.32029104232788086, | |
| "learning_rate": 9.944444444444446e-05, | |
| "loss": 0.6942, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.3175266346354711, | |
| "grad_norm": 0.29911231994628906, | |
| "learning_rate": 9.999235647539953e-05, | |
| "loss": 0.6312, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.33423856277418007, | |
| "grad_norm": 0.3333311676979065, | |
| "learning_rate": 9.996593741531468e-05, | |
| "loss": 0.6118, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.3509504909128891, | |
| "grad_norm": 0.31720009446144104, | |
| "learning_rate": 9.992065842489567e-05, | |
| "loss": 0.5505, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.3676624190515981, | |
| "grad_norm": 0.30165988206863403, | |
| "learning_rate": 9.985653659495773e-05, | |
| "loss": 0.6274, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.3843743471903071, | |
| "grad_norm": 0.31748878955841064, | |
| "learning_rate": 9.977359612865423e-05, | |
| "loss": 0.5831, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.4010862753290161, | |
| "grad_norm": 0.5018420815467834, | |
| "learning_rate": 9.967186833234101e-05, | |
| "loss": 0.6331, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.4177982034677251, | |
| "grad_norm": 0.3082675635814667, | |
| "learning_rate": 9.955139160375959e-05, | |
| "loss": 0.5884, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.4345101316064341, | |
| "grad_norm": 0.36464694142341614, | |
| "learning_rate": 9.941221141754385e-05, | |
| "loss": 0.5993, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.4512220597451431, | |
| "grad_norm": 0.36485177278518677, | |
| "learning_rate": 9.925438030805518e-05, | |
| "loss": 0.5846, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.4679339878838521, | |
| "grad_norm": 0.3401179015636444, | |
| "learning_rate": 9.907795784955327e-05, | |
| "loss": 0.61, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.48464591602256113, | |
| "grad_norm": 0.31191331148147583, | |
| "learning_rate": 9.888301063370934e-05, | |
| "loss": 0.5872, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.5013578441612702, | |
| "grad_norm": 0.2694801688194275, | |
| "learning_rate": 9.866961224447075e-05, | |
| "loss": 0.5905, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.5180697722999791, | |
| "grad_norm": 0.323143869638443, | |
| "learning_rate": 9.843784323028638e-05, | |
| "loss": 0.6213, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.5347817004386881, | |
| "grad_norm": 0.30978456139564514, | |
| "learning_rate": 9.818779107370309e-05, | |
| "loss": 0.5602, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.5514936285773971, | |
| "grad_norm": 0.36156347393989563, | |
| "learning_rate": 9.791955015834492e-05, | |
| "loss": 0.617, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.5682055567161062, | |
| "grad_norm": 0.3188885450363159, | |
| "learning_rate": 9.763322173328753e-05, | |
| "loss": 0.6133, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.5849174848548151, | |
| "grad_norm": 3.012507915496826, | |
| "learning_rate": 9.732891387484104e-05, | |
| "loss": 0.5401, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.6016294129935241, | |
| "grad_norm": 0.3500335216522217, | |
| "learning_rate": 9.700674144575614e-05, | |
| "loss": 0.5994, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.6183413411322332, | |
| "grad_norm": 0.38718223571777344, | |
| "learning_rate": 9.666682605186835e-05, | |
| "loss": 0.5362, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.6350532692709422, | |
| "grad_norm": 0.32986128330230713, | |
| "learning_rate": 9.63092959961973e-05, | |
| "loss": 0.596, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.6517651974096511, | |
| "grad_norm": 0.29063621163368225, | |
| "learning_rate": 9.593428623051792e-05, | |
| "loss": 0.5578, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.6684771255483601, | |
| "grad_norm": 0.3483268916606903, | |
| "learning_rate": 9.554193830442229e-05, | |
| "loss": 0.6073, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.6851890536870692, | |
| "grad_norm": 0.33267414569854736, | |
| "learning_rate": 9.513240031189067e-05, | |
| "loss": 0.5327, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.7019009818257782, | |
| "grad_norm": 0.3286801874637604, | |
| "learning_rate": 9.470582683539285e-05, | |
| "loss": 0.5884, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.7186129099644871, | |
| "grad_norm": 0.359754353761673, | |
| "learning_rate": 9.42623788875399e-05, | |
| "loss": 0.6042, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.7353248381031962, | |
| "grad_norm": 0.36169129610061646, | |
| "learning_rate": 9.380222385030915e-05, | |
| "loss": 0.4902, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.7520367662419052, | |
| "grad_norm": 0.31606003642082214, | |
| "learning_rate": 9.332553541186485e-05, | |
| "loss": 0.5816, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.7687486943806142, | |
| "grad_norm": 0.3151916265487671, | |
| "learning_rate": 9.283249350099859e-05, | |
| "loss": 0.6368, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.7854606225193231, | |
| "grad_norm": 0.2840172350406647, | |
| "learning_rate": 9.23232842192142e-05, | |
| "loss": 0.5906, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.8021725506580322, | |
| "grad_norm": 0.30779144167900085, | |
| "learning_rate": 9.179809977048248e-05, | |
| "loss": 0.5955, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.8188844787967412, | |
| "grad_norm": 0.31077197194099426, | |
| "learning_rate": 9.125713838869299e-05, | |
| "loss": 0.5831, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.8355964069354502, | |
| "grad_norm": 0.33284345269203186, | |
| "learning_rate": 9.070060426282925e-05, | |
| "loss": 0.6071, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.8523083350741592, | |
| "grad_norm": 0.33886072039604187, | |
| "learning_rate": 9.012870745989663e-05, | |
| "loss": 0.5619, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.8690202632128682, | |
| "grad_norm": 0.3419983386993408, | |
| "learning_rate": 8.954166384563127e-05, | |
| "loss": 0.5264, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.8857321913515772, | |
| "grad_norm": 0.34447890520095825, | |
| "learning_rate": 8.893969500302031e-05, | |
| "loss": 0.6026, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.9024441194902862, | |
| "grad_norm": 0.28169864416122437, | |
| "learning_rate": 8.832302814866416e-05, | |
| "loss": 0.5829, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.9191560476289952, | |
| "grad_norm": 0.36382725834846497, | |
| "learning_rate": 8.76918960470122e-05, | |
| "loss": 0.5803, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.9358679757677042, | |
| "grad_norm": 0.3617050349712372, | |
| "learning_rate": 8.704653692250466e-05, | |
| "loss": 0.5373, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.9525799039064132, | |
| "grad_norm": 0.35659492015838623, | |
| "learning_rate": 8.638719436965325e-05, | |
| "loss": 0.5686, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.9692918320451223, | |
| "grad_norm": 0.35579752922058105, | |
| "learning_rate": 8.571411726109519e-05, | |
| "loss": 0.5691, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.9860037601838312, | |
| "grad_norm": 0.31393831968307495, | |
| "learning_rate": 8.50275596536546e-05, | |
| "loss": 0.543, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.0016711928138708, | |
| "grad_norm": 0.3395228683948517, | |
| "learning_rate": 8.432778069244749e-05, | |
| "loss": 0.5515, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.01838312095258, | |
| "grad_norm": 0.32482120394706726, | |
| "learning_rate": 8.361504451306585e-05, | |
| "loss": 0.5337, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 1.035095049091289, | |
| "grad_norm": 0.34779229760169983, | |
| "learning_rate": 8.288962014187811e-05, | |
| "loss": 0.5536, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 1.0518069772299978, | |
| "grad_norm": 0.28236305713653564, | |
| "learning_rate": 8.21517813944837e-05, | |
| "loss": 0.4698, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 1.068518905368707, | |
| "grad_norm": 0.3089806139469147, | |
| "learning_rate": 8.14018067723597e-05, | |
| "loss": 0.5274, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 1.085230833507416, | |
| "grad_norm": 0.38024547696113586, | |
| "learning_rate": 8.063997935773885e-05, | |
| "loss": 0.5214, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.101942761646125, | |
| "grad_norm": 0.38784193992614746, | |
| "learning_rate": 7.986658670675861e-05, | |
| "loss": 0.5455, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.118654689784834, | |
| "grad_norm": 0.35978567600250244, | |
| "learning_rate": 7.908192074092136e-05, | |
| "loss": 0.5489, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 1.1353666179235429, | |
| "grad_norm": 0.38467374444007874, | |
| "learning_rate": 7.828627763690697e-05, | |
| "loss": 0.5604, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 1.152078546062252, | |
| "grad_norm": 0.42285487055778503, | |
| "learning_rate": 7.747995771477928e-05, | |
| "loss": 0.5591, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 1.168790474200961, | |
| "grad_norm": 0.46918097138404846, | |
| "learning_rate": 7.666326532462842e-05, | |
| "loss": 0.5201, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.1855024023396699, | |
| "grad_norm": 0.4248163104057312, | |
| "learning_rate": 7.583650873169232e-05, | |
| "loss": 0.5233, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 1.202214330478379, | |
| "grad_norm": 0.3677292764186859, | |
| "learning_rate": 7.500000000000001e-05, | |
| "loss": 0.5565, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 1.218926258617088, | |
| "grad_norm": 0.4452461302280426, | |
| "learning_rate": 7.41540548745814e-05, | |
| "loss": 0.5223, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 1.2356381867557968, | |
| "grad_norm": 0.46008288860321045, | |
| "learning_rate": 7.329899266228748e-05, | |
| "loss": 0.5347, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 1.252350114894506, | |
| "grad_norm": 0.32713180780410767, | |
| "learning_rate": 7.243513611126608e-05, | |
| "loss": 0.514, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.269062043033215, | |
| "grad_norm": 0.39738771319389343, | |
| "learning_rate": 7.156281128913871e-05, | |
| "loss": 0.5196, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 1.285773971171924, | |
| "grad_norm": 0.48977628350257874, | |
| "learning_rate": 7.068234745992456e-05, | |
| "loss": 0.5087, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 1.302485899310633, | |
| "grad_norm": 0.3981214165687561, | |
| "learning_rate": 6.979407695975776e-05, | |
| "loss": 0.4929, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 1.3191978274493419, | |
| "grad_norm": 0.4314100444316864, | |
| "learning_rate": 6.889833507144532e-05, | |
| "loss": 0.5511, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 1.335909755588051, | |
| "grad_norm": 0.42734798789024353, | |
| "learning_rate": 6.799545989791268e-05, | |
| "loss": 0.4975, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 1.35262168372676, | |
| "grad_norm": 0.44379082322120667, | |
| "learning_rate": 6.708579223458475e-05, | |
| "loss": 0.5344, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 1.369333611865469, | |
| "grad_norm": 0.41279590129852295, | |
| "learning_rate": 6.616967544075077e-05, | |
| "loss": 0.5416, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 1.386045540004178, | |
| "grad_norm": 0.4385620057582855, | |
| "learning_rate": 6.524745530996137e-05, | |
| "loss": 0.548, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 1.402757468142887, | |
| "grad_norm": 0.40365514159202576, | |
| "learning_rate": 6.431947993950682e-05, | |
| "loss": 0.5893, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 1.4194693962815959, | |
| "grad_norm": 0.4191945791244507, | |
| "learning_rate": 6.338609959902569e-05, | |
| "loss": 0.551, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 1.436181324420305, | |
| "grad_norm": 0.43660128116607666, | |
| "learning_rate": 6.244766659829351e-05, | |
| "loss": 0.5275, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 1.452893252559014, | |
| "grad_norm": 0.4230160713195801, | |
| "learning_rate": 6.150453515424153e-05, | |
| "loss": 0.5485, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 1.469605180697723, | |
| "grad_norm": 0.4209563434123993, | |
| "learning_rate": 6.055706125725542e-05, | |
| "loss": 0.5864, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 1.486317108836432, | |
| "grad_norm": 0.4084739089012146, | |
| "learning_rate": 5.9605602536804673e-05, | |
| "loss": 0.5408, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 1.503029036975141, | |
| "grad_norm": 0.3635924756526947, | |
| "learning_rate": 5.865051812645329e-05, | |
| "loss": 0.5245, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 1.51974096511385, | |
| "grad_norm": 0.4919414222240448, | |
| "learning_rate": 5.7692168528302807e-05, | |
| "loss": 0.5483, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 1.536452893252559, | |
| "grad_norm": 0.42606762051582336, | |
| "learning_rate": 5.673091547691866e-05, | |
| "loss": 0.4793, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 1.553164821391268, | |
| "grad_norm": 0.3808182179927826, | |
| "learning_rate": 5.576712180279133e-05, | |
| "loss": 0.5265, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 1.569876749529977, | |
| "grad_norm": 0.4470883011817932, | |
| "learning_rate": 5.480115129538409e-05, | |
| "loss": 0.5438, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 1.586588677668686, | |
| "grad_norm": 0.39399048686027527, | |
| "learning_rate": 5.383336856581833e-05, | |
| "loss": 0.5604, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 1.6033006058073949, | |
| "grad_norm": 0.4587889015674591, | |
| "learning_rate": 5.2864138909249176e-05, | |
| "loss": 0.5358, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 1.620012533946104, | |
| "grad_norm": 0.4291887879371643, | |
| "learning_rate": 5.189382816698263e-05, | |
| "loss": 0.5607, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 1.6367244620848131, | |
| "grad_norm": 0.4326860308647156, | |
| "learning_rate": 5.0922802588386766e-05, | |
| "loss": 0.548, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 1.653436390223522, | |
| "grad_norm": 0.4679637849330902, | |
| "learning_rate": 4.9951428692648664e-05, | |
| "loss": 0.5565, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 1.670148318362231, | |
| "grad_norm": 0.4345496594905853, | |
| "learning_rate": 4.898007313042975e-05, | |
| "loss": 0.5003, | |
| "step": 1000 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 1797, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 3.777769911925801e+17, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |