| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.9987827145465613, | |
| "eval_steps": 500, | |
| "global_step": 1642, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.012172854534388313, | |
| "grad_norm": 1.0562976598739624, | |
| "learning_rate": 2e-05, | |
| "loss": 2.4546, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.024345709068776627, | |
| "grad_norm": 0.8776949048042297, | |
| "learning_rate": 4e-05, | |
| "loss": 2.3127, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.036518563603164945, | |
| "grad_norm": 0.5755876302719116, | |
| "learning_rate": 6e-05, | |
| "loss": 1.9313, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.048691418137553254, | |
| "grad_norm": 0.5708982944488525, | |
| "learning_rate": 8e-05, | |
| "loss": 1.7055, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.06086427267194157, | |
| "grad_norm": 0.5251428484916687, | |
| "learning_rate": 0.0001, | |
| "loss": 1.4222, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.07303712720632989, | |
| "grad_norm": 0.3596666157245636, | |
| "learning_rate": 0.00012, | |
| "loss": 1.1843, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.0852099817407182, | |
| "grad_norm": 0.3925166130065918, | |
| "learning_rate": 0.00014, | |
| "loss": 1.0588, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.09738283627510651, | |
| "grad_norm": 0.326300710439682, | |
| "learning_rate": 0.00016, | |
| "loss": 1.0132, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.10955569080949483, | |
| "grad_norm": 0.4090663492679596, | |
| "learning_rate": 0.00018, | |
| "loss": 1.0263, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.12172854534388314, | |
| "grad_norm": 0.4432264566421509, | |
| "learning_rate": 0.0002, | |
| "loss": 0.9522, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.13390139987827146, | |
| "grad_norm": 0.3732856810092926, | |
| "learning_rate": 0.00019997924675658132, | |
| "loss": 0.9245, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.14607425441265978, | |
| "grad_norm": 0.4144698679447174, | |
| "learning_rate": 0.00019991699564026757, | |
| "loss": 0.9304, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.15824710894704808, | |
| "grad_norm": 0.4099937379360199, | |
| "learning_rate": 0.00019981327248931008, | |
| "loss": 0.9071, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.1704199634814364, | |
| "grad_norm": 0.3997822403907776, | |
| "learning_rate": 0.00019966812035554488, | |
| "loss": 0.8639, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.18259281801582472, | |
| "grad_norm": 0.34451448917388916, | |
| "learning_rate": 0.00019948159948652324, | |
| "loss": 0.8899, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.19476567255021301, | |
| "grad_norm": 0.35060638189315796, | |
| "learning_rate": 0.00019925378730050518, | |
| "loss": 0.8583, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.20693852708460134, | |
| "grad_norm": 0.34680619835853577, | |
| "learning_rate": 0.00019898477835432566, | |
| "loss": 0.9045, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.21911138161898966, | |
| "grad_norm": 0.33215051889419556, | |
| "learning_rate": 0.0001986746843041475, | |
| "loss": 0.8594, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.23128423615337795, | |
| "grad_norm": 0.31248751282691956, | |
| "learning_rate": 0.00019832363385911684, | |
| "loss": 0.8649, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.24345709068776628, | |
| "grad_norm": 0.3884786367416382, | |
| "learning_rate": 0.00019793177272794043, | |
| "loss": 0.8331, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.2556299452221546, | |
| "grad_norm": 0.36668214201927185, | |
| "learning_rate": 0.00019749926355840716, | |
| "loss": 0.8754, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.2678027997565429, | |
| "grad_norm": 0.38938990235328674, | |
| "learning_rate": 0.00019702628586987846, | |
| "loss": 0.8369, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.27997565429093124, | |
| "grad_norm": 0.3469957411289215, | |
| "learning_rate": 0.0001965130359787764, | |
| "loss": 0.8395, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.29214850882531956, | |
| "grad_norm": 0.3345205783843994, | |
| "learning_rate": 0.00019595972691709948, | |
| "loss": 0.8863, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.30432136335970783, | |
| "grad_norm": 0.4035792350769043, | |
| "learning_rate": 0.00019536658834400057, | |
| "loss": 0.7724, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.31649421789409615, | |
| "grad_norm": 0.37310659885406494, | |
| "learning_rate": 0.00019473386645046343, | |
| "loss": 0.8441, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.3286670724284845, | |
| "grad_norm": 0.3561219573020935, | |
| "learning_rate": 0.00019406182385711752, | |
| "loss": 0.8246, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.3408399269628728, | |
| "grad_norm": 0.3558606207370758, | |
| "learning_rate": 0.00019335073950523335, | |
| "loss": 0.7924, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.3530127814972611, | |
| "grad_norm": 0.3646792471408844, | |
| "learning_rate": 0.00019260090854094389, | |
| "loss": 0.8221, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.36518563603164944, | |
| "grad_norm": 0.39410504698753357, | |
| "learning_rate": 0.00019181264219273958, | |
| "loss": 0.8122, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.37735849056603776, | |
| "grad_norm": 0.4360694885253906, | |
| "learning_rate": 0.00019098626764228852, | |
| "loss": 0.7864, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.38953134510042603, | |
| "grad_norm": 0.411204993724823, | |
| "learning_rate": 0.00019012212788863475, | |
| "loss": 0.8117, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.40170419963481435, | |
| "grad_norm": 0.37499114871025085, | |
| "learning_rate": 0.00018922058160583132, | |
| "loss": 0.8203, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.4138770541692027, | |
| "grad_norm": 0.3543803095817566, | |
| "learning_rate": 0.00018828200299406746, | |
| "loss": 0.804, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.426049908703591, | |
| "grad_norm": 0.4047829210758209, | |
| "learning_rate": 0.0001873067816243511, | |
| "loss": 0.8417, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.4382227632379793, | |
| "grad_norm": 0.3936086893081665, | |
| "learning_rate": 0.00018629532227681174, | |
| "loss": 0.7673, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.45039561777236764, | |
| "grad_norm": 0.40592247247695923, | |
| "learning_rate": 0.0001852480447726903, | |
| "loss": 0.7866, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.4625684723067559, | |
| "grad_norm": 0.35532569885253906, | |
| "learning_rate": 0.00018416538380008616, | |
| "loss": 0.7698, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.47474132684114423, | |
| "grad_norm": 0.38175126910209656, | |
| "learning_rate": 0.00018304778873353344, | |
| "loss": 0.7507, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.48691418137553255, | |
| "grad_norm": 0.35053545236587524, | |
| "learning_rate": 0.00018189572344748132, | |
| "loss": 0.7901, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.4990870359099209, | |
| "grad_norm": 0.35815027356147766, | |
| "learning_rate": 0.00018070966612375612, | |
| "loss": 0.7696, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.5112598904443092, | |
| "grad_norm": 0.3681068420410156, | |
| "learning_rate": 0.0001794901090530848, | |
| "loss": 0.7892, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.5234327449786975, | |
| "grad_norm": 0.4286554157733917, | |
| "learning_rate": 0.00017823755843076233, | |
| "loss": 0.7534, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.5356055995130858, | |
| "grad_norm": 0.4017205536365509, | |
| "learning_rate": 0.00017695253414654795, | |
| "loss": 0.7361, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.5477784540474742, | |
| "grad_norm": 0.40098506212234497, | |
| "learning_rate": 0.00017563556956887703, | |
| "loss": 0.7421, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.5599513085818625, | |
| "grad_norm": 0.37902459502220154, | |
| "learning_rate": 0.00017428721132347862, | |
| "loss": 0.7823, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.5721241631162508, | |
| "grad_norm": 0.3943566083908081, | |
| "learning_rate": 0.0001729080190664904, | |
| "loss": 0.7696, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.5842970176506391, | |
| "grad_norm": 0.37313807010650635, | |
| "learning_rate": 0.00017149856525216502, | |
| "loss": 0.7464, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.5964698721850273, | |
| "grad_norm": 0.40370795130729675, | |
| "learning_rate": 0.0001700594348952643, | |
| "loss": 0.7785, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.6086427267194157, | |
| "grad_norm": 0.3891587257385254, | |
| "learning_rate": 0.00016859122532824047, | |
| "loss": 0.7407, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.620815581253804, | |
| "grad_norm": 0.37328803539276123, | |
| "learning_rate": 0.0001670945459533042, | |
| "loss": 0.7819, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.6329884357881923, | |
| "grad_norm": 0.40735870599746704, | |
| "learning_rate": 0.00016557001798948325, | |
| "loss": 0.7937, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.6451612903225806, | |
| "grad_norm": 0.3800922930240631, | |
| "learning_rate": 0.00016401827421477623, | |
| "loss": 0.7491, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.657334144856969, | |
| "grad_norm": 0.3687744736671448, | |
| "learning_rate": 0.00016243995870350873, | |
| "loss": 0.7917, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.6695069993913573, | |
| "grad_norm": 0.4083586633205414, | |
| "learning_rate": 0.00016083572655900072, | |
| "loss": 0.7581, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.6816798539257456, | |
| "grad_norm": 0.33832359313964844, | |
| "learning_rate": 0.0001592062436416561, | |
| "loss": 0.7775, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.6938527084601339, | |
| "grad_norm": 0.3860386610031128, | |
| "learning_rate": 0.0001575521862925875, | |
| "loss": 0.7528, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.7060255629945222, | |
| "grad_norm": 0.38392528891563416, | |
| "learning_rate": 0.0001558742410528907, | |
| "loss": 0.7426, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.7181984175289106, | |
| "grad_norm": 0.36063849925994873, | |
| "learning_rate": 0.00015417310437868586, | |
| "loss": 0.7148, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.7303712720632989, | |
| "grad_norm": 0.41407009959220886, | |
| "learning_rate": 0.00015244948235204264, | |
| "loss": 0.7526, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.7425441265976872, | |
| "grad_norm": 0.3566502332687378, | |
| "learning_rate": 0.00015070409038791076, | |
| "loss": 0.7052, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.7547169811320755, | |
| "grad_norm": 0.40627434849739075, | |
| "learning_rate": 0.00014893765293717598, | |
| "loss": 0.6922, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.7668898356664637, | |
| "grad_norm": 0.43230217695236206, | |
| "learning_rate": 0.00014715090318596636, | |
| "loss": 0.7096, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.7790626902008521, | |
| "grad_norm": 0.4245879352092743, | |
| "learning_rate": 0.00014534458275133213, | |
| "loss": 0.7204, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.7912355447352404, | |
| "grad_norm": 0.4921364486217499, | |
| "learning_rate": 0.00014351944137342679, | |
| "loss": 0.745, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.8034083992696287, | |
| "grad_norm": 0.37731364369392395, | |
| "learning_rate": 0.0001416762366043161, | |
| "loss": 0.7759, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.815581253804017, | |
| "grad_norm": 0.3670937716960907, | |
| "learning_rate": 0.0001398157334935449, | |
| "loss": 0.6828, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.8277541083384053, | |
| "grad_norm": 0.3592926561832428, | |
| "learning_rate": 0.00013793870427059203, | |
| "loss": 0.7364, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.8399269628727937, | |
| "grad_norm": 0.37360280752182007, | |
| "learning_rate": 0.00013604592802434485, | |
| "loss": 0.6936, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.852099817407182, | |
| "grad_norm": 0.3820659816265106, | |
| "learning_rate": 0.0001341381903797268, | |
| "loss": 0.702, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.8642726719415703, | |
| "grad_norm": 0.408120721578598, | |
| "learning_rate": 0.0001322162831716123, | |
| "loss": 0.7196, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.8764455264759586, | |
| "grad_norm": 0.40797159075737, | |
| "learning_rate": 0.00013028100411616368, | |
| "loss": 0.7137, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.888618381010347, | |
| "grad_norm": 0.3850896656513214, | |
| "learning_rate": 0.0001283331564797274, | |
| "loss": 0.7252, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.9007912355447353, | |
| "grad_norm": 0.3789777457714081, | |
| "learning_rate": 0.0001263735487454262, | |
| "loss": 0.7393, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.9129640900791236, | |
| "grad_norm": 0.39587724208831787, | |
| "learning_rate": 0.00012440299427758647, | |
| "loss": 0.7277, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.9251369446135118, | |
| "grad_norm": 0.4247297942638397, | |
| "learning_rate": 0.00012242231098413898, | |
| "loss": 0.6669, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.9373097991479001, | |
| "grad_norm": 0.43642064929008484, | |
| "learning_rate": 0.00012043232097713424, | |
| "loss": 0.7166, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.9494826536822885, | |
| "grad_norm": 0.4219953119754791, | |
| "learning_rate": 0.00011843385023151255, | |
| "loss": 0.7014, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.9616555082166768, | |
| "grad_norm": 0.39331114292144775, | |
| "learning_rate": 0.0001164277282422709, | |
| "loss": 0.6979, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.9738283627510651, | |
| "grad_norm": 0.4079013466835022, | |
| "learning_rate": 0.00011441478768016871, | |
| "loss": 0.646, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.9860012172854534, | |
| "grad_norm": 0.4337855577468872, | |
| "learning_rate": 0.00011239586404611542, | |
| "loss": 0.7214, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.9981740718198417, | |
| "grad_norm": 0.36248165369033813, | |
| "learning_rate": 0.00011037179532438344, | |
| "loss": 0.7285, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 1.01034692635423, | |
| "grad_norm": 0.4411465525627136, | |
| "learning_rate": 0.00010834342163479034, | |
| "loss": 0.7249, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 1.0225197808886184, | |
| "grad_norm": 0.4417564272880554, | |
| "learning_rate": 0.00010631158488399462, | |
| "loss": 0.678, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 1.0346926354230066, | |
| "grad_norm": 0.44863495230674744, | |
| "learning_rate": 0.0001042771284160498, | |
| "loss": 0.6996, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 1.046865489957395, | |
| "grad_norm": 0.46448981761932373, | |
| "learning_rate": 0.00010224089666236194, | |
| "loss": 0.6493, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 1.0590383444917832, | |
| "grad_norm": 0.48122501373291016, | |
| "learning_rate": 0.0001002037347911959, | |
| "loss": 0.7234, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 1.0712111990261717, | |
| "grad_norm": 0.45439159870147705, | |
| "learning_rate": 9.816648835687556e-05, | |
| "loss": 0.6589, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 1.08338405356056, | |
| "grad_norm": 0.41301119327545166, | |
| "learning_rate": 9.613000294882405e-05, | |
| "loss": 0.6915, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 1.0955569080949483, | |
| "grad_norm": 0.41401615738868713, | |
| "learning_rate": 9.409512384058915e-05, | |
| "loss": 0.7036, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 1.1077297626293365, | |
| "grad_norm": 0.4313485324382782, | |
| "learning_rate": 9.206269563900013e-05, | |
| "loss": 0.711, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 1.119902617163725, | |
| "grad_norm": 0.4550848603248596, | |
| "learning_rate": 9.003356193360098e-05, | |
| "loss": 0.6601, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 1.1320754716981132, | |
| "grad_norm": 0.43733838200569153, | |
| "learning_rate": 8.800856494650601e-05, | |
| "loss": 0.6937, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 1.1442483262325016, | |
| "grad_norm": 0.4673764407634735, | |
| "learning_rate": 8.598854518282311e-05, | |
| "loss": 0.6603, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 1.1564211807668898, | |
| "grad_norm": 0.4688016176223755, | |
| "learning_rate": 8.39743410817896e-05, | |
| "loss": 0.6291, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 1.168594035301278, | |
| "grad_norm": 0.43298402428627014, | |
| "learning_rate": 8.196678866876561e-05, | |
| "loss": 0.6701, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 1.1807668898356665, | |
| "grad_norm": 0.43098828196525574, | |
| "learning_rate": 7.996672120822918e-05, | |
| "loss": 0.6706, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 1.192939744370055, | |
| "grad_norm": 0.44411736726760864, | |
| "learning_rate": 7.797496885791753e-05, | |
| "loss": 0.6736, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 1.205112598904443, | |
| "grad_norm": 0.5002148747444153, | |
| "learning_rate": 7.599235832425781e-05, | |
| "loss": 0.6215, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 1.2172854534388313, | |
| "grad_norm": 0.4465004503726959, | |
| "learning_rate": 7.401971251923014e-05, | |
| "loss": 0.6843, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.2294583079732198, | |
| "grad_norm": 0.4540722966194153, | |
| "learning_rate": 7.205785021880602e-05, | |
| "loss": 0.6767, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 1.241631162507608, | |
| "grad_norm": 0.5289450883865356, | |
| "learning_rate": 7.01075857231029e-05, | |
| "loss": 0.6769, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 1.2538040170419964, | |
| "grad_norm": 0.4757733941078186, | |
| "learning_rate": 6.816972851839701e-05, | |
| "loss": 0.6836, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 1.2659768715763846, | |
| "grad_norm": 0.45475292205810547, | |
| "learning_rate": 6.624508294113387e-05, | |
| "loss": 0.6965, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 1.278149726110773, | |
| "grad_norm": 0.4642918109893799, | |
| "learning_rate": 6.433444784407679e-05, | |
| "loss": 0.6689, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 1.2903225806451613, | |
| "grad_norm": 0.4626942276954651, | |
| "learning_rate": 6.243861626473073e-05, | |
| "loss": 0.6499, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 1.3024954351795497, | |
| "grad_norm": 0.4092578589916229, | |
| "learning_rate": 6.055837509618072e-05, | |
| "loss": 0.6308, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 1.314668289713938, | |
| "grad_norm": 0.45399782061576843, | |
| "learning_rate": 5.869450476047984e-05, | |
| "loss": 0.6516, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 1.326841144248326, | |
| "grad_norm": 0.510785698890686, | |
| "learning_rate": 5.684777888472359e-05, | |
| "loss": 0.6631, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 1.3390139987827145, | |
| "grad_norm": 0.5002163648605347, | |
| "learning_rate": 5.501896397994465e-05, | |
| "loss": 0.6329, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 1.351186853317103, | |
| "grad_norm": 0.5203436613082886, | |
| "learning_rate": 5.320881912296067e-05, | |
| "loss": 0.6865, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 1.3633597078514912, | |
| "grad_norm": 0.4759369492530823, | |
| "learning_rate": 5.141809564130847e-05, | |
| "loss": 0.6427, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 1.3755325623858794, | |
| "grad_norm": 0.46041440963745117, | |
| "learning_rate": 4.964753680139424e-05, | |
| "loss": 0.6733, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 1.3877054169202678, | |
| "grad_norm": 0.455905020236969, | |
| "learning_rate": 4.7897877499989784e-05, | |
| "loss": 0.6654, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 1.399878271454656, | |
| "grad_norm": 0.46310657262802124, | |
| "learning_rate": 4.616984395920282e-05, | |
| "loss": 0.643, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 1.4120511259890445, | |
| "grad_norm": 0.48385167121887207, | |
| "learning_rate": 4.446415342504742e-05, | |
| "loss": 0.6365, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 1.4242239805234327, | |
| "grad_norm": 0.5654993057250977, | |
| "learning_rate": 4.27815138697407e-05, | |
| "loss": 0.6635, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 1.4363968350578211, | |
| "grad_norm": 0.43409934639930725, | |
| "learning_rate": 4.112262369784816e-05, | |
| "loss": 0.6914, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 1.4485696895922093, | |
| "grad_norm": 0.4706696569919586, | |
| "learning_rate": 3.9488171456400623e-05, | |
| "loss": 0.6672, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 1.4607425441265978, | |
| "grad_norm": 0.4703215956687927, | |
| "learning_rate": 3.787883554910262e-05, | |
| "loss": 0.637, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 1.472915398660986, | |
| "grad_norm": 0.4684227406978607, | |
| "learning_rate": 3.629528395475063e-05, | |
| "loss": 0.6469, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 1.4850882531953742, | |
| "grad_norm": 0.4506467580795288, | |
| "learning_rate": 3.4738173949978735e-05, | |
| "loss": 0.5811, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 1.4972611077297626, | |
| "grad_norm": 0.44935739040374756, | |
| "learning_rate": 3.3208151836446077e-05, | |
| "loss": 0.5933, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 1.509433962264151, | |
| "grad_norm": 0.5078376531600952, | |
| "learning_rate": 3.170585267257985e-05, | |
| "loss": 0.6546, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 1.5216068167985393, | |
| "grad_norm": 0.5375810861587524, | |
| "learning_rate": 3.023190000998479e-05, | |
| "loss": 0.6287, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 1.5337796713329275, | |
| "grad_norm": 0.4622966945171356, | |
| "learning_rate": 2.878690563462877e-05, | |
| "loss": 0.5885, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 1.545952525867316, | |
| "grad_norm": 0.47352567315101624, | |
| "learning_rate": 2.7371469312911978e-05, | |
| "loss": 0.6373, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 1.5581253804017043, | |
| "grad_norm": 0.4676053822040558, | |
| "learning_rate": 2.5986178542724982e-05, | |
| "loss": 0.6852, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 1.5702982349360926, | |
| "grad_norm": 0.5178486108779907, | |
| "learning_rate": 2.463160830959894e-05, | |
| "loss": 0.655, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 1.5824710894704808, | |
| "grad_norm": 0.4831840693950653, | |
| "learning_rate": 2.3308320848049437e-05, | |
| "loss": 0.6853, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 1.5946439440048692, | |
| "grad_norm": 0.5412158370018005, | |
| "learning_rate": 2.2016865408212452e-05, | |
| "loss": 0.6504, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 1.6068167985392574, | |
| "grad_norm": 0.4840323030948639, | |
| "learning_rate": 2.075777802787019e-05, | |
| "loss": 0.6829, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 1.6189896530736458, | |
| "grad_norm": 0.4864979684352875, | |
| "learning_rate": 1.953158130996039e-05, | |
| "loss": 0.6589, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 1.631162507608034, | |
| "grad_norm": 0.5091675519943237, | |
| "learning_rate": 1.833878420566243e-05, | |
| "loss": 0.6589, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 1.6433353621424223, | |
| "grad_norm": 0.5042529106140137, | |
| "learning_rate": 1.7179881803149355e-05, | |
| "loss": 0.5867, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 1.6555082166768107, | |
| "grad_norm": 0.5029342174530029, | |
| "learning_rate": 1.605535512209435e-05, | |
| "loss": 0.652, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 1.6676810712111991, | |
| "grad_norm": 0.5203830003738403, | |
| "learning_rate": 1.496567091401625e-05, | |
| "loss": 0.6404, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 1.6798539257455873, | |
| "grad_norm": 0.44688454270362854, | |
| "learning_rate": 1.3911281468547432e-05, | |
| "loss": 0.6528, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 1.6920267802799756, | |
| "grad_norm": 0.4528455138206482, | |
| "learning_rate": 1.2892624425704326e-05, | |
| "loss": 0.6499, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 1.704199634814364, | |
| "grad_norm": 0.5175755620002747, | |
| "learning_rate": 1.191012259423835e-05, | |
| "loss": 0.6553, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 1.7163724893487524, | |
| "grad_norm": 0.5325374603271484, | |
| "learning_rate": 1.0964183776142833e-05, | |
| "loss": 0.668, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 1.7285453438831406, | |
| "grad_norm": 0.4976743757724762, | |
| "learning_rate": 1.0055200597388792e-05, | |
| "loss": 0.6214, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 1.7407181984175288, | |
| "grad_norm": 0.484122097492218, | |
| "learning_rate": 9.18355034495968e-06, | |
| "loss": 0.6254, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 1.7528910529519173, | |
| "grad_norm": 0.4987303912639618, | |
| "learning_rate": 8.349594810252792e-06, | |
| "loss": 0.6376, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 1.7650639074863055, | |
| "grad_norm": 0.5294932723045349, | |
| "learning_rate": 7.553680138912378e-06, | |
| "loss": 0.6181, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 1.777236762020694, | |
| "grad_norm": 0.48353898525238037, | |
| "learning_rate": 6.796136687156696e-06, | |
| "loss": 0.6102, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 1.7894096165550821, | |
| "grad_norm": 0.4669087529182434, | |
| "learning_rate": 6.077278884658888e-06, | |
| "loss": 0.6455, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 1.8015824710894703, | |
| "grad_norm": 0.4892638921737671, | |
| "learning_rate": 5.3974051040380895e-06, | |
| "loss": 0.6367, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 1.8137553256238588, | |
| "grad_norm": 0.45729324221611023, | |
| "learning_rate": 4.756797537015589e-06, | |
| "loss": 0.5949, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 1.8259281801582472, | |
| "grad_norm": 0.43028926849365234, | |
| "learning_rate": 4.1557220772868725e-06, | |
| "loss": 0.6474, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 1.8381010346926354, | |
| "grad_norm": 0.5260388851165771, | |
| "learning_rate": 3.594428210158496e-06, | |
| "loss": 0.6355, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 1.8502738892270236, | |
| "grad_norm": 0.46215394139289856, | |
| "learning_rate": 3.073148908995538e-06, | |
| "loss": 0.6707, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 1.862446743761412, | |
| "grad_norm": 0.5043140649795532, | |
| "learning_rate": 2.592100538522524e-06, | |
| "loss": 0.6277, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 1.8746195982958005, | |
| "grad_norm": 0.49733731150627136, | |
| "learning_rate": 2.1514827650180423e-06, | |
| "loss": 0.6523, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 1.8867924528301887, | |
| "grad_norm": 0.5108760595321655, | |
| "learning_rate": 1.7514784734402268e-06, | |
| "loss": 0.6456, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 1.898965307364577, | |
| "grad_norm": 0.5145827531814575, | |
| "learning_rate": 1.3922536915177532e-06, | |
| "loss": 0.6488, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 1.9111381618989653, | |
| "grad_norm": 0.4664909541606903, | |
| "learning_rate": 1.0739575208373987e-06, | |
| "loss": 0.6454, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 1.9233110164333538, | |
| "grad_norm": 0.4939674139022827, | |
| "learning_rate": 7.96722074957379e-07, | |
| "loss": 0.6468, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 1.935483870967742, | |
| "grad_norm": 0.5013584494590759, | |
| "learning_rate": 5.606624245715453e-07, | |
| "loss": 0.6921, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 1.9476567255021302, | |
| "grad_norm": 0.5231553912162781, | |
| "learning_rate": 3.658765497476124e-07, | |
| "loss": 0.6091, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 1.9598295800365184, | |
| "grad_norm": 0.44986918568611145, | |
| "learning_rate": 2.1244529925906754e-07, | |
| "loss": 0.6854, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 1.9720024345709068, | |
| "grad_norm": 0.5024383664131165, | |
| "learning_rate": 1.0043235702770215e-07, | |
| "loss": 0.6481, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 1.9841752891052953, | |
| "grad_norm": 0.45827868580818176, | |
| "learning_rate": 2.9884215690634266e-08, | |
| "loss": 0.6115, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 1.9963481436396835, | |
| "grad_norm": 0.588126003742218, | |
| "learning_rate": 8.30157302877943e-10, | |
| "loss": 0.6485, | |
| "step": 1640 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 1642, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 2, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 8.191334335517491e+17, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |