so101-pen2box-v1 / trainer_state.json
c299m's picture
Finetuned GR00T N1.5 on desk_cleanup_v1 (5000 steps, front=wrist-top mapping)
7ee0afa verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.6305170239596469,
"eval_steps": 500,
"global_step": 5000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0012610340479192938,
"grad_norm": 15.448720932006836,
"learning_rate": 3.6e-06,
"loss": 0.915,
"step": 10
},
{
"epoch": 0.0025220680958385876,
"grad_norm": 6.090061664581299,
"learning_rate": 7.6e-06,
"loss": 0.6303,
"step": 20
},
{
"epoch": 0.0037831021437578815,
"grad_norm": 4.4645609855651855,
"learning_rate": 1.16e-05,
"loss": 0.3184,
"step": 30
},
{
"epoch": 0.005044136191677175,
"grad_norm": 2.623128890991211,
"learning_rate": 1.56e-05,
"loss": 0.2592,
"step": 40
},
{
"epoch": 0.006305170239596469,
"grad_norm": 2.312946081161499,
"learning_rate": 1.9600000000000002e-05,
"loss": 0.2221,
"step": 50
},
{
"epoch": 0.007566204287515763,
"grad_norm": 3.100372076034546,
"learning_rate": 2.36e-05,
"loss": 0.2353,
"step": 60
},
{
"epoch": 0.008827238335435058,
"grad_norm": 1.9989323616027832,
"learning_rate": 2.7600000000000003e-05,
"loss": 0.2113,
"step": 70
},
{
"epoch": 0.01008827238335435,
"grad_norm": 2.2940664291381836,
"learning_rate": 3.16e-05,
"loss": 0.1711,
"step": 80
},
{
"epoch": 0.011349306431273645,
"grad_norm": 1.9798835515975952,
"learning_rate": 3.56e-05,
"loss": 0.1391,
"step": 90
},
{
"epoch": 0.012610340479192938,
"grad_norm": 1.177800178527832,
"learning_rate": 3.960000000000001e-05,
"loss": 0.1631,
"step": 100
},
{
"epoch": 0.013871374527112233,
"grad_norm": 1.7671740055084229,
"learning_rate": 4.36e-05,
"loss": 0.2098,
"step": 110
},
{
"epoch": 0.015132408575031526,
"grad_norm": 2.2396202087402344,
"learning_rate": 4.76e-05,
"loss": 0.1826,
"step": 120
},
{
"epoch": 0.01639344262295082,
"grad_norm": 2.7950632572174072,
"learning_rate": 5.16e-05,
"loss": 0.1943,
"step": 130
},
{
"epoch": 0.017654476670870115,
"grad_norm": 2.6440370082855225,
"learning_rate": 5.560000000000001e-05,
"loss": 0.1514,
"step": 140
},
{
"epoch": 0.018915510718789406,
"grad_norm": 2.414039373397827,
"learning_rate": 5.96e-05,
"loss": 0.151,
"step": 150
},
{
"epoch": 0.0201765447667087,
"grad_norm": 1.7065895795822144,
"learning_rate": 6.36e-05,
"loss": 0.1507,
"step": 160
},
{
"epoch": 0.021437578814627996,
"grad_norm": 2.5152862071990967,
"learning_rate": 6.76e-05,
"loss": 0.1638,
"step": 170
},
{
"epoch": 0.02269861286254729,
"grad_norm": 2.9150664806365967,
"learning_rate": 7.16e-05,
"loss": 0.152,
"step": 180
},
{
"epoch": 0.02395964691046658,
"grad_norm": 2.840017557144165,
"learning_rate": 7.560000000000001e-05,
"loss": 0.1411,
"step": 190
},
{
"epoch": 0.025220680958385876,
"grad_norm": 1.9643855094909668,
"learning_rate": 7.960000000000001e-05,
"loss": 0.121,
"step": 200
},
{
"epoch": 0.02648171500630517,
"grad_norm": 1.9341304302215576,
"learning_rate": 8.36e-05,
"loss": 0.146,
"step": 210
},
{
"epoch": 0.027742749054224466,
"grad_norm": 2.080127239227295,
"learning_rate": 8.76e-05,
"loss": 0.1667,
"step": 220
},
{
"epoch": 0.029003783102143757,
"grad_norm": 1.2154450416564941,
"learning_rate": 9.16e-05,
"loss": 0.1566,
"step": 230
},
{
"epoch": 0.03026481715006305,
"grad_norm": 3.8702995777130127,
"learning_rate": 9.56e-05,
"loss": 0.1687,
"step": 240
},
{
"epoch": 0.031525851197982346,
"grad_norm": 2.737008810043335,
"learning_rate": 9.960000000000001e-05,
"loss": 0.1374,
"step": 250
},
{
"epoch": 0.03278688524590164,
"grad_norm": 1.4026472568511963,
"learning_rate": 9.999911419878559e-05,
"loss": 0.1596,
"step": 260
},
{
"epoch": 0.034047919293820936,
"grad_norm": 2.736366033554077,
"learning_rate": 9.999605221019081e-05,
"loss": 0.1592,
"step": 270
},
{
"epoch": 0.03530895334174023,
"grad_norm": 2.688608407974243,
"learning_rate": 9.999080323230761e-05,
"loss": 0.1544,
"step": 280
},
{
"epoch": 0.03656998738965952,
"grad_norm": 3.0117437839508057,
"learning_rate": 9.998336749474329e-05,
"loss": 0.1441,
"step": 290
},
{
"epoch": 0.03783102143757881,
"grad_norm": 2.537014961242676,
"learning_rate": 9.997374532276107e-05,
"loss": 0.1421,
"step": 300
},
{
"epoch": 0.03909205548549811,
"grad_norm": 1.7813339233398438,
"learning_rate": 9.996193713726596e-05,
"loss": 0.1208,
"step": 310
},
{
"epoch": 0.0403530895334174,
"grad_norm": 1.780358910560608,
"learning_rate": 9.994794345478624e-05,
"loss": 0.1297,
"step": 320
},
{
"epoch": 0.0416141235813367,
"grad_norm": 1.8027393817901611,
"learning_rate": 9.99317648874509e-05,
"loss": 0.1159,
"step": 330
},
{
"epoch": 0.04287515762925599,
"grad_norm": 2.2942054271698,
"learning_rate": 9.991340214296292e-05,
"loss": 0.1409,
"step": 340
},
{
"epoch": 0.044136191677175286,
"grad_norm": 1.8020310401916504,
"learning_rate": 9.989285602456819e-05,
"loss": 0.1573,
"step": 350
},
{
"epoch": 0.04539722572509458,
"grad_norm": 2.051375150680542,
"learning_rate": 9.98701274310205e-05,
"loss": 0.1063,
"step": 360
},
{
"epoch": 0.04665825977301387,
"grad_norm": 3.008836030960083,
"learning_rate": 9.984521735654218e-05,
"loss": 0.1231,
"step": 370
},
{
"epoch": 0.04791929382093316,
"grad_norm": 1.7396783828735352,
"learning_rate": 9.981812689078057e-05,
"loss": 0.1372,
"step": 380
},
{
"epoch": 0.04918032786885246,
"grad_norm": 1.3061403036117554,
"learning_rate": 9.978885721876041e-05,
"loss": 0.1237,
"step": 390
},
{
"epoch": 0.05044136191677175,
"grad_norm": 1.926888346672058,
"learning_rate": 9.975740962083198e-05,
"loss": 0.1112,
"step": 400
},
{
"epoch": 0.05170239596469105,
"grad_norm": 1.6264052391052246,
"learning_rate": 9.972378547261504e-05,
"loss": 0.1168,
"step": 410
},
{
"epoch": 0.05296343001261034,
"grad_norm": 2.228379964828491,
"learning_rate": 9.968798624493885e-05,
"loss": 0.1035,
"step": 420
},
{
"epoch": 0.05422446406052964,
"grad_norm": 1.805611491203308,
"learning_rate": 9.965001350377753e-05,
"loss": 0.0847,
"step": 430
},
{
"epoch": 0.05548549810844893,
"grad_norm": 2.9379208087921143,
"learning_rate": 9.960986891018183e-05,
"loss": 0.1202,
"step": 440
},
{
"epoch": 0.05674653215636822,
"grad_norm": 2.2286455631256104,
"learning_rate": 9.95675542202063e-05,
"loss": 0.1365,
"step": 450
},
{
"epoch": 0.058007566204287514,
"grad_norm": 2.3481640815734863,
"learning_rate": 9.952307128483256e-05,
"loss": 0.1122,
"step": 460
},
{
"epoch": 0.05926860025220681,
"grad_norm": 2.3531336784362793,
"learning_rate": 9.947642204988835e-05,
"loss": 0.1245,
"step": 470
},
{
"epoch": 0.0605296343001261,
"grad_norm": 1.371448040008545,
"learning_rate": 9.942760855596226e-05,
"loss": 0.1118,
"step": 480
},
{
"epoch": 0.0617906683480454,
"grad_norm": 1.3643913269042969,
"learning_rate": 9.937663293831471e-05,
"loss": 0.0913,
"step": 490
},
{
"epoch": 0.06305170239596469,
"grad_norm": 1.3660390377044678,
"learning_rate": 9.932349742678433e-05,
"loss": 0.1095,
"step": 500
},
{
"epoch": 0.06431273644388398,
"grad_norm": 1.235795497894287,
"learning_rate": 9.926820434569051e-05,
"loss": 0.1061,
"step": 510
},
{
"epoch": 0.06557377049180328,
"grad_norm": 2.3357131481170654,
"learning_rate": 9.921075611373179e-05,
"loss": 0.102,
"step": 520
},
{
"epoch": 0.06683480453972257,
"grad_norm": 1.6956318616867065,
"learning_rate": 9.915115524387988e-05,
"loss": 0.1051,
"step": 530
},
{
"epoch": 0.06809583858764187,
"grad_norm": 0.8027364611625671,
"learning_rate": 9.908940434326997e-05,
"loss": 0.1107,
"step": 540
},
{
"epoch": 0.06935687263556116,
"grad_norm": 1.4370204210281372,
"learning_rate": 9.902550611308645e-05,
"loss": 0.0948,
"step": 550
},
{
"epoch": 0.07061790668348046,
"grad_norm": 1.3123650550842285,
"learning_rate": 9.895946334844494e-05,
"loss": 0.0964,
"step": 560
},
{
"epoch": 0.07187894073139975,
"grad_norm": 2.1670567989349365,
"learning_rate": 9.889127893826989e-05,
"loss": 0.1093,
"step": 570
},
{
"epoch": 0.07313997477931904,
"grad_norm": 2.2896504402160645,
"learning_rate": 9.882095586516831e-05,
"loss": 0.1053,
"step": 580
},
{
"epoch": 0.07440100882723834,
"grad_norm": 1.490241289138794,
"learning_rate": 9.874849720529921e-05,
"loss": 0.1163,
"step": 590
},
{
"epoch": 0.07566204287515763,
"grad_norm": 2.4362456798553467,
"learning_rate": 9.867390612823914e-05,
"loss": 0.1013,
"step": 600
},
{
"epoch": 0.07692307692307693,
"grad_norm": 1.6140224933624268,
"learning_rate": 9.859718589684344e-05,
"loss": 0.0909,
"step": 610
},
{
"epoch": 0.07818411097099622,
"grad_norm": 2.315236806869507,
"learning_rate": 9.851833986710353e-05,
"loss": 0.0843,
"step": 620
},
{
"epoch": 0.07944514501891552,
"grad_norm": 1.5628085136413574,
"learning_rate": 9.843737148800023e-05,
"loss": 0.0843,
"step": 630
},
{
"epoch": 0.0807061790668348,
"grad_norm": 2.52994704246521,
"learning_rate": 9.835428430135271e-05,
"loss": 0.0799,
"step": 640
},
{
"epoch": 0.08196721311475409,
"grad_norm": 1.4020142555236816,
"learning_rate": 9.82690819416637e-05,
"loss": 0.0971,
"step": 650
},
{
"epoch": 0.0832282471626734,
"grad_norm": 1.480144739151001,
"learning_rate": 9.818176813596041e-05,
"loss": 0.0776,
"step": 660
},
{
"epoch": 0.08448928121059268,
"grad_norm": 1.6455413103103638,
"learning_rate": 9.809234670363159e-05,
"loss": 0.0897,
"step": 670
},
{
"epoch": 0.08575031525851198,
"grad_norm": 1.6539134979248047,
"learning_rate": 9.800082155626034e-05,
"loss": 0.1046,
"step": 680
},
{
"epoch": 0.08701134930643127,
"grad_norm": 1.8283783197402954,
"learning_rate": 9.790719669745312e-05,
"loss": 0.0845,
"step": 690
},
{
"epoch": 0.08827238335435057,
"grad_norm": 1.7916957139968872,
"learning_rate": 9.781147622266455e-05,
"loss": 0.0932,
"step": 700
},
{
"epoch": 0.08953341740226986,
"grad_norm": 1.744341254234314,
"learning_rate": 9.771366431901831e-05,
"loss": 0.0834,
"step": 710
},
{
"epoch": 0.09079445145018916,
"grad_norm": 1.4329490661621094,
"learning_rate": 9.761376526512394e-05,
"loss": 0.0659,
"step": 720
},
{
"epoch": 0.09205548549810845,
"grad_norm": 1.6136714220046997,
"learning_rate": 9.751178343088963e-05,
"loss": 0.1164,
"step": 730
},
{
"epoch": 0.09331651954602774,
"grad_norm": 1.8176523447036743,
"learning_rate": 9.740772327733123e-05,
"loss": 0.116,
"step": 740
},
{
"epoch": 0.09457755359394704,
"grad_norm": 1.5235085487365723,
"learning_rate": 9.730158935637697e-05,
"loss": 0.0832,
"step": 750
},
{
"epoch": 0.09583858764186633,
"grad_norm": 2.193366527557373,
"learning_rate": 9.719338631066834e-05,
"loss": 0.0896,
"step": 760
},
{
"epoch": 0.09709962168978563,
"grad_norm": 3.576033115386963,
"learning_rate": 9.708311887335713e-05,
"loss": 0.1041,
"step": 770
},
{
"epoch": 0.09836065573770492,
"grad_norm": 2.032519578933716,
"learning_rate": 9.697079186789823e-05,
"loss": 0.1102,
"step": 780
},
{
"epoch": 0.09962168978562422,
"grad_norm": 2.3417623043060303,
"learning_rate": 9.685641020783876e-05,
"loss": 0.0962,
"step": 790
},
{
"epoch": 0.1008827238335435,
"grad_norm": 1.609004020690918,
"learning_rate": 9.67399788966031e-05,
"loss": 0.0926,
"step": 800
},
{
"epoch": 0.1021437578814628,
"grad_norm": 1.1974939107894897,
"learning_rate": 9.662150302727395e-05,
"loss": 0.0762,
"step": 810
},
{
"epoch": 0.1034047919293821,
"grad_norm": 1.543877124786377,
"learning_rate": 9.650098778236968e-05,
"loss": 0.0847,
"step": 820
},
{
"epoch": 0.10466582597730138,
"grad_norm": 1.5203806161880493,
"learning_rate": 9.637843843361749e-05,
"loss": 0.085,
"step": 830
},
{
"epoch": 0.10592686002522068,
"grad_norm": 1.5826364755630493,
"learning_rate": 9.62538603417229e-05,
"loss": 0.0966,
"step": 840
},
{
"epoch": 0.10718789407313997,
"grad_norm": 2.522489309310913,
"learning_rate": 9.612725895613526e-05,
"loss": 0.1168,
"step": 850
},
{
"epoch": 0.10844892812105927,
"grad_norm": 1.4813344478607178,
"learning_rate": 9.599863981480926e-05,
"loss": 0.1122,
"step": 860
},
{
"epoch": 0.10970996216897856,
"grad_norm": 0.8094097375869751,
"learning_rate": 9.586800854396283e-05,
"loss": 0.0955,
"step": 870
},
{
"epoch": 0.11097099621689786,
"grad_norm": 1.2435733079910278,
"learning_rate": 9.573537085783095e-05,
"loss": 0.0869,
"step": 880
},
{
"epoch": 0.11223203026481715,
"grad_norm": 1.8356126546859741,
"learning_rate": 9.560073255841571e-05,
"loss": 0.0878,
"step": 890
},
{
"epoch": 0.11349306431273644,
"grad_norm": 1.1623339653015137,
"learning_rate": 9.546409953523247e-05,
"loss": 0.0642,
"step": 900
},
{
"epoch": 0.11475409836065574,
"grad_norm": 2.5689473152160645,
"learning_rate": 9.532547776505229e-05,
"loss": 0.0775,
"step": 910
},
{
"epoch": 0.11601513240857503,
"grad_norm": 1.428094744682312,
"learning_rate": 9.518487331164048e-05,
"loss": 0.098,
"step": 920
},
{
"epoch": 0.11727616645649433,
"grad_norm": 1.1375477313995361,
"learning_rate": 9.504229232549134e-05,
"loss": 0.0936,
"step": 930
},
{
"epoch": 0.11853720050441362,
"grad_norm": 1.4177823066711426,
"learning_rate": 9.489774104355909e-05,
"loss": 0.0742,
"step": 940
},
{
"epoch": 0.11979823455233292,
"grad_norm": 2.013066530227661,
"learning_rate": 9.475122578898507e-05,
"loss": 0.1032,
"step": 950
},
{
"epoch": 0.1210592686002522,
"grad_norm": 1.1708323955535889,
"learning_rate": 9.460275297082119e-05,
"loss": 0.0895,
"step": 960
},
{
"epoch": 0.1223203026481715,
"grad_norm": 1.1229428052902222,
"learning_rate": 9.445232908374948e-05,
"loss": 0.0803,
"step": 970
},
{
"epoch": 0.1235813366960908,
"grad_norm": 2.447211742401123,
"learning_rate": 9.429996070779808e-05,
"loss": 0.0927,
"step": 980
},
{
"epoch": 0.12484237074401008,
"grad_norm": 1.2103554010391235,
"learning_rate": 9.414565450805333e-05,
"loss": 0.0841,
"step": 990
},
{
"epoch": 0.12610340479192939,
"grad_norm": 1.4568835496902466,
"learning_rate": 9.398941723436831e-05,
"loss": 0.0804,
"step": 1000
},
{
"epoch": 0.1273644388398487,
"grad_norm": 1.8346503973007202,
"learning_rate": 9.383125572106752e-05,
"loss": 0.0846,
"step": 1010
},
{
"epoch": 0.12862547288776796,
"grad_norm": 1.5656734704971313,
"learning_rate": 9.367117688664791e-05,
"loss": 0.0875,
"step": 1020
},
{
"epoch": 0.12988650693568726,
"grad_norm": 1.1312291622161865,
"learning_rate": 9.35091877334763e-05,
"loss": 0.1047,
"step": 1030
},
{
"epoch": 0.13114754098360656,
"grad_norm": 1.185917854309082,
"learning_rate": 9.334529534748297e-05,
"loss": 0.0896,
"step": 1040
},
{
"epoch": 0.13240857503152584,
"grad_norm": 1.4013923406600952,
"learning_rate": 9.317950689785188e-05,
"loss": 0.1043,
"step": 1050
},
{
"epoch": 0.13366960907944514,
"grad_norm": 0.9797882437705994,
"learning_rate": 9.301182963670688e-05,
"loss": 0.0819,
"step": 1060
},
{
"epoch": 0.13493064312736444,
"grad_norm": 2.0887033939361572,
"learning_rate": 9.284227089879456e-05,
"loss": 0.0927,
"step": 1070
},
{
"epoch": 0.13619167717528374,
"grad_norm": 1.1331026554107666,
"learning_rate": 9.26708381011634e-05,
"loss": 0.0423,
"step": 1080
},
{
"epoch": 0.13745271122320302,
"grad_norm": 1.637695550918579,
"learning_rate": 9.249753874283937e-05,
"loss": 0.0916,
"step": 1090
},
{
"epoch": 0.13871374527112232,
"grad_norm": 1.2803550958633423,
"learning_rate": 9.232238040449779e-05,
"loss": 0.0732,
"step": 1100
},
{
"epoch": 0.13997477931904162,
"grad_norm": 1.011741280555725,
"learning_rate": 9.214537074813181e-05,
"loss": 0.1006,
"step": 1110
},
{
"epoch": 0.14123581336696092,
"grad_norm": 2.508837938308716,
"learning_rate": 9.196651751671724e-05,
"loss": 0.0901,
"step": 1120
},
{
"epoch": 0.1424968474148802,
"grad_norm": 1.011094093322754,
"learning_rate": 9.178582853387384e-05,
"loss": 0.0809,
"step": 1130
},
{
"epoch": 0.1437578814627995,
"grad_norm": 0.7687975168228149,
"learning_rate": 9.160331170352304e-05,
"loss": 0.0833,
"step": 1140
},
{
"epoch": 0.1450189155107188,
"grad_norm": 2.2561843395233154,
"learning_rate": 9.141897500954229e-05,
"loss": 0.0957,
"step": 1150
},
{
"epoch": 0.14627994955863807,
"grad_norm": 1.989361047744751,
"learning_rate": 9.123282651541576e-05,
"loss": 0.0769,
"step": 1160
},
{
"epoch": 0.14754098360655737,
"grad_norm": 2.0755341053009033,
"learning_rate": 9.104487436388161e-05,
"loss": 0.0789,
"step": 1170
},
{
"epoch": 0.14880201765447668,
"grad_norm": 2.3149120807647705,
"learning_rate": 9.085512677657582e-05,
"loss": 0.0878,
"step": 1180
},
{
"epoch": 0.15006305170239598,
"grad_norm": 1.0671385526657104,
"learning_rate": 9.066359205367258e-05,
"loss": 0.0798,
"step": 1190
},
{
"epoch": 0.15132408575031525,
"grad_norm": 1.0731128454208374,
"learning_rate": 9.047027857352112e-05,
"loss": 0.0833,
"step": 1200
},
{
"epoch": 0.15258511979823455,
"grad_norm": 1.041377305984497,
"learning_rate": 9.027519479227935e-05,
"loss": 0.0839,
"step": 1210
},
{
"epoch": 0.15384615384615385,
"grad_norm": 1.3868672847747803,
"learning_rate": 9.007834924354383e-05,
"loss": 0.0736,
"step": 1220
},
{
"epoch": 0.15510718789407313,
"grad_norm": 2.201692819595337,
"learning_rate": 8.987975053797655e-05,
"loss": 0.0723,
"step": 1230
},
{
"epoch": 0.15636822194199243,
"grad_norm": 1.6578350067138672,
"learning_rate": 8.967940736292825e-05,
"loss": 0.0959,
"step": 1240
},
{
"epoch": 0.15762925598991173,
"grad_norm": 1.1587399244308472,
"learning_rate": 8.947732848205846e-05,
"loss": 0.0674,
"step": 1250
},
{
"epoch": 0.15889029003783103,
"grad_norm": 1.663224220275879,
"learning_rate": 8.927352273495204e-05,
"loss": 0.083,
"step": 1260
},
{
"epoch": 0.1601513240857503,
"grad_norm": 2.5776238441467285,
"learning_rate": 8.906799903673265e-05,
"loss": 0.0761,
"step": 1270
},
{
"epoch": 0.1614123581336696,
"grad_norm": 1.3080676794052124,
"learning_rate": 8.88607663776726e-05,
"loss": 0.0712,
"step": 1280
},
{
"epoch": 0.1626733921815889,
"grad_norm": 1.5829815864562988,
"learning_rate": 8.865183382279978e-05,
"loss": 0.074,
"step": 1290
},
{
"epoch": 0.16393442622950818,
"grad_norm": 1.6632649898529053,
"learning_rate": 8.844121051150096e-05,
"loss": 0.0947,
"step": 1300
},
{
"epoch": 0.16519546027742749,
"grad_norm": 1.201285719871521,
"learning_rate": 8.822890565712211e-05,
"loss": 0.0665,
"step": 1310
},
{
"epoch": 0.1664564943253468,
"grad_norm": 1.4638184309005737,
"learning_rate": 8.801492854656536e-05,
"loss": 0.0896,
"step": 1320
},
{
"epoch": 0.1677175283732661,
"grad_norm": 2.047971725463867,
"learning_rate": 8.779928853988268e-05,
"loss": 0.0654,
"step": 1330
},
{
"epoch": 0.16897856242118536,
"grad_norm": 1.328118085861206,
"learning_rate": 8.758199506986655e-05,
"loss": 0.0771,
"step": 1340
},
{
"epoch": 0.17023959646910466,
"grad_norm": 1.4696358442306519,
"learning_rate": 8.73630576416373e-05,
"loss": 0.0452,
"step": 1350
},
{
"epoch": 0.17150063051702397,
"grad_norm": 1.5903834104537964,
"learning_rate": 8.714248583222726e-05,
"loss": 0.0694,
"step": 1360
},
{
"epoch": 0.17276166456494324,
"grad_norm": 1.3543390035629272,
"learning_rate": 8.692028929016196e-05,
"loss": 0.07,
"step": 1370
},
{
"epoch": 0.17402269861286254,
"grad_norm": 1.2121193408966064,
"learning_rate": 8.669647773503797e-05,
"loss": 0.0659,
"step": 1380
},
{
"epoch": 0.17528373266078184,
"grad_norm": 1.7679033279418945,
"learning_rate": 8.647106095709773e-05,
"loss": 0.0807,
"step": 1390
},
{
"epoch": 0.17654476670870115,
"grad_norm": 1.5584529638290405,
"learning_rate": 8.624404881680139e-05,
"loss": 0.08,
"step": 1400
},
{
"epoch": 0.17780580075662042,
"grad_norm": 1.0032943487167358,
"learning_rate": 8.601545124439535e-05,
"loss": 0.0509,
"step": 1410
},
{
"epoch": 0.17906683480453972,
"grad_norm": 1.3927818536758423,
"learning_rate": 8.5785278239478e-05,
"loss": 0.0683,
"step": 1420
},
{
"epoch": 0.18032786885245902,
"grad_norm": 1.626572847366333,
"learning_rate": 8.555353987056224e-05,
"loss": 0.0819,
"step": 1430
},
{
"epoch": 0.18158890290037832,
"grad_norm": 1.9200503826141357,
"learning_rate": 8.532024627463505e-05,
"loss": 0.09,
"step": 1440
},
{
"epoch": 0.1828499369482976,
"grad_norm": 0.8141462802886963,
"learning_rate": 8.508540765671407e-05,
"loss": 0.075,
"step": 1450
},
{
"epoch": 0.1841109709962169,
"grad_norm": 1.4652893543243408,
"learning_rate": 8.484903428940121e-05,
"loss": 0.0743,
"step": 1460
},
{
"epoch": 0.1853720050441362,
"grad_norm": 1.668524980545044,
"learning_rate": 8.461113651243334e-05,
"loss": 0.0727,
"step": 1470
},
{
"epoch": 0.18663303909205547,
"grad_norm": 0.9817781448364258,
"learning_rate": 8.437172473222987e-05,
"loss": 0.0611,
"step": 1480
},
{
"epoch": 0.18789407313997478,
"grad_norm": 1.9690488576889038,
"learning_rate": 8.413080942143767e-05,
"loss": 0.0765,
"step": 1490
},
{
"epoch": 0.18915510718789408,
"grad_norm": 1.770183801651001,
"learning_rate": 8.388840111847288e-05,
"loss": 0.043,
"step": 1500
},
{
"epoch": 0.19041614123581338,
"grad_norm": 1.5847400426864624,
"learning_rate": 8.364451042705998e-05,
"loss": 0.0565,
"step": 1510
},
{
"epoch": 0.19167717528373265,
"grad_norm": 1.2131879329681396,
"learning_rate": 8.33991480157679e-05,
"loss": 0.0644,
"step": 1520
},
{
"epoch": 0.19293820933165196,
"grad_norm": 1.2576392889022827,
"learning_rate": 8.315232461754338e-05,
"loss": 0.0571,
"step": 1530
},
{
"epoch": 0.19419924337957126,
"grad_norm": 1.8759030103683472,
"learning_rate": 8.290405102924144e-05,
"loss": 0.0776,
"step": 1540
},
{
"epoch": 0.19546027742749053,
"grad_norm": 1.6383674144744873,
"learning_rate": 8.265433811115316e-05,
"loss": 0.0905,
"step": 1550
},
{
"epoch": 0.19672131147540983,
"grad_norm": 0.776569664478302,
"learning_rate": 8.240319678653049e-05,
"loss": 0.0705,
"step": 1560
},
{
"epoch": 0.19798234552332913,
"grad_norm": 0.8885311484336853,
"learning_rate": 8.215063804110857e-05,
"loss": 0.0547,
"step": 1570
},
{
"epoch": 0.19924337957124844,
"grad_norm": 1.2598435878753662,
"learning_rate": 8.189667292262512e-05,
"loss": 0.0745,
"step": 1580
},
{
"epoch": 0.2005044136191677,
"grad_norm": 1.2729694843292236,
"learning_rate": 8.164131254033716e-05,
"loss": 0.0512,
"step": 1590
},
{
"epoch": 0.201765447667087,
"grad_norm": 1.0678434371948242,
"learning_rate": 8.138456806453503e-05,
"loss": 0.0621,
"step": 1600
},
{
"epoch": 0.2030264817150063,
"grad_norm": 0.6534672975540161,
"learning_rate": 8.112645072605386e-05,
"loss": 0.0614,
"step": 1610
},
{
"epoch": 0.2042875157629256,
"grad_norm": 1.5475647449493408,
"learning_rate": 8.086697181578222e-05,
"loss": 0.0771,
"step": 1620
},
{
"epoch": 0.2055485498108449,
"grad_norm": 0.9357134699821472,
"learning_rate": 8.060614268416823e-05,
"loss": 0.0588,
"step": 1630
},
{
"epoch": 0.2068095838587642,
"grad_norm": 2.568519115447998,
"learning_rate": 8.034397474072309e-05,
"loss": 0.0844,
"step": 1640
},
{
"epoch": 0.2080706179066835,
"grad_norm": 1.516713261604309,
"learning_rate": 8.008047945352193e-05,
"loss": 0.1124,
"step": 1650
},
{
"epoch": 0.20933165195460277,
"grad_norm": 1.8209099769592285,
"learning_rate": 7.981566834870225e-05,
"loss": 0.0757,
"step": 1660
},
{
"epoch": 0.21059268600252207,
"grad_norm": 0.958635687828064,
"learning_rate": 7.954955300995961e-05,
"loss": 0.0486,
"step": 1670
},
{
"epoch": 0.21185372005044137,
"grad_norm": 1.2683565616607666,
"learning_rate": 7.928214507804104e-05,
"loss": 0.0736,
"step": 1680
},
{
"epoch": 0.21311475409836064,
"grad_norm": 0.9750335812568665,
"learning_rate": 7.901345625023576e-05,
"loss": 0.0711,
"step": 1690
},
{
"epoch": 0.21437578814627994,
"grad_norm": 0.8680040240287781,
"learning_rate": 7.874349827986354e-05,
"loss": 0.0721,
"step": 1700
},
{
"epoch": 0.21563682219419925,
"grad_norm": 1.3047752380371094,
"learning_rate": 7.847228297576053e-05,
"loss": 0.0599,
"step": 1710
},
{
"epoch": 0.21689785624211855,
"grad_norm": 0.6582440733909607,
"learning_rate": 7.819982220176276e-05,
"loss": 0.0841,
"step": 1720
},
{
"epoch": 0.21815889029003782,
"grad_norm": 1.2942705154418945,
"learning_rate": 7.792612787618714e-05,
"loss": 0.0566,
"step": 1730
},
{
"epoch": 0.21941992433795712,
"grad_norm": 1.9159135818481445,
"learning_rate": 7.765121197131009e-05,
"loss": 0.0674,
"step": 1740
},
{
"epoch": 0.22068095838587642,
"grad_norm": 1.4682698249816895,
"learning_rate": 7.737508651284391e-05,
"loss": 0.0604,
"step": 1750
},
{
"epoch": 0.22194199243379573,
"grad_norm": 0.7456977963447571,
"learning_rate": 7.709776357941069e-05,
"loss": 0.0561,
"step": 1760
},
{
"epoch": 0.223203026481715,
"grad_norm": 1.2020699977874756,
"learning_rate": 7.681925530201392e-05,
"loss": 0.0665,
"step": 1770
},
{
"epoch": 0.2244640605296343,
"grad_norm": 0.7044927477836609,
"learning_rate": 7.65395738635079e-05,
"loss": 0.05,
"step": 1780
},
{
"epoch": 0.2257250945775536,
"grad_norm": 1.5077685117721558,
"learning_rate": 7.62587314980648e-05,
"loss": 0.0721,
"step": 1790
},
{
"epoch": 0.22698612862547288,
"grad_norm": 1.6477768421173096,
"learning_rate": 7.597674049063947e-05,
"loss": 0.0606,
"step": 1800
},
{
"epoch": 0.22824716267339218,
"grad_norm": 1.2261930704116821,
"learning_rate": 7.569361317643211e-05,
"loss": 0.0622,
"step": 1810
},
{
"epoch": 0.22950819672131148,
"grad_norm": 0.9352103471755981,
"learning_rate": 7.540936194034865e-05,
"loss": 0.0625,
"step": 1820
},
{
"epoch": 0.23076923076923078,
"grad_norm": 1.0798161029815674,
"learning_rate": 7.512399921645901e-05,
"loss": 0.0575,
"step": 1830
},
{
"epoch": 0.23203026481715006,
"grad_norm": 0.976698100566864,
"learning_rate": 7.483753748745317e-05,
"loss": 0.0626,
"step": 1840
},
{
"epoch": 0.23329129886506936,
"grad_norm": 1.3206651210784912,
"learning_rate": 7.454998928409516e-05,
"loss": 0.0614,
"step": 1850
},
{
"epoch": 0.23455233291298866,
"grad_norm": 0.8597652316093445,
"learning_rate": 7.426136718467493e-05,
"loss": 0.0653,
"step": 1860
},
{
"epoch": 0.23581336696090793,
"grad_norm": 1.1862742900848389,
"learning_rate": 7.397168381445812e-05,
"loss": 0.0567,
"step": 1870
},
{
"epoch": 0.23707440100882723,
"grad_norm": 1.1431993246078491,
"learning_rate": 7.368095184513377e-05,
"loss": 0.057,
"step": 1880
},
{
"epoch": 0.23833543505674654,
"grad_norm": 1.3535854816436768,
"learning_rate": 7.338918399426005e-05,
"loss": 0.0678,
"step": 1890
},
{
"epoch": 0.23959646910466584,
"grad_norm": 1.463699221611023,
"learning_rate": 7.309639302470801e-05,
"loss": 0.0661,
"step": 1900
},
{
"epoch": 0.2408575031525851,
"grad_norm": 1.0674207210540771,
"learning_rate": 7.280259174410312e-05,
"loss": 0.0696,
"step": 1910
},
{
"epoch": 0.2421185372005044,
"grad_norm": 1.7966514825820923,
"learning_rate": 7.250779300426517e-05,
"loss": 0.0606,
"step": 1920
},
{
"epoch": 0.24337957124842372,
"grad_norm": 1.4426531791687012,
"learning_rate": 7.22120097006461e-05,
"loss": 0.0754,
"step": 1930
},
{
"epoch": 0.244640605296343,
"grad_norm": 1.5177613496780396,
"learning_rate": 7.191525477176577e-05,
"loss": 0.0721,
"step": 1940
},
{
"epoch": 0.2459016393442623,
"grad_norm": 1.237687587738037,
"learning_rate": 7.161754119864616e-05,
"loss": 0.0531,
"step": 1950
},
{
"epoch": 0.2471626733921816,
"grad_norm": 1.1148518323898315,
"learning_rate": 7.131888200424339e-05,
"loss": 0.0602,
"step": 1960
},
{
"epoch": 0.2484237074401009,
"grad_norm": 0.7612186074256897,
"learning_rate": 7.101929025287816e-05,
"loss": 0.051,
"step": 1970
},
{
"epoch": 0.24968474148802017,
"grad_norm": 1.2145267724990845,
"learning_rate": 7.071877904966423e-05,
"loss": 0.0511,
"step": 1980
},
{
"epoch": 0.2509457755359395,
"grad_norm": 0.9511802196502686,
"learning_rate": 7.04173615399351e-05,
"loss": 0.0469,
"step": 1990
},
{
"epoch": 0.25220680958385877,
"grad_norm": 0.98260897397995,
"learning_rate": 7.011505090866913e-05,
"loss": 0.051,
"step": 2000
},
{
"epoch": 0.25346784363177804,
"grad_norm": 1.010575771331787,
"learning_rate": 6.981186037991271e-05,
"loss": 0.0505,
"step": 2010
},
{
"epoch": 0.2547288776796974,
"grad_norm": 1.0129963159561157,
"learning_rate": 6.950780321620174e-05,
"loss": 0.0719,
"step": 2020
},
{
"epoch": 0.25598991172761665,
"grad_norm": 1.9137223958969116,
"learning_rate": 6.920289271798157e-05,
"loss": 0.0845,
"step": 2030
},
{
"epoch": 0.2572509457755359,
"grad_norm": 1.41978120803833,
"learning_rate": 6.889714222302517e-05,
"loss": 0.0724,
"step": 2040
},
{
"epoch": 0.25851197982345525,
"grad_norm": 0.8272780179977417,
"learning_rate": 6.85905651058497e-05,
"loss": 0.0495,
"step": 2050
},
{
"epoch": 0.2597730138713745,
"grad_norm": 0.9778422713279724,
"learning_rate": 6.82831747771314e-05,
"loss": 0.0514,
"step": 2060
},
{
"epoch": 0.2610340479192938,
"grad_norm": 0.9014256596565247,
"learning_rate": 6.797498468311907e-05,
"loss": 0.0555,
"step": 2070
},
{
"epoch": 0.26229508196721313,
"grad_norm": 1.9740866422653198,
"learning_rate": 6.766600830504585e-05,
"loss": 0.0579,
"step": 2080
},
{
"epoch": 0.2635561160151324,
"grad_norm": 0.653318464756012,
"learning_rate": 6.735625915853942e-05,
"loss": 0.0529,
"step": 2090
},
{
"epoch": 0.2648171500630517,
"grad_norm": 0.5321911573410034,
"learning_rate": 6.70457507930309e-05,
"loss": 0.0506,
"step": 2100
},
{
"epoch": 0.266078184110971,
"grad_norm": 1.1539849042892456,
"learning_rate": 6.673449679116215e-05,
"loss": 0.0617,
"step": 2110
},
{
"epoch": 0.2673392181588903,
"grad_norm": 1.4305249452590942,
"learning_rate": 6.642251076819148e-05,
"loss": 0.062,
"step": 2120
},
{
"epoch": 0.2686002522068096,
"grad_norm": 1.937014102935791,
"learning_rate": 6.610980637139827e-05,
"loss": 0.0766,
"step": 2130
},
{
"epoch": 0.2698612862547289,
"grad_norm": 1.4543286561965942,
"learning_rate": 6.579639727948583e-05,
"loss": 0.058,
"step": 2140
},
{
"epoch": 0.27112232030264816,
"grad_norm": 1.0391244888305664,
"learning_rate": 6.548229720198315e-05,
"loss": 0.07,
"step": 2150
},
{
"epoch": 0.2723833543505675,
"grad_norm": 1.2010960578918457,
"learning_rate": 6.516751987864517e-05,
"loss": 0.0576,
"step": 2160
},
{
"epoch": 0.27364438839848676,
"grad_norm": 1.0839611291885376,
"learning_rate": 6.485207907885175e-05,
"loss": 0.0654,
"step": 2170
},
{
"epoch": 0.27490542244640603,
"grad_norm": 1.0279420614242554,
"learning_rate": 6.453598860100536e-05,
"loss": 0.0678,
"step": 2180
},
{
"epoch": 0.27616645649432536,
"grad_norm": 0.9727810621261597,
"learning_rate": 6.421926227192749e-05,
"loss": 0.0489,
"step": 2190
},
{
"epoch": 0.27742749054224464,
"grad_norm": 1.4229283332824707,
"learning_rate": 6.390191394625381e-05,
"loss": 0.0827,
"step": 2200
},
{
"epoch": 0.2786885245901639,
"grad_norm": 1.5392534732818604,
"learning_rate": 6.358395750582817e-05,
"loss": 0.0502,
"step": 2210
},
{
"epoch": 0.27994955863808324,
"grad_norm": 1.3767160177230835,
"learning_rate": 6.326540685909532e-05,
"loss": 0.0717,
"step": 2220
},
{
"epoch": 0.2812105926860025,
"grad_norm": 0.9939086437225342,
"learning_rate": 6.294627594049249e-05,
"loss": 0.0381,
"step": 2230
},
{
"epoch": 0.28247162673392184,
"grad_norm": 0.9147608280181885,
"learning_rate": 6.262657870983989e-05,
"loss": 0.0401,
"step": 2240
},
{
"epoch": 0.2837326607818411,
"grad_norm": 1.236060619354248,
"learning_rate": 6.230632915173009e-05,
"loss": 0.052,
"step": 2250
},
{
"epoch": 0.2849936948297604,
"grad_norm": 0.844493567943573,
"learning_rate": 6.198554127491622e-05,
"loss": 0.0604,
"step": 2260
},
{
"epoch": 0.2862547288776797,
"grad_norm": 1.0324779748916626,
"learning_rate": 6.166422911169923e-05,
"loss": 0.0676,
"step": 2270
},
{
"epoch": 0.287515762925599,
"grad_norm": 0.8622553944587708,
"learning_rate": 6.1342406717314e-05,
"loss": 0.0597,
"step": 2280
},
{
"epoch": 0.28877679697351827,
"grad_norm": 1.1971062421798706,
"learning_rate": 6.102008816931466e-05,
"loss": 0.0505,
"step": 2290
},
{
"epoch": 0.2900378310214376,
"grad_norm": 1.792452335357666,
"learning_rate": 6.069728756695866e-05,
"loss": 0.0568,
"step": 2300
},
{
"epoch": 0.29129886506935687,
"grad_norm": 1.1744405031204224,
"learning_rate": 6.037401903059008e-05,
"loss": 0.0504,
"step": 2310
},
{
"epoch": 0.29255989911727615,
"grad_norm": 1.1916894912719727,
"learning_rate": 6.005029670102195e-05,
"loss": 0.0577,
"step": 2320
},
{
"epoch": 0.2938209331651955,
"grad_norm": 0.9856624603271484,
"learning_rate": 5.972613473891766e-05,
"loss": 0.0608,
"step": 2330
},
{
"epoch": 0.29508196721311475,
"grad_norm": 0.9632654786109924,
"learning_rate": 5.940154732417158e-05,
"loss": 0.0535,
"step": 2340
},
{
"epoch": 0.296343001261034,
"grad_norm": 0.692136824131012,
"learning_rate": 5.907654865528876e-05,
"loss": 0.0471,
"step": 2350
},
{
"epoch": 0.29760403530895335,
"grad_norm": 1.9872313737869263,
"learning_rate": 5.875115294876381e-05,
"loss": 0.0676,
"step": 2360
},
{
"epoch": 0.2988650693568726,
"grad_norm": 0.9287277460098267,
"learning_rate": 5.842537443845908e-05,
"loss": 0.0585,
"step": 2370
},
{
"epoch": 0.30012610340479196,
"grad_norm": 0.984683632850647,
"learning_rate": 5.809922737498198e-05,
"loss": 0.0573,
"step": 2380
},
{
"epoch": 0.30138713745271123,
"grad_norm": 1.1549715995788574,
"learning_rate": 5.777272602506165e-05,
"loss": 0.0571,
"step": 2390
},
{
"epoch": 0.3026481715006305,
"grad_norm": 0.9611703753471375,
"learning_rate": 5.744588467092483e-05,
"loss": 0.0592,
"step": 2400
},
{
"epoch": 0.30390920554854983,
"grad_norm": 0.6672961115837097,
"learning_rate": 5.7118717609671194e-05,
"loss": 0.0933,
"step": 2410
},
{
"epoch": 0.3051702395964691,
"grad_norm": 1.5556656122207642,
"learning_rate": 5.679123915264786e-05,
"loss": 0.0634,
"step": 2420
},
{
"epoch": 0.3064312736443884,
"grad_norm": 0.737484872341156,
"learning_rate": 5.646346362482342e-05,
"loss": 0.0448,
"step": 2430
},
{
"epoch": 0.3076923076923077,
"grad_norm": 0.9914329648017883,
"learning_rate": 5.613540536416132e-05,
"loss": 0.0532,
"step": 2440
},
{
"epoch": 0.308953341740227,
"grad_norm": 1.0218828916549683,
"learning_rate": 5.5807078720992645e-05,
"loss": 0.051,
"step": 2450
},
{
"epoch": 0.31021437578814626,
"grad_norm": 1.627498984336853,
"learning_rate": 5.547849805738836e-05,
"loss": 0.0393,
"step": 2460
},
{
"epoch": 0.3114754098360656,
"grad_norm": 1.6867691278457642,
"learning_rate": 5.514967774653118e-05,
"loss": 0.075,
"step": 2470
},
{
"epoch": 0.31273644388398486,
"grad_norm": 1.1736292839050293,
"learning_rate": 5.482063217208674e-05,
"loss": 0.0536,
"step": 2480
},
{
"epoch": 0.31399747793190413,
"grad_norm": 0.6760343909263611,
"learning_rate": 5.449137572757439e-05,
"loss": 0.0545,
"step": 2490
},
{
"epoch": 0.31525851197982346,
"grad_norm": 0.88604336977005,
"learning_rate": 5.4161922815737696e-05,
"loss": 0.0713,
"step": 2500
},
{
"epoch": 0.31651954602774274,
"grad_norm": 0.6906472444534302,
"learning_rate": 5.3832287847914276e-05,
"loss": 0.05,
"step": 2510
},
{
"epoch": 0.31778058007566207,
"grad_norm": 0.7496006488800049,
"learning_rate": 5.35024852434055e-05,
"loss": 0.0581,
"step": 2520
},
{
"epoch": 0.31904161412358134,
"grad_norm": 1.3291327953338623,
"learning_rate": 5.317252942884567e-05,
"loss": 0.0548,
"step": 2530
},
{
"epoch": 0.3203026481715006,
"grad_norm": 1.1537989377975464,
"learning_rate": 5.284243483757109e-05,
"loss": 0.0516,
"step": 2540
},
{
"epoch": 0.32156368221941994,
"grad_norm": 1.2708510160446167,
"learning_rate": 5.2512215908988484e-05,
"loss": 0.0603,
"step": 2550
},
{
"epoch": 0.3228247162673392,
"grad_norm": 0.4326554238796234,
"learning_rate": 5.218188708794357e-05,
"loss": 0.0483,
"step": 2560
},
{
"epoch": 0.3240857503152585,
"grad_norm": 1.142648696899414,
"learning_rate": 5.18514628240891e-05,
"loss": 0.065,
"step": 2570
},
{
"epoch": 0.3253467843631778,
"grad_norm": 0.7592518329620361,
"learning_rate": 5.1520957571252795e-05,
"loss": 0.0326,
"step": 2580
},
{
"epoch": 0.3266078184110971,
"grad_norm": 1.01433527469635,
"learning_rate": 5.1190385786805106e-05,
"loss": 0.0625,
"step": 2590
},
{
"epoch": 0.32786885245901637,
"grad_norm": 0.751297116279602,
"learning_rate": 5.085976193102677e-05,
"loss": 0.0363,
"step": 2600
},
{
"epoch": 0.3291298865069357,
"grad_norm": 1.2161413431167603,
"learning_rate": 5.052910046647634e-05,
"loss": 0.0489,
"step": 2610
},
{
"epoch": 0.33039092055485497,
"grad_norm": 0.8380374908447266,
"learning_rate": 5.0198415857357464e-05,
"loss": 0.0513,
"step": 2620
},
{
"epoch": 0.3316519546027743,
"grad_norm": 1.255977988243103,
"learning_rate": 4.9867722568886223e-05,
"loss": 0.0571,
"step": 2630
},
{
"epoch": 0.3329129886506936,
"grad_norm": 0.7204821705818176,
"learning_rate": 4.9537035066658314e-05,
"loss": 0.0589,
"step": 2640
},
{
"epoch": 0.33417402269861285,
"grad_norm": 0.7960894107818604,
"learning_rate": 4.920636781601638e-05,
"loss": 0.0437,
"step": 2650
},
{
"epoch": 0.3354350567465322,
"grad_norm": 0.922639012336731,
"learning_rate": 4.88757352814172e-05,
"loss": 0.0435,
"step": 2660
},
{
"epoch": 0.33669609079445145,
"grad_norm": 0.42557841539382935,
"learning_rate": 4.8545151925798924e-05,
"loss": 0.0359,
"step": 2670
},
{
"epoch": 0.3379571248423707,
"grad_norm": 1.0862234830856323,
"learning_rate": 4.821463220994848e-05,
"loss": 0.0567,
"step": 2680
},
{
"epoch": 0.33921815889029006,
"grad_norm": 1.273342490196228,
"learning_rate": 4.788419059186895e-05,
"loss": 0.0464,
"step": 2690
},
{
"epoch": 0.34047919293820933,
"grad_norm": 0.7602908611297607,
"learning_rate": 4.7553841526147205e-05,
"loss": 0.0731,
"step": 2700
},
{
"epoch": 0.3417402269861286,
"grad_norm": 1.2073414325714111,
"learning_rate": 4.722359946332156e-05,
"loss": 0.0446,
"step": 2710
},
{
"epoch": 0.34300126103404793,
"grad_norm": 1.0555393695831299,
"learning_rate": 4.6893478849249654e-05,
"loss": 0.0467,
"step": 2720
},
{
"epoch": 0.3442622950819672,
"grad_norm": 1.1533141136169434,
"learning_rate": 4.656349412447664e-05,
"loss": 0.0461,
"step": 2730
},
{
"epoch": 0.3455233291298865,
"grad_norm": 0.9922979474067688,
"learning_rate": 4.623365972360337e-05,
"loss": 0.0452,
"step": 2740
},
{
"epoch": 0.3467843631778058,
"grad_norm": 0.6133615970611572,
"learning_rate": 4.590399007465503e-05,
"loss": 0.0372,
"step": 2750
},
{
"epoch": 0.3480453972257251,
"grad_norm": 0.8727760910987854,
"learning_rate": 4.557449959845005e-05,
"loss": 0.0355,
"step": 2760
},
{
"epoch": 0.3493064312736444,
"grad_norm": 0.6977477073669434,
"learning_rate": 4.524520270796927e-05,
"loss": 0.0501,
"step": 2770
},
{
"epoch": 0.3505674653215637,
"grad_norm": 0.6289255619049072,
"learning_rate": 4.491611380772545e-05,
"loss": 0.0514,
"step": 2780
},
{
"epoch": 0.35182849936948296,
"grad_norm": 0.5768862962722778,
"learning_rate": 4.458724729313318e-05,
"loss": 0.03,
"step": 2790
},
{
"epoch": 0.3530895334174023,
"grad_norm": 0.8174225687980652,
"learning_rate": 4.42586175498792e-05,
"loss": 0.0501,
"step": 2800
},
{
"epoch": 0.35435056746532156,
"grad_norm": 0.8189591765403748,
"learning_rate": 4.3930238953293094e-05,
"loss": 0.0347,
"step": 2810
},
{
"epoch": 0.35561160151324084,
"grad_norm": 0.864609956741333,
"learning_rate": 4.360212586771847e-05,
"loss": 0.0386,
"step": 2820
},
{
"epoch": 0.35687263556116017,
"grad_norm": 0.9554038643836975,
"learning_rate": 4.327429264588463e-05,
"loss": 0.0542,
"step": 2830
},
{
"epoch": 0.35813366960907944,
"grad_norm": 1.16752028465271,
"learning_rate": 4.2946753628278725e-05,
"loss": 0.0476,
"step": 2840
},
{
"epoch": 0.3593947036569987,
"grad_norm": 1.0468887090682983,
"learning_rate": 4.2619523142518474e-05,
"loss": 0.0392,
"step": 2850
},
{
"epoch": 0.36065573770491804,
"grad_norm": 0.7712638974189758,
"learning_rate": 4.229261550272539e-05,
"loss": 0.0348,
"step": 2860
},
{
"epoch": 0.3619167717528373,
"grad_norm": 0.935126781463623,
"learning_rate": 4.196604500889868e-05,
"loss": 0.0448,
"step": 2870
},
{
"epoch": 0.36317780580075665,
"grad_norm": 1.232252836227417,
"learning_rate": 4.163982594628969e-05,
"loss": 0.0528,
"step": 2880
},
{
"epoch": 0.3644388398486759,
"grad_norm": 0.5925912857055664,
"learning_rate": 4.131397258477702e-05,
"loss": 0.0544,
"step": 2890
},
{
"epoch": 0.3656998738965952,
"grad_norm": 1.0549910068511963,
"learning_rate": 4.0988499178242315e-05,
"loss": 0.0417,
"step": 2900
},
{
"epoch": 0.3669609079445145,
"grad_norm": 0.8058827519416809,
"learning_rate": 4.066341996394678e-05,
"loss": 0.0729,
"step": 2910
},
{
"epoch": 0.3682219419924338,
"grad_norm": 0.5958355665206909,
"learning_rate": 4.033874916190833e-05,
"loss": 0.0491,
"step": 2920
},
{
"epoch": 0.3694829760403531,
"grad_norm": 0.9849727749824524,
"learning_rate": 4.001450097427966e-05,
"loss": 0.0494,
"step": 2930
},
{
"epoch": 0.3707440100882724,
"grad_norm": 0.402420312166214,
"learning_rate": 3.9690689584726894e-05,
"loss": 0.0425,
"step": 2940
},
{
"epoch": 0.3720050441361917,
"grad_norm": 0.6626855731010437,
"learning_rate": 3.936732915780923e-05,
"loss": 0.0494,
"step": 2950
},
{
"epoch": 0.37326607818411095,
"grad_norm": 0.6585484743118286,
"learning_rate": 3.904443383835929e-05,
"loss": 0.036,
"step": 2960
},
{
"epoch": 0.3745271122320303,
"grad_norm": 0.9684002995491028,
"learning_rate": 3.872201775086437e-05,
"loss": 0.0343,
"step": 2970
},
{
"epoch": 0.37578814627994955,
"grad_norm": 0.7882438898086548,
"learning_rate": 3.8400094998848616e-05,
"loss": 0.0461,
"step": 2980
},
{
"epoch": 0.3770491803278688,
"grad_norm": 1.2529492378234863,
"learning_rate": 3.807867966425611e-05,
"loss": 0.0462,
"step": 2990
},
{
"epoch": 0.37831021437578816,
"grad_norm": 0.6608725190162659,
"learning_rate": 3.775778580683481e-05,
"loss": 0.0333,
"step": 3000
},
{
"epoch": 0.37957124842370743,
"grad_norm": 1.0286078453063965,
"learning_rate": 3.743742746352156e-05,
"loss": 0.0397,
"step": 3010
},
{
"epoch": 0.38083228247162676,
"grad_norm": 0.837370753288269,
"learning_rate": 3.711761864782817e-05,
"loss": 0.0419,
"step": 3020
},
{
"epoch": 0.38209331651954603,
"grad_norm": 0.6928431391716003,
"learning_rate": 3.679837334922825e-05,
"loss": 0.0513,
"step": 3030
},
{
"epoch": 0.3833543505674653,
"grad_norm": 1.4095587730407715,
"learning_rate": 3.647970553254538e-05,
"loss": 0.0503,
"step": 3040
},
{
"epoch": 0.38461538461538464,
"grad_norm": 0.8106990456581116,
"learning_rate": 3.61616291373422e-05,
"loss": 0.0313,
"step": 3050
},
{
"epoch": 0.3858764186633039,
"grad_norm": 0.9672452211380005,
"learning_rate": 3.584415807731065e-05,
"loss": 0.0496,
"step": 3060
},
{
"epoch": 0.3871374527112232,
"grad_norm": 1.2098649740219116,
"learning_rate": 3.552730623966337e-05,
"loss": 0.0481,
"step": 3070
},
{
"epoch": 0.3883984867591425,
"grad_norm": 0.6777541041374207,
"learning_rate": 3.521108748452617e-05,
"loss": 0.0358,
"step": 3080
},
{
"epoch": 0.3896595208070618,
"grad_norm": 0.9190433025360107,
"learning_rate": 3.489551564433186e-05,
"loss": 0.0385,
"step": 3090
},
{
"epoch": 0.39092055485498106,
"grad_norm": 1.1785540580749512,
"learning_rate": 3.4580604523215006e-05,
"loss": 0.0415,
"step": 3100
},
{
"epoch": 0.3921815889029004,
"grad_norm": 1.061510443687439,
"learning_rate": 3.4266367896408216e-05,
"loss": 0.0615,
"step": 3110
},
{
"epoch": 0.39344262295081966,
"grad_norm": 0.3887889087200165,
"learning_rate": 3.3952819509639534e-05,
"loss": 0.0354,
"step": 3120
},
{
"epoch": 0.39470365699873894,
"grad_norm": 1.259713053703308,
"learning_rate": 3.3639973078531165e-05,
"loss": 0.0318,
"step": 3130
},
{
"epoch": 0.39596469104665827,
"grad_norm": 0.5290289521217346,
"learning_rate": 3.332784228799947e-05,
"loss": 0.0369,
"step": 3140
},
{
"epoch": 0.39722572509457754,
"grad_norm": 0.4990309774875641,
"learning_rate": 3.301644079165638e-05,
"loss": 0.0326,
"step": 3150
},
{
"epoch": 0.39848675914249687,
"grad_norm": 0.6674959659576416,
"learning_rate": 3.27057822112122e-05,
"loss": 0.042,
"step": 3160
},
{
"epoch": 0.39974779319041615,
"grad_norm": 0.441133975982666,
"learning_rate": 3.239588013587958e-05,
"loss": 0.0435,
"step": 3170
},
{
"epoch": 0.4010088272383354,
"grad_norm": 0.6551958918571472,
"learning_rate": 3.208674812177926e-05,
"loss": 0.0477,
"step": 3180
},
{
"epoch": 0.40226986128625475,
"grad_norm": 0.4257091283798218,
"learning_rate": 3.177839969134698e-05,
"loss": 0.0417,
"step": 3190
},
{
"epoch": 0.403530895334174,
"grad_norm": 0.8351610898971558,
"learning_rate": 3.1470848332742e-05,
"loss": 0.0469,
"step": 3200
},
{
"epoch": 0.4047919293820933,
"grad_norm": 1.344315528869629,
"learning_rate": 3.116410749925708e-05,
"loss": 0.0351,
"step": 3210
},
{
"epoch": 0.4060529634300126,
"grad_norm": 0.8326131105422974,
"learning_rate": 3.085819060872995e-05,
"loss": 0.0273,
"step": 3220
},
{
"epoch": 0.4073139974779319,
"grad_norm": 1.4991629123687744,
"learning_rate": 3.055311104295648e-05,
"loss": 0.0361,
"step": 3230
},
{
"epoch": 0.4085750315258512,
"grad_norm": 0.7908870577812195,
"learning_rate": 3.024888214710517e-05,
"loss": 0.0406,
"step": 3240
},
{
"epoch": 0.4098360655737705,
"grad_norm": 1.4762327671051025,
"learning_rate": 2.994551722913349e-05,
"loss": 0.0586,
"step": 3250
},
{
"epoch": 0.4110970996216898,
"grad_norm": 1.2754584550857544,
"learning_rate": 2.9643029559205727e-05,
"loss": 0.0686,
"step": 3260
},
{
"epoch": 0.4123581336696091,
"grad_norm": 0.5942383408546448,
"learning_rate": 2.934143236911248e-05,
"loss": 0.0395,
"step": 3270
},
{
"epoch": 0.4136191677175284,
"grad_norm": 0.3312892019748688,
"learning_rate": 2.90407388516919e-05,
"loss": 0.0485,
"step": 3280
},
{
"epoch": 0.41488020176544765,
"grad_norm": 1.1495757102966309,
"learning_rate": 2.8740962160252495e-05,
"loss": 0.0475,
"step": 3290
},
{
"epoch": 0.416141235813367,
"grad_norm": 0.6852736473083496,
"learning_rate": 2.844211540799797e-05,
"loss": 0.0393,
"step": 3300
},
{
"epoch": 0.41740226986128626,
"grad_norm": 0.5902966856956482,
"learning_rate": 2.8144211667453368e-05,
"loss": 0.0418,
"step": 3310
},
{
"epoch": 0.41866330390920553,
"grad_norm": 0.3494200110435486,
"learning_rate": 2.7847263969893344e-05,
"loss": 0.0477,
"step": 3320
},
{
"epoch": 0.41992433795712486,
"grad_norm": 1.7985684871673584,
"learning_rate": 2.7551285304772206e-05,
"loss": 0.0345,
"step": 3330
},
{
"epoch": 0.42118537200504413,
"grad_norm": 1.3080884218215942,
"learning_rate": 2.7256288619155567e-05,
"loss": 0.0383,
"step": 3340
},
{
"epoch": 0.4224464060529634,
"grad_norm": 0.7690442800521851,
"learning_rate": 2.6962286817154158e-05,
"loss": 0.0407,
"step": 3350
},
{
"epoch": 0.42370744010088274,
"grad_norm": 1.0104235410690308,
"learning_rate": 2.6669292759359166e-05,
"loss": 0.0522,
"step": 3360
},
{
"epoch": 0.424968474148802,
"grad_norm": 0.8277296423912048,
"learning_rate": 2.637731926227993e-05,
"loss": 0.0336,
"step": 3370
},
{
"epoch": 0.4262295081967213,
"grad_norm": 1.3783977031707764,
"learning_rate": 2.6086379097783033e-05,
"loss": 0.0389,
"step": 3380
},
{
"epoch": 0.4274905422446406,
"grad_norm": 0.7857207655906677,
"learning_rate": 2.579648499253377e-05,
"loss": 0.0355,
"step": 3390
},
{
"epoch": 0.4287515762925599,
"grad_norm": 0.4031466543674469,
"learning_rate": 2.5507649627439466e-05,
"loss": 0.0384,
"step": 3400
},
{
"epoch": 0.4300126103404792,
"grad_norm": 1.1103296279907227,
"learning_rate": 2.5219885637094653e-05,
"loss": 0.0509,
"step": 3410
},
{
"epoch": 0.4312736443883985,
"grad_norm": 0.8391857147216797,
"learning_rate": 2.4933205609228533e-05,
"loss": 0.0572,
"step": 3420
},
{
"epoch": 0.43253467843631777,
"grad_norm": 0.5264548659324646,
"learning_rate": 2.464762208415419e-05,
"loss": 0.0287,
"step": 3430
},
{
"epoch": 0.4337957124842371,
"grad_norm": 1.0185983180999756,
"learning_rate": 2.4363147554220213e-05,
"loss": 0.0364,
"step": 3440
},
{
"epoch": 0.43505674653215637,
"grad_norm": 0.9097549319267273,
"learning_rate": 2.407979446326411e-05,
"loss": 0.0324,
"step": 3450
},
{
"epoch": 0.43631778058007564,
"grad_norm": 1.2580032348632812,
"learning_rate": 2.379757520606799e-05,
"loss": 0.04,
"step": 3460
},
{
"epoch": 0.43757881462799497,
"grad_norm": 0.8343772292137146,
"learning_rate": 2.3516502127816455e-05,
"loss": 0.0575,
"step": 3470
},
{
"epoch": 0.43883984867591425,
"grad_norm": 1.025708556175232,
"learning_rate": 2.323658752355647e-05,
"loss": 0.0254,
"step": 3480
},
{
"epoch": 0.4401008827238335,
"grad_norm": 1.2158018350601196,
"learning_rate": 2.2957843637659654e-05,
"loss": 0.0327,
"step": 3490
},
{
"epoch": 0.44136191677175285,
"grad_norm": 1.1680078506469727,
"learning_rate": 2.2680282663286552e-05,
"loss": 0.0348,
"step": 3500
},
{
"epoch": 0.4426229508196721,
"grad_norm": 1.0319323539733887,
"learning_rate": 2.2403916741853364e-05,
"loss": 0.0387,
"step": 3510
},
{
"epoch": 0.44388398486759145,
"grad_norm": 0.7653447389602661,
"learning_rate": 2.2128757962500817e-05,
"loss": 0.0344,
"step": 3520
},
{
"epoch": 0.4451450189155107,
"grad_norm": 0.45407629013061523,
"learning_rate": 2.1854818361565275e-05,
"loss": 0.0233,
"step": 3530
},
{
"epoch": 0.44640605296343,
"grad_norm": 0.5182017087936401,
"learning_rate": 2.1582109922052364e-05,
"loss": 0.0234,
"step": 3540
},
{
"epoch": 0.44766708701134933,
"grad_norm": 1.374019980430603,
"learning_rate": 2.1310644573112635e-05,
"loss": 0.041,
"step": 3550
},
{
"epoch": 0.4489281210592686,
"grad_norm": 1.2263790369033813,
"learning_rate": 2.1040434189519924e-05,
"loss": 0.0641,
"step": 3560
},
{
"epoch": 0.4501891551071879,
"grad_norm": 0.9699317812919617,
"learning_rate": 2.0771490591151733e-05,
"loss": 0.0323,
"step": 3570
},
{
"epoch": 0.4514501891551072,
"grad_norm": 0.5058130621910095,
"learning_rate": 2.0503825542472317e-05,
"loss": 0.0424,
"step": 3580
},
{
"epoch": 0.4527112232030265,
"grad_norm": 1.0030344724655151,
"learning_rate": 2.023745075201805e-05,
"loss": 0.0357,
"step": 3590
},
{
"epoch": 0.45397225725094575,
"grad_norm": 0.864802360534668,
"learning_rate": 1.9972377871885157e-05,
"loss": 0.0377,
"step": 3600
},
{
"epoch": 0.4552332912988651,
"grad_norm": 0.7574279308319092,
"learning_rate": 1.970861849722017e-05,
"loss": 0.0458,
"step": 3610
},
{
"epoch": 0.45649432534678436,
"grad_norm": 0.2994956970214844,
"learning_rate": 1.9446184165712587e-05,
"loss": 0.0257,
"step": 3620
},
{
"epoch": 0.45775535939470363,
"grad_norm": 1.153183937072754,
"learning_rate": 1.9185086357090214e-05,
"loss": 0.039,
"step": 3630
},
{
"epoch": 0.45901639344262296,
"grad_norm": 0.9249959588050842,
"learning_rate": 1.8925336492617057e-05,
"loss": 0.0427,
"step": 3640
},
{
"epoch": 0.46027742749054223,
"grad_norm": 0.2608698606491089,
"learning_rate": 1.8666945934593666e-05,
"loss": 0.0369,
"step": 3650
},
{
"epoch": 0.46153846153846156,
"grad_norm": 0.8319458365440369,
"learning_rate": 1.8409925985860126e-05,
"loss": 0.0551,
"step": 3660
},
{
"epoch": 0.46279949558638084,
"grad_norm": 0.508533239364624,
"learning_rate": 1.8154287889301603e-05,
"loss": 0.0397,
"step": 3670
},
{
"epoch": 0.4640605296343001,
"grad_norm": 0.6176130175590515,
"learning_rate": 1.7900042827356612e-05,
"loss": 0.0285,
"step": 3680
},
{
"epoch": 0.46532156368221944,
"grad_norm": 0.9162710905075073,
"learning_rate": 1.76472019215278e-05,
"loss": 0.0608,
"step": 3690
},
{
"epoch": 0.4665825977301387,
"grad_norm": 0.5802826881408691,
"learning_rate": 1.739577623189545e-05,
"loss": 0.0382,
"step": 3700
},
{
"epoch": 0.467843631778058,
"grad_norm": 0.5430688858032227,
"learning_rate": 1.7145776756633768e-05,
"loss": 0.0311,
"step": 3710
},
{
"epoch": 0.4691046658259773,
"grad_norm": 0.9320967197418213,
"learning_rate": 1.6897214431529646e-05,
"loss": 0.0612,
"step": 3720
},
{
"epoch": 0.4703656998738966,
"grad_norm": 0.7418308258056641,
"learning_rate": 1.6650100129504475e-05,
"loss": 0.0316,
"step": 3730
},
{
"epoch": 0.47162673392181587,
"grad_norm": 0.7048162221908569,
"learning_rate": 1.6404444660138335e-05,
"loss": 0.0306,
"step": 3740
},
{
"epoch": 0.4728877679697352,
"grad_norm": 0.6491131782531738,
"learning_rate": 1.616025876919725e-05,
"loss": 0.0292,
"step": 3750
},
{
"epoch": 0.47414880201765447,
"grad_norm": 0.8289541602134705,
"learning_rate": 1.5917553138163172e-05,
"loss": 0.0341,
"step": 3760
},
{
"epoch": 0.47540983606557374,
"grad_norm": 0.7368041276931763,
"learning_rate": 1.5676338383766632e-05,
"loss": 0.0451,
"step": 3770
},
{
"epoch": 0.4766708701134931,
"grad_norm": 0.7551342248916626,
"learning_rate": 1.5436625057522447e-05,
"loss": 0.0289,
"step": 3780
},
{
"epoch": 0.47793190416141235,
"grad_norm": 1.065066933631897,
"learning_rate": 1.519842364526804e-05,
"loss": 0.0508,
"step": 3790
},
{
"epoch": 0.4791929382093317,
"grad_norm": 0.2923586964607239,
"learning_rate": 1.4961744566704855e-05,
"loss": 0.0315,
"step": 3800
},
{
"epoch": 0.48045397225725095,
"grad_norm": 0.3932078182697296,
"learning_rate": 1.4726598174942551e-05,
"loss": 0.0334,
"step": 3810
},
{
"epoch": 0.4817150063051702,
"grad_norm": 0.8947867155075073,
"learning_rate": 1.4492994756046035e-05,
"loss": 0.0522,
"step": 3820
},
{
"epoch": 0.48297604035308955,
"grad_norm": 1.2476698160171509,
"learning_rate": 1.4260944528585645e-05,
"loss": 0.0513,
"step": 3830
},
{
"epoch": 0.4842370744010088,
"grad_norm": 0.404548704624176,
"learning_rate": 1.4030457643190048e-05,
"loss": 0.0328,
"step": 3840
},
{
"epoch": 0.4854981084489281,
"grad_norm": 0.8194689750671387,
"learning_rate": 1.3801544182102311e-05,
"loss": 0.034,
"step": 3850
},
{
"epoch": 0.48675914249684743,
"grad_norm": 0.4916388988494873,
"learning_rate": 1.3574214158738763e-05,
"loss": 0.0323,
"step": 3860
},
{
"epoch": 0.4880201765447667,
"grad_norm": 0.7781252861022949,
"learning_rate": 1.3348477517251101e-05,
"loss": 0.032,
"step": 3870
},
{
"epoch": 0.489281210592686,
"grad_norm": 0.7190259099006653,
"learning_rate": 1.312434413209131e-05,
"loss": 0.0474,
"step": 3880
},
{
"epoch": 0.4905422446406053,
"grad_norm": 0.523539662361145,
"learning_rate": 1.2901823807579727e-05,
"loss": 0.0305,
"step": 3890
},
{
"epoch": 0.4918032786885246,
"grad_norm": 0.629836916923523,
"learning_rate": 1.2680926277476245e-05,
"loss": 0.0249,
"step": 3900
},
{
"epoch": 0.4930643127364439,
"grad_norm": 0.9373968243598938,
"learning_rate": 1.2461661204554397e-05,
"loss": 0.0443,
"step": 3910
},
{
"epoch": 0.4943253467843632,
"grad_norm": 0.32277607917785645,
"learning_rate": 1.2244038180178835e-05,
"loss": 0.0467,
"step": 3920
},
{
"epoch": 0.49558638083228246,
"grad_norm": 1.0511391162872314,
"learning_rate": 1.2028066723885612e-05,
"loss": 0.0231,
"step": 3930
},
{
"epoch": 0.4968474148802018,
"grad_norm": 0.8604103922843933,
"learning_rate": 1.1813756282965888e-05,
"loss": 0.0292,
"step": 3940
},
{
"epoch": 0.49810844892812106,
"grad_norm": 0.7116047739982605,
"learning_rate": 1.1601116232052638e-05,
"loss": 0.0463,
"step": 3950
},
{
"epoch": 0.49936948297604034,
"grad_norm": 0.551101565361023,
"learning_rate": 1.1390155872710517e-05,
"loss": 0.0317,
"step": 3960
},
{
"epoch": 0.5006305170239597,
"grad_norm": 1.0197986364364624,
"learning_rate": 1.1180884433029087e-05,
"loss": 0.0225,
"step": 3970
},
{
"epoch": 0.501891551071879,
"grad_norm": 0.8839088678359985,
"learning_rate": 1.097331106721904e-05,
"loss": 0.0291,
"step": 3980
},
{
"epoch": 0.5031525851197982,
"grad_norm": 1.162286400794983,
"learning_rate": 1.0767444855211862e-05,
"loss": 0.0587,
"step": 3990
},
{
"epoch": 0.5044136191677175,
"grad_norm": 0.8182680606842041,
"learning_rate": 1.0563294802262558e-05,
"loss": 0.0216,
"step": 4000
},
{
"epoch": 0.5056746532156369,
"grad_norm": 0.9077721834182739,
"learning_rate": 1.0360869838555809e-05,
"loss": 0.0372,
"step": 4010
},
{
"epoch": 0.5069356872635561,
"grad_norm": 0.7742459177970886,
"learning_rate": 1.0160178818815313e-05,
"loss": 0.0418,
"step": 4020
},
{
"epoch": 0.5081967213114754,
"grad_norm": 0.6729003190994263,
"learning_rate": 9.961230521916387e-06,
"loss": 0.0323,
"step": 4030
},
{
"epoch": 0.5094577553593947,
"grad_norm": 0.735399603843689,
"learning_rate": 9.764033650502074e-06,
"loss": 0.034,
"step": 4040
},
{
"epoch": 0.510718789407314,
"grad_norm": 0.6807470917701721,
"learning_rate": 9.568596830602344e-06,
"loss": 0.0235,
"step": 4050
},
{
"epoch": 0.5119798234552333,
"grad_norm": 1.1667709350585938,
"learning_rate": 9.37492861125681e-06,
"loss": 0.0445,
"step": 4060
},
{
"epoch": 0.5132408575031526,
"grad_norm": 0.9771605730056763,
"learning_rate": 9.183037464140804e-06,
"loss": 0.0308,
"step": 4070
},
{
"epoch": 0.5145018915510718,
"grad_norm": 0.4709910452365875,
"learning_rate": 8.992931783194735e-06,
"loss": 0.0409,
"step": 4080
},
{
"epoch": 0.5157629255989912,
"grad_norm": 0.8958632349967957,
"learning_rate": 8.80461988425696e-06,
"loss": 0.0338,
"step": 4090
},
{
"epoch": 0.5170239596469105,
"grad_norm": 0.9982578754425049,
"learning_rate": 8.618110004699974e-06,
"loss": 0.0409,
"step": 4100
},
{
"epoch": 0.5182849936948297,
"grad_norm": 0.4651269018650055,
"learning_rate": 8.4334103030701e-06,
"loss": 0.0266,
"step": 4110
},
{
"epoch": 0.519546027742749,
"grad_norm": 0.33716320991516113,
"learning_rate": 8.25052885873066e-06,
"loss": 0.0284,
"step": 4120
},
{
"epoch": 0.5208070617906684,
"grad_norm": 0.5822999477386475,
"learning_rate": 8.06947367150846e-06,
"loss": 0.0341,
"step": 4130
},
{
"epoch": 0.5220680958385876,
"grad_norm": 0.6434028148651123,
"learning_rate": 7.890252661343938e-06,
"loss": 0.0206,
"step": 4140
},
{
"epoch": 0.5233291298865069,
"grad_norm": 0.586360514163971,
"learning_rate": 7.712873667944681e-06,
"loss": 0.0231,
"step": 4150
},
{
"epoch": 0.5245901639344263,
"grad_norm": 0.954788088798523,
"learning_rate": 7.537344450442469e-06,
"loss": 0.041,
"step": 4160
},
{
"epoch": 0.5258511979823455,
"grad_norm": 0.5906908512115479,
"learning_rate": 7.36367268705393e-06,
"loss": 0.033,
"step": 4170
},
{
"epoch": 0.5271122320302648,
"grad_norm": 0.8814557790756226,
"learning_rate": 7.1918659747446e-06,
"loss": 0.0438,
"step": 4180
},
{
"epoch": 0.5283732660781841,
"grad_norm": 1.0854028463363647,
"learning_rate": 7.021931828896666e-06,
"loss": 0.0321,
"step": 4190
},
{
"epoch": 0.5296343001261034,
"grad_norm": 0.898576557636261,
"learning_rate": 6.8538776829801584e-06,
"loss": 0.0299,
"step": 4200
},
{
"epoch": 0.5308953341740227,
"grad_norm": 0.9435405135154724,
"learning_rate": 6.687710888227849e-06,
"loss": 0.0392,
"step": 4210
},
{
"epoch": 0.532156368221942,
"grad_norm": 0.4928624927997589,
"learning_rate": 6.5234387133136565e-06,
"loss": 0.0339,
"step": 4220
},
{
"epoch": 0.5334174022698613,
"grad_norm": 1.565149188041687,
"learning_rate": 6.361068344034665e-06,
"loss": 0.0331,
"step": 4230
},
{
"epoch": 0.5346784363177806,
"grad_norm": 0.9430273771286011,
"learning_rate": 6.200606882996846e-06,
"loss": 0.028,
"step": 4240
},
{
"epoch": 0.5359394703656999,
"grad_norm": 0.4182937741279602,
"learning_rate": 6.042061349304312e-06,
"loss": 0.033,
"step": 4250
},
{
"epoch": 0.5372005044136192,
"grad_norm": 0.7507690191268921,
"learning_rate": 5.885438678252342e-06,
"loss": 0.039,
"step": 4260
},
{
"epoch": 0.5384615384615384,
"grad_norm": 0.7741302251815796,
"learning_rate": 5.730745721023939e-06,
"loss": 0.0335,
"step": 4270
},
{
"epoch": 0.5397225725094578,
"grad_norm": 0.5136469006538391,
"learning_rate": 5.577989244390192e-06,
"loss": 0.0194,
"step": 4280
},
{
"epoch": 0.5409836065573771,
"grad_norm": 0.5073527693748474,
"learning_rate": 5.4271759304142635e-06,
"loss": 0.04,
"step": 4290
},
{
"epoch": 0.5422446406052963,
"grad_norm": 0.4620015621185303,
"learning_rate": 5.278312376159051e-06,
"loss": 0.0379,
"step": 4300
},
{
"epoch": 0.5435056746532156,
"grad_norm": 0.6073434948921204,
"learning_rate": 5.1314050933986944e-06,
"loss": 0.0308,
"step": 4310
},
{
"epoch": 0.544766708701135,
"grad_norm": 0.7685037851333618,
"learning_rate": 4.986460508333634e-06,
"loss": 0.0329,
"step": 4320
},
{
"epoch": 0.5460277427490542,
"grad_norm": 0.41539332270622253,
"learning_rate": 4.843484961309597e-06,
"loss": 0.0285,
"step": 4330
},
{
"epoch": 0.5472887767969735,
"grad_norm": 0.44178634881973267,
"learning_rate": 4.702484706540161e-06,
"loss": 0.0191,
"step": 4340
},
{
"epoch": 0.5485498108448928,
"grad_norm": 0.9992635846138,
"learning_rate": 4.563465911833259e-06,
"loss": 0.0444,
"step": 4350
},
{
"epoch": 0.5498108448928121,
"grad_norm": 0.6119778156280518,
"learning_rate": 4.426434658321344e-06,
"loss": 0.031,
"step": 4360
},
{
"epoch": 0.5510718789407314,
"grad_norm": 0.8568145036697388,
"learning_rate": 4.2913969401953466e-06,
"loss": 0.0319,
"step": 4370
},
{
"epoch": 0.5523329129886507,
"grad_norm": 1.4540985822677612,
"learning_rate": 4.15835866444253e-06,
"loss": 0.0274,
"step": 4380
},
{
"epoch": 0.5535939470365699,
"grad_norm": 0.6545904874801636,
"learning_rate": 4.027325650588043e-06,
"loss": 0.0337,
"step": 4390
},
{
"epoch": 0.5548549810844893,
"grad_norm": 0.5760856866836548,
"learning_rate": 3.898303630440419e-06,
"loss": 0.0164,
"step": 4400
},
{
"epoch": 0.5561160151324086,
"grad_norm": 0.5065152645111084,
"learning_rate": 3.7712982478407877e-06,
"loss": 0.0329,
"step": 4410
},
{
"epoch": 0.5573770491803278,
"grad_norm": 0.5135217905044556,
"learning_rate": 3.6463150584160053e-06,
"loss": 0.0245,
"step": 4420
},
{
"epoch": 0.5586380832282472,
"grad_norm": 0.392915278673172,
"learning_rate": 3.5233595293356957e-06,
"loss": 0.0259,
"step": 4430
},
{
"epoch": 0.5598991172761665,
"grad_norm": 0.3183845579624176,
"learning_rate": 3.4024370390730033e-06,
"loss": 0.0246,
"step": 4440
},
{
"epoch": 0.5611601513240857,
"grad_norm": 0.3542439341545105,
"learning_rate": 3.2835528771693992e-06,
"loss": 0.0245,
"step": 4450
},
{
"epoch": 0.562421185372005,
"grad_norm": 0.9768691062927246,
"learning_rate": 3.1667122440032505e-06,
"loss": 0.0418,
"step": 4460
},
{
"epoch": 0.5636822194199244,
"grad_norm": 1.337186574935913,
"learning_rate": 3.051920250562351e-06,
"loss": 0.0282,
"step": 4470
},
{
"epoch": 0.5649432534678437,
"grad_norm": 1.1078072786331177,
"learning_rate": 2.939181918220385e-06,
"loss": 0.0321,
"step": 4480
},
{
"epoch": 0.5662042875157629,
"grad_norm": 0.5455815196037292,
"learning_rate": 2.8285021785172226e-06,
"loss": 0.0269,
"step": 4490
},
{
"epoch": 0.5674653215636822,
"grad_norm": 0.6270581483840942,
"learning_rate": 2.7198858729432288e-06,
"loss": 0.039,
"step": 4500
},
{
"epoch": 0.5687263556116016,
"grad_norm": 0.9356001615524292,
"learning_rate": 2.6133377527274905e-06,
"loss": 0.0291,
"step": 4510
},
{
"epoch": 0.5699873896595208,
"grad_norm": 0.5530498027801514,
"learning_rate": 2.5088624786299366e-06,
"loss": 0.022,
"step": 4520
},
{
"epoch": 0.5712484237074401,
"grad_norm": 0.8288186192512512,
"learning_rate": 2.406464620737531e-06,
"loss": 0.024,
"step": 4530
},
{
"epoch": 0.5725094577553594,
"grad_norm": 0.8630868196487427,
"learning_rate": 2.3061486582642734e-06,
"loss": 0.0394,
"step": 4540
},
{
"epoch": 0.5737704918032787,
"grad_norm": 0.5797224044799805,
"learning_rate": 2.2079189793553667e-06,
"loss": 0.0313,
"step": 4550
},
{
"epoch": 0.575031525851198,
"grad_norm": 1.7385971546173096,
"learning_rate": 2.111779880895165e-06,
"loss": 0.0387,
"step": 4560
},
{
"epoch": 0.5762925598991173,
"grad_norm": 0.6150429844856262,
"learning_rate": 2.01773556831929e-06,
"loss": 0.039,
"step": 4570
},
{
"epoch": 0.5775535939470365,
"grad_norm": 1.4603859186172485,
"learning_rate": 1.9257901554306513e-06,
"loss": 0.0394,
"step": 4580
},
{
"epoch": 0.5788146279949559,
"grad_norm": 0.522121787071228,
"learning_rate": 1.835947664219445e-06,
"loss": 0.0206,
"step": 4590
},
{
"epoch": 0.5800756620428752,
"grad_norm": 0.9636305570602417,
"learning_rate": 1.748212024687307e-06,
"loss": 0.0266,
"step": 4600
},
{
"epoch": 0.5813366960907944,
"grad_norm": 0.87248694896698,
"learning_rate": 1.6625870746753147e-06,
"loss": 0.0327,
"step": 4610
},
{
"epoch": 0.5825977301387137,
"grad_norm": 0.1894032210111618,
"learning_rate": 1.5790765596961853e-06,
"loss": 0.0307,
"step": 4620
},
{
"epoch": 0.5838587641866331,
"grad_norm": 0.8586317896842957,
"learning_rate": 1.4976841327703717e-06,
"loss": 0.0272,
"step": 4630
},
{
"epoch": 0.5851197982345523,
"grad_norm": 0.5026036500930786,
"learning_rate": 1.4184133542663014e-06,
"loss": 0.0213,
"step": 4640
},
{
"epoch": 0.5863808322824716,
"grad_norm": 1.2048367261886597,
"learning_rate": 1.341267691744641e-06,
"loss": 0.0353,
"step": 4650
},
{
"epoch": 0.587641866330391,
"grad_norm": 0.7445193529129028,
"learning_rate": 1.2662505198065666e-06,
"loss": 0.0299,
"step": 4660
},
{
"epoch": 0.5889029003783102,
"grad_norm": 0.47740575671195984,
"learning_rate": 1.193365119946216e-06,
"loss": 0.0329,
"step": 4670
},
{
"epoch": 0.5901639344262295,
"grad_norm": 0.6192237138748169,
"learning_rate": 1.1226146804070859e-06,
"loss": 0.0292,
"step": 4680
},
{
"epoch": 0.5914249684741488,
"grad_norm": 0.522982656955719,
"learning_rate": 1.0540022960426111e-06,
"loss": 0.0335,
"step": 4690
},
{
"epoch": 0.592686002522068,
"grad_norm": 0.5586244463920593,
"learning_rate": 9.875309681807443e-07,
"loss": 0.0303,
"step": 4700
},
{
"epoch": 0.5939470365699874,
"grad_norm": 1.1539770364761353,
"learning_rate": 9.232036044927061e-07,
"loss": 0.0325,
"step": 4710
},
{
"epoch": 0.5952080706179067,
"grad_norm": 0.7540831565856934,
"learning_rate": 8.610230188657919e-07,
"loss": 0.0426,
"step": 4720
},
{
"epoch": 0.5964691046658259,
"grad_norm": 1.028459072113037,
"learning_rate": 8.009919312802372e-07,
"loss": 0.0396,
"step": 4730
},
{
"epoch": 0.5977301387137453,
"grad_norm": 0.4053608775138855,
"learning_rate": 7.431129676902904e-07,
"loss": 0.0245,
"step": 4740
},
{
"epoch": 0.5989911727616646,
"grad_norm": 0.7504554390907288,
"learning_rate": 6.873886599093215e-07,
"loss": 0.0305,
"step": 4750
},
{
"epoch": 0.6002522068095839,
"grad_norm": 0.5518863201141357,
"learning_rate": 6.338214454990776e-07,
"loss": 0.0324,
"step": 4760
},
{
"epoch": 0.6015132408575031,
"grad_norm": 0.3712991774082184,
"learning_rate": 5.82413667663051e-07,
"loss": 0.0423,
"step": 4770
},
{
"epoch": 0.6027742749054225,
"grad_norm": 0.4932425022125244,
"learning_rate": 5.331675751439725e-07,
"loss": 0.0273,
"step": 4780
},
{
"epoch": 0.6040353089533418,
"grad_norm": 1.0675811767578125,
"learning_rate": 4.86085322125479e-07,
"loss": 0.0296,
"step": 4790
},
{
"epoch": 0.605296343001261,
"grad_norm": 0.5761094093322754,
"learning_rate": 4.411689681378284e-07,
"loss": 0.027,
"step": 4800
},
{
"epoch": 0.6065573770491803,
"grad_norm": 0.9229597449302673,
"learning_rate": 3.9842047796786466e-07,
"loss": 0.0307,
"step": 4810
},
{
"epoch": 0.6078184110970997,
"grad_norm": 1.0080735683441162,
"learning_rate": 3.578417215730323e-07,
"loss": 0.0506,
"step": 4820
},
{
"epoch": 0.6090794451450189,
"grad_norm": 0.6860240697860718,
"learning_rate": 3.1943447399958027e-07,
"loss": 0.0497,
"step": 4830
},
{
"epoch": 0.6103404791929382,
"grad_norm": 0.3656100928783417,
"learning_rate": 2.8320041530495724e-07,
"loss": 0.0312,
"step": 4840
},
{
"epoch": 0.6116015132408575,
"grad_norm": 0.9314307570457458,
"learning_rate": 2.491411304842539e-07,
"loss": 0.0276,
"step": 4850
},
{
"epoch": 0.6128625472887768,
"grad_norm": 0.7389268279075623,
"learning_rate": 2.1725810940094183e-07,
"loss": 0.0229,
"step": 4860
},
{
"epoch": 0.6141235813366961,
"grad_norm": 0.6638040542602539,
"learning_rate": 1.8755274672164202e-07,
"loss": 0.0364,
"step": 4870
},
{
"epoch": 0.6153846153846154,
"grad_norm": 0.6338533163070679,
"learning_rate": 1.600263418551573e-07,
"loss": 0.0296,
"step": 4880
},
{
"epoch": 0.6166456494325346,
"grad_norm": 0.6092068552970886,
"learning_rate": 1.346800988955954e-07,
"loss": 0.026,
"step": 4890
},
{
"epoch": 0.617906683480454,
"grad_norm": 0.6075211763381958,
"learning_rate": 1.1151512656975005e-07,
"loss": 0.0262,
"step": 4900
},
{
"epoch": 0.6191677175283733,
"grad_norm": 0.5688278079032898,
"learning_rate": 9.053243818853973e-08,
"loss": 0.0214,
"step": 4910
},
{
"epoch": 0.6204287515762925,
"grad_norm": 0.5925626158714294,
"learning_rate": 7.173295160273763e-08,
"loss": 0.0165,
"step": 4920
},
{
"epoch": 0.6216897856242118,
"grad_norm": 0.7344203591346741,
"learning_rate": 5.511748916279258e-08,
"loss": 0.0375,
"step": 4930
},
{
"epoch": 0.6229508196721312,
"grad_norm": 0.8294381499290466,
"learning_rate": 4.068677768285234e-08,
"loss": 0.0198,
"step": 4940
},
{
"epoch": 0.6242118537200504,
"grad_norm": 1.1469560861587524,
"learning_rate": 2.844144840898344e-08,
"loss": 0.0324,
"step": 4950
},
{
"epoch": 0.6254728877679697,
"grad_norm": 0.9988076686859131,
"learning_rate": 1.8382036991559936e-08,
"loss": 0.0271,
"step": 4960
},
{
"epoch": 0.626733921815889,
"grad_norm": 0.9736055135726929,
"learning_rate": 1.0508983461832156e-08,
"loss": 0.0292,
"step": 4970
},
{
"epoch": 0.6279949558638083,
"grad_norm": 0.8030845522880554,
"learning_rate": 4.822632212653222e-09,
"loss": 0.0269,
"step": 4980
},
{
"epoch": 0.6292559899117276,
"grad_norm": 0.41871246695518494,
"learning_rate": 1.3232319834632912e-09,
"loss": 0.0227,
"step": 4990
},
{
"epoch": 0.6305170239596469,
"grad_norm": 0.7686680555343628,
"learning_rate": 1.0935849353854721e-11,
"loss": 0.024,
"step": 5000
}
],
"logging_steps": 10,
"max_steps": 5000,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 1000,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 0.0,
"train_batch_size": 4,
"trial_name": null,
"trial_params": null
}