ejschwartz's picture
Upload folder using huggingface_hub
191618f verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 8.0,
"eval_steps": 500,
"global_step": 8256,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.04844961240310078,
"grad_norm": 0.41548049449920654,
"learning_rate": 4.999565439780451e-05,
"loss": 0.9259,
"step": 50
},
{
"epoch": 0.09689922480620156,
"grad_norm": 0.4070381224155426,
"learning_rate": 4.9982262622162004e-05,
"loss": 0.8252,
"step": 100
},
{
"epoch": 0.14534883720930233,
"grad_norm": 0.26511862874031067,
"learning_rate": 4.9959827710691796e-05,
"loss": 0.789,
"step": 150
},
{
"epoch": 0.1937984496124031,
"grad_norm": 0.379682719707489,
"learning_rate": 4.992835778443379e-05,
"loss": 0.8257,
"step": 200
},
{
"epoch": 0.24224806201550386,
"grad_norm": 0.34141114354133606,
"learning_rate": 4.988786423494294e-05,
"loss": 0.8357,
"step": 250
},
{
"epoch": 0.29069767441860467,
"grad_norm": 0.33835941553115845,
"learning_rate": 4.983836172016573e-05,
"loss": 0.8025,
"step": 300
},
{
"epoch": 0.3391472868217054,
"grad_norm": 0.28619295358657837,
"learning_rate": 4.97798681591342e-05,
"loss": 0.7223,
"step": 350
},
{
"epoch": 0.3875968992248062,
"grad_norm": 1.7040114402770996,
"learning_rate": 4.971240472547967e-05,
"loss": 0.7843,
"step": 400
},
{
"epoch": 0.436046511627907,
"grad_norm": 0.4446117877960205,
"learning_rate": 4.963599583976813e-05,
"loss": 0.8503,
"step": 450
},
{
"epoch": 0.4844961240310077,
"grad_norm": 0.285826712846756,
"learning_rate": 4.9550669160660576e-05,
"loss": 0.8192,
"step": 500
},
{
"epoch": 0.5329457364341085,
"grad_norm": 2.5809340476989746,
"learning_rate": 4.945645557490094e-05,
"loss": 0.7493,
"step": 550
},
{
"epoch": 0.5813953488372093,
"grad_norm": 0.31858932971954346,
"learning_rate": 4.935338918613571e-05,
"loss": 0.847,
"step": 600
},
{
"epoch": 0.6298449612403101,
"grad_norm": 0.3342924416065216,
"learning_rate": 4.924150730256899e-05,
"loss": 0.7255,
"step": 650
},
{
"epoch": 0.6782945736434108,
"grad_norm": 0.2791895568370819,
"learning_rate": 4.91208504234576e-05,
"loss": 0.8075,
"step": 700
},
{
"epoch": 0.7267441860465116,
"grad_norm": 0.4308435618877411,
"learning_rate": 4.899146222445105e-05,
"loss": 0.839,
"step": 750
},
{
"epoch": 0.7751937984496124,
"grad_norm": 0.47363415360450745,
"learning_rate": 4.8853389541781726e-05,
"loss": 0.7479,
"step": 800
},
{
"epoch": 0.8236434108527132,
"grad_norm": 0.39904293417930603,
"learning_rate": 4.870668235531106e-05,
"loss": 0.7821,
"step": 850
},
{
"epoch": 0.872093023255814,
"grad_norm": 0.3194170594215393,
"learning_rate": 4.8551393770437626e-05,
"loss": 0.8012,
"step": 900
},
{
"epoch": 0.9205426356589147,
"grad_norm": 0.8215324282646179,
"learning_rate": 4.838757999887399e-05,
"loss": 0.6319,
"step": 950
},
{
"epoch": 0.9689922480620154,
"grad_norm": 0.46082744002342224,
"learning_rate": 4.821530033829902e-05,
"loss": 0.494,
"step": 1000
},
{
"epoch": 1.0174418604651163,
"grad_norm": 0.25258105993270874,
"learning_rate": 4.8034617150893216e-05,
"loss": 0.7785,
"step": 1050
},
{
"epoch": 1.0658914728682172,
"grad_norm": 0.44710302352905273,
"learning_rate": 4.78455958407647e-05,
"loss": 0.573,
"step": 1100
},
{
"epoch": 1.1143410852713178,
"grad_norm": 0.25971588492393494,
"learning_rate": 4.764830483027408e-05,
"loss": 0.6463,
"step": 1150
},
{
"epoch": 1.1627906976744187,
"grad_norm": 0.5416250228881836,
"learning_rate": 4.744281553526676e-05,
"loss": 0.5477,
"step": 1200
},
{
"epoch": 1.2112403100775193,
"grad_norm": 0.6827994585037231,
"learning_rate": 4.7229202339221676e-05,
"loss": 0.6475,
"step": 1250
},
{
"epoch": 1.2596899224806202,
"grad_norm": 0.7923223376274109,
"learning_rate": 4.7007542566325844e-05,
"loss": 0.6066,
"step": 1300
},
{
"epoch": 1.308139534883721,
"grad_norm": 0.30041930079460144,
"learning_rate": 4.6777916453484305e-05,
"loss": 0.7032,
"step": 1350
},
{
"epoch": 1.3565891472868217,
"grad_norm": 0.30808496475219727,
"learning_rate": 4.654040712127581e-05,
"loss": 0.7161,
"step": 1400
},
{
"epoch": 1.4050387596899225,
"grad_norm": 0.2858184278011322,
"learning_rate": 4.629510054386468e-05,
"loss": 0.6813,
"step": 1450
},
{
"epoch": 1.4534883720930232,
"grad_norm": 0.4461047053337097,
"learning_rate": 4.604208551787964e-05,
"loss": 0.6757,
"step": 1500
},
{
"epoch": 1.501937984496124,
"grad_norm": 0.294251024723053,
"learning_rate": 4.578145363027104e-05,
"loss": 0.559,
"step": 1550
},
{
"epoch": 1.550387596899225,
"grad_norm": 0.4968578517436981,
"learning_rate": 4.5513299225158014e-05,
"loss": 0.7046,
"step": 1600
},
{
"epoch": 1.5988372093023255,
"grad_norm": 0.2557980418205261,
"learning_rate": 4.523771936967754e-05,
"loss": 0.6828,
"step": 1650
},
{
"epoch": 1.6472868217054264,
"grad_norm": 0.30303144454956055,
"learning_rate": 4.4954813818847886e-05,
"loss": 0.7159,
"step": 1700
},
{
"epoch": 1.695736434108527,
"grad_norm": 0.1948225349187851,
"learning_rate": 4.466468497945909e-05,
"loss": 0.7042,
"step": 1750
},
{
"epoch": 1.744186046511628,
"grad_norm": 0.39648470282554626,
"learning_rate": 4.436743787300346e-05,
"loss": 0.7765,
"step": 1800
},
{
"epoch": 1.7926356589147288,
"grad_norm": 0.38730746507644653,
"learning_rate": 4.406318009765971e-05,
"loss": 0.7311,
"step": 1850
},
{
"epoch": 1.8410852713178296,
"grad_norm": 0.226152241230011,
"learning_rate": 4.3752021789344286e-05,
"loss": 0.5145,
"step": 1900
},
{
"epoch": 1.8895348837209303,
"grad_norm": 0.6462261080741882,
"learning_rate": 4.343407558184415e-05,
"loss": 0.3022,
"step": 1950
},
{
"epoch": 1.937984496124031,
"grad_norm": 0.5680636763572693,
"learning_rate": 4.310945656604535e-05,
"loss": 0.6364,
"step": 2000
},
{
"epoch": 1.9864341085271318,
"grad_norm": 0.342068076133728,
"learning_rate": 4.277828224827213e-05,
"loss": 0.5432,
"step": 2050
},
{
"epoch": 2.0348837209302326,
"grad_norm": 0.3184409737586975,
"learning_rate": 4.244067250775174e-05,
"loss": 0.5958,
"step": 2100
},
{
"epoch": 2.0833333333333335,
"grad_norm": 0.4073927402496338,
"learning_rate": 4.209674955322029e-05,
"loss": 0.639,
"step": 2150
},
{
"epoch": 2.1317829457364343,
"grad_norm": 1.1842104196548462,
"learning_rate": 4.174663787868537e-05,
"loss": 0.6331,
"step": 2200
},
{
"epoch": 2.1802325581395348,
"grad_norm": 0.8454262614250183,
"learning_rate": 4.139046421836137e-05,
"loss": 0.6085,
"step": 2250
},
{
"epoch": 2.2286821705426356,
"grad_norm": 0.31670165061950684,
"learning_rate": 4.1028357500794024e-05,
"loss": 0.4542,
"step": 2300
},
{
"epoch": 2.2771317829457365,
"grad_norm": 0.3827883005142212,
"learning_rate": 4.06604488021905e-05,
"loss": 0.5486,
"step": 2350
},
{
"epoch": 2.3255813953488373,
"grad_norm": 0.4175989031791687,
"learning_rate": 4.0286871298972154e-05,
"loss": 0.5972,
"step": 2400
},
{
"epoch": 2.374031007751938,
"grad_norm": 0.3741699457168579,
"learning_rate": 3.9907760219567055e-05,
"loss": 0.5973,
"step": 2450
},
{
"epoch": 2.4224806201550386,
"grad_norm": 0.5313114523887634,
"learning_rate": 3.9523252795459676e-05,
"loss": 0.6082,
"step": 2500
},
{
"epoch": 2.4709302325581395,
"grad_norm": 0.3255683183670044,
"learning_rate": 3.91334882115155e-05,
"loss": 0.6618,
"step": 2550
},
{
"epoch": 2.5193798449612403,
"grad_norm": 0.6780579686164856,
"learning_rate": 3.873860755559862e-05,
"loss": 0.6439,
"step": 2600
},
{
"epoch": 2.567829457364341,
"grad_norm": 0.7427515387535095,
"learning_rate": 3.83387537675004e-05,
"loss": 0.4145,
"step": 2650
},
{
"epoch": 2.616279069767442,
"grad_norm": 0.3860374689102173,
"learning_rate": 3.793407158719784e-05,
"loss": 0.6016,
"step": 2700
},
{
"epoch": 2.6647286821705425,
"grad_norm": 0.5163136124610901,
"learning_rate": 3.752470750246023e-05,
"loss": 0.5656,
"step": 2750
},
{
"epoch": 2.7131782945736433,
"grad_norm": 0.31362855434417725,
"learning_rate": 3.711080969582321e-05,
"loss": 0.6171,
"step": 2800
},
{
"epoch": 2.761627906976744,
"grad_norm": 0.5183291435241699,
"learning_rate": 3.66925279909493e-05,
"loss": 0.5906,
"step": 2850
},
{
"epoch": 2.810077519379845,
"grad_norm": 0.6666070818901062,
"learning_rate": 3.6270013798394403e-05,
"loss": 0.489,
"step": 2900
},
{
"epoch": 2.858527131782946,
"grad_norm": 0.32670509815216064,
"learning_rate": 3.5843420060799874e-05,
"loss": 0.6118,
"step": 2950
},
{
"epoch": 2.9069767441860463,
"grad_norm": 0.1764584183692932,
"learning_rate": 3.541290119753e-05,
"loss": 0.1986,
"step": 3000
},
{
"epoch": 2.955426356589147,
"grad_norm": 0.2792399227619171,
"learning_rate": 3.49786130487749e-05,
"loss": 0.5696,
"step": 3050
},
{
"epoch": 3.003875968992248,
"grad_norm": 0.3555202782154083,
"learning_rate": 3.454071281913918e-05,
"loss": 0.48,
"step": 3100
},
{
"epoch": 3.052325581395349,
"grad_norm": 0.5549014806747437,
"learning_rate": 3.4099359020736636e-05,
"loss": 0.5539,
"step": 3150
},
{
"epoch": 3.10077519379845,
"grad_norm": 0.4574868083000183,
"learning_rate": 3.365471141581167e-05,
"loss": 0.5123,
"step": 3200
},
{
"epoch": 3.14922480620155,
"grad_norm": 0.268303781747818,
"learning_rate": 3.320693095890823e-05,
"loss": 0.521,
"step": 3250
},
{
"epoch": 3.197674418604651,
"grad_norm": 0.5150983929634094,
"learning_rate": 3.275617973860707e-05,
"loss": 0.5431,
"step": 3300
},
{
"epoch": 3.246124031007752,
"grad_norm": 0.39754441380500793,
"learning_rate": 3.230262091885261e-05,
"loss": 0.2304,
"step": 3350
},
{
"epoch": 3.294573643410853,
"grad_norm": 0.41833576560020447,
"learning_rate": 3.184641867989045e-05,
"loss": 0.4382,
"step": 3400
},
{
"epoch": 3.3430232558139537,
"grad_norm": 0.45826929807662964,
"learning_rate": 3.138773815883702e-05,
"loss": 0.4938,
"step": 3450
},
{
"epoch": 3.391472868217054,
"grad_norm": 0.2432149201631546,
"learning_rate": 3.092674538990294e-05,
"loss": 0.4546,
"step": 3500
},
{
"epoch": 3.439922480620155,
"grad_norm": 0.3492552638053894,
"learning_rate": 3.04636072442915e-05,
"loss": 0.5489,
"step": 3550
},
{
"epoch": 3.488372093023256,
"grad_norm": 0.4332628548145294,
"learning_rate": 2.9998491369794306e-05,
"loss": 0.5029,
"step": 3600
},
{
"epoch": 3.5368217054263567,
"grad_norm": 0.4654231369495392,
"learning_rate": 2.9531566130105735e-05,
"loss": 0.5832,
"step": 3650
},
{
"epoch": 3.5852713178294575,
"grad_norm": 0.49778714776039124,
"learning_rate": 2.906300054387823e-05,
"loss": 0.4908,
"step": 3700
},
{
"epoch": 3.633720930232558,
"grad_norm": 0.38342729210853577,
"learning_rate": 2.8592964223540608e-05,
"loss": 0.5958,
"step": 3750
},
{
"epoch": 3.682170542635659,
"grad_norm": 0.2664441168308258,
"learning_rate": 2.812162731390133e-05,
"loss": 0.4951,
"step": 3800
},
{
"epoch": 3.7306201550387597,
"grad_norm": 0.3066735565662384,
"learning_rate": 2.7649160430559105e-05,
"loss": 0.5901,
"step": 3850
},
{
"epoch": 3.7790697674418605,
"grad_norm": 0.365209698677063,
"learning_rate": 2.7175734598143088e-05,
"loss": 0.573,
"step": 3900
},
{
"epoch": 3.8275193798449614,
"grad_norm": 0.29753512144088745,
"learning_rate": 2.6701521188404965e-05,
"loss": 0.5557,
"step": 3950
},
{
"epoch": 3.875968992248062,
"grad_norm": 0.4851069152355194,
"learning_rate": 2.6226691858185454e-05,
"loss": 0.3863,
"step": 4000
},
{
"epoch": 3.9244186046511627,
"grad_norm": 0.39727818965911865,
"learning_rate": 2.5751418487277544e-05,
"loss": 0.5019,
"step": 4050
},
{
"epoch": 3.9728682170542635,
"grad_norm": 0.5351327061653137,
"learning_rate": 2.5275873116209104e-05,
"loss": 0.4453,
"step": 4100
},
{
"epoch": 4.021317829457364,
"grad_norm": 0.3918212354183197,
"learning_rate": 2.48002278839672e-05,
"loss": 0.528,
"step": 4150
},
{
"epoch": 4.069767441860465,
"grad_norm": 0.3745068609714508,
"learning_rate": 2.432465496568689e-05,
"loss": 0.5476,
"step": 4200
},
{
"epoch": 4.118217054263566,
"grad_norm": 0.9149877429008484,
"learning_rate": 2.3849326510326875e-05,
"loss": 0.4632,
"step": 4250
},
{
"epoch": 4.166666666666667,
"grad_norm": 0.6120011210441589,
"learning_rate": 2.3374414578354577e-05,
"loss": 0.5309,
"step": 4300
},
{
"epoch": 4.215116279069767,
"grad_norm": 0.3835127651691437,
"learning_rate": 2.290009107946337e-05,
"loss": 0.4484,
"step": 4350
},
{
"epoch": 4.263565891472869,
"grad_norm": 0.2763288617134094,
"learning_rate": 2.2426527710344296e-05,
"loss": 0.2135,
"step": 4400
},
{
"epoch": 4.312015503875969,
"grad_norm": 0.36940285563468933,
"learning_rate": 2.1953895892534886e-05,
"loss": 0.3706,
"step": 4450
},
{
"epoch": 4.3604651162790695,
"grad_norm": 0.3598892092704773,
"learning_rate": 2.148236671036765e-05,
"loss": 0.5161,
"step": 4500
},
{
"epoch": 4.408914728682171,
"grad_norm": 0.41965949535369873,
"learning_rate": 2.1012110849040533e-05,
"loss": 0.5151,
"step": 4550
},
{
"epoch": 4.457364341085271,
"grad_norm": 0.6105883717536926,
"learning_rate": 2.054329853283194e-05,
"loss": 0.4699,
"step": 4600
},
{
"epoch": 4.5058139534883725,
"grad_norm": 1.0464669466018677,
"learning_rate": 2.0076099463482557e-05,
"loss": 0.5253,
"step": 4650
},
{
"epoch": 4.554263565891473,
"grad_norm": 0.7042683959007263,
"learning_rate": 1.9610682758766316e-05,
"loss": 0.5089,
"step": 4700
},
{
"epoch": 4.602713178294573,
"grad_norm": 0.4324445128440857,
"learning_rate": 1.914721689127281e-05,
"loss": 0.5323,
"step": 4750
},
{
"epoch": 4.651162790697675,
"grad_norm": 0.337740957736969,
"learning_rate": 1.868586962742312e-05,
"loss": 0.4622,
"step": 4800
},
{
"epoch": 4.699612403100775,
"grad_norm": 0.30157017707824707,
"learning_rate": 1.8226807966741426e-05,
"loss": 0.2995,
"step": 4850
},
{
"epoch": 4.748062015503876,
"grad_norm": 0.28458529710769653,
"learning_rate": 1.777019808140412e-05,
"loss": 0.4785,
"step": 4900
},
{
"epoch": 4.796511627906977,
"grad_norm": 0.17888276278972626,
"learning_rate": 1.7316205256088396e-05,
"loss": 0.4547,
"step": 4950
},
{
"epoch": 4.844961240310077,
"grad_norm": 0.9857609272003174,
"learning_rate": 1.6864993828142177e-05,
"loss": 0.3773,
"step": 5000
},
{
"epoch": 4.8934108527131785,
"grad_norm": 0.2999161183834076,
"learning_rate": 1.6416727128096866e-05,
"loss": 0.5057,
"step": 5050
},
{
"epoch": 4.941860465116279,
"grad_norm": 0.3053271472454071,
"learning_rate": 1.5971567420544616e-05,
"loss": 0.4571,
"step": 5100
},
{
"epoch": 4.99031007751938,
"grad_norm": 0.2922804057598114,
"learning_rate": 1.5529675845401433e-05,
"loss": 0.3809,
"step": 5150
},
{
"epoch": 5.038759689922481,
"grad_norm": 0.37198856472969055,
"learning_rate": 1.5091212359577361e-05,
"loss": 0.5042,
"step": 5200
},
{
"epoch": 5.087209302325581,
"grad_norm": 0.2848919928073883,
"learning_rate": 1.4656335679074957e-05,
"loss": 0.3718,
"step": 5250
},
{
"epoch": 5.135658914728682,
"grad_norm": 0.4509316384792328,
"learning_rate": 1.422520322153686e-05,
"loss": 0.4388,
"step": 5300
},
{
"epoch": 5.184108527131783,
"grad_norm": 0.5705945491790771,
"learning_rate": 1.3797971049263427e-05,
"loss": 0.3232,
"step": 5350
},
{
"epoch": 5.232558139534884,
"grad_norm": 0.41659048199653625,
"learning_rate": 1.3374793812720998e-05,
"loss": 0.4727,
"step": 5400
},
{
"epoch": 5.2810077519379846,
"grad_norm": 0.44602829217910767,
"learning_rate": 1.2955824694561041e-05,
"loss": 0.4916,
"step": 5450
},
{
"epoch": 5.329457364341085,
"grad_norm": 0.4279162287712097,
"learning_rate": 1.254121535417091e-05,
"loss": 0.4178,
"step": 5500
},
{
"epoch": 5.377906976744186,
"grad_norm": 0.6180667281150818,
"learning_rate": 1.213111587277571e-05,
"loss": 0.429,
"step": 5550
},
{
"epoch": 5.426356589147287,
"grad_norm": 0.38391199707984924,
"learning_rate": 1.1725674699111622e-05,
"loss": 0.3601,
"step": 5600
},
{
"epoch": 5.474806201550388,
"grad_norm": 0.12185677886009216,
"learning_rate": 1.1325038595690074e-05,
"loss": 0.2,
"step": 5650
},
{
"epoch": 5.523255813953488,
"grad_norm": 0.3772415816783905,
"learning_rate": 1.0929352585672316e-05,
"loss": 0.4003,
"step": 5700
},
{
"epoch": 5.571705426356589,
"grad_norm": 0.4929804503917694,
"learning_rate": 1.0538759900373618e-05,
"loss": 0.4523,
"step": 5750
},
{
"epoch": 5.62015503875969,
"grad_norm": 0.24463126063346863,
"learning_rate": 1.0153401927416057e-05,
"loss": 0.373,
"step": 5800
},
{
"epoch": 5.6686046511627906,
"grad_norm": 0.4997948408126831,
"learning_rate": 9.773418159548738e-06,
"loss": 0.5103,
"step": 5850
},
{
"epoch": 5.717054263565892,
"grad_norm": 0.36583212018013,
"learning_rate": 9.398946144153872e-06,
"loss": 0.3983,
"step": 5900
},
{
"epoch": 5.765503875968992,
"grad_norm": 0.5164257287979126,
"learning_rate": 9.030121433457026e-06,
"loss": 0.4945,
"step": 5950
},
{
"epoch": 5.813953488372093,
"grad_norm": 0.4246552288532257,
"learning_rate": 8.667077535459714e-06,
"loss": 0.4677,
"step": 6000
},
{
"epoch": 5.862403100775194,
"grad_norm": 0.5210244655609131,
"learning_rate": 8.309945865611793e-06,
"loss": 0.4652,
"step": 6050
},
{
"epoch": 5.910852713178294,
"grad_norm": 0.43867701292037964,
"learning_rate": 7.958855699241427e-06,
"loss": 0.4393,
"step": 6100
},
{
"epoch": 5.959302325581396,
"grad_norm": 0.3301217257976532,
"learning_rate": 7.613934124759711e-06,
"loss": 0.5171,
"step": 6150
},
{
"epoch": 6.007751937984496,
"grad_norm": 0.5201098918914795,
"learning_rate": 7.27530599765692e-06,
"loss": 0.4175,
"step": 6200
},
{
"epoch": 6.0562015503875966,
"grad_norm": 0.46548280119895935,
"learning_rate": 6.94309389530706e-06,
"loss": 0.3947,
"step": 6250
},
{
"epoch": 6.104651162790698,
"grad_norm": 0.0748455747961998,
"learning_rate": 6.6174180725969624e-06,
"loss": 0.4261,
"step": 6300
},
{
"epoch": 6.153100775193798,
"grad_norm": 0.4305146634578705,
"learning_rate": 6.298396418396249e-06,
"loss": 0.3665,
"step": 6350
},
{
"epoch": 6.2015503875969,
"grad_norm": 0.5521872639656067,
"learning_rate": 5.986144412883582e-06,
"loss": 0.4068,
"step": 6400
},
{
"epoch": 6.25,
"grad_norm": 0.3403307795524597,
"learning_rate": 5.680775085744877e-06,
"loss": 0.4345,
"step": 6450
},
{
"epoch": 6.2984496124031,
"grad_norm": 0.4251650869846344,
"learning_rate": 5.382398975258643e-06,
"loss": 0.4144,
"step": 6500
},
{
"epoch": 6.346899224806202,
"grad_norm": 0.3254774212837219,
"learning_rate": 5.0911240882830085e-06,
"loss": 0.4702,
"step": 6550
},
{
"epoch": 6.395348837209302,
"grad_norm": 0.20938007533550262,
"learning_rate": 4.8070558611591846e-06,
"loss": 0.4185,
"step": 6600
},
{
"epoch": 6.4437984496124034,
"grad_norm": 0.2960624396800995,
"learning_rate": 4.53029712154538e-06,
"loss": 0.4124,
"step": 6650
},
{
"epoch": 6.492248062015504,
"grad_norm": 0.07721071690320969,
"learning_rate": 4.26094805119501e-06,
"loss": 0.1426,
"step": 6700
},
{
"epoch": 6.540697674418604,
"grad_norm": 0.23349162936210632,
"learning_rate": 3.9991061496926965e-06,
"loss": 0.4447,
"step": 6750
},
{
"epoch": 6.589147286821706,
"grad_norm": 0.3078431785106659,
"learning_rate": 3.7448661991611012e-06,
"loss": 0.3905,
"step": 6800
},
{
"epoch": 6.637596899224806,
"grad_norm": 0.5103276371955872,
"learning_rate": 3.4983202299515403e-06,
"loss": 0.4237,
"step": 6850
},
{
"epoch": 6.686046511627907,
"grad_norm": 0.3160223662853241,
"learning_rate": 3.259557487330586e-06,
"loss": 0.3905,
"step": 6900
},
{
"epoch": 6.734496124031008,
"grad_norm": 0.26788458228111267,
"learning_rate": 3.0286643991748742e-06,
"loss": 0.3543,
"step": 6950
},
{
"epoch": 6.782945736434108,
"grad_norm": 0.6616643071174622,
"learning_rate": 2.8057245446857614e-06,
"loss": 0.37,
"step": 7000
},
{
"epoch": 6.8313953488372094,
"grad_norm": 0.7186428308486938,
"learning_rate": 2.5908186241351107e-06,
"loss": 0.443,
"step": 7050
},
{
"epoch": 6.87984496124031,
"grad_norm": 0.5220199823379517,
"learning_rate": 2.3840244296532534e-06,
"loss": 0.4607,
"step": 7100
},
{
"epoch": 6.928294573643411,
"grad_norm": 0.4159252345561981,
"learning_rate": 2.1854168170695967e-06,
"loss": 0.4479,
"step": 7150
},
{
"epoch": 6.976744186046512,
"grad_norm": 0.5149658918380737,
"learning_rate": 1.9950676788161425e-06,
"loss": 0.5063,
"step": 7200
},
{
"epoch": 7.025193798449612,
"grad_norm": 0.44880130887031555,
"learning_rate": 1.813045917903733e-06,
"loss": 0.4645,
"step": 7250
},
{
"epoch": 7.073643410852713,
"grad_norm": 0.2966916263103485,
"learning_rate": 1.6394174229803244e-06,
"loss": 0.4564,
"step": 7300
},
{
"epoch": 7.122093023255814,
"grad_norm": 0.33741477131843567,
"learning_rate": 1.4742450444805296e-06,
"loss": 0.4172,
"step": 7350
},
{
"epoch": 7.170542635658915,
"grad_norm": 0.3214058578014374,
"learning_rate": 1.3175885718748366e-06,
"loss": 0.4658,
"step": 7400
},
{
"epoch": 7.2189922480620154,
"grad_norm": 0.3869069218635559,
"learning_rate": 1.1695047120268892e-06,
"loss": 0.3009,
"step": 7450
},
{
"epoch": 7.267441860465116,
"grad_norm": 0.4083672761917114,
"learning_rate": 1.030047068666612e-06,
"loss": 0.459,
"step": 7500
},
{
"epoch": 7.315891472868217,
"grad_norm": 0.3610597550868988,
"learning_rate": 8.992661229866056e-07,
"loss": 0.3833,
"step": 7550
},
{
"epoch": 7.364341085271318,
"grad_norm": 0.539691150188446,
"learning_rate": 7.772092153688598e-07,
"loss": 0.4663,
"step": 7600
},
{
"epoch": 7.412790697674419,
"grad_norm": 0.2774708569049835,
"learning_rate": 6.639205282483713e-07,
"loss": 0.2524,
"step": 7650
},
{
"epoch": 7.461240310077519,
"grad_norm": 0.4285198748111725,
"learning_rate": 5.59441070119876e-07,
"loss": 0.2562,
"step": 7700
},
{
"epoch": 7.50968992248062,
"grad_norm": 0.19247202575206757,
"learning_rate": 4.638086606935238e-07,
"loss": 0.3872,
"step": 7750
},
{
"epoch": 7.558139534883721,
"grad_norm": 0.3917827606201172,
"learning_rate": 3.770579172047817e-07,
"loss": 0.3324,
"step": 7800
},
{
"epoch": 7.6065891472868215,
"grad_norm": 0.439827024936676,
"learning_rate": 2.9922024188365206e-07,
"loss": 0.4872,
"step": 7850
},
{
"epoch": 7.655038759689923,
"grad_norm": 0.6945509314537048,
"learning_rate": 2.3032381058757658e-07,
"loss": 0.4528,
"step": 7900
},
{
"epoch": 7.703488372093023,
"grad_norm": 0.29122206568717957,
"learning_rate": 1.7039356260231122e-07,
"loss": 0.4036,
"step": 7950
},
{
"epoch": 7.751937984496124,
"grad_norm": 0.3214813768863678,
"learning_rate": 1.1945119161432817e-07,
"loss": 0.3775,
"step": 8000
},
{
"epoch": 7.800387596899225,
"grad_norm": 0.35670900344848633,
"learning_rate": 7.751513785809183e-08,
"loss": 0.388,
"step": 8050
},
{
"epoch": 7.848837209302325,
"grad_norm": 0.6631624698638916,
"learning_rate": 4.460058144102608e-08,
"loss": 0.4559,
"step": 8100
},
{
"epoch": 7.897286821705427,
"grad_norm": 0.3693590462207794,
"learning_rate": 2.071943684856803e-08,
"loss": 0.3987,
"step": 8150
},
{
"epoch": 7.945736434108527,
"grad_norm": 0.37362340092658997,
"learning_rate": 5.880348631362442e-09,
"loss": 0.3428,
"step": 8200
},
{
"epoch": 7.9941860465116275,
"grad_norm": 0.41830864548683167,
"learning_rate": 8.868827605923446e-11,
"loss": 0.4124,
"step": 8250
}
],
"logging_steps": 50,
"max_steps": 8256,
"num_input_tokens_seen": 0,
"num_train_epochs": 8,
"save_steps": 1000,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 4.682679674077476e+19,
"train_batch_size": 92,
"trial_name": null,
"trial_params": null
}