vlm-checkpoint / trainer_state.json
mardakani's picture
Upload folder using huggingface_hub
10eae6c verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.4895729890764646,
"eval_steps": 500,
"global_step": 4500,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.006620324395895399,
"grad_norm": 31.25,
"learning_rate": 3.8000000000000005e-06,
"loss": 1.6298,
"step": 20
},
{
"epoch": 0.013240648791790799,
"grad_norm": 17.125,
"learning_rate": 7.800000000000002e-06,
"loss": 1.1324,
"step": 40
},
{
"epoch": 0.019860973187686197,
"grad_norm": 24.0,
"learning_rate": 9.984979973297732e-06,
"loss": 0.782,
"step": 60
},
{
"epoch": 0.026481297583581597,
"grad_norm": 14.875,
"learning_rate": 9.951602136181577e-06,
"loss": 0.732,
"step": 80
},
{
"epoch": 0.033101621979477,
"grad_norm": 12.375,
"learning_rate": 9.918224299065421e-06,
"loss": 0.7183,
"step": 100
},
{
"epoch": 0.039721946375372394,
"grad_norm": 17.625,
"learning_rate": 9.884846461949267e-06,
"loss": 0.6998,
"step": 120
},
{
"epoch": 0.04634227077126779,
"grad_norm": 17.25,
"learning_rate": 9.851468624833111e-06,
"loss": 0.6805,
"step": 140
},
{
"epoch": 0.052962595167163194,
"grad_norm": 22.125,
"learning_rate": 9.818090787716956e-06,
"loss": 0.6887,
"step": 160
},
{
"epoch": 0.05958291956305859,
"grad_norm": 13.3125,
"learning_rate": 9.784712950600802e-06,
"loss": 0.6688,
"step": 180
},
{
"epoch": 0.066203243958954,
"grad_norm": 14.1875,
"learning_rate": 9.751335113484648e-06,
"loss": 0.6448,
"step": 200
},
{
"epoch": 0.07282356835484939,
"grad_norm": 53.0,
"learning_rate": 9.717957276368492e-06,
"loss": 0.6303,
"step": 220
},
{
"epoch": 0.07944389275074479,
"grad_norm": 21.375,
"learning_rate": 9.684579439252338e-06,
"loss": 0.6824,
"step": 240
},
{
"epoch": 0.08606421714664018,
"grad_norm": 35.0,
"learning_rate": 9.651201602136182e-06,
"loss": 0.6308,
"step": 260
},
{
"epoch": 0.09268454154253558,
"grad_norm": 38.5,
"learning_rate": 9.617823765020028e-06,
"loss": 0.6079,
"step": 280
},
{
"epoch": 0.09930486593843098,
"grad_norm": 10.875,
"learning_rate": 9.584445927903873e-06,
"loss": 0.6296,
"step": 300
},
{
"epoch": 0.10592519033432639,
"grad_norm": 15.6875,
"learning_rate": 9.551068090787717e-06,
"loss": 0.6119,
"step": 320
},
{
"epoch": 0.11254551473022179,
"grad_norm": 33.75,
"learning_rate": 9.517690253671563e-06,
"loss": 0.6681,
"step": 340
},
{
"epoch": 0.11916583912611718,
"grad_norm": 17.25,
"learning_rate": 9.484312416555407e-06,
"loss": 0.6765,
"step": 360
},
{
"epoch": 0.12578616352201258,
"grad_norm": 17.625,
"learning_rate": 9.450934579439253e-06,
"loss": 0.611,
"step": 380
},
{
"epoch": 0.132406487917908,
"grad_norm": 15.75,
"learning_rate": 9.4175567423231e-06,
"loss": 0.623,
"step": 400
},
{
"epoch": 0.13902681231380337,
"grad_norm": 24.5,
"learning_rate": 9.384178905206944e-06,
"loss": 0.6483,
"step": 420
},
{
"epoch": 0.14564713670969878,
"grad_norm": 19.5,
"learning_rate": 9.35080106809079e-06,
"loss": 0.5895,
"step": 440
},
{
"epoch": 0.15226746110559417,
"grad_norm": 20.5,
"learning_rate": 9.317423230974634e-06,
"loss": 0.6381,
"step": 460
},
{
"epoch": 0.15888778550148958,
"grad_norm": 21.875,
"learning_rate": 9.284045393858478e-06,
"loss": 0.6208,
"step": 480
},
{
"epoch": 0.16550810989738496,
"grad_norm": 16.0,
"learning_rate": 9.250667556742324e-06,
"loss": 0.5864,
"step": 500
},
{
"epoch": 0.17212843429328037,
"grad_norm": 15.375,
"learning_rate": 9.217289719626169e-06,
"loss": 0.6343,
"step": 520
},
{
"epoch": 0.17874875868917578,
"grad_norm": 18.75,
"learning_rate": 9.183911882510013e-06,
"loss": 0.6316,
"step": 540
},
{
"epoch": 0.18536908308507116,
"grad_norm": 12.0625,
"learning_rate": 9.150534045393859e-06,
"loss": 0.6067,
"step": 560
},
{
"epoch": 0.19198940748096657,
"grad_norm": 19.375,
"learning_rate": 9.117156208277705e-06,
"loss": 0.6179,
"step": 580
},
{
"epoch": 0.19860973187686196,
"grad_norm": 22.0,
"learning_rate": 9.08377837116155e-06,
"loss": 0.6558,
"step": 600
},
{
"epoch": 0.20523005627275737,
"grad_norm": 16.75,
"learning_rate": 9.050400534045395e-06,
"loss": 0.6367,
"step": 620
},
{
"epoch": 0.21185038066865278,
"grad_norm": 19.75,
"learning_rate": 9.01702269692924e-06,
"loss": 0.6075,
"step": 640
},
{
"epoch": 0.21847070506454816,
"grad_norm": 19.875,
"learning_rate": 8.983644859813084e-06,
"loss": 0.6385,
"step": 660
},
{
"epoch": 0.22509102946044357,
"grad_norm": 31.375,
"learning_rate": 8.95026702269693e-06,
"loss": 0.6369,
"step": 680
},
{
"epoch": 0.23171135385633895,
"grad_norm": 10.9375,
"learning_rate": 8.916889185580774e-06,
"loss": 0.6156,
"step": 700
},
{
"epoch": 0.23833167825223436,
"grad_norm": 30.125,
"learning_rate": 8.88351134846462e-06,
"loss": 0.6265,
"step": 720
},
{
"epoch": 0.24495200264812975,
"grad_norm": 462.0,
"learning_rate": 8.850133511348466e-06,
"loss": 0.5881,
"step": 740
},
{
"epoch": 0.25157232704402516,
"grad_norm": 15.1875,
"learning_rate": 8.81675567423231e-06,
"loss": 0.5912,
"step": 760
},
{
"epoch": 0.25819265143992054,
"grad_norm": 25.25,
"learning_rate": 8.783377837116157e-06,
"loss": 0.6167,
"step": 780
},
{
"epoch": 0.264812975835816,
"grad_norm": 15.75,
"learning_rate": 8.750000000000001e-06,
"loss": 0.6106,
"step": 800
},
{
"epoch": 0.27143330023171136,
"grad_norm": 22.5,
"learning_rate": 8.716622162883845e-06,
"loss": 0.6762,
"step": 820
},
{
"epoch": 0.27805362462760674,
"grad_norm": 25.375,
"learning_rate": 8.683244325767691e-06,
"loss": 0.5455,
"step": 840
},
{
"epoch": 0.2846739490235021,
"grad_norm": 18.5,
"learning_rate": 8.649866488651536e-06,
"loss": 0.6118,
"step": 860
},
{
"epoch": 0.29129427341939756,
"grad_norm": 26.5,
"learning_rate": 8.61648865153538e-06,
"loss": 0.6339,
"step": 880
},
{
"epoch": 0.29791459781529295,
"grad_norm": 8.8125,
"learning_rate": 8.583110814419226e-06,
"loss": 0.5795,
"step": 900
},
{
"epoch": 0.30453492221118833,
"grad_norm": 13.75,
"learning_rate": 8.549732977303072e-06,
"loss": 0.5702,
"step": 920
},
{
"epoch": 0.31115524660708377,
"grad_norm": 13.8125,
"learning_rate": 8.516355140186918e-06,
"loss": 0.5617,
"step": 940
},
{
"epoch": 0.31777557100297915,
"grad_norm": 14.25,
"learning_rate": 8.482977303070762e-06,
"loss": 0.6257,
"step": 960
},
{
"epoch": 0.32439589539887453,
"grad_norm": 15.5,
"learning_rate": 8.449599465954607e-06,
"loss": 0.6205,
"step": 980
},
{
"epoch": 0.3310162197947699,
"grad_norm": 19.375,
"learning_rate": 8.416221628838453e-06,
"loss": 0.5731,
"step": 1000
},
{
"epoch": 0.33763654419066536,
"grad_norm": 46.0,
"learning_rate": 8.382843791722297e-06,
"loss": 0.6116,
"step": 1020
},
{
"epoch": 0.34425686858656074,
"grad_norm": 15.5,
"learning_rate": 8.349465954606141e-06,
"loss": 0.612,
"step": 1040
},
{
"epoch": 0.3508771929824561,
"grad_norm": 36.0,
"learning_rate": 8.316088117489987e-06,
"loss": 0.5932,
"step": 1060
},
{
"epoch": 0.35749751737835156,
"grad_norm": 16.875,
"learning_rate": 8.282710280373832e-06,
"loss": 0.5667,
"step": 1080
},
{
"epoch": 0.36411784177424694,
"grad_norm": 19.25,
"learning_rate": 8.249332443257678e-06,
"loss": 0.5846,
"step": 1100
},
{
"epoch": 0.3707381661701423,
"grad_norm": 26.875,
"learning_rate": 8.215954606141524e-06,
"loss": 0.6549,
"step": 1120
},
{
"epoch": 0.37735849056603776,
"grad_norm": 15.0625,
"learning_rate": 8.182576769025368e-06,
"loss": 0.6129,
"step": 1140
},
{
"epoch": 0.38397881496193315,
"grad_norm": 11.4375,
"learning_rate": 8.149198931909212e-06,
"loss": 0.5143,
"step": 1160
},
{
"epoch": 0.39059913935782853,
"grad_norm": 9.3125,
"learning_rate": 8.115821094793058e-06,
"loss": 0.5842,
"step": 1180
},
{
"epoch": 0.3972194637537239,
"grad_norm": 20.0,
"learning_rate": 8.082443257676903e-06,
"loss": 0.5485,
"step": 1200
},
{
"epoch": 0.40383978814961935,
"grad_norm": 29.625,
"learning_rate": 8.049065420560749e-06,
"loss": 0.5844,
"step": 1220
},
{
"epoch": 0.41046011254551473,
"grad_norm": 16.0,
"learning_rate": 8.015687583444593e-06,
"loss": 0.5959,
"step": 1240
},
{
"epoch": 0.4170804369414101,
"grad_norm": 42.25,
"learning_rate": 7.982309746328439e-06,
"loss": 0.5758,
"step": 1260
},
{
"epoch": 0.42370076133730555,
"grad_norm": 18.25,
"learning_rate": 7.948931909212283e-06,
"loss": 0.5289,
"step": 1280
},
{
"epoch": 0.43032108573320094,
"grad_norm": 26.75,
"learning_rate": 7.91555407209613e-06,
"loss": 0.6503,
"step": 1300
},
{
"epoch": 0.4369414101290963,
"grad_norm": 26.75,
"learning_rate": 7.882176234979974e-06,
"loss": 0.6088,
"step": 1320
},
{
"epoch": 0.4435617345249917,
"grad_norm": 11.4375,
"learning_rate": 7.84879839786382e-06,
"loss": 0.58,
"step": 1340
},
{
"epoch": 0.45018205892088714,
"grad_norm": 14.5625,
"learning_rate": 7.815420560747664e-06,
"loss": 0.6087,
"step": 1360
},
{
"epoch": 0.4568023833167825,
"grad_norm": 21.375,
"learning_rate": 7.782042723631508e-06,
"loss": 0.531,
"step": 1380
},
{
"epoch": 0.4634227077126779,
"grad_norm": 20.5,
"learning_rate": 7.748664886515354e-06,
"loss": 0.57,
"step": 1400
},
{
"epoch": 0.47004303210857334,
"grad_norm": 22.375,
"learning_rate": 7.715287049399199e-06,
"loss": 0.57,
"step": 1420
},
{
"epoch": 0.4766633565044687,
"grad_norm": 24.75,
"learning_rate": 7.681909212283045e-06,
"loss": 0.5944,
"step": 1440
},
{
"epoch": 0.4832836809003641,
"grad_norm": 14.6875,
"learning_rate": 7.64853137516689e-06,
"loss": 0.6433,
"step": 1460
},
{
"epoch": 0.4899040052962595,
"grad_norm": 11.1875,
"learning_rate": 7.615153538050735e-06,
"loss": 0.5787,
"step": 1480
},
{
"epoch": 0.49652432969215493,
"grad_norm": 13.5,
"learning_rate": 7.58177570093458e-06,
"loss": 0.5239,
"step": 1500
},
{
"epoch": 0.5031446540880503,
"grad_norm": 26.875,
"learning_rate": 7.548397863818425e-06,
"loss": 0.5663,
"step": 1520
},
{
"epoch": 0.5097649784839458,
"grad_norm": 11.0625,
"learning_rate": 7.5150200267022696e-06,
"loss": 0.5335,
"step": 1540
},
{
"epoch": 0.5163853028798411,
"grad_norm": 13.6875,
"learning_rate": 7.481642189586116e-06,
"loss": 0.5628,
"step": 1560
},
{
"epoch": 0.5230056272757365,
"grad_norm": 27.625,
"learning_rate": 7.448264352469961e-06,
"loss": 0.6499,
"step": 1580
},
{
"epoch": 0.529625951671632,
"grad_norm": 57.25,
"learning_rate": 7.414886515353805e-06,
"loss": 0.578,
"step": 1600
},
{
"epoch": 0.5362462760675273,
"grad_norm": 13.8125,
"learning_rate": 7.381508678237651e-06,
"loss": 0.5788,
"step": 1620
},
{
"epoch": 0.5428666004634227,
"grad_norm": 10.6875,
"learning_rate": 7.348130841121495e-06,
"loss": 0.541,
"step": 1640
},
{
"epoch": 0.5494869248593182,
"grad_norm": 14.375,
"learning_rate": 7.314753004005341e-06,
"loss": 0.5834,
"step": 1660
},
{
"epoch": 0.5561072492552135,
"grad_norm": 10.8125,
"learning_rate": 7.281375166889187e-06,
"loss": 0.5852,
"step": 1680
},
{
"epoch": 0.5627275736511089,
"grad_norm": 61.0,
"learning_rate": 7.247997329773031e-06,
"loss": 0.6254,
"step": 1700
},
{
"epoch": 0.5693478980470043,
"grad_norm": 14.9375,
"learning_rate": 7.214619492656877e-06,
"loss": 0.5635,
"step": 1720
},
{
"epoch": 0.5759682224428997,
"grad_norm": 14.0,
"learning_rate": 7.181241655540721e-06,
"loss": 0.543,
"step": 1740
},
{
"epoch": 0.5825885468387951,
"grad_norm": 16.0,
"learning_rate": 7.147863818424566e-06,
"loss": 0.6006,
"step": 1760
},
{
"epoch": 0.5892088712346905,
"grad_norm": 26.875,
"learning_rate": 7.114485981308412e-06,
"loss": 0.5916,
"step": 1780
},
{
"epoch": 0.5958291956305859,
"grad_norm": 36.75,
"learning_rate": 7.081108144192257e-06,
"loss": 0.5724,
"step": 1800
},
{
"epoch": 0.6024495200264813,
"grad_norm": 14.6875,
"learning_rate": 7.047730307076102e-06,
"loss": 0.562,
"step": 1820
},
{
"epoch": 0.6090698444223767,
"grad_norm": 15.25,
"learning_rate": 7.014352469959947e-06,
"loss": 0.5978,
"step": 1840
},
{
"epoch": 0.6156901688182721,
"grad_norm": 12.0625,
"learning_rate": 6.980974632843792e-06,
"loss": 0.5607,
"step": 1860
},
{
"epoch": 0.6223104932141675,
"grad_norm": 8.8125,
"learning_rate": 6.9475967957276366e-06,
"loss": 0.5186,
"step": 1880
},
{
"epoch": 0.6289308176100629,
"grad_norm": 23.25,
"learning_rate": 6.9142189586114826e-06,
"loss": 0.5954,
"step": 1900
},
{
"epoch": 0.6355511420059583,
"grad_norm": 24.75,
"learning_rate": 6.880841121495328e-06,
"loss": 0.5794,
"step": 1920
},
{
"epoch": 0.6421714664018537,
"grad_norm": 15.0,
"learning_rate": 6.847463284379173e-06,
"loss": 0.5931,
"step": 1940
},
{
"epoch": 0.6487917907977491,
"grad_norm": 13.875,
"learning_rate": 6.814085447263018e-06,
"loss": 0.6028,
"step": 1960
},
{
"epoch": 0.6554121151936445,
"grad_norm": 23.125,
"learning_rate": 6.780707610146862e-06,
"loss": 0.6216,
"step": 1980
},
{
"epoch": 0.6620324395895398,
"grad_norm": 14.9375,
"learning_rate": 6.747329773030708e-06,
"loss": 0.6056,
"step": 2000
},
{
"epoch": 0.6686527639854353,
"grad_norm": 17.25,
"learning_rate": 6.713951935914554e-06,
"loss": 0.5911,
"step": 2020
},
{
"epoch": 0.6752730883813307,
"grad_norm": 18.0,
"learning_rate": 6.680574098798398e-06,
"loss": 0.5472,
"step": 2040
},
{
"epoch": 0.681893412777226,
"grad_norm": 8.5,
"learning_rate": 6.647196261682244e-06,
"loss": 0.5363,
"step": 2060
},
{
"epoch": 0.6885137371731215,
"grad_norm": 13.5,
"learning_rate": 6.613818424566088e-06,
"loss": 0.5892,
"step": 2080
},
{
"epoch": 0.6951340615690169,
"grad_norm": 140.0,
"learning_rate": 6.580440587449933e-06,
"loss": 0.5527,
"step": 2100
},
{
"epoch": 0.7017543859649122,
"grad_norm": 10.375,
"learning_rate": 6.547062750333779e-06,
"loss": 0.5269,
"step": 2120
},
{
"epoch": 0.7083747103608077,
"grad_norm": 18.375,
"learning_rate": 6.513684913217624e-06,
"loss": 0.5787,
"step": 2140
},
{
"epoch": 0.7149950347567031,
"grad_norm": 91.0,
"learning_rate": 6.48030707610147e-06,
"loss": 0.5397,
"step": 2160
},
{
"epoch": 0.7216153591525984,
"grad_norm": 46.25,
"learning_rate": 6.446929238985314e-06,
"loss": 0.5864,
"step": 2180
},
{
"epoch": 0.7282356835484939,
"grad_norm": 19.0,
"learning_rate": 6.413551401869159e-06,
"loss": 0.6393,
"step": 2200
},
{
"epoch": 0.7348560079443893,
"grad_norm": 16.125,
"learning_rate": 6.380173564753005e-06,
"loss": 0.5432,
"step": 2220
},
{
"epoch": 0.7414763323402846,
"grad_norm": 13.8125,
"learning_rate": 6.3467957276368496e-06,
"loss": 0.5974,
"step": 2240
},
{
"epoch": 0.7480966567361801,
"grad_norm": 18.0,
"learning_rate": 6.313417890520695e-06,
"loss": 0.5572,
"step": 2260
},
{
"epoch": 0.7547169811320755,
"grad_norm": 28.0,
"learning_rate": 6.28004005340454e-06,
"loss": 0.5552,
"step": 2280
},
{
"epoch": 0.7613373055279709,
"grad_norm": 19.0,
"learning_rate": 6.246662216288385e-06,
"loss": 0.5658,
"step": 2300
},
{
"epoch": 0.7679576299238663,
"grad_norm": 13.375,
"learning_rate": 6.213284379172229e-06,
"loss": 0.62,
"step": 2320
},
{
"epoch": 0.7745779543197616,
"grad_norm": 12.0,
"learning_rate": 6.179906542056075e-06,
"loss": 0.5836,
"step": 2340
},
{
"epoch": 0.7811982787156571,
"grad_norm": 15.3125,
"learning_rate": 6.1465287049399206e-06,
"loss": 0.5605,
"step": 2360
},
{
"epoch": 0.7878186031115525,
"grad_norm": 9.125,
"learning_rate": 6.113150867823765e-06,
"loss": 0.5827,
"step": 2380
},
{
"epoch": 0.7944389275074478,
"grad_norm": 16.875,
"learning_rate": 6.079773030707611e-06,
"loss": 0.5932,
"step": 2400
},
{
"epoch": 0.8010592519033433,
"grad_norm": 15.3125,
"learning_rate": 6.046395193591455e-06,
"loss": 0.5565,
"step": 2420
},
{
"epoch": 0.8076795762992387,
"grad_norm": 15.6875,
"learning_rate": 6.013017356475301e-06,
"loss": 0.5594,
"step": 2440
},
{
"epoch": 0.814299900695134,
"grad_norm": 28.75,
"learning_rate": 5.979639519359146e-06,
"loss": 0.5853,
"step": 2460
},
{
"epoch": 0.8209202250910295,
"grad_norm": 28.0,
"learning_rate": 5.946261682242991e-06,
"loss": 0.5193,
"step": 2480
},
{
"epoch": 0.8275405494869249,
"grad_norm": 9.875,
"learning_rate": 5.912883845126837e-06,
"loss": 0.5588,
"step": 2500
},
{
"epoch": 0.8341608738828202,
"grad_norm": 13.25,
"learning_rate": 5.879506008010681e-06,
"loss": 0.5621,
"step": 2520
},
{
"epoch": 0.8407811982787157,
"grad_norm": 48.5,
"learning_rate": 5.846128170894526e-06,
"loss": 0.5788,
"step": 2540
},
{
"epoch": 0.8474015226746111,
"grad_norm": 20.375,
"learning_rate": 5.812750333778372e-06,
"loss": 0.6038,
"step": 2560
},
{
"epoch": 0.8540218470705064,
"grad_norm": 10.0,
"learning_rate": 5.7793724966622166e-06,
"loss": 0.516,
"step": 2580
},
{
"epoch": 0.8606421714664019,
"grad_norm": 15.0625,
"learning_rate": 5.745994659546062e-06,
"loss": 0.5329,
"step": 2600
},
{
"epoch": 0.8672624958622972,
"grad_norm": 33.0,
"learning_rate": 5.712616822429907e-06,
"loss": 0.5484,
"step": 2620
},
{
"epoch": 0.8738828202581926,
"grad_norm": 11.125,
"learning_rate": 5.679238985313752e-06,
"loss": 0.53,
"step": 2640
},
{
"epoch": 0.8805031446540881,
"grad_norm": 11.5625,
"learning_rate": 5.645861148197598e-06,
"loss": 0.6449,
"step": 2660
},
{
"epoch": 0.8871234690499834,
"grad_norm": 17.75,
"learning_rate": 5.612483311081442e-06,
"loss": 0.6072,
"step": 2680
},
{
"epoch": 0.8937437934458788,
"grad_norm": 15.8125,
"learning_rate": 5.5791054739652876e-06,
"loss": 0.5155,
"step": 2700
},
{
"epoch": 0.9003641178417743,
"grad_norm": 13.1875,
"learning_rate": 5.545727636849133e-06,
"loss": 0.5687,
"step": 2720
},
{
"epoch": 0.9069844422376696,
"grad_norm": 14.4375,
"learning_rate": 5.512349799732978e-06,
"loss": 0.5353,
"step": 2740
},
{
"epoch": 0.913604766633565,
"grad_norm": 17.75,
"learning_rate": 5.478971962616822e-06,
"loss": 0.5925,
"step": 2760
},
{
"epoch": 0.9202250910294605,
"grad_norm": 29.125,
"learning_rate": 5.445594125500668e-06,
"loss": 0.5391,
"step": 2780
},
{
"epoch": 0.9268454154253558,
"grad_norm": 48.5,
"learning_rate": 5.412216288384513e-06,
"loss": 0.6109,
"step": 2800
},
{
"epoch": 0.9334657398212513,
"grad_norm": 22.375,
"learning_rate": 5.378838451268358e-06,
"loss": 0.5756,
"step": 2820
},
{
"epoch": 0.9400860642171467,
"grad_norm": 23.375,
"learning_rate": 5.345460614152204e-06,
"loss": 0.6174,
"step": 2840
},
{
"epoch": 0.946706388613042,
"grad_norm": 31.125,
"learning_rate": 5.312082777036048e-06,
"loss": 0.5758,
"step": 2860
},
{
"epoch": 0.9533267130089375,
"grad_norm": 25.125,
"learning_rate": 5.278704939919893e-06,
"loss": 0.5311,
"step": 2880
},
{
"epoch": 0.9599470374048328,
"grad_norm": 14.375,
"learning_rate": 5.245327102803739e-06,
"loss": 0.5638,
"step": 2900
},
{
"epoch": 0.9665673618007282,
"grad_norm": 12.5,
"learning_rate": 5.2119492656875836e-06,
"loss": 0.5714,
"step": 2920
},
{
"epoch": 0.9731876861966237,
"grad_norm": 23.375,
"learning_rate": 5.1785714285714296e-06,
"loss": 0.5827,
"step": 2940
},
{
"epoch": 0.979808010592519,
"grad_norm": 13.4375,
"learning_rate": 5.145193591455274e-06,
"loss": 0.5574,
"step": 2960
},
{
"epoch": 0.9864283349884144,
"grad_norm": 13.25,
"learning_rate": 5.111815754339119e-06,
"loss": 0.5829,
"step": 2980
},
{
"epoch": 0.9930486593843099,
"grad_norm": 18.125,
"learning_rate": 5.078437917222965e-06,
"loss": 0.5748,
"step": 3000
},
{
"epoch": 0.9996689837802052,
"grad_norm": 24.875,
"learning_rate": 5.045060080106809e-06,
"loss": 0.5445,
"step": 3020
},
{
"epoch": 1.0062893081761006,
"grad_norm": 15.75,
"learning_rate": 5.0116822429906546e-06,
"loss": 0.5045,
"step": 3040
},
{
"epoch": 1.012909632571996,
"grad_norm": 30.75,
"learning_rate": 4.9783044058745e-06,
"loss": 0.5523,
"step": 3060
},
{
"epoch": 1.0195299569678915,
"grad_norm": 15.0,
"learning_rate": 4.944926568758345e-06,
"loss": 0.5192,
"step": 3080
},
{
"epoch": 1.0261502813637868,
"grad_norm": 9.625,
"learning_rate": 4.91154873164219e-06,
"loss": 0.6295,
"step": 3100
},
{
"epoch": 1.0327706057596822,
"grad_norm": 15.6875,
"learning_rate": 4.878170894526035e-06,
"loss": 0.5496,
"step": 3120
},
{
"epoch": 1.0393909301555777,
"grad_norm": 10.9375,
"learning_rate": 4.84479305740988e-06,
"loss": 0.4973,
"step": 3140
},
{
"epoch": 1.046011254551473,
"grad_norm": 49.75,
"learning_rate": 4.8114152202937256e-06,
"loss": 0.5615,
"step": 3160
},
{
"epoch": 1.0526315789473684,
"grad_norm": 25.75,
"learning_rate": 4.778037383177571e-06,
"loss": 0.4783,
"step": 3180
},
{
"epoch": 1.059251903343264,
"grad_norm": 13.8125,
"learning_rate": 4.744659546061416e-06,
"loss": 0.5049,
"step": 3200
},
{
"epoch": 1.0658722277391592,
"grad_norm": 15.625,
"learning_rate": 4.71128170894526e-06,
"loss": 0.5896,
"step": 3220
},
{
"epoch": 1.0724925521350546,
"grad_norm": 19.375,
"learning_rate": 4.677903871829106e-06,
"loss": 0.5383,
"step": 3240
},
{
"epoch": 1.0791128765309501,
"grad_norm": 15.0625,
"learning_rate": 4.644526034712951e-06,
"loss": 0.4912,
"step": 3260
},
{
"epoch": 1.0857332009268454,
"grad_norm": 10.875,
"learning_rate": 4.611148197596796e-06,
"loss": 0.4702,
"step": 3280
},
{
"epoch": 1.0923535253227408,
"grad_norm": 51.0,
"learning_rate": 4.577770360480641e-06,
"loss": 0.4749,
"step": 3300
},
{
"epoch": 1.0989738497186363,
"grad_norm": 9.125,
"learning_rate": 4.544392523364486e-06,
"loss": 0.5195,
"step": 3320
},
{
"epoch": 1.1055941741145316,
"grad_norm": 15.875,
"learning_rate": 4.511014686248332e-06,
"loss": 0.5295,
"step": 3340
},
{
"epoch": 1.112214498510427,
"grad_norm": 18.375,
"learning_rate": 4.477636849132176e-06,
"loss": 0.5202,
"step": 3360
},
{
"epoch": 1.1188348229063223,
"grad_norm": 18.75,
"learning_rate": 4.4442590120160216e-06,
"loss": 0.5237,
"step": 3380
},
{
"epoch": 1.1254551473022179,
"grad_norm": 19.625,
"learning_rate": 4.410881174899867e-06,
"loss": 0.475,
"step": 3400
},
{
"epoch": 1.1320754716981132,
"grad_norm": 21.0,
"learning_rate": 4.377503337783712e-06,
"loss": 0.4982,
"step": 3420
},
{
"epoch": 1.1386957960940087,
"grad_norm": 19.75,
"learning_rate": 4.344125500667557e-06,
"loss": 0.5446,
"step": 3440
},
{
"epoch": 1.145316120489904,
"grad_norm": 18.0,
"learning_rate": 4.310747663551402e-06,
"loss": 0.5471,
"step": 3460
},
{
"epoch": 1.1519364448857994,
"grad_norm": 13.375,
"learning_rate": 4.277369826435247e-06,
"loss": 0.4676,
"step": 3480
},
{
"epoch": 1.1585567692816947,
"grad_norm": 17.25,
"learning_rate": 4.2439919893190926e-06,
"loss": 0.5296,
"step": 3500
},
{
"epoch": 1.1651770936775903,
"grad_norm": 19.125,
"learning_rate": 4.210614152202938e-06,
"loss": 0.5449,
"step": 3520
},
{
"epoch": 1.1717974180734856,
"grad_norm": 13.625,
"learning_rate": 4.177236315086783e-06,
"loss": 0.517,
"step": 3540
},
{
"epoch": 1.178417742469381,
"grad_norm": 11.3125,
"learning_rate": 4.143858477970627e-06,
"loss": 0.562,
"step": 3560
},
{
"epoch": 1.1850380668652765,
"grad_norm": 12.8125,
"learning_rate": 4.110480640854473e-06,
"loss": 0.5341,
"step": 3580
},
{
"epoch": 1.1916583912611718,
"grad_norm": 13.9375,
"learning_rate": 4.077102803738318e-06,
"loss": 0.5291,
"step": 3600
},
{
"epoch": 1.1982787156570671,
"grad_norm": 15.6875,
"learning_rate": 4.0437249666221636e-06,
"loss": 0.4853,
"step": 3620
},
{
"epoch": 1.2048990400529627,
"grad_norm": 7.40625,
"learning_rate": 4.010347129506008e-06,
"loss": 0.49,
"step": 3640
},
{
"epoch": 1.211519364448858,
"grad_norm": 17.625,
"learning_rate": 3.976969292389853e-06,
"loss": 0.5523,
"step": 3660
},
{
"epoch": 1.2181396888447533,
"grad_norm": 13.4375,
"learning_rate": 3.943591455273699e-06,
"loss": 0.5599,
"step": 3680
},
{
"epoch": 1.2247600132406489,
"grad_norm": 35.5,
"learning_rate": 3.910213618157544e-06,
"loss": 0.5391,
"step": 3700
},
{
"epoch": 1.2313803376365442,
"grad_norm": 26.75,
"learning_rate": 3.8768357810413885e-06,
"loss": 0.4959,
"step": 3720
},
{
"epoch": 1.2380006620324395,
"grad_norm": 11.375,
"learning_rate": 3.843457943925234e-06,
"loss": 0.5286,
"step": 3740
},
{
"epoch": 1.244620986428335,
"grad_norm": 11.375,
"learning_rate": 3.8100801068090793e-06,
"loss": 0.4926,
"step": 3760
},
{
"epoch": 1.2512413108242304,
"grad_norm": 12.0,
"learning_rate": 3.776702269692924e-06,
"loss": 0.4793,
"step": 3780
},
{
"epoch": 1.2578616352201257,
"grad_norm": 13.125,
"learning_rate": 3.7433244325767692e-06,
"loss": 0.5229,
"step": 3800
},
{
"epoch": 1.2644819596160213,
"grad_norm": 18.25,
"learning_rate": 3.7099465954606144e-06,
"loss": 0.4884,
"step": 3820
},
{
"epoch": 1.2711022840119166,
"grad_norm": 18.75,
"learning_rate": 3.67656875834446e-06,
"loss": 0.4978,
"step": 3840
},
{
"epoch": 1.277722608407812,
"grad_norm": 11.8125,
"learning_rate": 3.6431909212283047e-06,
"loss": 0.5222,
"step": 3860
},
{
"epoch": 1.2843429328037073,
"grad_norm": 11.8125,
"learning_rate": 3.60981308411215e-06,
"loss": 0.4939,
"step": 3880
},
{
"epoch": 1.2909632571996028,
"grad_norm": 13.9375,
"learning_rate": 3.576435246995995e-06,
"loss": 0.5503,
"step": 3900
},
{
"epoch": 1.2975835815954981,
"grad_norm": 18.0,
"learning_rate": 3.54305740987984e-06,
"loss": 0.4838,
"step": 3920
},
{
"epoch": 1.3042039059913937,
"grad_norm": 10.25,
"learning_rate": 3.509679572763685e-06,
"loss": 0.5089,
"step": 3940
},
{
"epoch": 1.310824230387289,
"grad_norm": 10.625,
"learning_rate": 3.4763017356475305e-06,
"loss": 0.5303,
"step": 3960
},
{
"epoch": 1.3174445547831843,
"grad_norm": 26.625,
"learning_rate": 3.4429238985313757e-06,
"loss": 0.5523,
"step": 3980
},
{
"epoch": 1.3240648791790797,
"grad_norm": 15.125,
"learning_rate": 3.4095460614152205e-06,
"loss": 0.4822,
"step": 4000
},
{
"epoch": 1.3306852035749752,
"grad_norm": 15.8125,
"learning_rate": 3.3761682242990656e-06,
"loss": 0.5181,
"step": 4020
},
{
"epoch": 1.3373055279708705,
"grad_norm": 16.125,
"learning_rate": 3.342790387182911e-06,
"loss": 0.4931,
"step": 4040
},
{
"epoch": 1.343925852366766,
"grad_norm": 19.875,
"learning_rate": 3.3094125500667564e-06,
"loss": 0.5432,
"step": 4060
},
{
"epoch": 1.3505461767626614,
"grad_norm": 15.6875,
"learning_rate": 3.276034712950601e-06,
"loss": 0.5477,
"step": 4080
},
{
"epoch": 1.3571665011585567,
"grad_norm": 24.625,
"learning_rate": 3.2426568758344463e-06,
"loss": 0.543,
"step": 4100
},
{
"epoch": 1.363786825554452,
"grad_norm": 15.0625,
"learning_rate": 3.2092790387182915e-06,
"loss": 0.4995,
"step": 4120
},
{
"epoch": 1.3704071499503476,
"grad_norm": 16.875,
"learning_rate": 3.175901201602136e-06,
"loss": 0.5271,
"step": 4140
},
{
"epoch": 1.377027474346243,
"grad_norm": 18.125,
"learning_rate": 3.1425233644859814e-06,
"loss": 0.5472,
"step": 4160
},
{
"epoch": 1.3836477987421385,
"grad_norm": 16.25,
"learning_rate": 3.109145527369827e-06,
"loss": 0.5145,
"step": 4180
},
{
"epoch": 1.3902681231380338,
"grad_norm": 16.625,
"learning_rate": 3.075767690253672e-06,
"loss": 0.5653,
"step": 4200
},
{
"epoch": 1.3968884475339292,
"grad_norm": 33.0,
"learning_rate": 3.042389853137517e-06,
"loss": 0.5437,
"step": 4220
},
{
"epoch": 1.4035087719298245,
"grad_norm": 40.75,
"learning_rate": 3.009012016021362e-06,
"loss": 0.4881,
"step": 4240
},
{
"epoch": 1.41012909632572,
"grad_norm": 15.625,
"learning_rate": 2.975634178905207e-06,
"loss": 0.5415,
"step": 4260
},
{
"epoch": 1.4167494207216154,
"grad_norm": 29.625,
"learning_rate": 2.942256341789052e-06,
"loss": 0.475,
"step": 4280
},
{
"epoch": 1.4233697451175107,
"grad_norm": 9.4375,
"learning_rate": 2.9088785046728975e-06,
"loss": 0.5296,
"step": 4300
},
{
"epoch": 1.4299900695134062,
"grad_norm": 24.5,
"learning_rate": 2.8755006675567427e-06,
"loss": 0.5698,
"step": 4320
},
{
"epoch": 1.4366103939093016,
"grad_norm": 16.5,
"learning_rate": 2.842122830440588e-06,
"loss": 0.4731,
"step": 4340
},
{
"epoch": 1.443230718305197,
"grad_norm": 17.125,
"learning_rate": 2.8087449933244326e-06,
"loss": 0.5652,
"step": 4360
},
{
"epoch": 1.4498510427010922,
"grad_norm": 16.5,
"learning_rate": 2.7753671562082778e-06,
"loss": 0.5158,
"step": 4380
},
{
"epoch": 1.4564713670969878,
"grad_norm": 27.25,
"learning_rate": 2.7419893190921234e-06,
"loss": 0.5035,
"step": 4400
},
{
"epoch": 1.463091691492883,
"grad_norm": 12.25,
"learning_rate": 2.7086114819759677e-06,
"loss": 0.4578,
"step": 4420
},
{
"epoch": 1.4697120158887786,
"grad_norm": 14.6875,
"learning_rate": 2.6752336448598133e-06,
"loss": 0.4649,
"step": 4440
},
{
"epoch": 1.476332340284674,
"grad_norm": 32.5,
"learning_rate": 2.6418558077436585e-06,
"loss": 0.5685,
"step": 4460
},
{
"epoch": 1.4829526646805693,
"grad_norm": 15.25,
"learning_rate": 2.6084779706275036e-06,
"loss": 0.5348,
"step": 4480
},
{
"epoch": 1.4895729890764646,
"grad_norm": 26.75,
"learning_rate": 2.5751001335113484e-06,
"loss": 0.4996,
"step": 4500
}
],
"logging_steps": 20,
"max_steps": 6042,
"num_input_tokens_seen": 0,
"num_train_epochs": 2,
"save_steps": 1500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 1.12103856601301e+17,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}