gemma-4b-instruct / trainer_state.json
suronek's picture
Upload trainer_state.json
e909a63 verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 3.0,
"eval_steps": 500,
"global_step": 1941,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.015458937198067632,
"grad_norm": 11.5,
"learning_rate": 7.627118644067798e-06,
"loss": 3.6877,
"step": 10
},
{
"epoch": 0.030917874396135265,
"grad_norm": 7.25,
"learning_rate": 1.6101694915254237e-05,
"loss": 3.3724,
"step": 20
},
{
"epoch": 0.0463768115942029,
"grad_norm": 7.125,
"learning_rate": 2.457627118644068e-05,
"loss": 3.143,
"step": 30
},
{
"epoch": 0.06183574879227053,
"grad_norm": 6.15625,
"learning_rate": 3.305084745762712e-05,
"loss": 3.2346,
"step": 40
},
{
"epoch": 0.07729468599033816,
"grad_norm": 6.0,
"learning_rate": 4.152542372881356e-05,
"loss": 2.9242,
"step": 50
},
{
"epoch": 0.0927536231884058,
"grad_norm": 5.5625,
"learning_rate": 5e-05,
"loss": 2.9235,
"step": 60
},
{
"epoch": 0.10821256038647344,
"grad_norm": 4.71875,
"learning_rate": 4.999651694417285e-05,
"loss": 3.0094,
"step": 70
},
{
"epoch": 0.12367149758454106,
"grad_norm": 4.0625,
"learning_rate": 4.9986068747225644e-05,
"loss": 3.0419,
"step": 80
},
{
"epoch": 0.1391304347826087,
"grad_norm": 3.96875,
"learning_rate": 4.9968658320490636e-05,
"loss": 2.8573,
"step": 90
},
{
"epoch": 0.15458937198067632,
"grad_norm": 3.40625,
"learning_rate": 4.994429051528689e-05,
"loss": 2.9456,
"step": 100
},
{
"epoch": 0.17004830917874397,
"grad_norm": 3.5,
"learning_rate": 4.991297212156848e-05,
"loss": 2.9164,
"step": 110
},
{
"epoch": 0.1855072463768116,
"grad_norm": 3.328125,
"learning_rate": 4.9874711866032495e-05,
"loss": 2.9057,
"step": 120
},
{
"epoch": 0.20096618357487922,
"grad_norm": 5.25,
"learning_rate": 4.982952040968744e-05,
"loss": 2.9002,
"step": 130
},
{
"epoch": 0.21642512077294687,
"grad_norm": 3.40625,
"learning_rate": 4.977741034488251e-05,
"loss": 2.6435,
"step": 140
},
{
"epoch": 0.2318840579710145,
"grad_norm": 3.15625,
"learning_rate": 4.971839619179892e-05,
"loss": 2.9411,
"step": 150
},
{
"epoch": 0.24734299516908212,
"grad_norm": 4.28125,
"learning_rate": 4.965249439440384e-05,
"loss": 2.6526,
"step": 160
},
{
"epoch": 0.26280193236714977,
"grad_norm": 7.0,
"learning_rate": 4.957972331586843e-05,
"loss": 2.6599,
"step": 170
},
{
"epoch": 0.2782608695652174,
"grad_norm": 3.296875,
"learning_rate": 4.950010323345103e-05,
"loss": 2.7398,
"step": 180
},
{
"epoch": 0.293719806763285,
"grad_norm": 4.9375,
"learning_rate": 4.941365633284699e-05,
"loss": 2.7388,
"step": 190
},
{
"epoch": 0.30917874396135264,
"grad_norm": 4.125,
"learning_rate": 4.932040670200677e-05,
"loss": 2.8344,
"step": 200
},
{
"epoch": 0.32463768115942027,
"grad_norm": 6.0,
"learning_rate": 4.9220380324424006e-05,
"loss": 2.9072,
"step": 210
},
{
"epoch": 0.34009661835748795,
"grad_norm": 4.40625,
"learning_rate": 4.911360507189526e-05,
"loss": 2.6469,
"step": 220
},
{
"epoch": 0.35555555555555557,
"grad_norm": 4.28125,
"learning_rate": 4.900011069675378e-05,
"loss": 2.6556,
"step": 230
},
{
"epoch": 0.3710144927536232,
"grad_norm": 2.796875,
"learning_rate": 4.8879928823579136e-05,
"loss": 2.5898,
"step": 240
},
{
"epoch": 0.3864734299516908,
"grad_norm": 3.484375,
"learning_rate": 4.875309294038523e-05,
"loss": 2.6991,
"step": 250
},
{
"epoch": 0.40193236714975844,
"grad_norm": 4.25,
"learning_rate": 4.8619638389289026e-05,
"loss": 2.667,
"step": 260
},
{
"epoch": 0.41739130434782606,
"grad_norm": 3.34375,
"learning_rate": 4.8479602356662665e-05,
"loss": 2.7002,
"step": 270
},
{
"epoch": 0.43285024154589374,
"grad_norm": 4.71875,
"learning_rate": 4.833302386277171e-05,
"loss": 2.7914,
"step": 280
},
{
"epoch": 0.44830917874396137,
"grad_norm": 4.15625,
"learning_rate": 4.817994375090233e-05,
"loss": 2.6703,
"step": 290
},
{
"epoch": 0.463768115942029,
"grad_norm": 3.140625,
"learning_rate": 4.80204046759806e-05,
"loss": 2.8089,
"step": 300
},
{
"epoch": 0.4792270531400966,
"grad_norm": 4.90625,
"learning_rate": 4.785445109268687e-05,
"loss": 2.7172,
"step": 310
},
{
"epoch": 0.49468599033816424,
"grad_norm": 2.921875,
"learning_rate": 4.768212924306877e-05,
"loss": 2.617,
"step": 320
},
{
"epoch": 0.5101449275362319,
"grad_norm": 2.96875,
"learning_rate": 4.75034871436561e-05,
"loss": 2.7096,
"step": 330
},
{
"epoch": 0.5256038647342995,
"grad_norm": 4.75,
"learning_rate": 4.7318574572081275e-05,
"loss": 2.6772,
"step": 340
},
{
"epoch": 0.5410628019323671,
"grad_norm": 4.90625,
"learning_rate": 4.712744305320911e-05,
"loss": 2.7253,
"step": 350
},
{
"epoch": 0.5565217391304348,
"grad_norm": 3.34375,
"learning_rate": 4.6930145844779626e-05,
"loss": 2.6389,
"step": 360
},
{
"epoch": 0.5719806763285025,
"grad_norm": 2.84375,
"learning_rate": 4.672673792256816e-05,
"loss": 2.6831,
"step": 370
},
{
"epoch": 0.58743961352657,
"grad_norm": 2.921875,
"learning_rate": 4.651727596506661e-05,
"loss": 2.7029,
"step": 380
},
{
"epoch": 0.6028985507246377,
"grad_norm": 3.765625,
"learning_rate": 4.63018183376903e-05,
"loss": 2.7264,
"step": 390
},
{
"epoch": 0.6183574879227053,
"grad_norm": 4.28125,
"learning_rate": 4.60804250765148e-05,
"loss": 2.6494,
"step": 400
},
{
"epoch": 0.633816425120773,
"grad_norm": 3.0625,
"learning_rate": 4.5853157871547184e-05,
"loss": 2.6504,
"step": 410
},
{
"epoch": 0.6492753623188405,
"grad_norm": 3.515625,
"learning_rate": 4.562008004953644e-05,
"loss": 2.817,
"step": 420
},
{
"epoch": 0.6647342995169082,
"grad_norm": 4.0,
"learning_rate": 4.538125655632789e-05,
"loss": 2.5887,
"step": 430
},
{
"epoch": 0.6801932367149759,
"grad_norm": 2.984375,
"learning_rate": 4.5136753938766286e-05,
"loss": 2.6033,
"step": 440
},
{
"epoch": 0.6956521739130435,
"grad_norm": 3.8125,
"learning_rate": 4.4886640326152985e-05,
"loss": 2.6842,
"step": 450
},
{
"epoch": 0.7111111111111111,
"grad_norm": 4.53125,
"learning_rate": 4.4630985411262054e-05,
"loss": 2.6597,
"step": 460
},
{
"epoch": 0.7265700483091787,
"grad_norm": 3.9375,
"learning_rate": 4.4369860430920776e-05,
"loss": 2.5888,
"step": 470
},
{
"epoch": 0.7420289855072464,
"grad_norm": 3.59375,
"learning_rate": 4.41033381461599e-05,
"loss": 2.6027,
"step": 480
},
{
"epoch": 0.7574879227053141,
"grad_norm": 3.15625,
"learning_rate": 4.383149282193919e-05,
"loss": 2.4755,
"step": 490
},
{
"epoch": 0.7729468599033816,
"grad_norm": 4.53125,
"learning_rate": 4.35544002064539e-05,
"loss": 2.5931,
"step": 500
},
{
"epoch": 0.7884057971014493,
"grad_norm": 4.09375,
"learning_rate": 4.327213751002794e-05,
"loss": 2.6694,
"step": 510
},
{
"epoch": 0.8038647342995169,
"grad_norm": 2.984375,
"learning_rate": 4.298478338359968e-05,
"loss": 2.6839,
"step": 520
},
{
"epoch": 0.8193236714975846,
"grad_norm": 3.1875,
"learning_rate": 4.269241789680629e-05,
"loss": 2.4991,
"step": 530
},
{
"epoch": 0.8347826086956521,
"grad_norm": 3.78125,
"learning_rate": 4.239512251567275e-05,
"loss": 2.5734,
"step": 540
},
{
"epoch": 0.8502415458937198,
"grad_norm": 3.546875,
"learning_rate": 4.2092980079911836e-05,
"loss": 2.5269,
"step": 550
},
{
"epoch": 0.8657004830917875,
"grad_norm": 4.25,
"learning_rate": 4.178607477984128e-05,
"loss": 2.4704,
"step": 560
},
{
"epoch": 0.881159420289855,
"grad_norm": 4.59375,
"learning_rate": 4.147449213292457e-05,
"loss": 2.6069,
"step": 570
},
{
"epoch": 0.8966183574879227,
"grad_norm": 4.03125,
"learning_rate": 4.115831895994203e-05,
"loss": 2.538,
"step": 580
},
{
"epoch": 0.9120772946859903,
"grad_norm": 3.0625,
"learning_rate": 4.0837643360798666e-05,
"loss": 2.6185,
"step": 590
},
{
"epoch": 0.927536231884058,
"grad_norm": 3.78125,
"learning_rate": 4.051255468997562e-05,
"loss": 2.4834,
"step": 600
},
{
"epoch": 0.9429951690821256,
"grad_norm": 3.78125,
"learning_rate": 4.018314353163202e-05,
"loss": 2.504,
"step": 610
},
{
"epoch": 0.9584541062801932,
"grad_norm": 3.59375,
"learning_rate": 3.984950167436423e-05,
"loss": 2.4611,
"step": 620
},
{
"epoch": 0.9739130434782609,
"grad_norm": 2.765625,
"learning_rate": 3.9511722085629485e-05,
"loss": 2.5228,
"step": 630
},
{
"epoch": 0.9893719806763285,
"grad_norm": 2.875,
"learning_rate": 3.916989888584095e-05,
"loss": 2.4755,
"step": 640
},
{
"epoch": 1.0046376811594202,
"grad_norm": 3.015625,
"learning_rate": 3.882412732214167e-05,
"loss": 2.42,
"step": 650
},
{
"epoch": 1.020096618357488,
"grad_norm": 2.890625,
"learning_rate": 3.847450374186441e-05,
"loss": 2.0797,
"step": 660
},
{
"epoch": 1.0355555555555556,
"grad_norm": 3.046875,
"learning_rate": 3.812112556568508e-05,
"loss": 1.949,
"step": 670
},
{
"epoch": 1.0510144927536231,
"grad_norm": 3.0625,
"learning_rate": 3.776409126047692e-05,
"loss": 1.9827,
"step": 680
},
{
"epoch": 1.066473429951691,
"grad_norm": 4.03125,
"learning_rate": 3.740350031187332e-05,
"loss": 2.0228,
"step": 690
},
{
"epoch": 1.0819323671497585,
"grad_norm": 3.0625,
"learning_rate": 3.7039453196546645e-05,
"loss": 1.9758,
"step": 700
},
{
"epoch": 1.097391304347826,
"grad_norm": 4.25,
"learning_rate": 3.6672051354211035e-05,
"loss": 2.1762,
"step": 710
},
{
"epoch": 1.1128502415458936,
"grad_norm": 2.453125,
"learning_rate": 3.6301397159356696e-05,
"loss": 2.0642,
"step": 720
},
{
"epoch": 1.1283091787439614,
"grad_norm": 3.359375,
"learning_rate": 3.592759389272389e-05,
"loss": 2.0444,
"step": 730
},
{
"epoch": 1.143768115942029,
"grad_norm": 3.46875,
"learning_rate": 3.555074571252431e-05,
"loss": 1.8933,
"step": 740
},
{
"epoch": 1.1592270531400966,
"grad_norm": 3.53125,
"learning_rate": 3.517095762541795e-05,
"loss": 2.1575,
"step": 750
},
{
"epoch": 1.1746859903381643,
"grad_norm": 3.25,
"learning_rate": 3.4788335457253595e-05,
"loss": 1.964,
"step": 760
},
{
"epoch": 1.190144927536232,
"grad_norm": 5.09375,
"learning_rate": 3.4402985823581046e-05,
"loss": 1.9813,
"step": 770
},
{
"epoch": 1.2056038647342995,
"grad_norm": 3.78125,
"learning_rate": 3.401501609994326e-05,
"loss": 1.8629,
"step": 780
},
{
"epoch": 1.221062801932367,
"grad_norm": 5.09375,
"learning_rate": 3.362453439195679e-05,
"loss": 2.1078,
"step": 790
},
{
"epoch": 1.2365217391304348,
"grad_norm": 3.078125,
"learning_rate": 3.323164950518868e-05,
"loss": 1.9695,
"step": 800
},
{
"epoch": 1.2519806763285024,
"grad_norm": 4.53125,
"learning_rate": 3.283647091483849e-05,
"loss": 1.9425,
"step": 810
},
{
"epoch": 1.26743961352657,
"grad_norm": 2.78125,
"learning_rate": 3.243910873523356e-05,
"loss": 1.9954,
"step": 820
},
{
"epoch": 1.2828985507246378,
"grad_norm": 4.21875,
"learning_rate": 3.203967368914631e-05,
"loss": 1.8656,
"step": 830
},
{
"epoch": 1.2983574879227053,
"grad_norm": 5.0,
"learning_rate": 3.163827707694193e-05,
"loss": 1.9414,
"step": 840
},
{
"epoch": 1.313816425120773,
"grad_norm": 3.75,
"learning_rate": 3.123503074556513e-05,
"loss": 2.0901,
"step": 850
},
{
"epoch": 1.3292753623188407,
"grad_norm": 3.5625,
"learning_rate": 3.083004705737468e-05,
"loss": 1.9897,
"step": 860
},
{
"epoch": 1.3447342995169083,
"grad_norm": 3.65625,
"learning_rate": 3.0423438858834174e-05,
"loss": 1.996,
"step": 870
},
{
"epoch": 1.3601932367149758,
"grad_norm": 5.09375,
"learning_rate": 3.0015319449068025e-05,
"loss": 2.0757,
"step": 880
},
{
"epoch": 1.3756521739130434,
"grad_norm": 4.1875,
"learning_rate": 2.960580254829131e-05,
"loss": 1.9013,
"step": 890
},
{
"epoch": 1.3911111111111112,
"grad_norm": 3.65625,
"learning_rate": 2.919500226612224e-05,
"loss": 2.0255,
"step": 900
},
{
"epoch": 1.4065700483091788,
"grad_norm": 4.1875,
"learning_rate": 2.8783033069786132e-05,
"loss": 1.8022,
"step": 910
},
{
"epoch": 1.4220289855072463,
"grad_norm": 2.859375,
"learning_rate": 2.8370009752219788e-05,
"loss": 1.8961,
"step": 920
},
{
"epoch": 1.437487922705314,
"grad_norm": 3.8125,
"learning_rate": 2.795604740008504e-05,
"loss": 1.927,
"step": 930
},
{
"epoch": 1.4529468599033817,
"grad_norm": 4.0625,
"learning_rate": 2.7541261361700514e-05,
"loss": 1.9894,
"step": 940
},
{
"epoch": 1.4684057971014493,
"grad_norm": 3.15625,
"learning_rate": 2.7125767214900455e-05,
"loss": 1.9681,
"step": 950
},
{
"epoch": 1.4838647342995168,
"grad_norm": 3.53125,
"learning_rate": 2.67096807348296e-05,
"loss": 2.0076,
"step": 960
},
{
"epoch": 1.4993236714975846,
"grad_norm": 3.890625,
"learning_rate": 2.629311786168306e-05,
"loss": 2.0221,
"step": 970
},
{
"epoch": 1.5147826086956522,
"grad_norm": 3.09375,
"learning_rate": 2.5876194668400256e-05,
"loss": 2.1633,
"step": 980
},
{
"epoch": 1.5302415458937197,
"grad_norm": 3.40625,
"learning_rate": 2.545902732832181e-05,
"loss": 2.1009,
"step": 990
},
{
"epoch": 1.5457004830917875,
"grad_norm": 3.390625,
"learning_rate": 2.5041732082818503e-05,
"loss": 1.9915,
"step": 1000
},
{
"epoch": 1.561159420289855,
"grad_norm": 3.0625,
"learning_rate": 2.462442520890127e-05,
"loss": 2.0244,
"step": 1010
},
{
"epoch": 1.5766183574879227,
"grad_norm": 7.3125,
"learning_rate": 2.4207222986821194e-05,
"loss": 2.0306,
"step": 1020
},
{
"epoch": 1.5920772946859905,
"grad_norm": 3.890625,
"learning_rate": 2.379024166766876e-05,
"loss": 1.986,
"step": 1030
},
{
"epoch": 1.6075362318840578,
"grad_norm": 3.359375,
"learning_rate": 2.3373597440981027e-05,
"loss": 2.0218,
"step": 1040
},
{
"epoch": 1.6229951690821256,
"grad_norm": 4.09375,
"learning_rate": 2.295740640236614e-05,
"loss": 1.843,
"step": 1050
},
{
"epoch": 1.6384541062801934,
"grad_norm": 3.90625,
"learning_rate": 2.2541784521153875e-05,
"loss": 2.0191,
"step": 1060
},
{
"epoch": 1.6539130434782607,
"grad_norm": 4.46875,
"learning_rate": 2.2126847608081434e-05,
"loss": 1.959,
"step": 1070
},
{
"epoch": 1.6693719806763285,
"grad_norm": 4.1875,
"learning_rate": 2.1712711283023482e-05,
"loss": 1.9462,
"step": 1080
},
{
"epoch": 1.684830917874396,
"grad_norm": 3.53125,
"learning_rate": 2.1299490942775213e-05,
"loss": 2.007,
"step": 1090
},
{
"epoch": 1.7002898550724637,
"grad_norm": 4.0625,
"learning_rate": 2.088730172889777e-05,
"loss": 1.9861,
"step": 1100
},
{
"epoch": 1.7157487922705315,
"grad_norm": 3.59375,
"learning_rate": 2.0476258495634594e-05,
"loss": 2.057,
"step": 1110
},
{
"epoch": 1.731207729468599,
"grad_norm": 3.71875,
"learning_rate": 2.0066475777908005e-05,
"loss": 2.0005,
"step": 1120
},
{
"epoch": 1.7466666666666666,
"grad_norm": 4.65625,
"learning_rate": 1.9658067759404625e-05,
"loss": 1.9735,
"step": 1130
},
{
"epoch": 1.7621256038647344,
"grad_norm": 4.15625,
"learning_rate": 1.925114824075876e-05,
"loss": 2.0445,
"step": 1140
},
{
"epoch": 1.777584541062802,
"grad_norm": 3.890625,
"learning_rate": 1.8845830607842438e-05,
"loss": 2.0739,
"step": 1150
},
{
"epoch": 1.7930434782608695,
"grad_norm": 4.0,
"learning_rate": 1.8442227800171125e-05,
"loss": 1.9375,
"step": 1160
},
{
"epoch": 1.8085024154589373,
"grad_norm": 3.921875,
"learning_rate": 1.8040452279433716e-05,
"loss": 1.9623,
"step": 1170
},
{
"epoch": 1.8239613526570049,
"grad_norm": 2.765625,
"learning_rate": 1.7640615998155693e-05,
"loss": 1.9219,
"step": 1180
},
{
"epoch": 1.8394202898550724,
"grad_norm": 4.15625,
"learning_rate": 1.7242830368504227e-05,
"loss": 1.8705,
"step": 1190
},
{
"epoch": 1.8548792270531402,
"grad_norm": 2.984375,
"learning_rate": 1.6847206231243718e-05,
"loss": 1.9085,
"step": 1200
},
{
"epoch": 1.8703381642512076,
"grad_norm": 4.03125,
"learning_rate": 1.6453853824850726e-05,
"loss": 1.8975,
"step": 1210
},
{
"epoch": 1.8857971014492754,
"grad_norm": 3.4375,
"learning_rate": 1.606288275479652e-05,
"loss": 1.905,
"step": 1220
},
{
"epoch": 1.9012560386473432,
"grad_norm": 3.71875,
"learning_rate": 1.5674401963006235e-05,
"loss": 2.1359,
"step": 1230
},
{
"epoch": 1.9167149758454105,
"grad_norm": 4.25,
"learning_rate": 1.5288519697502697e-05,
"loss": 1.9069,
"step": 1240
},
{
"epoch": 1.9321739130434783,
"grad_norm": 5.34375,
"learning_rate": 1.4905343482243775e-05,
"loss": 1.8041,
"step": 1250
},
{
"epoch": 1.9476328502415459,
"grad_norm": 2.96875,
"learning_rate": 1.4524980087161438e-05,
"loss": 2.1233,
"step": 1260
},
{
"epoch": 1.9630917874396134,
"grad_norm": 2.625,
"learning_rate": 1.4147535498410853e-05,
"loss": 1.9942,
"step": 1270
},
{
"epoch": 1.9785507246376812,
"grad_norm": 2.890625,
"learning_rate": 1.3773114888837957e-05,
"loss": 2.0154,
"step": 1280
},
{
"epoch": 1.9940096618357488,
"grad_norm": 3.875,
"learning_rate": 1.3401822588673636e-05,
"loss": 1.9747,
"step": 1290
},
{
"epoch": 2.0092753623188404,
"grad_norm": 3.625,
"learning_rate": 1.3033762056462654e-05,
"loss": 1.8271,
"step": 1300
},
{
"epoch": 2.024734299516908,
"grad_norm": 2.5,
"learning_rate": 1.2669035850235536e-05,
"loss": 1.5612,
"step": 1310
},
{
"epoch": 2.040193236714976,
"grad_norm": 4.3125,
"learning_rate": 1.230774559893131e-05,
"loss": 1.6105,
"step": 1320
},
{
"epoch": 2.0556521739130433,
"grad_norm": 5.1875,
"learning_rate": 1.1949991974079197e-05,
"loss": 1.6631,
"step": 1330
},
{
"epoch": 2.071111111111111,
"grad_norm": 3.84375,
"learning_rate": 1.1595874661746986e-05,
"loss": 1.7435,
"step": 1340
},
{
"epoch": 2.086570048309179,
"grad_norm": 3.484375,
"learning_rate": 1.1245492334764169e-05,
"loss": 1.5578,
"step": 1350
},
{
"epoch": 2.1020289855072463,
"grad_norm": 5.84375,
"learning_rate": 1.0898942625227168e-05,
"loss": 1.7355,
"step": 1360
},
{
"epoch": 2.117487922705314,
"grad_norm": 5.15625,
"learning_rate": 1.0556322097294835e-05,
"loss": 1.6674,
"step": 1370
},
{
"epoch": 2.132946859903382,
"grad_norm": 3.921875,
"learning_rate": 1.0217726220281243e-05,
"loss": 1.6355,
"step": 1380
},
{
"epoch": 2.148405797101449,
"grad_norm": 3.34375,
"learning_rate": 9.883249342053821e-06,
"loss": 1.7309,
"step": 1390
},
{
"epoch": 2.163864734299517,
"grad_norm": 3.71875,
"learning_rate": 9.552984662743716e-06,
"loss": 1.6491,
"step": 1400
},
{
"epoch": 2.1793236714975848,
"grad_norm": 3.6875,
"learning_rate": 9.227024208776224e-06,
"loss": 1.5166,
"step": 1410
},
{
"epoch": 2.194782608695652,
"grad_norm": 4.0,
"learning_rate": 8.905458807228013e-06,
"loss": 1.7866,
"step": 1420
},
{
"epoch": 2.21024154589372,
"grad_norm": 3.25,
"learning_rate": 8.588378060518738e-06,
"loss": 1.6775,
"step": 1430
},
{
"epoch": 2.2257004830917873,
"grad_norm": 4.34375,
"learning_rate": 8.275870321443802e-06,
"loss": 1.5462,
"step": 1440
},
{
"epoch": 2.241159420289855,
"grad_norm": 3.15625,
"learning_rate": 7.968022668555334e-06,
"loss": 1.7529,
"step": 1450
},
{
"epoch": 2.256618357487923,
"grad_norm": 3.796875,
"learning_rate": 7.664920881898236e-06,
"loss": 1.5787,
"step": 1460
},
{
"epoch": 2.27207729468599,
"grad_norm": 3.875,
"learning_rate": 7.3666494191080455e-06,
"loss": 1.5908,
"step": 1470
},
{
"epoch": 2.287536231884058,
"grad_norm": 3.375,
"learning_rate": 7.073291391877288e-06,
"loss": 1.5812,
"step": 1480
},
{
"epoch": 2.3029951690821258,
"grad_norm": 4.46875,
"learning_rate": 6.7849285427968586e-06,
"loss": 1.5513,
"step": 1490
},
{
"epoch": 2.318454106280193,
"grad_norm": 4.75,
"learning_rate": 6.5016412225789e-06,
"loss": 1.6197,
"step": 1500
},
{
"epoch": 2.333913043478261,
"grad_norm": 3.859375,
"learning_rate": 6.223508367667538e-06,
"loss": 1.624,
"step": 1510
},
{
"epoch": 2.3493719806763287,
"grad_norm": 3.796875,
"learning_rate": 5.9506074782436275e-06,
"loss": 1.6575,
"step": 1520
},
{
"epoch": 2.364830917874396,
"grad_norm": 4.03125,
"learning_rate": 5.683014596629857e-06,
"loss": 1.501,
"step": 1530
},
{
"epoch": 2.380289855072464,
"grad_norm": 3.453125,
"learning_rate": 5.420804286101838e-06,
"loss": 1.7582,
"step": 1540
},
{
"epoch": 2.395748792270531,
"grad_norm": 3.96875,
"learning_rate": 5.164049610111607e-06,
"loss": 1.8236,
"step": 1550
},
{
"epoch": 2.411207729468599,
"grad_norm": 2.96875,
"learning_rate": 4.912822111928767e-06,
"loss": 1.7672,
"step": 1560
},
{
"epoch": 2.4266666666666667,
"grad_norm": 5.8125,
"learning_rate": 4.667191794705444e-06,
"loss": 1.6043,
"step": 1570
},
{
"epoch": 2.442125603864734,
"grad_norm": 9.0625,
"learning_rate": 4.427227101970261e-06,
"loss": 1.6339,
"step": 1580
},
{
"epoch": 2.457584541062802,
"grad_norm": 3.90625,
"learning_rate": 4.192994898556921e-06,
"loss": 1.685,
"step": 1590
},
{
"epoch": 2.4730434782608697,
"grad_norm": 3.6875,
"learning_rate": 3.964560451972705e-06,
"loss": 1.6333,
"step": 1600
},
{
"epoch": 2.488502415458937,
"grad_norm": 2.890625,
"learning_rate": 3.7419874142120363e-06,
"loss": 1.661,
"step": 1610
},
{
"epoch": 2.503961352657005,
"grad_norm": 5.1875,
"learning_rate": 3.525337804020212e-06,
"loss": 1.6976,
"step": 1620
},
{
"epoch": 2.5194202898550726,
"grad_norm": 2.953125,
"learning_rate": 3.314671989612195e-06,
"loss": 1.6764,
"step": 1630
},
{
"epoch": 2.53487922705314,
"grad_norm": 5.03125,
"learning_rate": 3.110048671851404e-06,
"loss": 1.658,
"step": 1640
},
{
"epoch": 2.5503381642512077,
"grad_norm": 4.03125,
"learning_rate": 2.911524867892956e-06,
"loss": 1.5693,
"step": 1650
},
{
"epoch": 2.5657971014492755,
"grad_norm": 5.1875,
"learning_rate": 2.719155895296255e-06,
"loss": 1.6833,
"step": 1660
},
{
"epoch": 2.581256038647343,
"grad_norm": 4.40625,
"learning_rate": 2.5329953566109537e-06,
"loss": 1.7026,
"step": 1670
},
{
"epoch": 2.5967149758454107,
"grad_norm": 4.59375,
"learning_rate": 2.353095124440999e-06,
"loss": 1.6888,
"step": 1680
},
{
"epoch": 2.6121739130434785,
"grad_norm": 3.296875,
"learning_rate": 2.17950532699053e-06,
"loss": 1.5656,
"step": 1690
},
{
"epoch": 2.627632850241546,
"grad_norm": 3.78125,
"learning_rate": 2.012274334095998e-06,
"loss": 1.732,
"step": 1700
},
{
"epoch": 2.6430917874396136,
"grad_norm": 3.15625,
"learning_rate": 1.8514487437481436e-06,
"loss": 1.4964,
"step": 1710
},
{
"epoch": 2.6585507246376814,
"grad_norm": 3.640625,
"learning_rate": 1.6970733691077378e-06,
"loss": 1.5922,
"step": 1720
},
{
"epoch": 2.6740096618357487,
"grad_norm": 3.609375,
"learning_rate": 1.549191226018637e-06,
"loss": 1.6481,
"step": 1730
},
{
"epoch": 2.6894685990338165,
"grad_norm": 4.0,
"learning_rate": 1.4078435210216568e-06,
"loss": 1.5963,
"step": 1740
},
{
"epoch": 2.7049275362318843,
"grad_norm": 3.9375,
"learning_rate": 1.273069639872601e-06,
"loss": 1.6844,
"step": 1750
},
{
"epoch": 2.7203864734299517,
"grad_norm": 3.375,
"learning_rate": 1.1449071365676356e-06,
"loss": 1.7353,
"step": 1760
},
{
"epoch": 2.7358454106280194,
"grad_norm": 6.0,
"learning_rate": 1.0233917228790828e-06,
"loss": 1.6287,
"step": 1770
},
{
"epoch": 2.751304347826087,
"grad_norm": 3.46875,
"learning_rate": 9.08557258404516e-07,
"loss": 1.6166,
"step": 1780
},
{
"epoch": 2.7667632850241546,
"grad_norm": 4.21875,
"learning_rate": 8.004357411319857e-07,
"loss": 1.6725,
"step": 1790
},
{
"epoch": 2.7822222222222224,
"grad_norm": 4.6875,
"learning_rate": 6.990572985239612e-07,
"loss": 1.5655,
"step": 1800
},
{
"epoch": 2.7976811594202897,
"grad_norm": 3.703125,
"learning_rate": 6.044501791224539e-07,
"loss": 1.6394,
"step": 1810
},
{
"epoch": 2.8131400966183575,
"grad_norm": 4.21875,
"learning_rate": 5.166407446777572e-07,
"loss": 1.7043,
"step": 1820
},
{
"epoch": 2.828599033816425,
"grad_norm": 6.5,
"learning_rate": 4.3565346280284305e-07,
"loss": 1.5092,
"step": 1830
},
{
"epoch": 2.8440579710144926,
"grad_norm": 4.21875,
"learning_rate": 3.6151090015565103e-07,
"loss": 1.7447,
"step": 1840
},
{
"epoch": 2.8595169082125604,
"grad_norm": 3.984375,
"learning_rate": 2.9423371615096005e-07,
"loss": 1.66,
"step": 1850
},
{
"epoch": 2.874975845410628,
"grad_norm": 3.296875,
"learning_rate": 2.3384065720379878e-07,
"loss": 1.6567,
"step": 1860
},
{
"epoch": 2.8904347826086956,
"grad_norm": 3.71875,
"learning_rate": 1.803485515058323e-07,
"loss": 1.5354,
"step": 1870
},
{
"epoch": 2.9058937198067634,
"grad_norm": 5.28125,
"learning_rate": 1.3377230433630205e-07,
"loss": 1.6183,
"step": 1880
},
{
"epoch": 2.9213526570048307,
"grad_norm": 4.1875,
"learning_rate": 9.412489390873414e-08,
"loss": 1.7047,
"step": 1890
},
{
"epoch": 2.9368115942028985,
"grad_norm": 4.78125,
"learning_rate": 6.141736775464313e-08,
"loss": 1.5235,
"step": 1900
},
{
"epoch": 2.9522705314009663,
"grad_norm": 4.375,
"learning_rate": 3.5658839645194565e-08,
"loss": 1.6394,
"step": 1910
},
{
"epoch": 2.9677294685990336,
"grad_norm": 4.3125,
"learning_rate": 1.6856487051700177e-08,
"loss": 1.5718,
"step": 1920
},
{
"epoch": 2.9831884057971014,
"grad_norm": 3.296875,
"learning_rate": 5.015549145664933e-09,
"loss": 1.6534,
"step": 1930
},
{
"epoch": 2.998647342995169,
"grad_norm": 4.65625,
"learning_rate": 1.393253389103677e-10,
"loss": 1.6622,
"step": 1940
},
{
"epoch": 3.0,
"step": 1941,
"total_flos": 7.76867645696736e+16,
"train_loss": 2.121323910710248,
"train_runtime": 3108.1718,
"train_samples_per_second": 9.99,
"train_steps_per_second": 0.624
}
],
"logging_steps": 10,
"max_steps": 1941,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 7.76867645696736e+16,
"train_batch_size": 2,
"trial_name": null,
"trial_params": null
}