arabiko / trainer_state.json
iko-01's picture
arabic lang model v1 18000 ,0.76
cd66320 verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.3910355486862442,
"eval_steps": 500,
"global_step": 18000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0077279752704791345,
"grad_norm": 2.382732391357422,
"learning_rate": 4.987764039155075e-05,
"loss": 2.2319,
"step": 100
},
{
"epoch": 0.015455950540958269,
"grad_norm": 1.1049453020095825,
"learning_rate": 4.974884080370943e-05,
"loss": 1.4633,
"step": 200
},
{
"epoch": 0.023183925811437404,
"grad_norm": 2.012437582015991,
"learning_rate": 4.962004121586811e-05,
"loss": 1.4898,
"step": 300
},
{
"epoch": 0.030911901081916538,
"grad_norm": 2.1583738327026367,
"learning_rate": 4.949124162802679e-05,
"loss": 1.2682,
"step": 400
},
{
"epoch": 0.03863987635239567,
"grad_norm": 1.3249982595443726,
"learning_rate": 4.9362442040185474e-05,
"loss": 1.3189,
"step": 500
},
{
"epoch": 0.04636785162287481,
"grad_norm": 1.0867700576782227,
"learning_rate": 4.923364245234415e-05,
"loss": 1.2482,
"step": 600
},
{
"epoch": 0.05409582689335394,
"grad_norm": 0.9986234307289124,
"learning_rate": 4.910484286450284e-05,
"loss": 1.1299,
"step": 700
},
{
"epoch": 0.061823802163833076,
"grad_norm": 0.8884647488594055,
"learning_rate": 4.897604327666152e-05,
"loss": 1.2575,
"step": 800
},
{
"epoch": 0.0695517774343122,
"grad_norm": 1.6838749647140503,
"learning_rate": 4.8847243688820196e-05,
"loss": 1.1865,
"step": 900
},
{
"epoch": 0.07727975270479134,
"grad_norm": 2.3805487155914307,
"learning_rate": 4.8718444100978875e-05,
"loss": 1.1084,
"step": 1000
},
{
"epoch": 0.08500772797527048,
"grad_norm": 1.653010606765747,
"learning_rate": 4.858964451313756e-05,
"loss": 1.1401,
"step": 1100
},
{
"epoch": 0.09273570324574962,
"grad_norm": 1.6932406425476074,
"learning_rate": 4.846084492529624e-05,
"loss": 1.2395,
"step": 1200
},
{
"epoch": 0.10046367851622875,
"grad_norm": 1.2242937088012695,
"learning_rate": 4.8332045337454925e-05,
"loss": 1.1328,
"step": 1300
},
{
"epoch": 0.10819165378670788,
"grad_norm": 2.8659744262695312,
"learning_rate": 4.8203245749613604e-05,
"loss": 1.1238,
"step": 1400
},
{
"epoch": 0.11591962905718702,
"grad_norm": 2.079432249069214,
"learning_rate": 4.807444616177228e-05,
"loss": 1.0579,
"step": 1500
},
{
"epoch": 0.12364760432766615,
"grad_norm": 0.5808582901954651,
"learning_rate": 4.794564657393096e-05,
"loss": 0.9646,
"step": 1600
},
{
"epoch": 0.13137557959814528,
"grad_norm": 1.380146861076355,
"learning_rate": 4.781684698608965e-05,
"loss": 1.1086,
"step": 1700
},
{
"epoch": 0.1391035548686244,
"grad_norm": 1.4197566509246826,
"learning_rate": 4.7688047398248325e-05,
"loss": 1.1619,
"step": 1800
},
{
"epoch": 0.14683153013910355,
"grad_norm": 1.1287027597427368,
"learning_rate": 4.755924781040701e-05,
"loss": 1.0074,
"step": 1900
},
{
"epoch": 0.1545595054095827,
"grad_norm": 1.1685930490493774,
"learning_rate": 4.743044822256569e-05,
"loss": 1.1573,
"step": 2000
},
{
"epoch": 0.16228748068006182,
"grad_norm": 1.008876919746399,
"learning_rate": 4.730164863472437e-05,
"loss": 1.0858,
"step": 2100
},
{
"epoch": 0.17001545595054096,
"grad_norm": 0.9628033638000488,
"learning_rate": 4.717284904688305e-05,
"loss": 1.1267,
"step": 2200
},
{
"epoch": 0.1777434312210201,
"grad_norm": 1.298677682876587,
"learning_rate": 4.704404945904173e-05,
"loss": 1.0804,
"step": 2300
},
{
"epoch": 0.18547140649149924,
"grad_norm": 1.6227502822875977,
"learning_rate": 4.691524987120041e-05,
"loss": 1.1371,
"step": 2400
},
{
"epoch": 0.19319938176197837,
"grad_norm": 1.6179583072662354,
"learning_rate": 4.67864502833591e-05,
"loss": 1.0538,
"step": 2500
},
{
"epoch": 0.2009273570324575,
"grad_norm": 1.242937445640564,
"learning_rate": 4.6657650695517776e-05,
"loss": 0.9714,
"step": 2600
},
{
"epoch": 0.20865533230293662,
"grad_norm": 1.4579142332077026,
"learning_rate": 4.6528851107676455e-05,
"loss": 1.1136,
"step": 2700
},
{
"epoch": 0.21638330757341576,
"grad_norm": 1.5873304605484009,
"learning_rate": 4.6400051519835134e-05,
"loss": 1.0059,
"step": 2800
},
{
"epoch": 0.2241112828438949,
"grad_norm": 1.6348575353622437,
"learning_rate": 4.627125193199382e-05,
"loss": 1.009,
"step": 2900
},
{
"epoch": 0.23183925811437403,
"grad_norm": 1.373192548751831,
"learning_rate": 4.6142452344152505e-05,
"loss": 1.0069,
"step": 3000
},
{
"epoch": 0.23956723338485317,
"grad_norm": 0.9459308385848999,
"learning_rate": 4.6013652756311184e-05,
"loss": 0.9666,
"step": 3100
},
{
"epoch": 0.2472952086553323,
"grad_norm": 1.5119107961654663,
"learning_rate": 4.588485316846986e-05,
"loss": 1.0447,
"step": 3200
},
{
"epoch": 0.2550231839258114,
"grad_norm": 1.3911887407302856,
"learning_rate": 4.575605358062854e-05,
"loss": 1.0636,
"step": 3300
},
{
"epoch": 0.26275115919629055,
"grad_norm": 1.434175729751587,
"learning_rate": 4.562725399278723e-05,
"loss": 1.0425,
"step": 3400
},
{
"epoch": 0.2704791344667697,
"grad_norm": 1.7139261960983276,
"learning_rate": 4.5498454404945905e-05,
"loss": 1.0795,
"step": 3500
},
{
"epoch": 0.2782071097372488,
"grad_norm": 0.8860535025596619,
"learning_rate": 4.536965481710459e-05,
"loss": 0.9276,
"step": 3600
},
{
"epoch": 0.28593508500772796,
"grad_norm": 1.6701558828353882,
"learning_rate": 4.524085522926327e-05,
"loss": 0.9323,
"step": 3700
},
{
"epoch": 0.2936630602782071,
"grad_norm": 1.3985803127288818,
"learning_rate": 4.511205564142195e-05,
"loss": 0.9605,
"step": 3800
},
{
"epoch": 0.30139103554868624,
"grad_norm": 0.7279998064041138,
"learning_rate": 4.498325605358063e-05,
"loss": 0.9523,
"step": 3900
},
{
"epoch": 0.3091190108191654,
"grad_norm": 0.7744489312171936,
"learning_rate": 4.485445646573931e-05,
"loss": 1.03,
"step": 4000
},
{
"epoch": 0.3168469860896445,
"grad_norm": 1.330718755722046,
"learning_rate": 4.472565687789799e-05,
"loss": 1.0305,
"step": 4100
},
{
"epoch": 0.32457496136012365,
"grad_norm": 1.212430477142334,
"learning_rate": 4.459685729005668e-05,
"loss": 1.055,
"step": 4200
},
{
"epoch": 0.3323029366306028,
"grad_norm": 1.0525803565979004,
"learning_rate": 4.4468057702215356e-05,
"loss": 1.0114,
"step": 4300
},
{
"epoch": 0.3400309119010819,
"grad_norm": 1.147479772567749,
"learning_rate": 4.433925811437404e-05,
"loss": 0.9928,
"step": 4400
},
{
"epoch": 0.34775888717156106,
"grad_norm": 1.313454508781433,
"learning_rate": 4.4210458526532714e-05,
"loss": 0.974,
"step": 4500
},
{
"epoch": 0.3554868624420402,
"grad_norm": 1.4218354225158691,
"learning_rate": 4.40816589386914e-05,
"loss": 0.9413,
"step": 4600
},
{
"epoch": 0.36321483771251933,
"grad_norm": 1.2902942895889282,
"learning_rate": 4.395285935085008e-05,
"loss": 1.002,
"step": 4700
},
{
"epoch": 0.37094281298299847,
"grad_norm": 0.638493001461029,
"learning_rate": 4.3824059763008763e-05,
"loss": 0.9175,
"step": 4800
},
{
"epoch": 0.3786707882534776,
"grad_norm": 1.4392635822296143,
"learning_rate": 4.369526017516744e-05,
"loss": 0.89,
"step": 4900
},
{
"epoch": 0.38639876352395675,
"grad_norm": 2.024583339691162,
"learning_rate": 4.356646058732613e-05,
"loss": 0.9731,
"step": 5000
},
{
"epoch": 0.3941267387944359,
"grad_norm": 1.8554579019546509,
"learning_rate": 4.34376609994848e-05,
"loss": 0.9251,
"step": 5100
},
{
"epoch": 0.401854714064915,
"grad_norm": 0.7937321662902832,
"learning_rate": 4.3308861411643485e-05,
"loss": 0.9758,
"step": 5200
},
{
"epoch": 0.4095826893353941,
"grad_norm": 1.3737086057662964,
"learning_rate": 4.3180061823802164e-05,
"loss": 1.0431,
"step": 5300
},
{
"epoch": 0.41731066460587324,
"grad_norm": 1.1854668855667114,
"learning_rate": 4.305126223596085e-05,
"loss": 0.9832,
"step": 5400
},
{
"epoch": 0.4250386398763524,
"grad_norm": 1.4984533786773682,
"learning_rate": 4.292246264811953e-05,
"loss": 0.9949,
"step": 5500
},
{
"epoch": 0.4327666151468315,
"grad_norm": 0.8790501356124878,
"learning_rate": 4.2793663060278214e-05,
"loss": 1.0039,
"step": 5600
},
{
"epoch": 0.44049459041731065,
"grad_norm": 0.7149144411087036,
"learning_rate": 4.2664863472436886e-05,
"loss": 0.9687,
"step": 5700
},
{
"epoch": 0.4482225656877898,
"grad_norm": 1.146522045135498,
"learning_rate": 4.253606388459557e-05,
"loss": 0.9247,
"step": 5800
},
{
"epoch": 0.4559505409582689,
"grad_norm": 1.8279156684875488,
"learning_rate": 4.240726429675425e-05,
"loss": 0.998,
"step": 5900
},
{
"epoch": 0.46367851622874806,
"grad_norm": 0.974434494972229,
"learning_rate": 4.2278464708912936e-05,
"loss": 1.0235,
"step": 6000
},
{
"epoch": 0.4714064914992272,
"grad_norm": 1.2241114377975464,
"learning_rate": 4.2149665121071615e-05,
"loss": 0.854,
"step": 6100
},
{
"epoch": 0.47913446676970634,
"grad_norm": 1.2750343084335327,
"learning_rate": 4.20208655332303e-05,
"loss": 1.005,
"step": 6200
},
{
"epoch": 0.4868624420401855,
"grad_norm": 1.21708345413208,
"learning_rate": 4.189206594538897e-05,
"loss": 0.9381,
"step": 6300
},
{
"epoch": 0.4945904173106646,
"grad_norm": 1.3617980480194092,
"learning_rate": 4.176326635754766e-05,
"loss": 0.8568,
"step": 6400
},
{
"epoch": 0.5023183925811437,
"grad_norm": 1.8272175788879395,
"learning_rate": 4.163446676970634e-05,
"loss": 0.8578,
"step": 6500
},
{
"epoch": 0.5100463678516228,
"grad_norm": 0.9534468650817871,
"learning_rate": 4.150566718186502e-05,
"loss": 0.9316,
"step": 6600
},
{
"epoch": 0.517774343122102,
"grad_norm": 0.8899122476577759,
"learning_rate": 4.13768675940237e-05,
"loss": 0.8945,
"step": 6700
},
{
"epoch": 0.5255023183925811,
"grad_norm": 0.9442152380943298,
"learning_rate": 4.1248068006182387e-05,
"loss": 0.8847,
"step": 6800
},
{
"epoch": 0.5332302936630603,
"grad_norm": 1.1126829385757446,
"learning_rate": 4.111926841834106e-05,
"loss": 0.9536,
"step": 6900
},
{
"epoch": 0.5409582689335394,
"grad_norm": 1.2574111223220825,
"learning_rate": 4.0990468830499744e-05,
"loss": 0.9258,
"step": 7000
},
{
"epoch": 0.5486862442040186,
"grad_norm": 1.5313997268676758,
"learning_rate": 4.086166924265842e-05,
"loss": 0.914,
"step": 7100
},
{
"epoch": 0.5564142194744977,
"grad_norm": 1.064965009689331,
"learning_rate": 4.073286965481711e-05,
"loss": 0.9365,
"step": 7200
},
{
"epoch": 0.5641421947449768,
"grad_norm": 1.4868042469024658,
"learning_rate": 4.060407006697579e-05,
"loss": 0.9697,
"step": 7300
},
{
"epoch": 0.5718701700154559,
"grad_norm": 0.8288936018943787,
"learning_rate": 4.047527047913447e-05,
"loss": 0.8882,
"step": 7400
},
{
"epoch": 0.5795981452859351,
"grad_norm": 0.5883311033248901,
"learning_rate": 4.0346470891293145e-05,
"loss": 0.9866,
"step": 7500
},
{
"epoch": 0.5873261205564142,
"grad_norm": 0.6529534459114075,
"learning_rate": 4.021767130345183e-05,
"loss": 0.9215,
"step": 7600
},
{
"epoch": 0.5950540958268934,
"grad_norm": 0.7607959508895874,
"learning_rate": 4.008887171561051e-05,
"loss": 0.9043,
"step": 7700
},
{
"epoch": 0.6027820710973725,
"grad_norm": 1.3604129552841187,
"learning_rate": 3.9960072127769195e-05,
"loss": 0.9164,
"step": 7800
},
{
"epoch": 0.6105100463678517,
"grad_norm": 1.1792867183685303,
"learning_rate": 3.9831272539927873e-05,
"loss": 0.9893,
"step": 7900
},
{
"epoch": 0.6182380216383307,
"grad_norm": 0.9397594332695007,
"learning_rate": 3.970247295208656e-05,
"loss": 0.9113,
"step": 8000
},
{
"epoch": 0.6259659969088099,
"grad_norm": 0.5270503759384155,
"learning_rate": 3.957367336424523e-05,
"loss": 0.9062,
"step": 8100
},
{
"epoch": 0.633693972179289,
"grad_norm": 0.9222223162651062,
"learning_rate": 3.944487377640392e-05,
"loss": 0.9085,
"step": 8200
},
{
"epoch": 0.6414219474497682,
"grad_norm": 1.1142535209655762,
"learning_rate": 3.9316074188562595e-05,
"loss": 0.9342,
"step": 8300
},
{
"epoch": 0.6491499227202473,
"grad_norm": 0.9540684223175049,
"learning_rate": 3.918727460072128e-05,
"loss": 0.9318,
"step": 8400
},
{
"epoch": 0.6568778979907264,
"grad_norm": 1.5218793153762817,
"learning_rate": 3.905847501287996e-05,
"loss": 0.952,
"step": 8500
},
{
"epoch": 0.6646058732612056,
"grad_norm": 0.6363744735717773,
"learning_rate": 3.8929675425038645e-05,
"loss": 0.8996,
"step": 8600
},
{
"epoch": 0.6723338485316847,
"grad_norm": 1.2216317653656006,
"learning_rate": 3.880087583719732e-05,
"loss": 0.9585,
"step": 8700
},
{
"epoch": 0.6800618238021638,
"grad_norm": 0.5407654643058777,
"learning_rate": 3.8672076249356e-05,
"loss": 0.9532,
"step": 8800
},
{
"epoch": 0.6877897990726429,
"grad_norm": 0.936690092086792,
"learning_rate": 3.854327666151468e-05,
"loss": 0.8751,
"step": 8900
},
{
"epoch": 0.6955177743431221,
"grad_norm": 1.137471079826355,
"learning_rate": 3.841447707367337e-05,
"loss": 0.8881,
"step": 9000
},
{
"epoch": 0.7032457496136012,
"grad_norm": 0.28785666823387146,
"learning_rate": 3.828567748583205e-05,
"loss": 0.9673,
"step": 9100
},
{
"epoch": 0.7109737248840804,
"grad_norm": 0.8596464991569519,
"learning_rate": 3.815687789799073e-05,
"loss": 0.9919,
"step": 9200
},
{
"epoch": 0.7187017001545595,
"grad_norm": 1.2839220762252808,
"learning_rate": 3.802807831014941e-05,
"loss": 0.9603,
"step": 9300
},
{
"epoch": 0.7264296754250387,
"grad_norm": 1.0451067686080933,
"learning_rate": 3.789927872230809e-05,
"loss": 0.9291,
"step": 9400
},
{
"epoch": 0.7341576506955177,
"grad_norm": 0.7771898508071899,
"learning_rate": 3.7770479134466775e-05,
"loss": 0.9414,
"step": 9500
},
{
"epoch": 0.7418856259659969,
"grad_norm": 0.885811984539032,
"learning_rate": 3.7641679546625453e-05,
"loss": 0.8987,
"step": 9600
},
{
"epoch": 0.749613601236476,
"grad_norm": 1.3268210887908936,
"learning_rate": 3.751287995878414e-05,
"loss": 0.9359,
"step": 9700
},
{
"epoch": 0.7573415765069552,
"grad_norm": 0.6823452711105347,
"learning_rate": 3.738408037094282e-05,
"loss": 0.8704,
"step": 9800
},
{
"epoch": 0.7650695517774343,
"grad_norm": 0.8156200647354126,
"learning_rate": 3.7255280783101497e-05,
"loss": 0.8954,
"step": 9900
},
{
"epoch": 0.7727975270479135,
"grad_norm": 0.8661110997200012,
"learning_rate": 3.7126481195260175e-05,
"loss": 0.8889,
"step": 10000
},
{
"epoch": 0.7805255023183926,
"grad_norm": 0.5882070064544678,
"learning_rate": 3.699768160741886e-05,
"loss": 0.916,
"step": 10100
},
{
"epoch": 0.7882534775888718,
"grad_norm": 0.9691454768180847,
"learning_rate": 3.686888201957754e-05,
"loss": 0.834,
"step": 10200
},
{
"epoch": 0.7959814528593508,
"grad_norm": 1.3009424209594727,
"learning_rate": 3.6740082431736225e-05,
"loss": 0.9496,
"step": 10300
},
{
"epoch": 0.80370942812983,
"grad_norm": 1.1349143981933594,
"learning_rate": 3.6611282843894904e-05,
"loss": 0.8839,
"step": 10400
},
{
"epoch": 0.8114374034003091,
"grad_norm": 0.924774706363678,
"learning_rate": 3.648248325605358e-05,
"loss": 0.9256,
"step": 10500
},
{
"epoch": 0.8191653786707882,
"grad_norm": 0.7730056047439575,
"learning_rate": 3.635368366821226e-05,
"loss": 0.9233,
"step": 10600
},
{
"epoch": 0.8268933539412674,
"grad_norm": 1.2240443229675293,
"learning_rate": 3.622488408037095e-05,
"loss": 0.9311,
"step": 10700
},
{
"epoch": 0.8346213292117465,
"grad_norm": 1.022357702255249,
"learning_rate": 3.6096084492529626e-05,
"loss": 0.8764,
"step": 10800
},
{
"epoch": 0.8423493044822257,
"grad_norm": 0.9591399431228638,
"learning_rate": 3.596728490468831e-05,
"loss": 0.8918,
"step": 10900
},
{
"epoch": 0.8500772797527048,
"grad_norm": 0.5928884744644165,
"learning_rate": 3.583848531684699e-05,
"loss": 0.9446,
"step": 11000
},
{
"epoch": 0.8578052550231839,
"grad_norm": 1.6488679647445679,
"learning_rate": 3.5710973724884085e-05,
"loss": 0.8841,
"step": 11100
},
{
"epoch": 0.865533230293663,
"grad_norm": 1.309877872467041,
"learning_rate": 3.5582174137042763e-05,
"loss": 0.849,
"step": 11200
},
{
"epoch": 0.8732612055641422,
"grad_norm": 1.1240078210830688,
"learning_rate": 3.545337454920145e-05,
"loss": 0.9454,
"step": 11300
},
{
"epoch": 0.8809891808346213,
"grad_norm": 0.9316561222076416,
"learning_rate": 3.532457496136012e-05,
"loss": 0.9605,
"step": 11400
},
{
"epoch": 0.8887171561051005,
"grad_norm": 0.7135329842567444,
"learning_rate": 3.5195775373518807e-05,
"loss": 0.8883,
"step": 11500
},
{
"epoch": 0.8964451313755796,
"grad_norm": 0.9502096772193909,
"learning_rate": 3.5066975785677485e-05,
"loss": 0.8585,
"step": 11600
},
{
"epoch": 0.9041731066460588,
"grad_norm": 1.4689842462539673,
"learning_rate": 3.493817619783617e-05,
"loss": 0.9306,
"step": 11700
},
{
"epoch": 0.9119010819165378,
"grad_norm": 0.41735073924064636,
"learning_rate": 3.480937660999485e-05,
"loss": 0.8678,
"step": 11800
},
{
"epoch": 0.919629057187017,
"grad_norm": 0.6292606592178345,
"learning_rate": 3.4680577022153535e-05,
"loss": 0.8864,
"step": 11900
},
{
"epoch": 0.9273570324574961,
"grad_norm": 0.7318680882453918,
"learning_rate": 3.455177743431221e-05,
"loss": 0.8658,
"step": 12000
},
{
"epoch": 0.9350850077279753,
"grad_norm": 1.8119789361953735,
"learning_rate": 3.442297784647089e-05,
"loss": 0.8236,
"step": 12100
},
{
"epoch": 0.9428129829984544,
"grad_norm": 2.469245195388794,
"learning_rate": 3.429417825862957e-05,
"loss": 0.9092,
"step": 12200
},
{
"epoch": 0.9505409582689336,
"grad_norm": 0.9728156924247742,
"learning_rate": 3.416537867078826e-05,
"loss": 0.9168,
"step": 12300
},
{
"epoch": 0.9582689335394127,
"grad_norm": 1.0292718410491943,
"learning_rate": 3.4036579082946936e-05,
"loss": 0.8535,
"step": 12400
},
{
"epoch": 0.9659969088098919,
"grad_norm": 1.157630443572998,
"learning_rate": 3.390777949510562e-05,
"loss": 0.8482,
"step": 12500
},
{
"epoch": 0.973724884080371,
"grad_norm": 0.944819986820221,
"learning_rate": 3.3778979907264293e-05,
"loss": 0.9321,
"step": 12600
},
{
"epoch": 0.98145285935085,
"grad_norm": 0.8043965697288513,
"learning_rate": 3.365018031942298e-05,
"loss": 0.905,
"step": 12700
},
{
"epoch": 0.9891808346213292,
"grad_norm": 0.81541508436203,
"learning_rate": 3.352138073158166e-05,
"loss": 0.8518,
"step": 12800
},
{
"epoch": 0.9969088098918083,
"grad_norm": 0.9929108619689941,
"learning_rate": 3.339258114374034e-05,
"loss": 0.8439,
"step": 12900
},
{
"epoch": 1.0046367851622875,
"grad_norm": 1.1995909214019775,
"learning_rate": 3.326378155589902e-05,
"loss": 0.8566,
"step": 13000
},
{
"epoch": 1.0123647604327666,
"grad_norm": 0.7053945064544678,
"learning_rate": 3.313498196805771e-05,
"loss": 0.8943,
"step": 13100
},
{
"epoch": 1.0200927357032457,
"grad_norm": 0.7920809984207153,
"learning_rate": 3.300618238021638e-05,
"loss": 0.8741,
"step": 13200
},
{
"epoch": 1.027820710973725,
"grad_norm": 1.0155510902404785,
"learning_rate": 3.2877382792375065e-05,
"loss": 0.8022,
"step": 13300
},
{
"epoch": 1.035548686244204,
"grad_norm": 1.0440579652786255,
"learning_rate": 3.2748583204533744e-05,
"loss": 0.8103,
"step": 13400
},
{
"epoch": 1.0432766615146831,
"grad_norm": 0.9690439105033875,
"learning_rate": 3.261978361669243e-05,
"loss": 0.8003,
"step": 13500
},
{
"epoch": 1.0510046367851622,
"grad_norm": 0.9963751435279846,
"learning_rate": 3.249098402885111e-05,
"loss": 0.8653,
"step": 13600
},
{
"epoch": 1.0587326120556415,
"grad_norm": 0.49158361554145813,
"learning_rate": 3.2362184441009794e-05,
"loss": 0.8642,
"step": 13700
},
{
"epoch": 1.0664605873261206,
"grad_norm": 0.9656476378440857,
"learning_rate": 3.2233384853168466e-05,
"loss": 0.8053,
"step": 13800
},
{
"epoch": 1.0741885625965997,
"grad_norm": 1.2530566453933716,
"learning_rate": 3.210458526532715e-05,
"loss": 0.8381,
"step": 13900
},
{
"epoch": 1.0819165378670788,
"grad_norm": 0.8346812725067139,
"learning_rate": 3.197578567748583e-05,
"loss": 0.8065,
"step": 14000
},
{
"epoch": 1.089644513137558,
"grad_norm": 1.1836985349655151,
"learning_rate": 3.1846986089644516e-05,
"loss": 0.8483,
"step": 14100
},
{
"epoch": 1.0973724884080371,
"grad_norm": 0.9917717576026917,
"learning_rate": 3.1718186501803195e-05,
"loss": 0.8459,
"step": 14200
},
{
"epoch": 1.1051004636785162,
"grad_norm": 1.0433543920516968,
"learning_rate": 3.158938691396188e-05,
"loss": 0.8446,
"step": 14300
},
{
"epoch": 1.1128284389489953,
"grad_norm": 1.1880654096603394,
"learning_rate": 3.146058732612055e-05,
"loss": 0.8122,
"step": 14400
},
{
"epoch": 1.1205564142194744,
"grad_norm": 0.46447160840034485,
"learning_rate": 3.133178773827924e-05,
"loss": 0.7701,
"step": 14500
},
{
"epoch": 1.1282843894899537,
"grad_norm": 1.4256715774536133,
"learning_rate": 3.1202988150437917e-05,
"loss": 0.8635,
"step": 14600
},
{
"epoch": 1.1360123647604328,
"grad_norm": 0.9805368781089783,
"learning_rate": 3.10741885625966e-05,
"loss": 0.8218,
"step": 14700
},
{
"epoch": 1.1437403400309119,
"grad_norm": 1.1528363227844238,
"learning_rate": 3.094538897475529e-05,
"loss": 0.8366,
"step": 14800
},
{
"epoch": 1.1514683153013912,
"grad_norm": 0.9413203001022339,
"learning_rate": 3.0816589386913966e-05,
"loss": 0.8323,
"step": 14900
},
{
"epoch": 1.1591962905718702,
"grad_norm": 0.9010465145111084,
"learning_rate": 3.0687789799072645e-05,
"loss": 0.8863,
"step": 15000
},
{
"epoch": 1.1669242658423493,
"grad_norm": 0.8075920343399048,
"learning_rate": 3.0558990211231324e-05,
"loss": 0.8015,
"step": 15100
},
{
"epoch": 1.1746522411128284,
"grad_norm": 0.7727937698364258,
"learning_rate": 3.043019062339001e-05,
"loss": 0.822,
"step": 15200
},
{
"epoch": 1.1823802163833075,
"grad_norm": 0.8842815160751343,
"learning_rate": 3.030139103554869e-05,
"loss": 0.8069,
"step": 15300
},
{
"epoch": 1.1901081916537868,
"grad_norm": 1.687024474143982,
"learning_rate": 3.017259144770737e-05,
"loss": 0.8723,
"step": 15400
},
{
"epoch": 1.1978361669242659,
"grad_norm": 0.9511739015579224,
"learning_rate": 3.004379185986605e-05,
"loss": 0.7482,
"step": 15500
},
{
"epoch": 1.205564142194745,
"grad_norm": 1.404523253440857,
"learning_rate": 2.9914992272024735e-05,
"loss": 0.8435,
"step": 15600
},
{
"epoch": 1.213292117465224,
"grad_norm": 1.1667975187301636,
"learning_rate": 2.978619268418341e-05,
"loss": 0.8717,
"step": 15700
},
{
"epoch": 1.2210200927357033,
"grad_norm": 1.286392331123352,
"learning_rate": 2.9657393096342096e-05,
"loss": 0.8148,
"step": 15800
},
{
"epoch": 1.2287480680061824,
"grad_norm": 0.915986955165863,
"learning_rate": 2.9528593508500775e-05,
"loss": 0.8285,
"step": 15900
},
{
"epoch": 1.2364760432766615,
"grad_norm": 0.9131502509117126,
"learning_rate": 2.9399793920659457e-05,
"loss": 0.8686,
"step": 16000
},
{
"epoch": 1.2442040185471406,
"grad_norm": 1.6465792655944824,
"learning_rate": 2.9270994332818136e-05,
"loss": 0.7989,
"step": 16100
},
{
"epoch": 1.2519319938176197,
"grad_norm": 0.6225204467773438,
"learning_rate": 2.914219474497682e-05,
"loss": 0.8871,
"step": 16200
},
{
"epoch": 1.259659969088099,
"grad_norm": 1.4327455759048462,
"learning_rate": 2.9013395157135496e-05,
"loss": 0.8531,
"step": 16300
},
{
"epoch": 1.267387944358578,
"grad_norm": 1.1303315162658691,
"learning_rate": 2.8884595569294182e-05,
"loss": 0.8464,
"step": 16400
},
{
"epoch": 1.2751159196290571,
"grad_norm": 0.8437920212745667,
"learning_rate": 2.875579598145286e-05,
"loss": 0.8394,
"step": 16500
},
{
"epoch": 1.2828438948995364,
"grad_norm": 1.1533674001693726,
"learning_rate": 2.8626996393611543e-05,
"loss": 0.8548,
"step": 16600
},
{
"epoch": 1.2905718701700155,
"grad_norm": 0.858174204826355,
"learning_rate": 2.8498196805770222e-05,
"loss": 0.8281,
"step": 16700
},
{
"epoch": 1.2982998454404946,
"grad_norm": 0.8084927797317505,
"learning_rate": 2.8369397217928907e-05,
"loss": 0.8215,
"step": 16800
},
{
"epoch": 1.3060278207109737,
"grad_norm": 1.1206783056259155,
"learning_rate": 2.8240597630087583e-05,
"loss": 0.8648,
"step": 16900
},
{
"epoch": 1.3137557959814528,
"grad_norm": 1.336126685142517,
"learning_rate": 2.8111798042246268e-05,
"loss": 0.8386,
"step": 17000
},
{
"epoch": 1.321483771251932,
"grad_norm": 1.350467324256897,
"learning_rate": 2.798428645028336e-05,
"loss": 0.7863,
"step": 17100
},
{
"epoch": 1.3292117465224111,
"grad_norm": 1.1663061380386353,
"learning_rate": 2.785548686244204e-05,
"loss": 0.8944,
"step": 17200
},
{
"epoch": 1.3369397217928902,
"grad_norm": 1.4521268606185913,
"learning_rate": 2.7727975270479132e-05,
"loss": 0.8633,
"step": 17300
},
{
"epoch": 1.3446676970633695,
"grad_norm": 1.0562511682510376,
"learning_rate": 2.7599175682637818e-05,
"loss": 0.8684,
"step": 17400
},
{
"epoch": 1.3523956723338486,
"grad_norm": 0.7794145941734314,
"learning_rate": 2.7470376094796497e-05,
"loss": 0.8334,
"step": 17500
},
{
"epoch": 1.3601236476043277,
"grad_norm": 1.1900535821914673,
"learning_rate": 2.734157650695518e-05,
"loss": 0.8278,
"step": 17600
},
{
"epoch": 1.3678516228748068,
"grad_norm": 1.2511202096939087,
"learning_rate": 2.7212776919113858e-05,
"loss": 0.8056,
"step": 17700
},
{
"epoch": 1.3755795981452859,
"grad_norm": 0.5958196520805359,
"learning_rate": 2.7083977331272543e-05,
"loss": 0.7903,
"step": 17800
},
{
"epoch": 1.383307573415765,
"grad_norm": 0.9310637712478638,
"learning_rate": 2.695517774343122e-05,
"loss": 0.806,
"step": 17900
},
{
"epoch": 1.3910355486862442,
"grad_norm": 0.8104686737060547,
"learning_rate": 2.6826378155589904e-05,
"loss": 0.8067,
"step": 18000
}
],
"logging_steps": 100,
"max_steps": 38820,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 1.8813026304e+16,
"train_batch_size": 4,
"trial_name": null,
"trial_params": null
}