kurtpayne's picture
Auto fine-tune: 13910 examples (LoRA adapter)
6fc80c2 verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 2.0,
"eval_steps": 500,
"global_step": 3414,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.005858230814294083,
"grad_norm": 10.993535995483398,
"learning_rate": 1.9978910369068542e-05,
"loss": 3.7814,
"step": 10
},
{
"epoch": 0.011716461628588167,
"grad_norm": 6.300055980682373,
"learning_rate": 1.9955477445811366e-05,
"loss": 3.9016,
"step": 20
},
{
"epoch": 0.01757469244288225,
"grad_norm": 14.147106170654297,
"learning_rate": 1.993204452255419e-05,
"loss": 3.6143,
"step": 30
},
{
"epoch": 0.023432923257176334,
"grad_norm": 9.435710906982422,
"learning_rate": 1.9908611599297014e-05,
"loss": 2.4102,
"step": 40
},
{
"epoch": 0.029291154071470416,
"grad_norm": 10.944840431213379,
"learning_rate": 1.988517867603984e-05,
"loss": 2.179,
"step": 50
},
{
"epoch": 0.0351493848857645,
"grad_norm": 5.027509689331055,
"learning_rate": 1.9861745752782663e-05,
"loss": 2.0129,
"step": 60
},
{
"epoch": 0.041007615700058585,
"grad_norm": 7.124016761779785,
"learning_rate": 1.9838312829525487e-05,
"loss": 1.3423,
"step": 70
},
{
"epoch": 0.04686584651435267,
"grad_norm": 4.878211498260498,
"learning_rate": 1.981487990626831e-05,
"loss": 1.5739,
"step": 80
},
{
"epoch": 0.05272407732864675,
"grad_norm": 2.8620986938476562,
"learning_rate": 1.9791446983011135e-05,
"loss": 1.3344,
"step": 90
},
{
"epoch": 0.05858230814294083,
"grad_norm": 3.641845226287842,
"learning_rate": 1.9768014059753956e-05,
"loss": 1.3627,
"step": 100
},
{
"epoch": 0.06444053895723492,
"grad_norm": 3.619720935821533,
"learning_rate": 1.974458113649678e-05,
"loss": 0.9266,
"step": 110
},
{
"epoch": 0.070298769771529,
"grad_norm": 1.5297939777374268,
"learning_rate": 1.9721148213239604e-05,
"loss": 0.8823,
"step": 120
},
{
"epoch": 0.07615700058582309,
"grad_norm": 1.3193559646606445,
"learning_rate": 1.9697715289982428e-05,
"loss": 0.724,
"step": 130
},
{
"epoch": 0.08201523140011717,
"grad_norm": 3.3446288108825684,
"learning_rate": 1.967428236672525e-05,
"loss": 0.7263,
"step": 140
},
{
"epoch": 0.08787346221441125,
"grad_norm": 4.514213562011719,
"learning_rate": 1.9650849443468073e-05,
"loss": 0.7383,
"step": 150
},
{
"epoch": 0.09373169302870533,
"grad_norm": 1.2520763874053955,
"learning_rate": 1.9627416520210897e-05,
"loss": 0.658,
"step": 160
},
{
"epoch": 0.09958992384299942,
"grad_norm": 1.4183530807495117,
"learning_rate": 1.960398359695372e-05,
"loss": 0.6493,
"step": 170
},
{
"epoch": 0.1054481546572935,
"grad_norm": 2.3520781993865967,
"learning_rate": 1.9580550673696545e-05,
"loss": 0.5405,
"step": 180
},
{
"epoch": 0.11130638547158758,
"grad_norm": 2.6773362159729004,
"learning_rate": 1.955711775043937e-05,
"loss": 0.6707,
"step": 190
},
{
"epoch": 0.11716461628588166,
"grad_norm": 3.6037440299987793,
"learning_rate": 1.9533684827182193e-05,
"loss": 0.5618,
"step": 200
},
{
"epoch": 0.12302284710017575,
"grad_norm": 4.3400702476501465,
"learning_rate": 1.9510251903925017e-05,
"loss": 0.5424,
"step": 210
},
{
"epoch": 0.12888107791446984,
"grad_norm": 1.572972059249878,
"learning_rate": 1.948681898066784e-05,
"loss": 0.5249,
"step": 220
},
{
"epoch": 0.1347393087287639,
"grad_norm": 1.511702537536621,
"learning_rate": 1.9463386057410662e-05,
"loss": 0.5116,
"step": 230
},
{
"epoch": 0.140597539543058,
"grad_norm": 1.3979424238204956,
"learning_rate": 1.9439953134153486e-05,
"loss": 0.5342,
"step": 240
},
{
"epoch": 0.14645577035735208,
"grad_norm": 2.983322858810425,
"learning_rate": 1.941652021089631e-05,
"loss": 0.3911,
"step": 250
},
{
"epoch": 0.15231400117164617,
"grad_norm": 1.372771143913269,
"learning_rate": 1.9393087287639135e-05,
"loss": 0.3779,
"step": 260
},
{
"epoch": 0.15817223198594024,
"grad_norm": 1.926331639289856,
"learning_rate": 1.936965436438196e-05,
"loss": 0.3879,
"step": 270
},
{
"epoch": 0.16403046280023434,
"grad_norm": 1.2269700765609741,
"learning_rate": 1.9346221441124783e-05,
"loss": 0.4142,
"step": 280
},
{
"epoch": 0.1698886936145284,
"grad_norm": 4.581326961517334,
"learning_rate": 1.9322788517867607e-05,
"loss": 0.3587,
"step": 290
},
{
"epoch": 0.1757469244288225,
"grad_norm": 1.6644238233566284,
"learning_rate": 1.9299355594610428e-05,
"loss": 0.2604,
"step": 300
},
{
"epoch": 0.18160515524311657,
"grad_norm": 1.3686414957046509,
"learning_rate": 1.9275922671353252e-05,
"loss": 0.2254,
"step": 310
},
{
"epoch": 0.18746338605741067,
"grad_norm": 1.5756723880767822,
"learning_rate": 1.9252489748096076e-05,
"loss": 0.297,
"step": 320
},
{
"epoch": 0.19332161687170474,
"grad_norm": 1.3246747255325317,
"learning_rate": 1.92290568248389e-05,
"loss": 0.2594,
"step": 330
},
{
"epoch": 0.19917984768599883,
"grad_norm": 2.5869126319885254,
"learning_rate": 1.9205623901581724e-05,
"loss": 0.1844,
"step": 340
},
{
"epoch": 0.2050380785002929,
"grad_norm": 1.37604558467865,
"learning_rate": 1.9182190978324548e-05,
"loss": 0.2867,
"step": 350
},
{
"epoch": 0.210896309314587,
"grad_norm": 2.2903075218200684,
"learning_rate": 1.915875805506737e-05,
"loss": 0.2371,
"step": 360
},
{
"epoch": 0.21675454012888107,
"grad_norm": 0.9721745252609253,
"learning_rate": 1.9135325131810193e-05,
"loss": 0.238,
"step": 370
},
{
"epoch": 0.22261277094317516,
"grad_norm": 0.6926158666610718,
"learning_rate": 1.9111892208553017e-05,
"loss": 0.2084,
"step": 380
},
{
"epoch": 0.22847100175746923,
"grad_norm": 3.8920700550079346,
"learning_rate": 1.908845928529584e-05,
"loss": 0.313,
"step": 390
},
{
"epoch": 0.23432923257176333,
"grad_norm": 1.1392645835876465,
"learning_rate": 1.9065026362038665e-05,
"loss": 0.2973,
"step": 400
},
{
"epoch": 0.2401874633860574,
"grad_norm": 0.7737612128257751,
"learning_rate": 1.904159343878149e-05,
"loss": 0.1771,
"step": 410
},
{
"epoch": 0.2460456942003515,
"grad_norm": 0.5699288845062256,
"learning_rate": 1.9018160515524314e-05,
"loss": 0.1408,
"step": 420
},
{
"epoch": 0.2519039250146456,
"grad_norm": 1.6235454082489014,
"learning_rate": 1.8994727592267138e-05,
"loss": 0.1686,
"step": 430
},
{
"epoch": 0.2577621558289397,
"grad_norm": 3.0851707458496094,
"learning_rate": 1.8971294669009962e-05,
"loss": 0.1298,
"step": 440
},
{
"epoch": 0.26362038664323373,
"grad_norm": 1.0552427768707275,
"learning_rate": 1.8947861745752786e-05,
"loss": 0.1288,
"step": 450
},
{
"epoch": 0.2694786174575278,
"grad_norm": 1.6423840522766113,
"learning_rate": 1.892442882249561e-05,
"loss": 0.1995,
"step": 460
},
{
"epoch": 0.2753368482718219,
"grad_norm": 0.3003721535205841,
"learning_rate": 1.890099589923843e-05,
"loss": 0.1002,
"step": 470
},
{
"epoch": 0.281195079086116,
"grad_norm": 0.5576034784317017,
"learning_rate": 1.8877562975981255e-05,
"loss": 0.1311,
"step": 480
},
{
"epoch": 0.28705330990041006,
"grad_norm": 0.36932089924812317,
"learning_rate": 1.885413005272408e-05,
"loss": 0.2413,
"step": 490
},
{
"epoch": 0.29291154071470415,
"grad_norm": 0.4237634241580963,
"learning_rate": 1.8830697129466903e-05,
"loss": 0.0527,
"step": 500
},
{
"epoch": 0.29876977152899825,
"grad_norm": 3.5395658016204834,
"learning_rate": 1.8807264206209724e-05,
"loss": 0.1513,
"step": 510
},
{
"epoch": 0.30462800234329235,
"grad_norm": 0.28212159872055054,
"learning_rate": 1.8783831282952548e-05,
"loss": 0.0912,
"step": 520
},
{
"epoch": 0.3104862331575864,
"grad_norm": 3.4626212120056152,
"learning_rate": 1.8760398359695372e-05,
"loss": 0.1459,
"step": 530
},
{
"epoch": 0.3163444639718805,
"grad_norm": 1.6932200193405151,
"learning_rate": 1.8736965436438196e-05,
"loss": 0.1388,
"step": 540
},
{
"epoch": 0.3222026947861746,
"grad_norm": 0.14610934257507324,
"learning_rate": 1.871353251318102e-05,
"loss": 0.1426,
"step": 550
},
{
"epoch": 0.3280609256004687,
"grad_norm": 0.2995212972164154,
"learning_rate": 1.8690099589923845e-05,
"loss": 0.1387,
"step": 560
},
{
"epoch": 0.3339191564147627,
"grad_norm": 0.7743491530418396,
"learning_rate": 1.866666666666667e-05,
"loss": 0.0986,
"step": 570
},
{
"epoch": 0.3397773872290568,
"grad_norm": 4.253698825836182,
"learning_rate": 1.8643233743409493e-05,
"loss": 0.1312,
"step": 580
},
{
"epoch": 0.3456356180433509,
"grad_norm": 1.1342084407806396,
"learning_rate": 1.8619800820152317e-05,
"loss": 0.0792,
"step": 590
},
{
"epoch": 0.351493848857645,
"grad_norm": 0.500845730304718,
"learning_rate": 1.859636789689514e-05,
"loss": 0.2464,
"step": 600
},
{
"epoch": 0.35735207967193905,
"grad_norm": 7.179585933685303,
"learning_rate": 1.8572934973637965e-05,
"loss": 0.1881,
"step": 610
},
{
"epoch": 0.36321031048623315,
"grad_norm": 3.4145667552948,
"learning_rate": 1.8549502050380786e-05,
"loss": 0.2294,
"step": 620
},
{
"epoch": 0.36906854130052724,
"grad_norm": 4.951934337615967,
"learning_rate": 1.852606912712361e-05,
"loss": 0.0984,
"step": 630
},
{
"epoch": 0.37492677211482134,
"grad_norm": 3.63204288482666,
"learning_rate": 1.8502636203866434e-05,
"loss": 0.3341,
"step": 640
},
{
"epoch": 0.38078500292911543,
"grad_norm": 3.267408609390259,
"learning_rate": 1.8479203280609258e-05,
"loss": 0.1523,
"step": 650
},
{
"epoch": 0.3866432337434095,
"grad_norm": 3.1641011238098145,
"learning_rate": 1.8455770357352082e-05,
"loss": 0.286,
"step": 660
},
{
"epoch": 0.39250146455770357,
"grad_norm": 1.006550669670105,
"learning_rate": 1.8432337434094903e-05,
"loss": 0.1675,
"step": 670
},
{
"epoch": 0.39835969537199767,
"grad_norm": 0.17614607512950897,
"learning_rate": 1.8408904510837727e-05,
"loss": 0.1466,
"step": 680
},
{
"epoch": 0.40421792618629176,
"grad_norm": 0.142693430185318,
"learning_rate": 1.838547158758055e-05,
"loss": 0.2919,
"step": 690
},
{
"epoch": 0.4100761570005858,
"grad_norm": 3.1687896251678467,
"learning_rate": 1.8362038664323375e-05,
"loss": 0.2602,
"step": 700
},
{
"epoch": 0.4159343878148799,
"grad_norm": 0.22907620668411255,
"learning_rate": 1.83386057410662e-05,
"loss": 0.1003,
"step": 710
},
{
"epoch": 0.421792618629174,
"grad_norm": 0.19047817587852478,
"learning_rate": 1.8315172817809024e-05,
"loss": 0.1336,
"step": 720
},
{
"epoch": 0.4276508494434681,
"grad_norm": 4.431445121765137,
"learning_rate": 1.8291739894551848e-05,
"loss": 0.1076,
"step": 730
},
{
"epoch": 0.43350908025776214,
"grad_norm": 1.916579008102417,
"learning_rate": 1.8268306971294672e-05,
"loss": 0.1822,
"step": 740
},
{
"epoch": 0.43936731107205623,
"grad_norm": 3.521374225616455,
"learning_rate": 1.8244874048037493e-05,
"loss": 0.0826,
"step": 750
},
{
"epoch": 0.44522554188635033,
"grad_norm": 2.703446865081787,
"learning_rate": 1.8221441124780317e-05,
"loss": 0.18,
"step": 760
},
{
"epoch": 0.4510837727006444,
"grad_norm": 0.11092668026685715,
"learning_rate": 1.819800820152314e-05,
"loss": 0.2272,
"step": 770
},
{
"epoch": 0.45694200351493847,
"grad_norm": 0.1550651639699936,
"learning_rate": 1.8174575278265965e-05,
"loss": 0.2156,
"step": 780
},
{
"epoch": 0.46280023432923256,
"grad_norm": 0.18351873755455017,
"learning_rate": 1.815114235500879e-05,
"loss": 0.0512,
"step": 790
},
{
"epoch": 0.46865846514352666,
"grad_norm": 0.20050418376922607,
"learning_rate": 1.8127709431751613e-05,
"loss": 0.0394,
"step": 800
},
{
"epoch": 0.47451669595782076,
"grad_norm": 3.0404205322265625,
"learning_rate": 1.8104276508494437e-05,
"loss": 0.1457,
"step": 810
},
{
"epoch": 0.4803749267721148,
"grad_norm": 0.16257639229297638,
"learning_rate": 1.808084358523726e-05,
"loss": 0.173,
"step": 820
},
{
"epoch": 0.4862331575864089,
"grad_norm": 0.5828198194503784,
"learning_rate": 1.8057410661980082e-05,
"loss": 0.094,
"step": 830
},
{
"epoch": 0.492091388400703,
"grad_norm": 0.1483214646577835,
"learning_rate": 1.8033977738722906e-05,
"loss": 0.0408,
"step": 840
},
{
"epoch": 0.4979496192149971,
"grad_norm": 0.07068105041980743,
"learning_rate": 1.801054481546573e-05,
"loss": 0.0934,
"step": 850
},
{
"epoch": 0.5038078500292912,
"grad_norm": 0.6805324554443359,
"learning_rate": 1.7987111892208554e-05,
"loss": 0.0113,
"step": 860
},
{
"epoch": 0.5096660808435852,
"grad_norm": 0.09847233444452286,
"learning_rate": 1.796367896895138e-05,
"loss": 0.2669,
"step": 870
},
{
"epoch": 0.5155243116578794,
"grad_norm": 0.3944021761417389,
"learning_rate": 1.7940246045694203e-05,
"loss": 0.1015,
"step": 880
},
{
"epoch": 0.5213825424721734,
"grad_norm": 0.17011871933937073,
"learning_rate": 1.7916813122437023e-05,
"loss": 0.1127,
"step": 890
},
{
"epoch": 0.5272407732864675,
"grad_norm": 2.945100784301758,
"learning_rate": 1.7893380199179847e-05,
"loss": 0.1913,
"step": 900
},
{
"epoch": 0.5330990041007616,
"grad_norm": 0.25906553864479065,
"learning_rate": 1.786994727592267e-05,
"loss": 0.0835,
"step": 910
},
{
"epoch": 0.5389572349150556,
"grad_norm": 0.30626562237739563,
"learning_rate": 1.7846514352665496e-05,
"loss": 0.0305,
"step": 920
},
{
"epoch": 0.5448154657293497,
"grad_norm": 0.1143951565027237,
"learning_rate": 1.782308142940832e-05,
"loss": 0.0216,
"step": 930
},
{
"epoch": 0.5506736965436438,
"grad_norm": 0.19745256006717682,
"learning_rate": 1.7799648506151144e-05,
"loss": 0.1507,
"step": 940
},
{
"epoch": 0.5565319273579379,
"grad_norm": 7.011041164398193,
"learning_rate": 1.7776215582893968e-05,
"loss": 0.3032,
"step": 950
},
{
"epoch": 0.562390158172232,
"grad_norm": 3.46498966217041,
"learning_rate": 1.7752782659636792e-05,
"loss": 0.2152,
"step": 960
},
{
"epoch": 0.5682483889865261,
"grad_norm": 0.17841269075870514,
"learning_rate": 1.7729349736379616e-05,
"loss": 0.0435,
"step": 970
},
{
"epoch": 0.5741066198008201,
"grad_norm": 0.3201232850551605,
"learning_rate": 1.770591681312244e-05,
"loss": 0.0959,
"step": 980
},
{
"epoch": 0.5799648506151143,
"grad_norm": 3.194690704345703,
"learning_rate": 1.7682483889865264e-05,
"loss": 0.1778,
"step": 990
},
{
"epoch": 0.5858230814294083,
"grad_norm": 3.6589221954345703,
"learning_rate": 1.7659050966608085e-05,
"loss": 0.1359,
"step": 1000
},
{
"epoch": 0.5916813122437024,
"grad_norm": 8.663952827453613,
"learning_rate": 1.763561804335091e-05,
"loss": 0.1705,
"step": 1010
},
{
"epoch": 0.5975395430579965,
"grad_norm": 0.9029685854911804,
"learning_rate": 1.7612185120093733e-05,
"loss": 0.1045,
"step": 1020
},
{
"epoch": 0.6033977738722905,
"grad_norm": 0.06549205631017685,
"learning_rate": 1.7588752196836558e-05,
"loss": 0.0432,
"step": 1030
},
{
"epoch": 0.6092560046865847,
"grad_norm": 0.2381201982498169,
"learning_rate": 1.7565319273579378e-05,
"loss": 0.0923,
"step": 1040
},
{
"epoch": 0.6151142355008787,
"grad_norm": 0.07783389836549759,
"learning_rate": 1.7541886350322202e-05,
"loss": 0.0385,
"step": 1050
},
{
"epoch": 0.6209724663151728,
"grad_norm": 0.1266435831785202,
"learning_rate": 1.7518453427065027e-05,
"loss": 0.0282,
"step": 1060
},
{
"epoch": 0.6268306971294669,
"grad_norm": 0.20830880105495453,
"learning_rate": 1.749502050380785e-05,
"loss": 0.0758,
"step": 1070
},
{
"epoch": 0.632688927943761,
"grad_norm": 1.412645936012268,
"learning_rate": 1.7471587580550675e-05,
"loss": 0.1091,
"step": 1080
},
{
"epoch": 0.6385471587580551,
"grad_norm": 0.6468214988708496,
"learning_rate": 1.74481546572935e-05,
"loss": 0.1342,
"step": 1090
},
{
"epoch": 0.6444053895723492,
"grad_norm": 0.05657221004366875,
"learning_rate": 1.7424721734036323e-05,
"loss": 0.0898,
"step": 1100
},
{
"epoch": 0.6502636203866432,
"grad_norm": 0.18487469851970673,
"learning_rate": 1.7401288810779147e-05,
"loss": 0.0827,
"step": 1110
},
{
"epoch": 0.6561218512009374,
"grad_norm": 0.06991606205701828,
"learning_rate": 1.737785588752197e-05,
"loss": 0.147,
"step": 1120
},
{
"epoch": 0.6619800820152314,
"grad_norm": 0.3615979254245758,
"learning_rate": 1.7354422964264795e-05,
"loss": 0.0985,
"step": 1130
},
{
"epoch": 0.6678383128295254,
"grad_norm": 16.712085723876953,
"learning_rate": 1.733099004100762e-05,
"loss": 0.1162,
"step": 1140
},
{
"epoch": 0.6736965436438196,
"grad_norm": 0.044244520366191864,
"learning_rate": 1.730755711775044e-05,
"loss": 0.0973,
"step": 1150
},
{
"epoch": 0.6795547744581136,
"grad_norm": 2.611173391342163,
"learning_rate": 1.7284124194493264e-05,
"loss": 0.131,
"step": 1160
},
{
"epoch": 0.6854130052724078,
"grad_norm": 9.249836921691895,
"learning_rate": 1.726069127123609e-05,
"loss": 0.1265,
"step": 1170
},
{
"epoch": 0.6912712360867018,
"grad_norm": 4.1363301277160645,
"learning_rate": 1.7237258347978912e-05,
"loss": 0.1003,
"step": 1180
},
{
"epoch": 0.6971294669009959,
"grad_norm": 3.4619572162628174,
"learning_rate": 1.7213825424721737e-05,
"loss": 0.1756,
"step": 1190
},
{
"epoch": 0.70298769771529,
"grad_norm": 3.285961627960205,
"learning_rate": 1.7190392501464557e-05,
"loss": 0.2226,
"step": 1200
},
{
"epoch": 0.7088459285295841,
"grad_norm": 0.13242599368095398,
"learning_rate": 1.716695957820738e-05,
"loss": 0.1064,
"step": 1210
},
{
"epoch": 0.7147041593438781,
"grad_norm": 0.39351117610931396,
"learning_rate": 1.7143526654950206e-05,
"loss": 0.0712,
"step": 1220
},
{
"epoch": 0.7205623901581723,
"grad_norm": 3.50242018699646,
"learning_rate": 1.712009373169303e-05,
"loss": 0.1174,
"step": 1230
},
{
"epoch": 0.7264206209724663,
"grad_norm": 1.7381882667541504,
"learning_rate": 1.7096660808435854e-05,
"loss": 0.3288,
"step": 1240
},
{
"epoch": 0.7322788517867604,
"grad_norm": 0.6413008570671082,
"learning_rate": 1.7073227885178678e-05,
"loss": 0.2029,
"step": 1250
},
{
"epoch": 0.7381370826010545,
"grad_norm": 11.778146743774414,
"learning_rate": 1.7049794961921502e-05,
"loss": 0.1279,
"step": 1260
},
{
"epoch": 0.7439953134153485,
"grad_norm": 0.32977521419525146,
"learning_rate": 1.7026362038664326e-05,
"loss": 0.1154,
"step": 1270
},
{
"epoch": 0.7498535442296427,
"grad_norm": 0.11577194184064865,
"learning_rate": 1.7002929115407147e-05,
"loss": 0.1317,
"step": 1280
},
{
"epoch": 0.7557117750439367,
"grad_norm": 0.08130369335412979,
"learning_rate": 1.697949619214997e-05,
"loss": 0.0718,
"step": 1290
},
{
"epoch": 0.7615700058582309,
"grad_norm": 4.955254554748535,
"learning_rate": 1.6956063268892795e-05,
"loss": 0.1281,
"step": 1300
},
{
"epoch": 0.7674282366725249,
"grad_norm": 0.06785889714956284,
"learning_rate": 1.693263034563562e-05,
"loss": 0.1004,
"step": 1310
},
{
"epoch": 0.773286467486819,
"grad_norm": 0.1323424130678177,
"learning_rate": 1.6909197422378443e-05,
"loss": 0.1207,
"step": 1320
},
{
"epoch": 0.7791446983011131,
"grad_norm": 8.72974681854248,
"learning_rate": 1.6885764499121267e-05,
"loss": 0.1181,
"step": 1330
},
{
"epoch": 0.7850029291154071,
"grad_norm": 0.08895012736320496,
"learning_rate": 1.686233157586409e-05,
"loss": 0.0098,
"step": 1340
},
{
"epoch": 0.7908611599297012,
"grad_norm": 0.3387869894504547,
"learning_rate": 1.6838898652606916e-05,
"loss": 0.0319,
"step": 1350
},
{
"epoch": 0.7967193907439953,
"grad_norm": 0.06218031048774719,
"learning_rate": 1.681546572934974e-05,
"loss": 0.1521,
"step": 1360
},
{
"epoch": 0.8025776215582894,
"grad_norm": 0.0525616817176342,
"learning_rate": 1.679203280609256e-05,
"loss": 0.032,
"step": 1370
},
{
"epoch": 0.8084358523725835,
"grad_norm": 0.33692577481269836,
"learning_rate": 1.6768599882835385e-05,
"loss": 0.0106,
"step": 1380
},
{
"epoch": 0.8142940831868776,
"grad_norm": 3.903052806854248,
"learning_rate": 1.674516695957821e-05,
"loss": 0.1573,
"step": 1390
},
{
"epoch": 0.8201523140011716,
"grad_norm": 0.07920405268669128,
"learning_rate": 1.6721734036321033e-05,
"loss": 0.0089,
"step": 1400
},
{
"epoch": 0.8260105448154658,
"grad_norm": 2.7212400436401367,
"learning_rate": 1.6698301113063854e-05,
"loss": 0.2825,
"step": 1410
},
{
"epoch": 0.8318687756297598,
"grad_norm": 0.2885132133960724,
"learning_rate": 1.6674868189806678e-05,
"loss": 0.0744,
"step": 1420
},
{
"epoch": 0.8377270064440538,
"grad_norm": 0.04495526850223541,
"learning_rate": 1.6651435266549502e-05,
"loss": 0.0115,
"step": 1430
},
{
"epoch": 0.843585237258348,
"grad_norm": 0.15983568131923676,
"learning_rate": 1.6628002343292326e-05,
"loss": 0.03,
"step": 1440
},
{
"epoch": 0.849443468072642,
"grad_norm": 0.054607000201940536,
"learning_rate": 1.660456942003515e-05,
"loss": 0.0691,
"step": 1450
},
{
"epoch": 0.8553016988869362,
"grad_norm": 0.06109416112303734,
"learning_rate": 1.6581136496777974e-05,
"loss": 0.217,
"step": 1460
},
{
"epoch": 0.8611599297012302,
"grad_norm": 11.838920593261719,
"learning_rate": 1.6557703573520798e-05,
"loss": 0.2354,
"step": 1470
},
{
"epoch": 0.8670181605155243,
"grad_norm": 5.60699462890625,
"learning_rate": 1.6534270650263622e-05,
"loss": 0.1286,
"step": 1480
},
{
"epoch": 0.8728763913298184,
"grad_norm": 4.943994998931885,
"learning_rate": 1.6510837727006446e-05,
"loss": 0.1325,
"step": 1490
},
{
"epoch": 0.8787346221441125,
"grad_norm": 0.08744335919618607,
"learning_rate": 1.648740480374927e-05,
"loss": 0.1518,
"step": 1500
},
{
"epoch": 0.8845928529584065,
"grad_norm": 5.690299987792969,
"learning_rate": 1.6463971880492095e-05,
"loss": 0.1634,
"step": 1510
},
{
"epoch": 0.8904510837727007,
"grad_norm": 0.06222234293818474,
"learning_rate": 1.644053895723492e-05,
"loss": 0.1902,
"step": 1520
},
{
"epoch": 0.8963093145869947,
"grad_norm": 0.13232512772083282,
"learning_rate": 1.641710603397774e-05,
"loss": 0.0997,
"step": 1530
},
{
"epoch": 0.9021675454012889,
"grad_norm": 0.09595705568790436,
"learning_rate": 1.6393673110720564e-05,
"loss": 0.1137,
"step": 1540
},
{
"epoch": 0.9080257762155829,
"grad_norm": 3.798790693283081,
"learning_rate": 1.6370240187463388e-05,
"loss": 0.2348,
"step": 1550
},
{
"epoch": 0.9138840070298769,
"grad_norm": 6.075992107391357,
"learning_rate": 1.6346807264206212e-05,
"loss": 0.083,
"step": 1560
},
{
"epoch": 0.9197422378441711,
"grad_norm": 0.1521161049604416,
"learning_rate": 1.6323374340949033e-05,
"loss": 0.1863,
"step": 1570
},
{
"epoch": 0.9256004686584651,
"grad_norm": 4.295522689819336,
"learning_rate": 1.6299941417691857e-05,
"loss": 0.0441,
"step": 1580
},
{
"epoch": 0.9314586994727593,
"grad_norm": 11.862258911132812,
"learning_rate": 1.627650849443468e-05,
"loss": 0.1057,
"step": 1590
},
{
"epoch": 0.9373169302870533,
"grad_norm": 0.19827328622341156,
"learning_rate": 1.6253075571177505e-05,
"loss": 0.0422,
"step": 1600
},
{
"epoch": 0.9431751611013474,
"grad_norm": 0.05980195105075836,
"learning_rate": 1.622964264792033e-05,
"loss": 0.0119,
"step": 1610
},
{
"epoch": 0.9490333919156415,
"grad_norm": 5.003294944763184,
"learning_rate": 1.6206209724663153e-05,
"loss": 0.0994,
"step": 1620
},
{
"epoch": 0.9548916227299356,
"grad_norm": 2.8840978145599365,
"learning_rate": 1.6182776801405977e-05,
"loss": 0.1385,
"step": 1630
},
{
"epoch": 0.9607498535442296,
"grad_norm": 0.07565687596797943,
"learning_rate": 1.61593438781488e-05,
"loss": 0.0457,
"step": 1640
},
{
"epoch": 0.9666080843585237,
"grad_norm": 16.399011611938477,
"learning_rate": 1.6135910954891626e-05,
"loss": 0.1724,
"step": 1650
},
{
"epoch": 0.9724663151728178,
"grad_norm": 0.08199520409107208,
"learning_rate": 1.611247803163445e-05,
"loss": 0.1122,
"step": 1660
},
{
"epoch": 0.9783245459871119,
"grad_norm": 0.07179383933544159,
"learning_rate": 1.608904510837727e-05,
"loss": 0.1905,
"step": 1670
},
{
"epoch": 0.984182776801406,
"grad_norm": 3.4531142711639404,
"learning_rate": 1.6065612185120094e-05,
"loss": 0.2529,
"step": 1680
},
{
"epoch": 0.9900410076157,
"grad_norm": 3.2887964248657227,
"learning_rate": 1.604217926186292e-05,
"loss": 0.2152,
"step": 1690
},
{
"epoch": 0.9958992384299942,
"grad_norm": 0.08914665877819061,
"learning_rate": 1.6018746338605743e-05,
"loss": 0.0181,
"step": 1700
},
{
"epoch": 1.0017574692442883,
"grad_norm": 0.09250498563051224,
"learning_rate": 1.5995313415348567e-05,
"loss": 0.0127,
"step": 1710
},
{
"epoch": 1.0076157000585824,
"grad_norm": 3.05525541305542,
"learning_rate": 1.597188049209139e-05,
"loss": 0.1035,
"step": 1720
},
{
"epoch": 1.0134739308728764,
"grad_norm": 7.614686489105225,
"learning_rate": 1.594844756883421e-05,
"loss": 0.1496,
"step": 1730
},
{
"epoch": 1.0193321616871704,
"grad_norm": 0.10618708282709122,
"learning_rate": 1.5925014645577036e-05,
"loss": 0.0079,
"step": 1740
},
{
"epoch": 1.0251903925014645,
"grad_norm": 6.923569202423096,
"learning_rate": 1.590158172231986e-05,
"loss": 0.0851,
"step": 1750
},
{
"epoch": 1.0310486233157587,
"grad_norm": 5.463113307952881,
"learning_rate": 1.5878148799062684e-05,
"loss": 0.1242,
"step": 1760
},
{
"epoch": 1.0369068541300528,
"grad_norm": 0.06814823299646378,
"learning_rate": 1.5854715875805508e-05,
"loss": 0.1508,
"step": 1770
},
{
"epoch": 1.0427650849443468,
"grad_norm": 0.18510116636753082,
"learning_rate": 1.5831282952548332e-05,
"loss": 0.1044,
"step": 1780
},
{
"epoch": 1.0486233157586409,
"grad_norm": 7.552770614624023,
"learning_rate": 1.5807850029291156e-05,
"loss": 0.0893,
"step": 1790
},
{
"epoch": 1.054481546572935,
"grad_norm": 0.9201821684837341,
"learning_rate": 1.5784417106033977e-05,
"loss": 0.0783,
"step": 1800
},
{
"epoch": 1.060339777387229,
"grad_norm": 8.554439544677734,
"learning_rate": 1.57609841827768e-05,
"loss": 0.2055,
"step": 1810
},
{
"epoch": 1.0661980082015232,
"grad_norm": 0.0478215292096138,
"learning_rate": 1.5737551259519625e-05,
"loss": 0.051,
"step": 1820
},
{
"epoch": 1.0720562390158173,
"grad_norm": 3.4328463077545166,
"learning_rate": 1.571411833626245e-05,
"loss": 0.1924,
"step": 1830
},
{
"epoch": 1.0779144698301113,
"grad_norm": 0.7175261974334717,
"learning_rate": 1.5690685413005274e-05,
"loss": 0.0065,
"step": 1840
},
{
"epoch": 1.0837727006444053,
"grad_norm": 0.20879748463630676,
"learning_rate": 1.5667252489748098e-05,
"loss": 0.006,
"step": 1850
},
{
"epoch": 1.0896309314586994,
"grad_norm": 0.04610930755734444,
"learning_rate": 1.5643819566490922e-05,
"loss": 0.0084,
"step": 1860
},
{
"epoch": 1.0954891622729936,
"grad_norm": 0.023400362581014633,
"learning_rate": 1.5620386643233746e-05,
"loss": 0.0703,
"step": 1870
},
{
"epoch": 1.1013473930872877,
"grad_norm": 0.007849013432860374,
"learning_rate": 1.559695371997657e-05,
"loss": 0.0054,
"step": 1880
},
{
"epoch": 1.1072056239015817,
"grad_norm": 0.025371093302965164,
"learning_rate": 1.5573520796719394e-05,
"loss": 0.0773,
"step": 1890
},
{
"epoch": 1.1130638547158758,
"grad_norm": 0.06657887995243073,
"learning_rate": 1.5550087873462215e-05,
"loss": 0.0375,
"step": 1900
},
{
"epoch": 1.1189220855301698,
"grad_norm": 2.8749732971191406,
"learning_rate": 1.552665495020504e-05,
"loss": 0.1956,
"step": 1910
},
{
"epoch": 1.124780316344464,
"grad_norm": 0.04028751328587532,
"learning_rate": 1.5503222026947863e-05,
"loss": 0.1195,
"step": 1920
},
{
"epoch": 1.1306385471587581,
"grad_norm": 0.045628052204847336,
"learning_rate": 1.5479789103690687e-05,
"loss": 0.1612,
"step": 1930
},
{
"epoch": 1.1364967779730522,
"grad_norm": 2.970036506652832,
"learning_rate": 1.5456356180433508e-05,
"loss": 0.0281,
"step": 1940
},
{
"epoch": 1.1423550087873462,
"grad_norm": 0.28056764602661133,
"learning_rate": 1.5432923257176332e-05,
"loss": 0.0254,
"step": 1950
},
{
"epoch": 1.1482132396016402,
"grad_norm": 0.044658973813056946,
"learning_rate": 1.5409490333919156e-05,
"loss": 0.1445,
"step": 1960
},
{
"epoch": 1.1540714704159343,
"grad_norm": 0.12443964928388596,
"learning_rate": 1.538605741066198e-05,
"loss": 0.2513,
"step": 1970
},
{
"epoch": 1.1599297012302285,
"grad_norm": 0.06219332292675972,
"learning_rate": 1.5362624487404804e-05,
"loss": 0.0266,
"step": 1980
},
{
"epoch": 1.1657879320445226,
"grad_norm": 0.09316384792327881,
"learning_rate": 1.533919156414763e-05,
"loss": 0.0737,
"step": 1990
},
{
"epoch": 1.1716461628588166,
"grad_norm": 7.367013931274414,
"learning_rate": 1.5315758640890453e-05,
"loss": 0.0688,
"step": 2000
},
{
"epoch": 1.1775043936731107,
"grad_norm": 0.04027366265654564,
"learning_rate": 1.5292325717633277e-05,
"loss": 0.0572,
"step": 2010
},
{
"epoch": 1.1833626244874047,
"grad_norm": 0.36168113350868225,
"learning_rate": 1.52688927943761e-05,
"loss": 0.1153,
"step": 2020
},
{
"epoch": 1.189220855301699,
"grad_norm": 0.10165631026029587,
"learning_rate": 1.5245459871118923e-05,
"loss": 0.0672,
"step": 2030
},
{
"epoch": 1.195079086115993,
"grad_norm": 1.8676950931549072,
"learning_rate": 1.5222026947861747e-05,
"loss": 0.1765,
"step": 2040
},
{
"epoch": 1.200937316930287,
"grad_norm": 0.0685826912522316,
"learning_rate": 1.5198594024604571e-05,
"loss": 0.0532,
"step": 2050
},
{
"epoch": 1.206795547744581,
"grad_norm": 0.09834755212068558,
"learning_rate": 1.5175161101347396e-05,
"loss": 0.0509,
"step": 2060
},
{
"epoch": 1.2126537785588751,
"grad_norm": 0.26971983909606934,
"learning_rate": 1.5151728178090216e-05,
"loss": 0.0384,
"step": 2070
},
{
"epoch": 1.2185120093731694,
"grad_norm": 0.042703777551651,
"learning_rate": 1.512829525483304e-05,
"loss": 0.0825,
"step": 2080
},
{
"epoch": 1.2243702401874634,
"grad_norm": 0.02655109018087387,
"learning_rate": 1.5104862331575865e-05,
"loss": 0.1039,
"step": 2090
},
{
"epoch": 1.2302284710017575,
"grad_norm": 19.35833740234375,
"learning_rate": 1.5081429408318689e-05,
"loss": 0.1339,
"step": 2100
},
{
"epoch": 1.2360867018160515,
"grad_norm": 0.0528317391872406,
"learning_rate": 1.5057996485061513e-05,
"loss": 0.1107,
"step": 2110
},
{
"epoch": 1.2419449326303456,
"grad_norm": 0.06290628761053085,
"learning_rate": 1.5034563561804337e-05,
"loss": 0.0576,
"step": 2120
},
{
"epoch": 1.2478031634446398,
"grad_norm": 0.049718987196683884,
"learning_rate": 1.501113063854716e-05,
"loss": 0.1598,
"step": 2130
},
{
"epoch": 1.2536613942589339,
"grad_norm": 1.1866627931594849,
"learning_rate": 1.4987697715289983e-05,
"loss": 0.2636,
"step": 2140
},
{
"epoch": 1.259519625073228,
"grad_norm": 13.535921096801758,
"learning_rate": 1.4964264792032807e-05,
"loss": 0.0952,
"step": 2150
},
{
"epoch": 1.265377855887522,
"grad_norm": 3.8203511238098145,
"learning_rate": 1.4940831868775632e-05,
"loss": 0.08,
"step": 2160
},
{
"epoch": 1.271236086701816,
"grad_norm": 0.06578367948532104,
"learning_rate": 1.4917398945518456e-05,
"loss": 0.0749,
"step": 2170
},
{
"epoch": 1.2770943175161102,
"grad_norm": 0.025391127914190292,
"learning_rate": 1.489396602226128e-05,
"loss": 0.1329,
"step": 2180
},
{
"epoch": 1.2829525483304043,
"grad_norm": 4.095520973205566,
"learning_rate": 1.4870533099004102e-05,
"loss": 0.116,
"step": 2190
},
{
"epoch": 1.2888107791446983,
"grad_norm": 0.11842747032642365,
"learning_rate": 1.4847100175746925e-05,
"loss": 0.0872,
"step": 2200
},
{
"epoch": 1.2946690099589924,
"grad_norm": 0.24338534474372864,
"learning_rate": 1.4823667252489749e-05,
"loss": 0.0083,
"step": 2210
},
{
"epoch": 1.3005272407732864,
"grad_norm": 0.7529749274253845,
"learning_rate": 1.4800234329232573e-05,
"loss": 0.1073,
"step": 2220
},
{
"epoch": 1.3063854715875807,
"grad_norm": 0.021239090710878372,
"learning_rate": 1.4776801405975395e-05,
"loss": 0.0095,
"step": 2230
},
{
"epoch": 1.3122437024018747,
"grad_norm": 0.21038176119327545,
"learning_rate": 1.475336848271822e-05,
"loss": 0.0802,
"step": 2240
},
{
"epoch": 1.3181019332161688,
"grad_norm": 4.547256946563721,
"learning_rate": 1.4729935559461044e-05,
"loss": 0.0826,
"step": 2250
},
{
"epoch": 1.3239601640304628,
"grad_norm": 0.02918773703277111,
"learning_rate": 1.4706502636203868e-05,
"loss": 0.0776,
"step": 2260
},
{
"epoch": 1.3298183948447568,
"grad_norm": 0.1411774605512619,
"learning_rate": 1.4683069712946692e-05,
"loss": 0.0842,
"step": 2270
},
{
"epoch": 1.335676625659051,
"grad_norm": 6.09588098526001,
"learning_rate": 1.4659636789689516e-05,
"loss": 0.1478,
"step": 2280
},
{
"epoch": 1.341534856473345,
"grad_norm": 0.08673793077468872,
"learning_rate": 1.4636203866432338e-05,
"loss": 0.0364,
"step": 2290
},
{
"epoch": 1.3473930872876392,
"grad_norm": 0.020423686131834984,
"learning_rate": 1.4612770943175162e-05,
"loss": 0.0088,
"step": 2300
},
{
"epoch": 1.3532513181019332,
"grad_norm": 0.07648295164108276,
"learning_rate": 1.4589338019917987e-05,
"loss": 0.0018,
"step": 2310
},
{
"epoch": 1.3591095489162273,
"grad_norm": 0.04958697408437729,
"learning_rate": 1.456590509666081e-05,
"loss": 0.0842,
"step": 2320
},
{
"epoch": 1.3649677797305213,
"grad_norm": 0.1480845808982849,
"learning_rate": 1.4542472173403633e-05,
"loss": 0.1083,
"step": 2330
},
{
"epoch": 1.3708260105448153,
"grad_norm": 4.45640754699707,
"learning_rate": 1.4519039250146455e-05,
"loss": 0.1541,
"step": 2340
},
{
"epoch": 1.3766842413591096,
"grad_norm": 3.787076711654663,
"learning_rate": 1.449560632688928e-05,
"loss": 0.0705,
"step": 2350
},
{
"epoch": 1.3825424721734036,
"grad_norm": 0.051576077938079834,
"learning_rate": 1.4472173403632104e-05,
"loss": 0.0039,
"step": 2360
},
{
"epoch": 1.3884007029876977,
"grad_norm": 0.5413435697555542,
"learning_rate": 1.4448740480374928e-05,
"loss": 0.1665,
"step": 2370
},
{
"epoch": 1.3942589338019917,
"grad_norm": 0.40767887234687805,
"learning_rate": 1.4425307557117752e-05,
"loss": 0.0356,
"step": 2380
},
{
"epoch": 1.4001171646162858,
"grad_norm": 0.08306195586919785,
"learning_rate": 1.4401874633860576e-05,
"loss": 0.1033,
"step": 2390
},
{
"epoch": 1.40597539543058,
"grad_norm": 0.18400411307811737,
"learning_rate": 1.4378441710603398e-05,
"loss": 0.0393,
"step": 2400
},
{
"epoch": 1.411833626244874,
"grad_norm": 0.050293222069740295,
"learning_rate": 1.4355008787346223e-05,
"loss": 0.0709,
"step": 2410
},
{
"epoch": 1.4176918570591681,
"grad_norm": 0.1496008336544037,
"learning_rate": 1.4331575864089047e-05,
"loss": 0.0617,
"step": 2420
},
{
"epoch": 1.4235500878734622,
"grad_norm": 0.05141230672597885,
"learning_rate": 1.430814294083187e-05,
"loss": 0.1249,
"step": 2430
},
{
"epoch": 1.4294083186877562,
"grad_norm": 0.012737499549984932,
"learning_rate": 1.4284710017574695e-05,
"loss": 0.0873,
"step": 2440
},
{
"epoch": 1.4352665495020505,
"grad_norm": 0.03204461559653282,
"learning_rate": 1.4261277094317519e-05,
"loss": 0.0055,
"step": 2450
},
{
"epoch": 1.4411247803163445,
"grad_norm": 0.01898861490190029,
"learning_rate": 1.423784417106034e-05,
"loss": 0.0796,
"step": 2460
},
{
"epoch": 1.4469830111306385,
"grad_norm": 3.666001796722412,
"learning_rate": 1.4214411247803164e-05,
"loss": 0.1088,
"step": 2470
},
{
"epoch": 1.4528412419449326,
"grad_norm": 3.432924270629883,
"learning_rate": 1.4190978324545988e-05,
"loss": 0.1513,
"step": 2480
},
{
"epoch": 1.4586994727592266,
"grad_norm": 0.04218652471899986,
"learning_rate": 1.4167545401288812e-05,
"loss": 0.0185,
"step": 2490
},
{
"epoch": 1.4645577035735209,
"grad_norm": 0.025178318843245506,
"learning_rate": 1.4144112478031635e-05,
"loss": 0.0038,
"step": 2500
},
{
"epoch": 1.470415934387815,
"grad_norm": 4.099980354309082,
"learning_rate": 1.4120679554774459e-05,
"loss": 0.0935,
"step": 2510
},
{
"epoch": 1.476274165202109,
"grad_norm": 5.700379848480225,
"learning_rate": 1.4097246631517283e-05,
"loss": 0.0138,
"step": 2520
},
{
"epoch": 1.482132396016403,
"grad_norm": 12.581140518188477,
"learning_rate": 1.4073813708260107e-05,
"loss": 0.0832,
"step": 2530
},
{
"epoch": 1.487990626830697,
"grad_norm": 0.24273565411567688,
"learning_rate": 1.4050380785002931e-05,
"loss": 0.0639,
"step": 2540
},
{
"epoch": 1.4938488576449913,
"grad_norm": 0.04139144718647003,
"learning_rate": 1.4026947861745755e-05,
"loss": 0.0265,
"step": 2550
},
{
"epoch": 1.4997070884592854,
"grad_norm": 0.05344259366393089,
"learning_rate": 1.4003514938488578e-05,
"loss": 0.1068,
"step": 2560
},
{
"epoch": 1.5055653192735794,
"grad_norm": 0.015608408488333225,
"learning_rate": 1.3980082015231402e-05,
"loss": 0.2022,
"step": 2570
},
{
"epoch": 1.5114235500878734,
"grad_norm": 0.023171085864305496,
"learning_rate": 1.3956649091974226e-05,
"loss": 0.0281,
"step": 2580
},
{
"epoch": 1.5172817809021675,
"grad_norm": 6.981873512268066,
"learning_rate": 1.3933216168717048e-05,
"loss": 0.0813,
"step": 2590
},
{
"epoch": 1.5231400117164617,
"grad_norm": 0.06565181910991669,
"learning_rate": 1.390978324545987e-05,
"loss": 0.0602,
"step": 2600
},
{
"epoch": 1.5289982425307556,
"grad_norm": 0.20862698554992676,
"learning_rate": 1.3886350322202695e-05,
"loss": 0.0693,
"step": 2610
},
{
"epoch": 1.5348564733450498,
"grad_norm": 0.02823842503130436,
"learning_rate": 1.3862917398945519e-05,
"loss": 0.03,
"step": 2620
},
{
"epoch": 1.5407147041593439,
"grad_norm": 0.7352235913276672,
"learning_rate": 1.3839484475688343e-05,
"loss": 0.0442,
"step": 2630
},
{
"epoch": 1.546572934973638,
"grad_norm": 0.02312728948891163,
"learning_rate": 1.3816051552431167e-05,
"loss": 0.0351,
"step": 2640
},
{
"epoch": 1.5524311657879322,
"grad_norm": 0.07476960122585297,
"learning_rate": 1.3792618629173991e-05,
"loss": 0.0387,
"step": 2650
},
{
"epoch": 1.558289396602226,
"grad_norm": 0.14400868117809296,
"learning_rate": 1.3769185705916814e-05,
"loss": 0.0602,
"step": 2660
},
{
"epoch": 1.5641476274165202,
"grad_norm": 0.03687351942062378,
"learning_rate": 1.3745752782659638e-05,
"loss": 0.0235,
"step": 2670
},
{
"epoch": 1.5700058582308143,
"grad_norm": 0.02022375538945198,
"learning_rate": 1.3722319859402462e-05,
"loss": 0.1263,
"step": 2680
},
{
"epoch": 1.5758640890451083,
"grad_norm": 9.280242919921875,
"learning_rate": 1.3698886936145286e-05,
"loss": 0.0292,
"step": 2690
},
{
"epoch": 1.5817223198594026,
"grad_norm": 0.02085457742214203,
"learning_rate": 1.367545401288811e-05,
"loss": 0.0104,
"step": 2700
},
{
"epoch": 1.5875805506736964,
"grad_norm": 5.943541526794434,
"learning_rate": 1.3652021089630934e-05,
"loss": 0.0689,
"step": 2710
},
{
"epoch": 1.5934387814879907,
"grad_norm": 0.07496699690818787,
"learning_rate": 1.3628588166373755e-05,
"loss": 0.0393,
"step": 2720
},
{
"epoch": 1.5992970123022847,
"grad_norm": 0.05785732343792915,
"learning_rate": 1.3605155243116579e-05,
"loss": 0.0791,
"step": 2730
},
{
"epoch": 1.6051552431165788,
"grad_norm": 0.026315512135624886,
"learning_rate": 1.3581722319859403e-05,
"loss": 0.0734,
"step": 2740
},
{
"epoch": 1.611013473930873,
"grad_norm": 10.757952690124512,
"learning_rate": 1.3558289396602227e-05,
"loss": 0.019,
"step": 2750
},
{
"epoch": 1.6168717047451668,
"grad_norm": 0.1766202300786972,
"learning_rate": 1.353485647334505e-05,
"loss": 0.0712,
"step": 2760
},
{
"epoch": 1.622729935559461,
"grad_norm": 0.023655325174331665,
"learning_rate": 1.3511423550087874e-05,
"loss": 0.031,
"step": 2770
},
{
"epoch": 1.6285881663737551,
"grad_norm": 13.143453598022461,
"learning_rate": 1.3487990626830698e-05,
"loss": 0.0238,
"step": 2780
},
{
"epoch": 1.6344463971880492,
"grad_norm": 0.2318425476551056,
"learning_rate": 1.3464557703573522e-05,
"loss": 0.0077,
"step": 2790
},
{
"epoch": 1.6403046280023434,
"grad_norm": 0.032312728464603424,
"learning_rate": 1.3441124780316346e-05,
"loss": 0.1002,
"step": 2800
},
{
"epoch": 1.6461628588166373,
"grad_norm": 0.021946435794234276,
"learning_rate": 1.341769185705917e-05,
"loss": 0.0765,
"step": 2810
},
{
"epoch": 1.6520210896309315,
"grad_norm": 4.089444637298584,
"learning_rate": 1.3394258933801994e-05,
"loss": 0.0796,
"step": 2820
},
{
"epoch": 1.6578793204452256,
"grad_norm": 6.655864715576172,
"learning_rate": 1.3370826010544817e-05,
"loss": 0.1206,
"step": 2830
},
{
"epoch": 1.6637375512595196,
"grad_norm": 1.4402270317077637,
"learning_rate": 1.3347393087287641e-05,
"loss": 0.0079,
"step": 2840
},
{
"epoch": 1.6695957820738139,
"grad_norm": 0.031677696853876114,
"learning_rate": 1.3323960164030465e-05,
"loss": 0.0024,
"step": 2850
},
{
"epoch": 1.6754540128881077,
"grad_norm": 0.21266481280326843,
"learning_rate": 1.3300527240773287e-05,
"loss": 0.1047,
"step": 2860
},
{
"epoch": 1.681312243702402,
"grad_norm": 0.08722619712352753,
"learning_rate": 1.327709431751611e-05,
"loss": 0.1248,
"step": 2870
},
{
"epoch": 1.687170474516696,
"grad_norm": 13.05366039276123,
"learning_rate": 1.3253661394258934e-05,
"loss": 0.0397,
"step": 2880
},
{
"epoch": 1.69302870533099,
"grad_norm": 0.050381001085042953,
"learning_rate": 1.3230228471001758e-05,
"loss": 0.0436,
"step": 2890
},
{
"epoch": 1.698886936145284,
"grad_norm": 0.19227945804595947,
"learning_rate": 1.3206795547744582e-05,
"loss": 0.1769,
"step": 2900
},
{
"epoch": 1.7047451669595781,
"grad_norm": 8.314407348632812,
"learning_rate": 1.3183362624487406e-05,
"loss": 0.0578,
"step": 2910
},
{
"epoch": 1.7106033977738724,
"grad_norm": 4.961170673370361,
"learning_rate": 1.315992970123023e-05,
"loss": 0.0179,
"step": 2920
},
{
"epoch": 1.7164616285881664,
"grad_norm": 0.01738910935819149,
"learning_rate": 1.3136496777973053e-05,
"loss": 0.2876,
"step": 2930
},
{
"epoch": 1.7223198594024605,
"grad_norm": 0.026022635400295258,
"learning_rate": 1.3113063854715877e-05,
"loss": 0.1407,
"step": 2940
},
{
"epoch": 1.7281780902167545,
"grad_norm": 4.809380054473877,
"learning_rate": 1.3089630931458701e-05,
"loss": 0.1144,
"step": 2950
},
{
"epoch": 1.7340363210310485,
"grad_norm": 12.418693542480469,
"learning_rate": 1.3066198008201525e-05,
"loss": 0.0545,
"step": 2960
},
{
"epoch": 1.7398945518453428,
"grad_norm": 0.03380923718214035,
"learning_rate": 1.304276508494435e-05,
"loss": 0.0892,
"step": 2970
},
{
"epoch": 1.7457527826596366,
"grad_norm": 0.022299950942397118,
"learning_rate": 1.3019332161687173e-05,
"loss": 0.2233,
"step": 2980
},
{
"epoch": 1.751611013473931,
"grad_norm": 0.03865174576640129,
"learning_rate": 1.2995899238429994e-05,
"loss": 0.1028,
"step": 2990
},
{
"epoch": 1.757469244288225,
"grad_norm": 6.527011871337891,
"learning_rate": 1.2972466315172818e-05,
"loss": 0.0616,
"step": 3000
},
{
"epoch": 1.763327475102519,
"grad_norm": 2.869323492050171,
"learning_rate": 1.2949033391915642e-05,
"loss": 0.0057,
"step": 3010
},
{
"epoch": 1.7691857059168132,
"grad_norm": 7.389009475708008,
"learning_rate": 1.2925600468658466e-05,
"loss": 0.0454,
"step": 3020
},
{
"epoch": 1.775043936731107,
"grad_norm": 0.01063600555062294,
"learning_rate": 1.2902167545401289e-05,
"loss": 0.0059,
"step": 3030
},
{
"epoch": 1.7809021675454013,
"grad_norm": 0.018307434394955635,
"learning_rate": 1.2878734622144113e-05,
"loss": 0.0535,
"step": 3040
},
{
"epoch": 1.7867603983596954,
"grad_norm": 7.301026344299316,
"learning_rate": 1.2855301698886937e-05,
"loss": 0.1781,
"step": 3050
},
{
"epoch": 1.7926186291739894,
"grad_norm": 0.01443097461014986,
"learning_rate": 1.2831868775629761e-05,
"loss": 0.0498,
"step": 3060
},
{
"epoch": 1.7984768599882837,
"grad_norm": 0.023425087332725525,
"learning_rate": 1.2808435852372585e-05,
"loss": 0.0193,
"step": 3070
},
{
"epoch": 1.8043350908025775,
"grad_norm": 0.014292699284851551,
"learning_rate": 1.278500292911541e-05,
"loss": 0.1091,
"step": 3080
},
{
"epoch": 1.8101933216168717,
"grad_norm": 10.170490264892578,
"learning_rate": 1.2761570005858232e-05,
"loss": 0.0909,
"step": 3090
},
{
"epoch": 1.8160515524311658,
"grad_norm": 0.008421896025538445,
"learning_rate": 1.2738137082601056e-05,
"loss": 0.0414,
"step": 3100
},
{
"epoch": 1.8219097832454598,
"grad_norm": 3.434213638305664,
"learning_rate": 1.271470415934388e-05,
"loss": 0.1271,
"step": 3110
},
{
"epoch": 1.827768014059754,
"grad_norm": 3.279567241668701,
"learning_rate": 1.2691271236086702e-05,
"loss": 0.194,
"step": 3120
},
{
"epoch": 1.833626244874048,
"grad_norm": 0.016901379451155663,
"learning_rate": 1.2667838312829525e-05,
"loss": 0.0084,
"step": 3130
},
{
"epoch": 1.8394844756883422,
"grad_norm": 0.023041723296046257,
"learning_rate": 1.2644405389572349e-05,
"loss": 0.0146,
"step": 3140
},
{
"epoch": 1.8453427065026362,
"grad_norm": 0.013225620612502098,
"learning_rate": 1.2620972466315173e-05,
"loss": 0.0693,
"step": 3150
},
{
"epoch": 1.8512009373169303,
"grad_norm": 0.3961861729621887,
"learning_rate": 1.2597539543057997e-05,
"loss": 0.1117,
"step": 3160
},
{
"epoch": 1.8570591681312245,
"grad_norm": 3.4199657440185547,
"learning_rate": 1.2574106619800821e-05,
"loss": 0.1389,
"step": 3170
},
{
"epoch": 1.8629173989455183,
"grad_norm": 0.012990470044314861,
"learning_rate": 1.2550673696543645e-05,
"loss": 0.0668,
"step": 3180
},
{
"epoch": 1.8687756297598126,
"grad_norm": 0.05180348455905914,
"learning_rate": 1.2527240773286468e-05,
"loss": 0.0596,
"step": 3190
},
{
"epoch": 1.8746338605741066,
"grad_norm": 0.1156950443983078,
"learning_rate": 1.2503807850029292e-05,
"loss": 0.1105,
"step": 3200
},
{
"epoch": 1.8804920913884007,
"grad_norm": 0.14457456767559052,
"learning_rate": 1.2480374926772116e-05,
"loss": 0.0467,
"step": 3210
},
{
"epoch": 1.886350322202695,
"grad_norm": 0.04957001283764839,
"learning_rate": 1.245694200351494e-05,
"loss": 0.0851,
"step": 3220
},
{
"epoch": 1.8922085530169888,
"grad_norm": 0.01281155925244093,
"learning_rate": 1.2433509080257764e-05,
"loss": 0.0945,
"step": 3230
},
{
"epoch": 1.898066783831283,
"grad_norm": 0.02651827037334442,
"learning_rate": 1.2410076157000588e-05,
"loss": 0.0506,
"step": 3240
},
{
"epoch": 1.903925014645577,
"grad_norm": 0.329218327999115,
"learning_rate": 1.238664323374341e-05,
"loss": 0.0122,
"step": 3250
},
{
"epoch": 1.909783245459871,
"grad_norm": 0.02343546599149704,
"learning_rate": 1.2363210310486233e-05,
"loss": 0.0122,
"step": 3260
},
{
"epoch": 1.9156414762741654,
"grad_norm": 0.37970277667045593,
"learning_rate": 1.2339777387229057e-05,
"loss": 0.0808,
"step": 3270
},
{
"epoch": 1.9214997070884592,
"grad_norm": 0.027905110269784927,
"learning_rate": 1.2316344463971882e-05,
"loss": 0.0453,
"step": 3280
},
{
"epoch": 1.9273579379027534,
"grad_norm": 0.021370578557252884,
"learning_rate": 1.2292911540714706e-05,
"loss": 0.1797,
"step": 3290
},
{
"epoch": 1.9332161687170475,
"grad_norm": 0.02419857680797577,
"learning_rate": 1.2269478617457528e-05,
"loss": 0.1422,
"step": 3300
},
{
"epoch": 1.9390743995313415,
"grad_norm": 0.0072282287292182446,
"learning_rate": 1.2246045694200352e-05,
"loss": 0.0327,
"step": 3310
},
{
"epoch": 1.9449326303456356,
"grad_norm": 0.10726092755794525,
"learning_rate": 1.2222612770943176e-05,
"loss": 0.062,
"step": 3320
},
{
"epoch": 1.9507908611599296,
"grad_norm": 0.08492777496576309,
"learning_rate": 1.2199179847686e-05,
"loss": 0.0247,
"step": 3330
},
{
"epoch": 1.9566490919742239,
"grad_norm": 0.03214435651898384,
"learning_rate": 1.2175746924428825e-05,
"loss": 0.0945,
"step": 3340
},
{
"epoch": 1.962507322788518,
"grad_norm": 0.05654756724834442,
"learning_rate": 1.2152314001171649e-05,
"loss": 0.0385,
"step": 3350
},
{
"epoch": 1.968365553602812,
"grad_norm": 0.08512037247419357,
"learning_rate": 1.2128881077914471e-05,
"loss": 0.0031,
"step": 3360
},
{
"epoch": 1.974223784417106,
"grad_norm": 0.040594056248664856,
"learning_rate": 1.2105448154657295e-05,
"loss": 0.0454,
"step": 3370
},
{
"epoch": 1.9800820152314,
"grad_norm": 1.7671189308166504,
"learning_rate": 1.2082015231400118e-05,
"loss": 0.0803,
"step": 3380
},
{
"epoch": 1.9859402460456943,
"grad_norm": 0.11120552569627762,
"learning_rate": 1.2058582308142942e-05,
"loss": 0.0028,
"step": 3390
},
{
"epoch": 1.9917984768599881,
"grad_norm": 1.060610294342041,
"learning_rate": 1.2035149384885764e-05,
"loss": 0.0918,
"step": 3400
},
{
"epoch": 1.9976567076742824,
"grad_norm": 0.03006490133702755,
"learning_rate": 1.2011716461628588e-05,
"loss": 0.0126,
"step": 3410
}
],
"logging_steps": 10,
"max_steps": 8535,
"num_input_tokens_seen": 0,
"num_train_epochs": 5,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 3691608167424000.0,
"train_batch_size": 8,
"trial_name": null,
"trial_params": null
}