austral-proto / checkpoint-684 /trainer_state.json
juvi21's picture
Add files using upload-large-folder tool
058b354 verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 3.013177159590044,
"eval_steps": 500,
"global_step": 684,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.004392386530014641,
"grad_norm": 2.023870796411983,
"learning_rate": 0.0,
"loss": 1.8021,
"step": 1
},
{
"epoch": 0.008784773060029283,
"grad_norm": 2.0723948799697984,
"learning_rate": 2.0000000000000002e-07,
"loss": 1.8796,
"step": 2
},
{
"epoch": 0.013177159590043924,
"grad_norm": 2.0322569672979474,
"learning_rate": 4.0000000000000003e-07,
"loss": 1.855,
"step": 3
},
{
"epoch": 0.017569546120058566,
"grad_norm": 2.111472782868084,
"learning_rate": 6.000000000000001e-07,
"loss": 1.8545,
"step": 4
},
{
"epoch": 0.021961932650073207,
"grad_norm": 2.096607607233168,
"learning_rate": 8.000000000000001e-07,
"loss": 1.807,
"step": 5
},
{
"epoch": 0.02635431918008785,
"grad_norm": 2.0771855831079518,
"learning_rate": 1.0000000000000002e-06,
"loss": 1.8208,
"step": 6
},
{
"epoch": 0.03074670571010249,
"grad_norm": 2.0341840619969567,
"learning_rate": 1.2000000000000002e-06,
"loss": 1.8754,
"step": 7
},
{
"epoch": 0.03513909224011713,
"grad_norm": 2.0564329237094356,
"learning_rate": 1.4000000000000001e-06,
"loss": 1.8882,
"step": 8
},
{
"epoch": 0.03953147877013177,
"grad_norm": 1.9900074702634307,
"learning_rate": 1.6000000000000001e-06,
"loss": 1.8733,
"step": 9
},
{
"epoch": 0.043923865300146414,
"grad_norm": 1.895599415585885,
"learning_rate": 1.8000000000000001e-06,
"loss": 1.8474,
"step": 10
},
{
"epoch": 0.048316251830161056,
"grad_norm": 1.9304209419460994,
"learning_rate": 2.0000000000000003e-06,
"loss": 1.884,
"step": 11
},
{
"epoch": 0.0527086383601757,
"grad_norm": 1.6511439041472846,
"learning_rate": 2.2e-06,
"loss": 1.8452,
"step": 12
},
{
"epoch": 0.05710102489019034,
"grad_norm": 1.6727753891125912,
"learning_rate": 2.4000000000000003e-06,
"loss": 1.8632,
"step": 13
},
{
"epoch": 0.06149341142020498,
"grad_norm": 1.6571693399048197,
"learning_rate": 2.6e-06,
"loss": 1.8419,
"step": 14
},
{
"epoch": 0.06588579795021962,
"grad_norm": 1.5588712723347087,
"learning_rate": 2.8000000000000003e-06,
"loss": 1.8163,
"step": 15
},
{
"epoch": 0.07027818448023426,
"grad_norm": 1.57706405792705,
"learning_rate": 3e-06,
"loss": 1.7808,
"step": 16
},
{
"epoch": 0.0746705710102489,
"grad_norm": 1.4841447475095337,
"learning_rate": 3.2000000000000003e-06,
"loss": 1.7854,
"step": 17
},
{
"epoch": 0.07906295754026355,
"grad_norm": 1.2785291394155553,
"learning_rate": 3.4000000000000005e-06,
"loss": 1.7531,
"step": 18
},
{
"epoch": 0.08345534407027819,
"grad_norm": 1.1759630521885338,
"learning_rate": 3.6000000000000003e-06,
"loss": 1.8185,
"step": 19
},
{
"epoch": 0.08784773060029283,
"grad_norm": 1.1188613513306425,
"learning_rate": 3.8000000000000005e-06,
"loss": 1.849,
"step": 20
},
{
"epoch": 0.09224011713030747,
"grad_norm": 1.008741263333265,
"learning_rate": 4.000000000000001e-06,
"loss": 1.775,
"step": 21
},
{
"epoch": 0.09663250366032211,
"grad_norm": 0.9240991279618896,
"learning_rate": 4.2000000000000004e-06,
"loss": 1.8253,
"step": 22
},
{
"epoch": 0.10102489019033675,
"grad_norm": 0.9015845652170666,
"learning_rate": 4.4e-06,
"loss": 1.7967,
"step": 23
},
{
"epoch": 0.1054172767203514,
"grad_norm": 0.8007281301128234,
"learning_rate": 4.600000000000001e-06,
"loss": 1.7958,
"step": 24
},
{
"epoch": 0.10980966325036604,
"grad_norm": 0.7263390923205475,
"learning_rate": 4.800000000000001e-06,
"loss": 1.7744,
"step": 25
},
{
"epoch": 0.11420204978038068,
"grad_norm": 0.6527771074756341,
"learning_rate": 5e-06,
"loss": 1.7502,
"step": 26
},
{
"epoch": 0.11859443631039532,
"grad_norm": 0.5943189060332841,
"learning_rate": 5.2e-06,
"loss": 1.7378,
"step": 27
},
{
"epoch": 0.12298682284040996,
"grad_norm": 0.5426800655727773,
"learning_rate": 5.400000000000001e-06,
"loss": 1.7372,
"step": 28
},
{
"epoch": 0.1273792093704246,
"grad_norm": 0.46887045607953654,
"learning_rate": 5.600000000000001e-06,
"loss": 1.7141,
"step": 29
},
{
"epoch": 0.13177159590043924,
"grad_norm": 0.4408809706079413,
"learning_rate": 5.8e-06,
"loss": 1.6977,
"step": 30
},
{
"epoch": 0.13616398243045388,
"grad_norm": 0.4867065604810947,
"learning_rate": 6e-06,
"loss": 1.7379,
"step": 31
},
{
"epoch": 0.14055636896046853,
"grad_norm": 0.40882779662256746,
"learning_rate": 6.200000000000001e-06,
"loss": 1.8064,
"step": 32
},
{
"epoch": 0.14494875549048317,
"grad_norm": 0.4363561706655908,
"learning_rate": 6.4000000000000006e-06,
"loss": 1.6835,
"step": 33
},
{
"epoch": 0.1493411420204978,
"grad_norm": 0.4443411222267533,
"learning_rate": 6.600000000000001e-06,
"loss": 1.7464,
"step": 34
},
{
"epoch": 0.15373352855051245,
"grad_norm": 0.4519702758206936,
"learning_rate": 6.800000000000001e-06,
"loss": 1.6891,
"step": 35
},
{
"epoch": 0.1581259150805271,
"grad_norm": 0.4174999907173025,
"learning_rate": 7e-06,
"loss": 1.7262,
"step": 36
},
{
"epoch": 0.16251830161054173,
"grad_norm": 0.46742969885702906,
"learning_rate": 7.2000000000000005e-06,
"loss": 1.7608,
"step": 37
},
{
"epoch": 0.16691068814055637,
"grad_norm": 0.4155692294314683,
"learning_rate": 7.4e-06,
"loss": 1.7211,
"step": 38
},
{
"epoch": 0.17130307467057102,
"grad_norm": 0.4123169421407387,
"learning_rate": 7.600000000000001e-06,
"loss": 1.6956,
"step": 39
},
{
"epoch": 0.17569546120058566,
"grad_norm": 0.3887606381731466,
"learning_rate": 7.800000000000002e-06,
"loss": 1.7391,
"step": 40
},
{
"epoch": 0.1800878477306003,
"grad_norm": 0.37535703391318914,
"learning_rate": 8.000000000000001e-06,
"loss": 1.751,
"step": 41
},
{
"epoch": 0.18448023426061494,
"grad_norm": 0.48621902751310403,
"learning_rate": 8.2e-06,
"loss": 1.7091,
"step": 42
},
{
"epoch": 0.18887262079062958,
"grad_norm": 0.39145172998871297,
"learning_rate": 8.400000000000001e-06,
"loss": 1.7764,
"step": 43
},
{
"epoch": 0.19326500732064422,
"grad_norm": 0.3735488902443606,
"learning_rate": 8.6e-06,
"loss": 1.7058,
"step": 44
},
{
"epoch": 0.19765739385065886,
"grad_norm": 0.33988576479414434,
"learning_rate": 8.8e-06,
"loss": 1.7305,
"step": 45
},
{
"epoch": 0.2020497803806735,
"grad_norm": 0.9733163465798257,
"learning_rate": 9e-06,
"loss": 1.7567,
"step": 46
},
{
"epoch": 0.20644216691068815,
"grad_norm": 0.3344321176956884,
"learning_rate": 9.200000000000002e-06,
"loss": 1.7277,
"step": 47
},
{
"epoch": 0.2108345534407028,
"grad_norm": 0.3647805237752918,
"learning_rate": 9.4e-06,
"loss": 1.7197,
"step": 48
},
{
"epoch": 0.21522693997071743,
"grad_norm": 0.36967036325253694,
"learning_rate": 9.600000000000001e-06,
"loss": 1.6994,
"step": 49
},
{
"epoch": 0.21961932650073207,
"grad_norm": 0.32386928338357696,
"learning_rate": 9.800000000000001e-06,
"loss": 1.6975,
"step": 50
},
{
"epoch": 0.2240117130307467,
"grad_norm": 0.33940794357965215,
"learning_rate": 1e-05,
"loss": 1.7278,
"step": 51
},
{
"epoch": 0.22840409956076135,
"grad_norm": 0.3396058372359069,
"learning_rate": 9.999966483034437e-06,
"loss": 1.6342,
"step": 52
},
{
"epoch": 0.232796486090776,
"grad_norm": 0.3395876900960321,
"learning_rate": 9.999865932587107e-06,
"loss": 1.6846,
"step": 53
},
{
"epoch": 0.23718887262079064,
"grad_norm": 0.3320830942903407,
"learning_rate": 9.999698350006063e-06,
"loss": 1.6744,
"step": 54
},
{
"epoch": 0.24158125915080528,
"grad_norm": 0.318642304519145,
"learning_rate": 9.999463737538054e-06,
"loss": 1.6633,
"step": 55
},
{
"epoch": 0.24597364568081992,
"grad_norm": 0.3302002388842341,
"learning_rate": 9.999162098328475e-06,
"loss": 1.6994,
"step": 56
},
{
"epoch": 0.25036603221083453,
"grad_norm": 0.30428798634415904,
"learning_rate": 9.998793436421342e-06,
"loss": 1.6728,
"step": 57
},
{
"epoch": 0.2547584187408492,
"grad_norm": 0.30662673120989614,
"learning_rate": 9.998357756759223e-06,
"loss": 1.6283,
"step": 58
},
{
"epoch": 0.2591508052708638,
"grad_norm": 0.34305953470241257,
"learning_rate": 9.997855065183185e-06,
"loss": 1.7269,
"step": 59
},
{
"epoch": 0.2635431918008785,
"grad_norm": 0.29647698247102616,
"learning_rate": 9.997285368432704e-06,
"loss": 1.6108,
"step": 60
},
{
"epoch": 0.2679355783308931,
"grad_norm": 0.30904144706299363,
"learning_rate": 9.996648674145584e-06,
"loss": 1.5925,
"step": 61
},
{
"epoch": 0.27232796486090777,
"grad_norm": 0.28294691041282544,
"learning_rate": 9.995944990857848e-06,
"loss": 1.7004,
"step": 62
},
{
"epoch": 0.2767203513909224,
"grad_norm": 0.4537316624659888,
"learning_rate": 9.99517432800363e-06,
"loss": 1.673,
"step": 63
},
{
"epoch": 0.28111273792093705,
"grad_norm": 0.33334142649617554,
"learning_rate": 9.994336695915041e-06,
"loss": 1.7305,
"step": 64
},
{
"epoch": 0.28550512445095166,
"grad_norm": 0.2906595328556503,
"learning_rate": 9.993432105822034e-06,
"loss": 1.6511,
"step": 65
},
{
"epoch": 0.28989751098096633,
"grad_norm": 0.30937353104293036,
"learning_rate": 9.992460569852256e-06,
"loss": 1.7055,
"step": 66
},
{
"epoch": 0.29428989751098095,
"grad_norm": 0.29714918983678074,
"learning_rate": 9.99142210103088e-06,
"loss": 1.6022,
"step": 67
},
{
"epoch": 0.2986822840409956,
"grad_norm": 0.3077061364082642,
"learning_rate": 9.99031671328044e-06,
"loss": 1.6167,
"step": 68
},
{
"epoch": 0.30307467057101023,
"grad_norm": 0.31797999416909106,
"learning_rate": 9.98914442142063e-06,
"loss": 1.6063,
"step": 69
},
{
"epoch": 0.3074670571010249,
"grad_norm": 0.28544810434107143,
"learning_rate": 9.987905241168117e-06,
"loss": 1.673,
"step": 70
},
{
"epoch": 0.3118594436310395,
"grad_norm": 0.3113516293182134,
"learning_rate": 9.986599189136325e-06,
"loss": 1.6759,
"step": 71
},
{
"epoch": 0.3162518301610542,
"grad_norm": 0.32958924621680397,
"learning_rate": 9.985226282835216e-06,
"loss": 1.6894,
"step": 72
},
{
"epoch": 0.3206442166910688,
"grad_norm": 0.3189944736085633,
"learning_rate": 9.983786540671052e-06,
"loss": 1.7762,
"step": 73
},
{
"epoch": 0.32503660322108346,
"grad_norm": 0.30229970638681786,
"learning_rate": 9.982279981946143e-06,
"loss": 1.709,
"step": 74
},
{
"epoch": 0.3294289897510981,
"grad_norm": 0.308085411523194,
"learning_rate": 9.980706626858607e-06,
"loss": 1.6741,
"step": 75
},
{
"epoch": 0.33382137628111275,
"grad_norm": 0.2851270169881767,
"learning_rate": 9.979066496502075e-06,
"loss": 1.656,
"step": 76
},
{
"epoch": 0.33821376281112736,
"grad_norm": 0.3115977673015286,
"learning_rate": 9.977359612865424e-06,
"loss": 1.6633,
"step": 77
},
{
"epoch": 0.34260614934114203,
"grad_norm": 0.2960787226189042,
"learning_rate": 9.97558599883248e-06,
"loss": 1.673,
"step": 78
},
{
"epoch": 0.34699853587115664,
"grad_norm": 0.35701812130442995,
"learning_rate": 9.973745678181705e-06,
"loss": 1.6103,
"step": 79
},
{
"epoch": 0.3513909224011713,
"grad_norm": 0.367868955812032,
"learning_rate": 9.971838675585888e-06,
"loss": 1.6768,
"step": 80
},
{
"epoch": 0.3557833089311859,
"grad_norm": 0.3295428259298103,
"learning_rate": 9.9698650166118e-06,
"loss": 1.6668,
"step": 81
},
{
"epoch": 0.3601756954612006,
"grad_norm": 0.3242883305567894,
"learning_rate": 9.96782472771987e-06,
"loss": 1.7152,
"step": 82
},
{
"epoch": 0.3645680819912152,
"grad_norm": 0.3070622417895325,
"learning_rate": 9.965717836263813e-06,
"loss": 1.5697,
"step": 83
},
{
"epoch": 0.3689604685212299,
"grad_norm": 0.27753629978682975,
"learning_rate": 9.96354437049027e-06,
"loss": 1.6453,
"step": 84
},
{
"epoch": 0.3733528550512445,
"grad_norm": 0.2723902519890988,
"learning_rate": 9.961304359538437e-06,
"loss": 1.7165,
"step": 85
},
{
"epoch": 0.37774524158125916,
"grad_norm": 0.29061586618994084,
"learning_rate": 9.958997833439658e-06,
"loss": 1.7135,
"step": 86
},
{
"epoch": 0.3821376281112738,
"grad_norm": 0.33085661251847487,
"learning_rate": 9.956624823117036e-06,
"loss": 1.6109,
"step": 87
},
{
"epoch": 0.38653001464128844,
"grad_norm": 0.2931612046141165,
"learning_rate": 9.954185360385013e-06,
"loss": 1.7048,
"step": 88
},
{
"epoch": 0.39092240117130306,
"grad_norm": 0.2900600158100116,
"learning_rate": 9.951679477948946e-06,
"loss": 1.6861,
"step": 89
},
{
"epoch": 0.3953147877013177,
"grad_norm": 0.2775954202644971,
"learning_rate": 9.949107209404664e-06,
"loss": 1.5706,
"step": 90
},
{
"epoch": 0.39970717423133234,
"grad_norm": 0.2733537235090247,
"learning_rate": 9.946468589238022e-06,
"loss": 1.7146,
"step": 91
},
{
"epoch": 0.404099560761347,
"grad_norm": 0.2710139549067813,
"learning_rate": 9.943763652824437e-06,
"loss": 1.6488,
"step": 92
},
{
"epoch": 0.4084919472913616,
"grad_norm": 0.27892641103107996,
"learning_rate": 9.94099243642841e-06,
"loss": 1.6002,
"step": 93
},
{
"epoch": 0.4128843338213763,
"grad_norm": 0.4570340740318857,
"learning_rate": 9.93815497720305e-06,
"loss": 1.6686,
"step": 94
},
{
"epoch": 0.4172767203513909,
"grad_norm": 0.2816582371174757,
"learning_rate": 9.935251313189564e-06,
"loss": 1.7531,
"step": 95
},
{
"epoch": 0.4216691068814056,
"grad_norm": 0.2886345479986427,
"learning_rate": 9.932281483316759e-06,
"loss": 1.744,
"step": 96
},
{
"epoch": 0.4260614934114202,
"grad_norm": 0.3178449956639589,
"learning_rate": 9.929245527400504e-06,
"loss": 1.6404,
"step": 97
},
{
"epoch": 0.43045387994143486,
"grad_norm": 0.31466032997182714,
"learning_rate": 9.926143486143216e-06,
"loss": 1.6583,
"step": 98
},
{
"epoch": 0.43484626647144947,
"grad_norm": 0.3406321749653024,
"learning_rate": 9.922975401133292e-06,
"loss": 1.7313,
"step": 99
},
{
"epoch": 0.43923865300146414,
"grad_norm": 0.33050248521276526,
"learning_rate": 9.919741314844577e-06,
"loss": 1.6518,
"step": 100
},
{
"epoch": 0.44363103953147875,
"grad_norm": 0.2886134577231967,
"learning_rate": 9.916441270635772e-06,
"loss": 1.6957,
"step": 101
},
{
"epoch": 0.4480234260614934,
"grad_norm": 0.27520985458354463,
"learning_rate": 9.913075312749867e-06,
"loss": 1.6052,
"step": 102
},
{
"epoch": 0.45241581259150804,
"grad_norm": 0.2649316531653303,
"learning_rate": 9.909643486313533e-06,
"loss": 1.6739,
"step": 103
},
{
"epoch": 0.4568081991215227,
"grad_norm": 0.27557834576379425,
"learning_rate": 9.90614583733654e-06,
"loss": 1.6584,
"step": 104
},
{
"epoch": 0.4612005856515373,
"grad_norm": 0.3243046224133678,
"learning_rate": 9.90258241271112e-06,
"loss": 1.6491,
"step": 105
},
{
"epoch": 0.465592972181552,
"grad_norm": 0.2601090357711318,
"learning_rate": 9.89895326021134e-06,
"loss": 1.6662,
"step": 106
},
{
"epoch": 0.4699853587115666,
"grad_norm": 0.2842151241183655,
"learning_rate": 9.895258428492475e-06,
"loss": 1.6501,
"step": 107
},
{
"epoch": 0.4743777452415813,
"grad_norm": 0.2594313373745908,
"learning_rate": 9.891497967090344e-06,
"loss": 1.6475,
"step": 108
},
{
"epoch": 0.4787701317715959,
"grad_norm": 0.2844945420500575,
"learning_rate": 9.887671926420649e-06,
"loss": 1.7204,
"step": 109
},
{
"epoch": 0.48316251830161056,
"grad_norm": 0.27655451441999607,
"learning_rate": 9.8837803577783e-06,
"loss": 1.6838,
"step": 110
},
{
"epoch": 0.48755490483162517,
"grad_norm": 0.3017150137441088,
"learning_rate": 9.879823313336723e-06,
"loss": 1.6688,
"step": 111
},
{
"epoch": 0.49194729136163984,
"grad_norm": 0.26575250791745125,
"learning_rate": 9.87580084614717e-06,
"loss": 1.6753,
"step": 112
},
{
"epoch": 0.49633967789165445,
"grad_norm": 0.2581989783149807,
"learning_rate": 9.871713010137998e-06,
"loss": 1.6244,
"step": 113
},
{
"epoch": 0.5007320644216691,
"grad_norm": 0.2622294231157268,
"learning_rate": 9.86755986011395e-06,
"loss": 1.6261,
"step": 114
},
{
"epoch": 0.5051244509516838,
"grad_norm": 0.26209513370076304,
"learning_rate": 9.86334145175542e-06,
"loss": 1.6397,
"step": 115
},
{
"epoch": 0.5095168374816984,
"grad_norm": 0.2846184522412747,
"learning_rate": 9.859057841617709e-06,
"loss": 1.7091,
"step": 116
},
{
"epoch": 0.513909224011713,
"grad_norm": 0.2528850826310284,
"learning_rate": 9.854709087130261e-06,
"loss": 1.6831,
"step": 117
},
{
"epoch": 0.5183016105417276,
"grad_norm": 0.32698350420324157,
"learning_rate": 9.850295246595898e-06,
"loss": 1.6431,
"step": 118
},
{
"epoch": 0.5226939970717424,
"grad_norm": 0.35727071053972576,
"learning_rate": 9.845816379190037e-06,
"loss": 1.7088,
"step": 119
},
{
"epoch": 0.527086383601757,
"grad_norm": 0.28971742333278044,
"learning_rate": 9.841272544959892e-06,
"loss": 1.7195,
"step": 120
},
{
"epoch": 0.5314787701317716,
"grad_norm": 0.468869869211724,
"learning_rate": 9.836663804823683e-06,
"loss": 1.6896,
"step": 121
},
{
"epoch": 0.5358711566617862,
"grad_norm": 0.3908944943187778,
"learning_rate": 9.831990220569801e-06,
"loss": 1.7406,
"step": 122
},
{
"epoch": 0.5402635431918009,
"grad_norm": 0.2665924461613152,
"learning_rate": 9.827251854855992e-06,
"loss": 1.6609,
"step": 123
},
{
"epoch": 0.5446559297218155,
"grad_norm": 0.28211372742262864,
"learning_rate": 9.82244877120851e-06,
"loss": 1.6684,
"step": 124
},
{
"epoch": 0.5490483162518301,
"grad_norm": 0.2623738132657017,
"learning_rate": 9.817581034021273e-06,
"loss": 1.6923,
"step": 125
},
{
"epoch": 0.5534407027818448,
"grad_norm": 0.35357676308356323,
"learning_rate": 9.812648708554991e-06,
"loss": 1.6467,
"step": 126
},
{
"epoch": 0.5578330893118595,
"grad_norm": 0.2555639734647961,
"learning_rate": 9.807651860936299e-06,
"loss": 1.6893,
"step": 127
},
{
"epoch": 0.5622254758418741,
"grad_norm": 0.26746261958597517,
"learning_rate": 9.802590558156863e-06,
"loss": 1.6754,
"step": 128
},
{
"epoch": 0.5666178623718887,
"grad_norm": 0.26383031628885084,
"learning_rate": 9.797464868072489e-06,
"loss": 1.6363,
"step": 129
},
{
"epoch": 0.5710102489019033,
"grad_norm": 0.319619389966322,
"learning_rate": 9.792274859402205e-06,
"loss": 1.636,
"step": 130
},
{
"epoch": 0.575402635431918,
"grad_norm": 0.2639308579990406,
"learning_rate": 9.787020601727353e-06,
"loss": 1.6079,
"step": 131
},
{
"epoch": 0.5797950219619327,
"grad_norm": 0.28226380411742236,
"learning_rate": 9.781702165490638e-06,
"loss": 1.657,
"step": 132
},
{
"epoch": 0.5841874084919473,
"grad_norm": 0.27157003543418035,
"learning_rate": 9.7763196219952e-06,
"loss": 1.7009,
"step": 133
},
{
"epoch": 0.5885797950219619,
"grad_norm": 0.28487209604498265,
"learning_rate": 9.770873043403648e-06,
"loss": 1.6236,
"step": 134
},
{
"epoch": 0.5929721815519766,
"grad_norm": 0.3009729998770739,
"learning_rate": 9.765362502737098e-06,
"loss": 1.6812,
"step": 135
},
{
"epoch": 0.5973645680819912,
"grad_norm": 0.2576811873616972,
"learning_rate": 9.759788073874188e-06,
"loss": 1.6885,
"step": 136
},
{
"epoch": 0.6017569546120058,
"grad_norm": 0.3108368566509119,
"learning_rate": 9.754149831550097e-06,
"loss": 1.6086,
"step": 137
},
{
"epoch": 0.6061493411420205,
"grad_norm": 0.3336050806293322,
"learning_rate": 9.748447851355534e-06,
"loss": 1.6773,
"step": 138
},
{
"epoch": 0.6105417276720352,
"grad_norm": 0.28918467659013913,
"learning_rate": 9.742682209735727e-06,
"loss": 1.7141,
"step": 139
},
{
"epoch": 0.6149341142020498,
"grad_norm": 0.25507889874035206,
"learning_rate": 9.736852983989405e-06,
"loss": 1.6335,
"step": 140
},
{
"epoch": 0.6193265007320644,
"grad_norm": 0.2712795343500641,
"learning_rate": 9.730960252267744e-06,
"loss": 1.6186,
"step": 141
},
{
"epoch": 0.623718887262079,
"grad_norm": 0.29326296437100124,
"learning_rate": 9.725004093573343e-06,
"loss": 1.6039,
"step": 142
},
{
"epoch": 0.6281112737920937,
"grad_norm": 0.27412689936526563,
"learning_rate": 9.718984587759148e-06,
"loss": 1.594,
"step": 143
},
{
"epoch": 0.6325036603221084,
"grad_norm": 0.28908560688778157,
"learning_rate": 9.712901815527387e-06,
"loss": 1.6458,
"step": 144
},
{
"epoch": 0.636896046852123,
"grad_norm": 0.26538330998431864,
"learning_rate": 9.706755858428487e-06,
"loss": 1.6546,
"step": 145
},
{
"epoch": 0.6412884333821376,
"grad_norm": 0.26498403141947585,
"learning_rate": 9.70054679885998e-06,
"loss": 1.6224,
"step": 146
},
{
"epoch": 0.6456808199121523,
"grad_norm": 0.25028323667878993,
"learning_rate": 9.6942747200654e-06,
"loss": 1.6554,
"step": 147
},
{
"epoch": 0.6500732064421669,
"grad_norm": 0.3077620507501807,
"learning_rate": 9.687939706133168e-06,
"loss": 1.6751,
"step": 148
},
{
"epoch": 0.6544655929721815,
"grad_norm": 0.2738251492188122,
"learning_rate": 9.68154184199546e-06,
"loss": 1.6139,
"step": 149
},
{
"epoch": 0.6588579795021962,
"grad_norm": 0.300766059001253,
"learning_rate": 9.675081213427076e-06,
"loss": 1.6783,
"step": 150
},
{
"epoch": 0.6632503660322109,
"grad_norm": 0.2671963968809055,
"learning_rate": 9.668557907044278e-06,
"loss": 1.7028,
"step": 151
},
{
"epoch": 0.6676427525622255,
"grad_norm": 0.27596191143138304,
"learning_rate": 9.661972010303641e-06,
"loss": 1.6279,
"step": 152
},
{
"epoch": 0.6720351390922401,
"grad_norm": 0.2719766311854951,
"learning_rate": 9.655323611500876e-06,
"loss": 1.6634,
"step": 153
},
{
"epoch": 0.6764275256222547,
"grad_norm": 0.2697612116557389,
"learning_rate": 9.648612799769644e-06,
"loss": 1.617,
"step": 154
},
{
"epoch": 0.6808199121522694,
"grad_norm": 0.28137116472179047,
"learning_rate": 9.641839665080363e-06,
"loss": 1.6212,
"step": 155
},
{
"epoch": 0.6852122986822841,
"grad_norm": 0.2599526010890989,
"learning_rate": 9.635004298239004e-06,
"loss": 1.6542,
"step": 156
},
{
"epoch": 0.6896046852122987,
"grad_norm": 0.662388825119619,
"learning_rate": 9.628106790885866e-06,
"loss": 1.633,
"step": 157
},
{
"epoch": 0.6939970717423133,
"grad_norm": 0.2913416852335614,
"learning_rate": 9.621147235494357e-06,
"loss": 1.6904,
"step": 158
},
{
"epoch": 0.698389458272328,
"grad_norm": 0.26581120890113497,
"learning_rate": 9.614125725369748e-06,
"loss": 1.6583,
"step": 159
},
{
"epoch": 0.7027818448023426,
"grad_norm": 0.26696083510238133,
"learning_rate": 9.607042354647924e-06,
"loss": 1.6354,
"step": 160
},
{
"epoch": 0.7071742313323572,
"grad_norm": 0.2913753113559095,
"learning_rate": 9.599897218294122e-06,
"loss": 1.5914,
"step": 161
},
{
"epoch": 0.7115666178623719,
"grad_norm": 0.2794317973029827,
"learning_rate": 9.59269041210166e-06,
"loss": 1.5732,
"step": 162
},
{
"epoch": 0.7159590043923866,
"grad_norm": 0.27364398897711945,
"learning_rate": 9.585422032690644e-06,
"loss": 1.6796,
"step": 163
},
{
"epoch": 0.7203513909224012,
"grad_norm": 0.2924137561125512,
"learning_rate": 9.578092177506683e-06,
"loss": 1.6921,
"step": 164
},
{
"epoch": 0.7247437774524158,
"grad_norm": 0.27864643658701665,
"learning_rate": 9.570700944819584e-06,
"loss": 1.6442,
"step": 165
},
{
"epoch": 0.7291361639824304,
"grad_norm": 0.3172135018133218,
"learning_rate": 9.56324843372202e-06,
"loss": 1.7003,
"step": 166
},
{
"epoch": 0.7335285505124451,
"grad_norm": 0.33119379016577744,
"learning_rate": 9.55573474412821e-06,
"loss": 1.6277,
"step": 167
},
{
"epoch": 0.7379209370424598,
"grad_norm": 0.24315818847358012,
"learning_rate": 9.548159976772593e-06,
"loss": 1.621,
"step": 168
},
{
"epoch": 0.7423133235724744,
"grad_norm": 0.28066914391093295,
"learning_rate": 9.540524233208449e-06,
"loss": 1.6169,
"step": 169
},
{
"epoch": 0.746705710102489,
"grad_norm": 0.3096372829142638,
"learning_rate": 9.532827615806561e-06,
"loss": 1.6425,
"step": 170
},
{
"epoch": 0.7510980966325037,
"grad_norm": 0.3050362838238551,
"learning_rate": 9.525070227753835e-06,
"loss": 1.5835,
"step": 171
},
{
"epoch": 0.7554904831625183,
"grad_norm": 0.27558198897100084,
"learning_rate": 9.517252173051912e-06,
"loss": 1.6866,
"step": 172
},
{
"epoch": 0.7598828696925329,
"grad_norm": 0.3415215102331636,
"learning_rate": 9.509373556515781e-06,
"loss": 1.6954,
"step": 173
},
{
"epoch": 0.7642752562225475,
"grad_norm": 0.2833145266859935,
"learning_rate": 9.501434483772371e-06,
"loss": 1.5642,
"step": 174
},
{
"epoch": 0.7686676427525623,
"grad_norm": 0.261080979929468,
"learning_rate": 9.49343506125913e-06,
"loss": 1.6481,
"step": 175
},
{
"epoch": 0.7730600292825769,
"grad_norm": 0.27528584249968596,
"learning_rate": 9.48537539622261e-06,
"loss": 1.6356,
"step": 176
},
{
"epoch": 0.7774524158125915,
"grad_norm": 0.28493642680744213,
"learning_rate": 9.477255596717012e-06,
"loss": 1.617,
"step": 177
},
{
"epoch": 0.7818448023426061,
"grad_norm": 0.250792323656803,
"learning_rate": 9.469075771602756e-06,
"loss": 1.7373,
"step": 178
},
{
"epoch": 0.7862371888726208,
"grad_norm": 0.2815707976300464,
"learning_rate": 9.460836030545007e-06,
"loss": 1.7134,
"step": 179
},
{
"epoch": 0.7906295754026355,
"grad_norm": 0.33965220402585167,
"learning_rate": 9.452536484012212e-06,
"loss": 1.66,
"step": 180
},
{
"epoch": 0.7950219619326501,
"grad_norm": 0.337075115730111,
"learning_rate": 9.444177243274619e-06,
"loss": 1.7237,
"step": 181
},
{
"epoch": 0.7994143484626647,
"grad_norm": 0.2652773355794464,
"learning_rate": 9.43575842040278e-06,
"loss": 1.7238,
"step": 182
},
{
"epoch": 0.8038067349926794,
"grad_norm": 0.29569868187991866,
"learning_rate": 9.427280128266049e-06,
"loss": 1.6484,
"step": 183
},
{
"epoch": 0.808199121522694,
"grad_norm": 0.2647366968721245,
"learning_rate": 9.418742480531086e-06,
"loss": 1.6557,
"step": 184
},
{
"epoch": 0.8125915080527086,
"grad_norm": 0.2624692157687208,
"learning_rate": 9.410145591660301e-06,
"loss": 1.6501,
"step": 185
},
{
"epoch": 0.8169838945827232,
"grad_norm": 0.2604406630602592,
"learning_rate": 9.40148957691035e-06,
"loss": 1.6205,
"step": 186
},
{
"epoch": 0.821376281112738,
"grad_norm": 0.3809448031526879,
"learning_rate": 9.392774552330567e-06,
"loss": 1.6014,
"step": 187
},
{
"epoch": 0.8257686676427526,
"grad_norm": 0.285104899301775,
"learning_rate": 9.384000634761429e-06,
"loss": 1.7119,
"step": 188
},
{
"epoch": 0.8301610541727672,
"grad_norm": 0.3603217046093286,
"learning_rate": 9.375167941832974e-06,
"loss": 1.6129,
"step": 189
},
{
"epoch": 0.8345534407027818,
"grad_norm": 0.26192371812461096,
"learning_rate": 9.366276591963222e-06,
"loss": 1.6648,
"step": 190
},
{
"epoch": 0.8389458272327965,
"grad_norm": 0.27224376529649075,
"learning_rate": 9.357326704356602e-06,
"loss": 1.7368,
"step": 191
},
{
"epoch": 0.8433382137628112,
"grad_norm": 0.267560205737413,
"learning_rate": 9.348318399002347e-06,
"loss": 1.6456,
"step": 192
},
{
"epoch": 0.8477306002928258,
"grad_norm": 0.38772390686795116,
"learning_rate": 9.339251796672878e-06,
"loss": 1.6765,
"step": 193
},
{
"epoch": 0.8521229868228404,
"grad_norm": 0.28624234868841525,
"learning_rate": 9.330127018922195e-06,
"loss": 1.6397,
"step": 194
},
{
"epoch": 0.8565153733528551,
"grad_norm": 0.3517826456437289,
"learning_rate": 9.320944188084241e-06,
"loss": 1.6504,
"step": 195
},
{
"epoch": 0.8609077598828697,
"grad_norm": 0.3054896957644684,
"learning_rate": 9.31170342727127e-06,
"loss": 1.6581,
"step": 196
},
{
"epoch": 0.8653001464128843,
"grad_norm": 0.47217080493290625,
"learning_rate": 9.302404860372185e-06,
"loss": 1.6614,
"step": 197
},
{
"epoch": 0.8696925329428989,
"grad_norm": 0.3744290659075852,
"learning_rate": 9.293048612050883e-06,
"loss": 1.6181,
"step": 198
},
{
"epoch": 0.8740849194729137,
"grad_norm": 0.2928672407192581,
"learning_rate": 9.283634807744586e-06,
"loss": 1.5801,
"step": 199
},
{
"epoch": 0.8784773060029283,
"grad_norm": 0.5500989172351091,
"learning_rate": 9.274163573662158e-06,
"loss": 1.6213,
"step": 200
},
{
"epoch": 0.8828696925329429,
"grad_norm": 0.27844961793523915,
"learning_rate": 9.264635036782406e-06,
"loss": 1.6442,
"step": 201
},
{
"epoch": 0.8872620790629575,
"grad_norm": 0.2764381586339949,
"learning_rate": 9.255049324852388e-06,
"loss": 1.6403,
"step": 202
},
{
"epoch": 0.8916544655929722,
"grad_norm": 0.28692831842946026,
"learning_rate": 9.245406566385698e-06,
"loss": 1.6204,
"step": 203
},
{
"epoch": 0.8960468521229868,
"grad_norm": 0.2854958274623322,
"learning_rate": 9.235706890660735e-06,
"loss": 1.6777,
"step": 204
},
{
"epoch": 0.9004392386530015,
"grad_norm": 0.2884758073862357,
"learning_rate": 9.225950427718974e-06,
"loss": 1.6414,
"step": 205
},
{
"epoch": 0.9048316251830161,
"grad_norm": 0.2774591809427237,
"learning_rate": 9.216137308363235e-06,
"loss": 1.667,
"step": 206
},
{
"epoch": 0.9092240117130308,
"grad_norm": 0.26327408771476135,
"learning_rate": 9.206267664155906e-06,
"loss": 1.6472,
"step": 207
},
{
"epoch": 0.9136163982430454,
"grad_norm": 0.3163508666559799,
"learning_rate": 9.1963416274172e-06,
"loss": 1.6898,
"step": 208
},
{
"epoch": 0.91800878477306,
"grad_norm": 0.2691312569989342,
"learning_rate": 9.18635933122337e-06,
"loss": 1.6788,
"step": 209
},
{
"epoch": 0.9224011713030746,
"grad_norm": 0.329963898141559,
"learning_rate": 9.176320909404925e-06,
"loss": 1.7077,
"step": 210
},
{
"epoch": 0.9267935578330894,
"grad_norm": 0.3497378648580076,
"learning_rate": 9.16622649654484e-06,
"loss": 1.7203,
"step": 211
},
{
"epoch": 0.931185944363104,
"grad_norm": 0.2750435891659104,
"learning_rate": 9.156076227976752e-06,
"loss": 1.6372,
"step": 212
},
{
"epoch": 0.9355783308931186,
"grad_norm": 0.29344217135995226,
"learning_rate": 9.145870239783143e-06,
"loss": 1.6294,
"step": 213
},
{
"epoch": 0.9399707174231332,
"grad_norm": 0.2713649627402965,
"learning_rate": 9.135608668793511e-06,
"loss": 1.6391,
"step": 214
},
{
"epoch": 0.9443631039531479,
"grad_norm": 0.2995344678229402,
"learning_rate": 9.125291652582549e-06,
"loss": 1.6102,
"step": 215
},
{
"epoch": 0.9487554904831625,
"grad_norm": 0.32763546183738235,
"learning_rate": 9.114919329468283e-06,
"loss": 1.6823,
"step": 216
},
{
"epoch": 0.9531478770131772,
"grad_norm": 0.29620265372375704,
"learning_rate": 9.104491838510235e-06,
"loss": 1.6905,
"step": 217
},
{
"epoch": 0.9575402635431918,
"grad_norm": 0.43487598587006066,
"learning_rate": 9.094009319507547e-06,
"loss": 1.6611,
"step": 218
},
{
"epoch": 0.9619326500732065,
"grad_norm": 0.32989859860270077,
"learning_rate": 9.08347191299711e-06,
"loss": 1.634,
"step": 219
},
{
"epoch": 0.9663250366032211,
"grad_norm": 0.26622257061624277,
"learning_rate": 9.07287976025168e-06,
"loss": 1.6075,
"step": 220
},
{
"epoch": 0.9707174231332357,
"grad_norm": 0.28626358026286186,
"learning_rate": 9.062233003277983e-06,
"loss": 1.6045,
"step": 221
},
{
"epoch": 0.9751098096632503,
"grad_norm": 0.5424158794876999,
"learning_rate": 9.051531784814817e-06,
"loss": 1.5825,
"step": 222
},
{
"epoch": 0.9795021961932651,
"grad_norm": 0.2771987597060821,
"learning_rate": 9.040776248331128e-06,
"loss": 1.6083,
"step": 223
},
{
"epoch": 0.9838945827232797,
"grad_norm": 1.5311321947246777,
"learning_rate": 9.029966538024097e-06,
"loss": 1.5973,
"step": 224
},
{
"epoch": 0.9882869692532943,
"grad_norm": 0.3081246423484545,
"learning_rate": 9.019102798817196e-06,
"loss": 1.5915,
"step": 225
},
{
"epoch": 0.9926793557833089,
"grad_norm": 0.3066770476121554,
"learning_rate": 9.008185176358257e-06,
"loss": 1.5551,
"step": 226
},
{
"epoch": 0.9970717423133236,
"grad_norm": 0.29888292004137484,
"learning_rate": 8.997213817017508e-06,
"loss": 1.7461,
"step": 227
},
{
"epoch": 1.0043923865300146,
"grad_norm": 0.5938110979809961,
"learning_rate": 8.986188867885617e-06,
"loss": 3.2509,
"step": 228
},
{
"epoch": 1.0087847730600292,
"grad_norm": 0.2899326916339286,
"learning_rate": 8.975110476771724e-06,
"loss": 1.5874,
"step": 229
},
{
"epoch": 1.0131771595900438,
"grad_norm": 0.2777095345419943,
"learning_rate": 8.96397879220145e-06,
"loss": 1.6706,
"step": 230
},
{
"epoch": 1.0175695461200585,
"grad_norm": 0.26363195429841585,
"learning_rate": 8.952793963414908e-06,
"loss": 1.6317,
"step": 231
},
{
"epoch": 1.0219619326500733,
"grad_norm": 0.2688971491502818,
"learning_rate": 8.941556140364707e-06,
"loss": 1.654,
"step": 232
},
{
"epoch": 1.026354319180088,
"grad_norm": 0.28487309366468816,
"learning_rate": 8.930265473713939e-06,
"loss": 1.6133,
"step": 233
},
{
"epoch": 1.0307467057101025,
"grad_norm": 0.28141744313455036,
"learning_rate": 8.918922114834156e-06,
"loss": 1.589,
"step": 234
},
{
"epoch": 1.0351390922401171,
"grad_norm": 0.40308880749849646,
"learning_rate": 8.907526215803351e-06,
"loss": 1.6499,
"step": 235
},
{
"epoch": 1.0395314787701317,
"grad_norm": 0.2834055854316979,
"learning_rate": 8.896077929403901e-06,
"loss": 1.658,
"step": 236
},
{
"epoch": 1.0439238653001464,
"grad_norm": 0.2887035123274668,
"learning_rate": 8.884577409120535e-06,
"loss": 1.6196,
"step": 237
},
{
"epoch": 1.048316251830161,
"grad_norm": 0.2985213212589398,
"learning_rate": 8.873024809138272e-06,
"loss": 1.677,
"step": 238
},
{
"epoch": 1.0527086383601758,
"grad_norm": 0.2863532205939482,
"learning_rate": 8.861420284340352e-06,
"loss": 1.6682,
"step": 239
},
{
"epoch": 1.0571010248901904,
"grad_norm": 0.29235163398244823,
"learning_rate": 8.849763990306153e-06,
"loss": 1.6009,
"step": 240
},
{
"epoch": 1.061493411420205,
"grad_norm": 0.2899336107020558,
"learning_rate": 8.838056083309118e-06,
"loss": 1.6319,
"step": 241
},
{
"epoch": 1.0658857979502196,
"grad_norm": 0.3186837918366928,
"learning_rate": 8.826296720314658e-06,
"loss": 1.5875,
"step": 242
},
{
"epoch": 1.0702781844802343,
"grad_norm": 0.33938164798671433,
"learning_rate": 8.814486058978035e-06,
"loss": 1.6059,
"step": 243
},
{
"epoch": 1.0746705710102489,
"grad_norm": 0.2991115627129074,
"learning_rate": 8.802624257642262e-06,
"loss": 1.6599,
"step": 244
},
{
"epoch": 1.0790629575402635,
"grad_norm": 0.28608565865487656,
"learning_rate": 8.790711475335971e-06,
"loss": 1.6364,
"step": 245
},
{
"epoch": 1.083455344070278,
"grad_norm": 0.28359019806003394,
"learning_rate": 8.778747871771293e-06,
"loss": 1.6584,
"step": 246
},
{
"epoch": 1.0878477306002927,
"grad_norm": 0.34292617248780644,
"learning_rate": 8.766733607341697e-06,
"loss": 1.6613,
"step": 247
},
{
"epoch": 1.0922401171303076,
"grad_norm": 0.3469366526291968,
"learning_rate": 8.754668843119865e-06,
"loss": 1.6542,
"step": 248
},
{
"epoch": 1.0966325036603222,
"grad_norm": 0.2745752678931537,
"learning_rate": 8.742553740855507e-06,
"loss": 1.6723,
"step": 249
},
{
"epoch": 1.1010248901903368,
"grad_norm": 0.3863733971251328,
"learning_rate": 8.730388462973209e-06,
"loss": 1.6268,
"step": 250
},
{
"epoch": 1.1054172767203514,
"grad_norm": 0.3196278576328997,
"learning_rate": 8.718173172570254e-06,
"loss": 1.6349,
"step": 251
},
{
"epoch": 1.109809663250366,
"grad_norm": 0.27017487934331,
"learning_rate": 8.705908033414426e-06,
"loss": 1.606,
"step": 252
},
{
"epoch": 1.1142020497803806,
"grad_norm": 0.6576768607622604,
"learning_rate": 8.693593209941826e-06,
"loss": 1.5936,
"step": 253
},
{
"epoch": 1.1185944363103952,
"grad_norm": 0.30260640047372234,
"learning_rate": 8.681228867254657e-06,
"loss": 1.5948,
"step": 254
},
{
"epoch": 1.12298682284041,
"grad_norm": 0.3102239482584757,
"learning_rate": 8.66881517111902e-06,
"loss": 1.6928,
"step": 255
},
{
"epoch": 1.1273792093704247,
"grad_norm": 0.2818931187575453,
"learning_rate": 8.656352287962687e-06,
"loss": 1.687,
"step": 256
},
{
"epoch": 1.1317715959004393,
"grad_norm": 0.30007928577375903,
"learning_rate": 8.643840384872865e-06,
"loss": 1.6628,
"step": 257
},
{
"epoch": 1.136163982430454,
"grad_norm": 0.3249258384227434,
"learning_rate": 8.631279629593968e-06,
"loss": 1.6123,
"step": 258
},
{
"epoch": 1.1405563689604685,
"grad_norm": 0.2871446068255736,
"learning_rate": 8.61867019052535e-06,
"loss": 1.6608,
"step": 259
},
{
"epoch": 1.1449487554904831,
"grad_norm": 0.3001183609451796,
"learning_rate": 8.606012236719073e-06,
"loss": 1.6405,
"step": 260
},
{
"epoch": 1.1493411420204978,
"grad_norm": 0.271155672303218,
"learning_rate": 8.593305937877614e-06,
"loss": 1.5819,
"step": 261
},
{
"epoch": 1.1537335285505124,
"grad_norm": 0.30999460151747915,
"learning_rate": 8.580551464351603e-06,
"loss": 1.6378,
"step": 262
},
{
"epoch": 1.158125915080527,
"grad_norm": 0.2979330701414515,
"learning_rate": 8.567748987137544e-06,
"loss": 1.7342,
"step": 263
},
{
"epoch": 1.1625183016105418,
"grad_norm": 0.29037637226651547,
"learning_rate": 8.554898677875509e-06,
"loss": 1.663,
"step": 264
},
{
"epoch": 1.1669106881405564,
"grad_norm": 0.29574080692093535,
"learning_rate": 8.542000708846851e-06,
"loss": 1.6937,
"step": 265
},
{
"epoch": 1.171303074670571,
"grad_norm": 0.2867463319690563,
"learning_rate": 8.52905525297188e-06,
"loss": 1.5812,
"step": 266
},
{
"epoch": 1.1756954612005857,
"grad_norm": 0.3075848284440638,
"learning_rate": 8.516062483807556e-06,
"loss": 1.6421,
"step": 267
},
{
"epoch": 1.1800878477306003,
"grad_norm": 0.2745442718449274,
"learning_rate": 8.503022575545159e-06,
"loss": 1.6592,
"step": 268
},
{
"epoch": 1.1844802342606149,
"grad_norm": 0.4008583979349149,
"learning_rate": 8.48993570300795e-06,
"loss": 1.6052,
"step": 269
},
{
"epoch": 1.1888726207906295,
"grad_norm": 0.5183284694523707,
"learning_rate": 8.476802041648832e-06,
"loss": 1.7352,
"step": 270
},
{
"epoch": 1.1932650073206443,
"grad_norm": 0.30560643793660064,
"learning_rate": 8.463621767547998e-06,
"loss": 1.5592,
"step": 271
},
{
"epoch": 1.197657393850659,
"grad_norm": 0.5272799941494408,
"learning_rate": 8.450395057410561e-06,
"loss": 1.5471,
"step": 272
},
{
"epoch": 1.2020497803806736,
"grad_norm": 0.4281815470371588,
"learning_rate": 8.437122088564197e-06,
"loss": 1.6445,
"step": 273
},
{
"epoch": 1.2064421669106882,
"grad_norm": 0.27224728594527714,
"learning_rate": 8.423803038956763e-06,
"loss": 1.6629,
"step": 274
},
{
"epoch": 1.2108345534407028,
"grad_norm": 0.3269535080116159,
"learning_rate": 8.41043808715391e-06,
"loss": 1.5722,
"step": 275
},
{
"epoch": 1.2152269399707174,
"grad_norm": 0.3054371244574999,
"learning_rate": 8.39702741233669e-06,
"loss": 1.6173,
"step": 276
},
{
"epoch": 1.219619326500732,
"grad_norm": 0.36010851057708126,
"learning_rate": 8.383571194299154e-06,
"loss": 1.596,
"step": 277
},
{
"epoch": 1.2240117130307466,
"grad_norm": 0.3803533141127172,
"learning_rate": 8.37006961344594e-06,
"loss": 1.6229,
"step": 278
},
{
"epoch": 1.2284040995607612,
"grad_norm": 0.28612951080269505,
"learning_rate": 8.356522850789852e-06,
"loss": 1.5549,
"step": 279
},
{
"epoch": 1.232796486090776,
"grad_norm": 0.2855964904759227,
"learning_rate": 8.342931087949446e-06,
"loss": 1.6512,
"step": 280
},
{
"epoch": 1.2371888726207907,
"grad_norm": 0.3054377930088084,
"learning_rate": 8.32929450714658e-06,
"loss": 1.6274,
"step": 281
},
{
"epoch": 1.2415812591508053,
"grad_norm": 0.29343763848165016,
"learning_rate": 8.315613291203977e-06,
"loss": 1.6372,
"step": 282
},
{
"epoch": 1.24597364568082,
"grad_norm": 0.40870431747557984,
"learning_rate": 8.301887623542774e-06,
"loss": 1.6667,
"step": 283
},
{
"epoch": 1.2503660322108345,
"grad_norm": 0.30881819618442474,
"learning_rate": 8.288117688180064e-06,
"loss": 1.5547,
"step": 284
},
{
"epoch": 1.2547584187408491,
"grad_norm": 0.28765815508847925,
"learning_rate": 8.274303669726427e-06,
"loss": 1.6097,
"step": 285
},
{
"epoch": 1.2591508052708638,
"grad_norm": 0.33007103881402516,
"learning_rate": 8.260445753383453e-06,
"loss": 1.7016,
"step": 286
},
{
"epoch": 1.2635431918008786,
"grad_norm": 0.3628853649167158,
"learning_rate": 8.246544124941266e-06,
"loss": 1.6086,
"step": 287
},
{
"epoch": 1.267935578330893,
"grad_norm": 0.40134140387051037,
"learning_rate": 8.232598970776027e-06,
"loss": 1.6789,
"step": 288
},
{
"epoch": 1.2723279648609078,
"grad_norm": 0.2920563442488122,
"learning_rate": 8.218610477847435e-06,
"loss": 1.5915,
"step": 289
},
{
"epoch": 1.2767203513909224,
"grad_norm": 0.28530665366941144,
"learning_rate": 8.204578833696228e-06,
"loss": 1.5949,
"step": 290
},
{
"epoch": 1.281112737920937,
"grad_norm": 0.3155789127902478,
"learning_rate": 8.190504226441654e-06,
"loss": 1.639,
"step": 291
},
{
"epoch": 1.2855051244509517,
"grad_norm": 0.3199350667177758,
"learning_rate": 8.176386844778969e-06,
"loss": 1.6818,
"step": 292
},
{
"epoch": 1.2898975109809663,
"grad_norm": 0.34596043922632713,
"learning_rate": 8.162226877976886e-06,
"loss": 1.6604,
"step": 293
},
{
"epoch": 1.294289897510981,
"grad_norm": 0.32781624288061406,
"learning_rate": 8.148024515875057e-06,
"loss": 1.5971,
"step": 294
},
{
"epoch": 1.2986822840409955,
"grad_norm": 0.5927243317332561,
"learning_rate": 8.133779948881514e-06,
"loss": 1.6574,
"step": 295
},
{
"epoch": 1.3030746705710103,
"grad_norm": 0.296769298222996,
"learning_rate": 8.11949336797012e-06,
"loss": 1.5505,
"step": 296
},
{
"epoch": 1.307467057101025,
"grad_norm": 0.28849112804215943,
"learning_rate": 8.105164964678009e-06,
"loss": 1.6729,
"step": 297
},
{
"epoch": 1.3118594436310396,
"grad_norm": 0.3286168384006033,
"learning_rate": 8.090794931103026e-06,
"loss": 1.5949,
"step": 298
},
{
"epoch": 1.3162518301610542,
"grad_norm": 0.31185844876531665,
"learning_rate": 8.076383459901138e-06,
"loss": 1.6918,
"step": 299
},
{
"epoch": 1.3206442166910688,
"grad_norm": 0.2789921585419944,
"learning_rate": 8.061930744283855e-06,
"loss": 1.6437,
"step": 300
},
{
"epoch": 1.3250366032210834,
"grad_norm": 0.30165508316706874,
"learning_rate": 8.04743697801565e-06,
"loss": 1.6555,
"step": 301
},
{
"epoch": 1.329428989751098,
"grad_norm": 0.2989059936488396,
"learning_rate": 8.032902355411345e-06,
"loss": 1.5141,
"step": 302
},
{
"epoch": 1.3338213762811129,
"grad_norm": 0.29928394703560823,
"learning_rate": 8.018327071333521e-06,
"loss": 1.5859,
"step": 303
},
{
"epoch": 1.3382137628111272,
"grad_norm": 0.29795453570714703,
"learning_rate": 8.003711321189895e-06,
"loss": 1.5864,
"step": 304
},
{
"epoch": 1.342606149341142,
"grad_norm": 0.30765817351768504,
"learning_rate": 7.989055300930704e-06,
"loss": 1.5743,
"step": 305
},
{
"epoch": 1.3469985358711567,
"grad_norm": 0.43955554329120444,
"learning_rate": 7.974359207046081e-06,
"loss": 1.6423,
"step": 306
},
{
"epoch": 1.3513909224011713,
"grad_norm": 0.37865307469865994,
"learning_rate": 7.959623236563412e-06,
"loss": 1.6102,
"step": 307
},
{
"epoch": 1.355783308931186,
"grad_norm": 0.3074090057367007,
"learning_rate": 7.944847587044704e-06,
"loss": 1.6602,
"step": 308
},
{
"epoch": 1.3601756954612005,
"grad_norm": 0.3361817362879421,
"learning_rate": 7.930032456583931e-06,
"loss": 1.6704,
"step": 309
},
{
"epoch": 1.3645680819912152,
"grad_norm": 0.2960444587212274,
"learning_rate": 7.915178043804382e-06,
"loss": 1.5536,
"step": 310
},
{
"epoch": 1.3689604685212298,
"grad_norm": 0.31080078465435335,
"learning_rate": 7.900284547855992e-06,
"loss": 1.659,
"step": 311
},
{
"epoch": 1.3733528550512446,
"grad_norm": 0.29259208311732576,
"learning_rate": 7.885352168412677e-06,
"loss": 1.6324,
"step": 312
},
{
"epoch": 1.3777452415812592,
"grad_norm": 0.40704907971444493,
"learning_rate": 7.870381105669657e-06,
"loss": 1.6954,
"step": 313
},
{
"epoch": 1.3821376281112738,
"grad_norm": 0.2884678272243352,
"learning_rate": 7.85537156034077e-06,
"loss": 1.6911,
"step": 314
},
{
"epoch": 1.3865300146412884,
"grad_norm": 0.3002277679918279,
"learning_rate": 7.84032373365578e-06,
"loss": 1.6384,
"step": 315
},
{
"epoch": 1.390922401171303,
"grad_norm": 0.29747656531257255,
"learning_rate": 7.825237827357684e-06,
"loss": 1.5728,
"step": 316
},
{
"epoch": 1.3953147877013177,
"grad_norm": 0.2922330023166882,
"learning_rate": 7.810114043700002e-06,
"loss": 1.6876,
"step": 317
},
{
"epoch": 1.3997071742313323,
"grad_norm": 0.29216993729442,
"learning_rate": 7.794952585444068e-06,
"loss": 1.7112,
"step": 318
},
{
"epoch": 1.4040995607613471,
"grad_norm": 0.2841430060967501,
"learning_rate": 7.779753655856313e-06,
"loss": 1.6352,
"step": 319
},
{
"epoch": 1.4084919472913615,
"grad_norm": 0.29535174268674796,
"learning_rate": 7.764517458705536e-06,
"loss": 1.638,
"step": 320
},
{
"epoch": 1.4128843338213763,
"grad_norm": 0.3553267662730052,
"learning_rate": 7.749244198260175e-06,
"loss": 1.6262,
"step": 321
},
{
"epoch": 1.417276720351391,
"grad_norm": 0.2977673782348279,
"learning_rate": 7.733934079285569e-06,
"loss": 1.6115,
"step": 322
},
{
"epoch": 1.4216691068814056,
"grad_norm": 0.2822196746941408,
"learning_rate": 7.718587307041209e-06,
"loss": 1.6468,
"step": 323
},
{
"epoch": 1.4260614934114202,
"grad_norm": 0.2837671497162217,
"learning_rate": 7.703204087277989e-06,
"loss": 1.6474,
"step": 324
},
{
"epoch": 1.4304538799414348,
"grad_norm": 0.31664451334685184,
"learning_rate": 7.687784626235448e-06,
"loss": 1.5919,
"step": 325
},
{
"epoch": 1.4348462664714494,
"grad_norm": 0.33195017711571867,
"learning_rate": 7.672329130639007e-06,
"loss": 1.6941,
"step": 326
},
{
"epoch": 1.439238653001464,
"grad_norm": 0.2995108911659512,
"learning_rate": 7.656837807697187e-06,
"loss": 1.6393,
"step": 327
},
{
"epoch": 1.4436310395314789,
"grad_norm": 0.2949122695416032,
"learning_rate": 7.641310865098845e-06,
"loss": 1.5135,
"step": 328
},
{
"epoch": 1.4480234260614935,
"grad_norm": 0.31946384200509315,
"learning_rate": 7.625748511010382e-06,
"loss": 1.5784,
"step": 329
},
{
"epoch": 1.452415812591508,
"grad_norm": 0.29208814812765094,
"learning_rate": 7.610150954072953e-06,
"loss": 1.6794,
"step": 330
},
{
"epoch": 1.4568081991215227,
"grad_norm": 0.34475174012170284,
"learning_rate": 7.594518403399666e-06,
"loss": 1.5773,
"step": 331
},
{
"epoch": 1.4612005856515373,
"grad_norm": 0.2876304119955936,
"learning_rate": 7.578851068572788e-06,
"loss": 1.5613,
"step": 332
},
{
"epoch": 1.465592972181552,
"grad_norm": 0.31542092184200016,
"learning_rate": 7.563149159640929e-06,
"loss": 1.6086,
"step": 333
},
{
"epoch": 1.4699853587115665,
"grad_norm": 0.2964161628058562,
"learning_rate": 7.547412887116224e-06,
"loss": 1.5963,
"step": 334
},
{
"epoch": 1.4743777452415814,
"grad_norm": 0.32832262869958717,
"learning_rate": 7.531642461971515e-06,
"loss": 1.6054,
"step": 335
},
{
"epoch": 1.4787701317715958,
"grad_norm": 0.3209241382730098,
"learning_rate": 7.51583809563752e-06,
"loss": 1.6748,
"step": 336
},
{
"epoch": 1.4831625183016106,
"grad_norm": 0.30060262616835987,
"learning_rate": 7.500000000000001e-06,
"loss": 1.6081,
"step": 337
},
{
"epoch": 1.4875549048316252,
"grad_norm": 0.3500388762321464,
"learning_rate": 7.4841283873969194e-06,
"loss": 1.5906,
"step": 338
},
{
"epoch": 1.4919472913616398,
"grad_norm": 0.28308587203259633,
"learning_rate": 7.468223470615593e-06,
"loss": 1.5879,
"step": 339
},
{
"epoch": 1.4963396778916545,
"grad_norm": 0.2901927981331445,
"learning_rate": 7.452285462889841e-06,
"loss": 1.6242,
"step": 340
},
{
"epoch": 1.500732064421669,
"grad_norm": 0.3314307283598877,
"learning_rate": 7.436314577897126e-06,
"loss": 1.646,
"step": 341
},
{
"epoch": 1.505124450951684,
"grad_norm": 0.3264131109049365,
"learning_rate": 7.420311029755688e-06,
"loss": 1.5855,
"step": 342
},
{
"epoch": 1.5095168374816983,
"grad_norm": 0.3519561244527488,
"learning_rate": 7.404275033021676e-06,
"loss": 1.6415,
"step": 343
},
{
"epoch": 1.5139092240117131,
"grad_norm": 0.3231864119230437,
"learning_rate": 7.388206802686272e-06,
"loss": 1.6038,
"step": 344
},
{
"epoch": 1.5183016105417275,
"grad_norm": 0.4648117736859699,
"learning_rate": 7.372106554172802e-06,
"loss": 1.6327,
"step": 345
},
{
"epoch": 1.5226939970717424,
"grad_norm": 0.30819599956019383,
"learning_rate": 7.355974503333859e-06,
"loss": 1.7179,
"step": 346
},
{
"epoch": 1.527086383601757,
"grad_norm": 0.3425241683440226,
"learning_rate": 7.339810866448398e-06,
"loss": 1.6486,
"step": 347
},
{
"epoch": 1.5314787701317716,
"grad_norm": 0.3229436387251198,
"learning_rate": 7.323615860218844e-06,
"loss": 1.5028,
"step": 348
},
{
"epoch": 1.5358711566617862,
"grad_norm": 0.32420322558759,
"learning_rate": 7.307389701768183e-06,
"loss": 1.517,
"step": 349
},
{
"epoch": 1.5402635431918008,
"grad_norm": 0.33308925785501975,
"learning_rate": 7.291132608637053e-06,
"loss": 1.6278,
"step": 350
},
{
"epoch": 1.5446559297218156,
"grad_norm": 0.33961176568859514,
"learning_rate": 7.274844798780826e-06,
"loss": 1.6651,
"step": 351
},
{
"epoch": 1.54904831625183,
"grad_norm": 0.32173930273995155,
"learning_rate": 7.258526490566687e-06,
"loss": 1.491,
"step": 352
},
{
"epoch": 1.5534407027818449,
"grad_norm": 0.3093139646127846,
"learning_rate": 7.242177902770706e-06,
"loss": 1.5796,
"step": 353
},
{
"epoch": 1.5578330893118595,
"grad_norm": 0.32919345439390313,
"learning_rate": 7.2257992545749045e-06,
"loss": 1.5865,
"step": 354
},
{
"epoch": 1.562225475841874,
"grad_norm": 0.2835010953118983,
"learning_rate": 7.209390765564318e-06,
"loss": 1.6417,
"step": 355
},
{
"epoch": 1.5666178623718887,
"grad_norm": 1.7445414238062138,
"learning_rate": 7.192952655724049e-06,
"loss": 1.6597,
"step": 356
},
{
"epoch": 1.5710102489019033,
"grad_norm": 0.5860196285629298,
"learning_rate": 7.176485145436325e-06,
"loss": 1.5387,
"step": 357
},
{
"epoch": 1.5754026354319182,
"grad_norm": 0.3537384385833807,
"learning_rate": 7.159988455477534e-06,
"loss": 1.6013,
"step": 358
},
{
"epoch": 1.5797950219619326,
"grad_norm": 0.3286829503374976,
"learning_rate": 7.143462807015271e-06,
"loss": 1.6264,
"step": 359
},
{
"epoch": 1.5841874084919474,
"grad_norm": 0.3197218822335203,
"learning_rate": 7.1269084216053756e-06,
"loss": 1.5552,
"step": 360
},
{
"epoch": 1.5885797950219618,
"grad_norm": 0.31209150272027425,
"learning_rate": 7.11032552118895e-06,
"loss": 1.6108,
"step": 361
},
{
"epoch": 1.5929721815519766,
"grad_norm": 0.3227458575298131,
"learning_rate": 7.093714328089398e-06,
"loss": 1.649,
"step": 362
},
{
"epoch": 1.5973645680819912,
"grad_norm": 0.3637374362635078,
"learning_rate": 7.0770750650094335e-06,
"loss": 1.6739,
"step": 363
},
{
"epoch": 1.6017569546120058,
"grad_norm": 0.31236210751966154,
"learning_rate": 7.060407955028098e-06,
"loss": 1.5946,
"step": 364
},
{
"epoch": 1.6061493411420205,
"grad_norm": 0.35651724986710415,
"learning_rate": 7.0437132215977744e-06,
"loss": 1.6337,
"step": 365
},
{
"epoch": 1.610541727672035,
"grad_norm": 0.31143483852268733,
"learning_rate": 7.026991088541184e-06,
"loss": 1.67,
"step": 366
},
{
"epoch": 1.61493411420205,
"grad_norm": 0.33050222076540253,
"learning_rate": 7.01024178004839e-06,
"loss": 1.6204,
"step": 367
},
{
"epoch": 1.6193265007320643,
"grad_norm": 0.3284547022539121,
"learning_rate": 6.99346552067379e-06,
"loss": 1.573,
"step": 368
},
{
"epoch": 1.6237188872620791,
"grad_norm": 0.3603027916184282,
"learning_rate": 6.976662535333107e-06,
"loss": 1.6553,
"step": 369
},
{
"epoch": 1.6281112737920937,
"grad_norm": 0.8725979374632243,
"learning_rate": 6.959833049300376e-06,
"loss": 1.6279,
"step": 370
},
{
"epoch": 1.6325036603221084,
"grad_norm": 0.28597465971107255,
"learning_rate": 6.942977288204915e-06,
"loss": 1.5074,
"step": 371
},
{
"epoch": 1.636896046852123,
"grad_norm": 0.31388416897723714,
"learning_rate": 6.926095478028312e-06,
"loss": 1.6896,
"step": 372
},
{
"epoch": 1.6412884333821376,
"grad_norm": 0.3008107505835411,
"learning_rate": 6.909187845101387e-06,
"loss": 1.5688,
"step": 373
},
{
"epoch": 1.6456808199121524,
"grad_norm": 0.32252769956105753,
"learning_rate": 6.892254616101158e-06,
"loss": 1.7391,
"step": 374
},
{
"epoch": 1.6500732064421668,
"grad_norm": 0.3253332219188239,
"learning_rate": 6.87529601804781e-06,
"loss": 1.5479,
"step": 375
},
{
"epoch": 1.6544655929721817,
"grad_norm": 0.3354516401894849,
"learning_rate": 6.858312278301638e-06,
"loss": 1.5944,
"step": 376
},
{
"epoch": 1.658857979502196,
"grad_norm": 0.3101157670728667,
"learning_rate": 6.841303624560012e-06,
"loss": 1.5979,
"step": 377
},
{
"epoch": 1.6632503660322109,
"grad_norm": 0.3263157294243148,
"learning_rate": 6.8242702848543185e-06,
"loss": 1.6651,
"step": 378
},
{
"epoch": 1.6676427525622255,
"grad_norm": 0.2963170265612591,
"learning_rate": 6.807212487546897e-06,
"loss": 1.6414,
"step": 379
},
{
"epoch": 1.67203513909224,
"grad_norm": 0.3436329546570268,
"learning_rate": 6.790130461327993e-06,
"loss": 1.5237,
"step": 380
},
{
"epoch": 1.6764275256222547,
"grad_norm": 0.30951635226199037,
"learning_rate": 6.773024435212678e-06,
"loss": 1.5608,
"step": 381
},
{
"epoch": 1.6808199121522693,
"grad_norm": 0.3024875350743093,
"learning_rate": 6.755894638537791e-06,
"loss": 1.5595,
"step": 382
},
{
"epoch": 1.6852122986822842,
"grad_norm": 0.30964409063235376,
"learning_rate": 6.73874130095885e-06,
"loss": 1.6166,
"step": 383
},
{
"epoch": 1.6896046852122986,
"grad_norm": 0.31495863587091266,
"learning_rate": 6.721564652446987e-06,
"loss": 1.5279,
"step": 384
},
{
"epoch": 1.6939970717423134,
"grad_norm": 0.3115937718660612,
"learning_rate": 6.704364923285858e-06,
"loss": 1.5694,
"step": 385
},
{
"epoch": 1.698389458272328,
"grad_norm": 0.29768001900234425,
"learning_rate": 6.687142344068552e-06,
"loss": 1.5578,
"step": 386
},
{
"epoch": 1.7027818448023426,
"grad_norm": 0.32146567067323123,
"learning_rate": 6.669897145694507e-06,
"loss": 1.6368,
"step": 387
},
{
"epoch": 1.7071742313323572,
"grad_norm": 0.29522637791127904,
"learning_rate": 6.6526295593664136e-06,
"loss": 1.6121,
"step": 388
},
{
"epoch": 1.7115666178623719,
"grad_norm": 0.30160303491140916,
"learning_rate": 6.635339816587109e-06,
"loss": 1.6238,
"step": 389
},
{
"epoch": 1.7159590043923867,
"grad_norm": 0.4208009773656739,
"learning_rate": 6.618028149156479e-06,
"loss": 1.62,
"step": 390
},
{
"epoch": 1.720351390922401,
"grad_norm": 0.2881943555017053,
"learning_rate": 6.600694789168345e-06,
"loss": 1.6364,
"step": 391
},
{
"epoch": 1.724743777452416,
"grad_norm": 0.3077307710744344,
"learning_rate": 6.583339969007364e-06,
"loss": 1.5677,
"step": 392
},
{
"epoch": 1.7291361639824303,
"grad_norm": 0.35235096673751176,
"learning_rate": 6.565963921345896e-06,
"loss": 1.6163,
"step": 393
},
{
"epoch": 1.7335285505124451,
"grad_norm": 0.3186476335255873,
"learning_rate": 6.548566879140897e-06,
"loss": 1.6289,
"step": 394
},
{
"epoch": 1.7379209370424598,
"grad_norm": 0.31476614898776,
"learning_rate": 6.531149075630796e-06,
"loss": 1.5874,
"step": 395
},
{
"epoch": 1.7423133235724744,
"grad_norm": 0.3469536206500234,
"learning_rate": 6.513710744332361e-06,
"loss": 1.6621,
"step": 396
},
{
"epoch": 1.746705710102489,
"grad_norm": 0.30680438680917854,
"learning_rate": 6.49625211903757e-06,
"loss": 1.595,
"step": 397
},
{
"epoch": 1.7510980966325036,
"grad_norm": 0.34458603376189034,
"learning_rate": 6.478773433810478e-06,
"loss": 1.5886,
"step": 398
},
{
"epoch": 1.7554904831625184,
"grad_norm": 0.34083953962031827,
"learning_rate": 6.461274922984087e-06,
"loss": 1.5705,
"step": 399
},
{
"epoch": 1.7598828696925328,
"grad_norm": 0.3345685083562738,
"learning_rate": 6.4437568211571864e-06,
"loss": 1.6181,
"step": 400
},
{
"epoch": 1.7642752562225477,
"grad_norm": 0.28767837558253834,
"learning_rate": 6.426219363191224e-06,
"loss": 1.6273,
"step": 401
},
{
"epoch": 1.7686676427525623,
"grad_norm": 0.3309056934700509,
"learning_rate": 6.408662784207149e-06,
"loss": 1.5575,
"step": 402
},
{
"epoch": 1.7730600292825769,
"grad_norm": 0.33936552783693097,
"learning_rate": 6.391087319582264e-06,
"loss": 1.5653,
"step": 403
},
{
"epoch": 1.7774524158125915,
"grad_norm": 0.38728001848935395,
"learning_rate": 6.373493204947065e-06,
"loss": 1.5962,
"step": 404
},
{
"epoch": 1.7818448023426061,
"grad_norm": 0.33731817557741206,
"learning_rate": 6.355880676182086e-06,
"loss": 1.5267,
"step": 405
},
{
"epoch": 1.786237188872621,
"grad_norm": 0.3073610397325029,
"learning_rate": 6.3382499694147345e-06,
"loss": 1.5735,
"step": 406
},
{
"epoch": 1.7906295754026353,
"grad_norm": 0.5666929994713482,
"learning_rate": 6.3206013210161285e-06,
"loss": 1.6164,
"step": 407
},
{
"epoch": 1.7950219619326502,
"grad_norm": 0.3423706928777578,
"learning_rate": 6.302934967597922e-06,
"loss": 1.6694,
"step": 408
},
{
"epoch": 1.7994143484626646,
"grad_norm": 0.3100272828530065,
"learning_rate": 6.2852511460091406e-06,
"loss": 1.6322,
"step": 409
},
{
"epoch": 1.8038067349926794,
"grad_norm": 0.3710603928040935,
"learning_rate": 6.267550093333e-06,
"loss": 1.5405,
"step": 410
},
{
"epoch": 1.808199121522694,
"grad_norm": 0.3024234235421662,
"learning_rate": 6.249832046883729e-06,
"loss": 1.6389,
"step": 411
},
{
"epoch": 1.8125915080527086,
"grad_norm": 0.2923050985231585,
"learning_rate": 6.232097244203388e-06,
"loss": 1.563,
"step": 412
},
{
"epoch": 1.8169838945827232,
"grad_norm": 0.37742465142395615,
"learning_rate": 6.214345923058686e-06,
"loss": 1.5605,
"step": 413
},
{
"epoch": 1.8213762811127379,
"grad_norm": 0.2763099605085667,
"learning_rate": 6.1965783214377895e-06,
"loss": 1.5757,
"step": 414
},
{
"epoch": 1.8257686676427527,
"grad_norm": 0.29226253974509314,
"learning_rate": 6.178794677547138e-06,
"loss": 1.6213,
"step": 415
},
{
"epoch": 1.830161054172767,
"grad_norm": 0.3892056822790433,
"learning_rate": 6.16099522980824e-06,
"loss": 1.6471,
"step": 416
},
{
"epoch": 1.834553440702782,
"grad_norm": 0.3547953314773269,
"learning_rate": 6.143180216854488e-06,
"loss": 1.5933,
"step": 417
},
{
"epoch": 1.8389458272327965,
"grad_norm": 0.2876417607221798,
"learning_rate": 6.125349877527952e-06,
"loss": 1.6056,
"step": 418
},
{
"epoch": 1.8433382137628112,
"grad_norm": 0.29632892867662236,
"learning_rate": 6.1075044508761804e-06,
"loss": 1.5798,
"step": 419
},
{
"epoch": 1.8477306002928258,
"grad_norm": 0.3351398469787539,
"learning_rate": 6.0896441761489925e-06,
"loss": 1.5775,
"step": 420
},
{
"epoch": 1.8521229868228404,
"grad_norm": 0.32572738949136165,
"learning_rate": 6.071769292795274e-06,
"loss": 1.5065,
"step": 421
},
{
"epoch": 1.8565153733528552,
"grad_norm": 0.32922431311553646,
"learning_rate": 6.053880040459765e-06,
"loss": 1.5768,
"step": 422
},
{
"epoch": 1.8609077598828696,
"grad_norm": 0.3229086221064645,
"learning_rate": 6.035976658979846e-06,
"loss": 1.6242,
"step": 423
},
{
"epoch": 1.8653001464128844,
"grad_norm": 0.3114352291982662,
"learning_rate": 6.018059388382327e-06,
"loss": 1.6473,
"step": 424
},
{
"epoch": 1.8696925329428988,
"grad_norm": 0.31045445243058734,
"learning_rate": 6.000128468880223e-06,
"loss": 1.535,
"step": 425
},
{
"epoch": 1.8740849194729137,
"grad_norm": 0.33523008129662585,
"learning_rate": 5.982184140869539e-06,
"loss": 1.6344,
"step": 426
},
{
"epoch": 1.8784773060029283,
"grad_norm": 0.3081522900734568,
"learning_rate": 5.964226644926045e-06,
"loss": 1.6386,
"step": 427
},
{
"epoch": 1.882869692532943,
"grad_norm": 0.2900227624443104,
"learning_rate": 5.946256221802052e-06,
"loss": 1.5843,
"step": 428
},
{
"epoch": 1.8872620790629575,
"grad_norm": 0.354099020882711,
"learning_rate": 5.928273112423177e-06,
"loss": 1.5758,
"step": 429
},
{
"epoch": 1.8916544655929721,
"grad_norm": 0.34160211290628684,
"learning_rate": 5.910277557885128e-06,
"loss": 1.5551,
"step": 430
},
{
"epoch": 1.896046852122987,
"grad_norm": 0.3162945077147358,
"learning_rate": 5.892269799450453e-06,
"loss": 1.6204,
"step": 431
},
{
"epoch": 1.9004392386530014,
"grad_norm": 0.3509407679369851,
"learning_rate": 5.8742500785453226e-06,
"loss": 1.6224,
"step": 432
},
{
"epoch": 1.9048316251830162,
"grad_norm": 0.2848983911128527,
"learning_rate": 5.856218636756281e-06,
"loss": 1.6059,
"step": 433
},
{
"epoch": 1.9092240117130308,
"grad_norm": 0.38193324539008777,
"learning_rate": 5.838175715827015e-06,
"loss": 1.5511,
"step": 434
},
{
"epoch": 1.9136163982430454,
"grad_norm": 0.31934508340416906,
"learning_rate": 5.820121557655109e-06,
"loss": 1.7028,
"step": 435
},
{
"epoch": 1.91800878477306,
"grad_norm": 0.33670681289920085,
"learning_rate": 5.8020564042888015e-06,
"loss": 1.5687,
"step": 436
},
{
"epoch": 1.9224011713030746,
"grad_norm": 0.318844847843566,
"learning_rate": 5.783980497923743e-06,
"loss": 1.567,
"step": 437
},
{
"epoch": 1.9267935578330895,
"grad_norm": 0.29610949062035014,
"learning_rate": 5.76589408089974e-06,
"loss": 1.5166,
"step": 438
},
{
"epoch": 1.9311859443631039,
"grad_norm": 0.28728684262143106,
"learning_rate": 5.747797395697525e-06,
"loss": 1.5813,
"step": 439
},
{
"epoch": 1.9355783308931187,
"grad_norm": 0.32309431631814955,
"learning_rate": 5.729690684935487e-06,
"loss": 1.641,
"step": 440
},
{
"epoch": 1.939970717423133,
"grad_norm": 0.2969130322032886,
"learning_rate": 5.711574191366427e-06,
"loss": 1.5608,
"step": 441
},
{
"epoch": 1.944363103953148,
"grad_norm": 0.42872939564364393,
"learning_rate": 5.693448157874299e-06,
"loss": 1.5983,
"step": 442
},
{
"epoch": 1.9487554904831625,
"grad_norm": 0.3798270075015977,
"learning_rate": 5.675312827470959e-06,
"loss": 1.5896,
"step": 443
},
{
"epoch": 1.9531478770131772,
"grad_norm": 0.30174438725527203,
"learning_rate": 5.657168443292909e-06,
"loss": 1.6043,
"step": 444
},
{
"epoch": 1.9575402635431918,
"grad_norm": 0.4119857865917871,
"learning_rate": 5.639015248598025e-06,
"loss": 1.5754,
"step": 445
},
{
"epoch": 1.9619326500732064,
"grad_norm": 0.32221313787015915,
"learning_rate": 5.620853486762307e-06,
"loss": 1.6569,
"step": 446
},
{
"epoch": 1.9663250366032212,
"grad_norm": 0.3547729959901477,
"learning_rate": 5.6026834012766155e-06,
"loss": 1.6228,
"step": 447
},
{
"epoch": 1.9707174231332356,
"grad_norm": 0.28279646337718173,
"learning_rate": 5.584505235743404e-06,
"loss": 1.5874,
"step": 448
},
{
"epoch": 1.9751098096632504,
"grad_norm": 0.3264218725678291,
"learning_rate": 5.566319233873446e-06,
"loss": 1.6313,
"step": 449
},
{
"epoch": 1.979502196193265,
"grad_norm": 0.28955653787826696,
"learning_rate": 5.548125639482587e-06,
"loss": 1.5989,
"step": 450
},
{
"epoch": 1.9838945827232797,
"grad_norm": 0.881097673855719,
"learning_rate": 5.529924696488456e-06,
"loss": 1.5796,
"step": 451
},
{
"epoch": 1.9882869692532943,
"grad_norm": 0.32048202284049887,
"learning_rate": 5.5117166489072014e-06,
"loss": 1.578,
"step": 452
},
{
"epoch": 1.992679355783309,
"grad_norm": 0.2896791795835667,
"learning_rate": 5.493501740850228e-06,
"loss": 1.6146,
"step": 453
},
{
"epoch": 1.9970717423133237,
"grad_norm": 0.35142442191521966,
"learning_rate": 5.475280216520913e-06,
"loss": 1.5522,
"step": 454
},
{
"epoch": 2.004392386530015,
"grad_norm": 0.6164551609296155,
"learning_rate": 5.45705232021134e-06,
"loss": 3.059,
"step": 455
},
{
"epoch": 2.0087847730600292,
"grad_norm": 0.33044889056247584,
"learning_rate": 5.438818296299015e-06,
"loss": 1.5982,
"step": 456
},
{
"epoch": 2.013177159590044,
"grad_norm": 0.31938902514769063,
"learning_rate": 5.4205783892435996e-06,
"loss": 1.6012,
"step": 457
},
{
"epoch": 2.0175695461200585,
"grad_norm": 1.1795222072761016,
"learning_rate": 5.402332843583631e-06,
"loss": 1.588,
"step": 458
},
{
"epoch": 2.0219619326500733,
"grad_norm": 0.31065801099482754,
"learning_rate": 5.384081903933235e-06,
"loss": 1.6023,
"step": 459
},
{
"epoch": 2.0263543191800877,
"grad_norm": 0.29281216005167415,
"learning_rate": 5.365825814978861e-06,
"loss": 1.5848,
"step": 460
},
{
"epoch": 2.0307467057101025,
"grad_norm": 0.2939045229396799,
"learning_rate": 5.34756482147599e-06,
"loss": 1.5619,
"step": 461
},
{
"epoch": 2.035139092240117,
"grad_norm": 0.2763356309694064,
"learning_rate": 5.3292991682458576e-06,
"loss": 1.6325,
"step": 462
},
{
"epoch": 2.0395314787701317,
"grad_norm": 0.32189639380967644,
"learning_rate": 5.311029100172173e-06,
"loss": 1.5714,
"step": 463
},
{
"epoch": 2.0439238653001466,
"grad_norm": 0.2991909339232657,
"learning_rate": 5.292754862197831e-06,
"loss": 1.5574,
"step": 464
},
{
"epoch": 2.048316251830161,
"grad_norm": 0.3129964860572516,
"learning_rate": 5.274476699321638e-06,
"loss": 1.6938,
"step": 465
},
{
"epoch": 2.052708638360176,
"grad_norm": 0.30394033701147166,
"learning_rate": 5.256194856595012e-06,
"loss": 1.5411,
"step": 466
},
{
"epoch": 2.05710102489019,
"grad_norm": 0.34065979728221907,
"learning_rate": 5.237909579118713e-06,
"loss": 1.6064,
"step": 467
},
{
"epoch": 2.061493411420205,
"grad_norm": 0.2826129934198123,
"learning_rate": 5.219621112039545e-06,
"loss": 1.6057,
"step": 468
},
{
"epoch": 2.0658857979502194,
"grad_norm": 0.3195263718137741,
"learning_rate": 5.201329700547077e-06,
"loss": 1.6617,
"step": 469
},
{
"epoch": 2.0702781844802343,
"grad_norm": 0.31086863726157415,
"learning_rate": 5.183035589870353e-06,
"loss": 1.5866,
"step": 470
},
{
"epoch": 2.074670571010249,
"grad_norm": 0.32171527645836867,
"learning_rate": 5.164739025274604e-06,
"loss": 1.5607,
"step": 471
},
{
"epoch": 2.0790629575402635,
"grad_norm": 0.30392336695969924,
"learning_rate": 5.146440252057962e-06,
"loss": 1.5615,
"step": 472
},
{
"epoch": 2.0834553440702783,
"grad_norm": 0.31809057906529703,
"learning_rate": 5.128139515548164e-06,
"loss": 1.5828,
"step": 473
},
{
"epoch": 2.0878477306002927,
"grad_norm": 0.2927979459326381,
"learning_rate": 5.109837061099274e-06,
"loss": 1.6074,
"step": 474
},
{
"epoch": 2.0922401171303076,
"grad_norm": 0.28785248814578307,
"learning_rate": 5.0915331340883875e-06,
"loss": 1.5747,
"step": 475
},
{
"epoch": 2.096632503660322,
"grad_norm": 0.32743369261911104,
"learning_rate": 5.07322797991234e-06,
"loss": 1.6416,
"step": 476
},
{
"epoch": 2.101024890190337,
"grad_norm": 0.3059835406726052,
"learning_rate": 5.0549218439844185e-06,
"loss": 1.5421,
"step": 477
},
{
"epoch": 2.1054172767203516,
"grad_norm": 0.3034377175450521,
"learning_rate": 5.036614971731076e-06,
"loss": 1.5441,
"step": 478
},
{
"epoch": 2.109809663250366,
"grad_norm": 0.4025542458464793,
"learning_rate": 5.018307608588637e-06,
"loss": 1.5459,
"step": 479
},
{
"epoch": 2.114202049780381,
"grad_norm": 0.29644549708934803,
"learning_rate": 5e-06,
"loss": 1.5845,
"step": 480
},
{
"epoch": 2.1185944363103952,
"grad_norm": 0.3821617049266223,
"learning_rate": 4.981692391411366e-06,
"loss": 1.5627,
"step": 481
},
{
"epoch": 2.12298682284041,
"grad_norm": 0.32702317134885744,
"learning_rate": 4.963385028268925e-06,
"loss": 1.6177,
"step": 482
},
{
"epoch": 2.1273792093704245,
"grad_norm": 0.5647033191984581,
"learning_rate": 4.945078156015582e-06,
"loss": 1.5839,
"step": 483
},
{
"epoch": 2.1317715959004393,
"grad_norm": 0.3030955667087971,
"learning_rate": 4.926772020087663e-06,
"loss": 1.6255,
"step": 484
},
{
"epoch": 2.1361639824304537,
"grad_norm": 0.33951362803589097,
"learning_rate": 4.908466865911615e-06,
"loss": 1.6158,
"step": 485
},
{
"epoch": 2.1405563689604685,
"grad_norm": 0.29445544201117774,
"learning_rate": 4.8901629389007274e-06,
"loss": 1.5388,
"step": 486
},
{
"epoch": 2.1449487554904834,
"grad_norm": 0.3590464064008913,
"learning_rate": 4.871860484451838e-06,
"loss": 1.5925,
"step": 487
},
{
"epoch": 2.1493411420204978,
"grad_norm": 0.29393299221073704,
"learning_rate": 4.853559747942041e-06,
"loss": 1.5905,
"step": 488
},
{
"epoch": 2.1537335285505126,
"grad_norm": 0.3045017300873621,
"learning_rate": 4.835260974725397e-06,
"loss": 1.6272,
"step": 489
},
{
"epoch": 2.158125915080527,
"grad_norm": 0.3005274825666973,
"learning_rate": 4.816964410129648e-06,
"loss": 1.5681,
"step": 490
},
{
"epoch": 2.162518301610542,
"grad_norm": 0.3689663925401503,
"learning_rate": 4.798670299452926e-06,
"loss": 1.5749,
"step": 491
},
{
"epoch": 2.166910688140556,
"grad_norm": 0.3613384924795449,
"learning_rate": 4.7803788879604585e-06,
"loss": 1.6029,
"step": 492
},
{
"epoch": 2.171303074670571,
"grad_norm": 0.3237181229811394,
"learning_rate": 4.762090420881289e-06,
"loss": 1.5673,
"step": 493
},
{
"epoch": 2.1756954612005854,
"grad_norm": 0.29104535885079846,
"learning_rate": 4.743805143404989e-06,
"loss": 1.5711,
"step": 494
},
{
"epoch": 2.1800878477306003,
"grad_norm": 0.310319811358895,
"learning_rate": 4.7255233006783626e-06,
"loss": 1.6182,
"step": 495
},
{
"epoch": 2.184480234260615,
"grad_norm": 0.31864865605925,
"learning_rate": 4.707245137802169e-06,
"loss": 1.636,
"step": 496
},
{
"epoch": 2.1888726207906295,
"grad_norm": 0.2889375392125057,
"learning_rate": 4.688970899827828e-06,
"loss": 1.6264,
"step": 497
},
{
"epoch": 2.1932650073206443,
"grad_norm": 0.337058227912909,
"learning_rate": 4.670700831754145e-06,
"loss": 1.5834,
"step": 498
},
{
"epoch": 2.1976573938506587,
"grad_norm": 0.3060498021030112,
"learning_rate": 4.652435178524013e-06,
"loss": 1.5825,
"step": 499
},
{
"epoch": 2.2020497803806736,
"grad_norm": 0.31142401889233506,
"learning_rate": 4.6341741850211406e-06,
"loss": 1.6192,
"step": 500
},
{
"epoch": 2.206442166910688,
"grad_norm": 0.3480563412111249,
"learning_rate": 4.615918096066766e-06,
"loss": 1.5718,
"step": 501
},
{
"epoch": 2.210834553440703,
"grad_norm": 0.2774237140655264,
"learning_rate": 4.597667156416371e-06,
"loss": 1.639,
"step": 502
},
{
"epoch": 2.2152269399707176,
"grad_norm": 0.2804945714593694,
"learning_rate": 4.5794216107564e-06,
"loss": 1.5795,
"step": 503
},
{
"epoch": 2.219619326500732,
"grad_norm": 0.4361446908640307,
"learning_rate": 4.561181703700986e-06,
"loss": 1.5406,
"step": 504
},
{
"epoch": 2.224011713030747,
"grad_norm": 0.3609641609090663,
"learning_rate": 4.542947679788662e-06,
"loss": 1.5926,
"step": 505
},
{
"epoch": 2.2284040995607612,
"grad_norm": 0.29968200375160275,
"learning_rate": 4.524719783479088e-06,
"loss": 1.6309,
"step": 506
},
{
"epoch": 2.232796486090776,
"grad_norm": 0.2902529148130268,
"learning_rate": 4.506498259149774e-06,
"loss": 1.6243,
"step": 507
},
{
"epoch": 2.2371888726207905,
"grad_norm": 0.35171986609259426,
"learning_rate": 4.488283351092799e-06,
"loss": 1.585,
"step": 508
},
{
"epoch": 2.2415812591508053,
"grad_norm": 0.3140743538598277,
"learning_rate": 4.470075303511546e-06,
"loss": 1.5835,
"step": 509
},
{
"epoch": 2.24597364568082,
"grad_norm": 0.743746136107164,
"learning_rate": 4.451874360517413e-06,
"loss": 1.497,
"step": 510
},
{
"epoch": 2.2503660322108345,
"grad_norm": 0.30150754058851265,
"learning_rate": 4.433680766126554e-06,
"loss": 1.6938,
"step": 511
},
{
"epoch": 2.2547584187408494,
"grad_norm": 0.3418824009574996,
"learning_rate": 4.4154947642566e-06,
"loss": 1.5996,
"step": 512
},
{
"epoch": 2.2591508052708638,
"grad_norm": 4.265008272718185,
"learning_rate": 4.397316598723385e-06,
"loss": 1.5282,
"step": 513
},
{
"epoch": 2.2635431918008786,
"grad_norm": 0.3174625288266337,
"learning_rate": 4.379146513237695e-06,
"loss": 1.646,
"step": 514
},
{
"epoch": 2.267935578330893,
"grad_norm": 0.3319690818479321,
"learning_rate": 4.360984751401977e-06,
"loss": 1.5278,
"step": 515
},
{
"epoch": 2.272327964860908,
"grad_norm": 0.30044205636049554,
"learning_rate": 4.342831556707093e-06,
"loss": 1.609,
"step": 516
},
{
"epoch": 2.276720351390922,
"grad_norm": 0.33317854776862893,
"learning_rate": 4.3246871725290414e-06,
"loss": 1.5675,
"step": 517
},
{
"epoch": 2.281112737920937,
"grad_norm": 0.31632328292339285,
"learning_rate": 4.306551842125702e-06,
"loss": 1.5537,
"step": 518
},
{
"epoch": 2.2855051244509514,
"grad_norm": 0.4655555090676632,
"learning_rate": 4.2884258086335755e-06,
"loss": 1.5998,
"step": 519
},
{
"epoch": 2.2898975109809663,
"grad_norm": 0.2769381188179044,
"learning_rate": 4.270309315064514e-06,
"loss": 1.6107,
"step": 520
},
{
"epoch": 2.294289897510981,
"grad_norm": 0.28065221196751095,
"learning_rate": 4.252202604302476e-06,
"loss": 1.6556,
"step": 521
},
{
"epoch": 2.2986822840409955,
"grad_norm": 0.42592024405532036,
"learning_rate": 4.234105919100261e-06,
"loss": 1.5993,
"step": 522
},
{
"epoch": 2.3030746705710103,
"grad_norm": 0.3058126067604197,
"learning_rate": 4.21601950207626e-06,
"loss": 1.6123,
"step": 523
},
{
"epoch": 2.3074670571010247,
"grad_norm": 0.3341616849592129,
"learning_rate": 4.1979435957111984e-06,
"loss": 1.617,
"step": 524
},
{
"epoch": 2.3118594436310396,
"grad_norm": 0.29572720448399287,
"learning_rate": 4.179878442344892e-06,
"loss": 1.6266,
"step": 525
},
{
"epoch": 2.316251830161054,
"grad_norm": 0.31535745712469815,
"learning_rate": 4.161824284172985e-06,
"loss": 1.5354,
"step": 526
},
{
"epoch": 2.320644216691069,
"grad_norm": 0.3933501995617675,
"learning_rate": 4.14378136324372e-06,
"loss": 1.5932,
"step": 527
},
{
"epoch": 2.3250366032210836,
"grad_norm": 0.2955075282035676,
"learning_rate": 4.125749921454679e-06,
"loss": 1.6059,
"step": 528
},
{
"epoch": 2.329428989751098,
"grad_norm": 0.3156864130791791,
"learning_rate": 4.107730200549549e-06,
"loss": 1.5252,
"step": 529
},
{
"epoch": 2.333821376281113,
"grad_norm": 0.31312483297202565,
"learning_rate": 4.089722442114873e-06,
"loss": 1.6255,
"step": 530
},
{
"epoch": 2.3382137628111272,
"grad_norm": 0.3984616100434099,
"learning_rate": 4.071726887576823e-06,
"loss": 1.6074,
"step": 531
},
{
"epoch": 2.342606149341142,
"grad_norm": 0.3245701785753724,
"learning_rate": 4.053743778197951e-06,
"loss": 1.6017,
"step": 532
},
{
"epoch": 2.3469985358711565,
"grad_norm": 0.3109891324799959,
"learning_rate": 4.035773355073956e-06,
"loss": 1.7142,
"step": 533
},
{
"epoch": 2.3513909224011713,
"grad_norm": 0.2812903996182882,
"learning_rate": 4.017815859130462e-06,
"loss": 1.6089,
"step": 534
},
{
"epoch": 2.355783308931186,
"grad_norm": 0.31403362482776737,
"learning_rate": 3.999871531119779e-06,
"loss": 1.5455,
"step": 535
},
{
"epoch": 2.3601756954612005,
"grad_norm": 0.3608937370940864,
"learning_rate": 3.981940611617675e-06,
"loss": 1.5522,
"step": 536
},
{
"epoch": 2.3645680819912154,
"grad_norm": 0.33749233448354743,
"learning_rate": 3.9640233410201555e-06,
"loss": 1.6777,
"step": 537
},
{
"epoch": 2.3689604685212298,
"grad_norm": 0.5153655618339453,
"learning_rate": 3.946119959540235e-06,
"loss": 1.5495,
"step": 538
},
{
"epoch": 2.3733528550512446,
"grad_norm": 0.31504839746477054,
"learning_rate": 3.928230707204729e-06,
"loss": 1.5521,
"step": 539
},
{
"epoch": 2.377745241581259,
"grad_norm": 0.27776484632999904,
"learning_rate": 3.910355823851008e-06,
"loss": 1.5042,
"step": 540
},
{
"epoch": 2.382137628111274,
"grad_norm": 0.2939400235215093,
"learning_rate": 3.892495549123821e-06,
"loss": 1.6054,
"step": 541
},
{
"epoch": 2.3865300146412887,
"grad_norm": 0.3176044597823968,
"learning_rate": 3.874650122472049e-06,
"loss": 1.5915,
"step": 542
},
{
"epoch": 2.390922401171303,
"grad_norm": 0.42146288925638176,
"learning_rate": 3.856819783145514e-06,
"loss": 1.5908,
"step": 543
},
{
"epoch": 2.395314787701318,
"grad_norm": 0.30849439803473244,
"learning_rate": 3.8390047701917625e-06,
"loss": 1.6203,
"step": 544
},
{
"epoch": 2.3997071742313323,
"grad_norm": 0.3104483621519379,
"learning_rate": 3.821205322452863e-06,
"loss": 1.5298,
"step": 545
},
{
"epoch": 2.404099560761347,
"grad_norm": 1.6029942546871143,
"learning_rate": 3.803421678562213e-06,
"loss": 1.4927,
"step": 546
},
{
"epoch": 2.4084919472913615,
"grad_norm": 0.332446513039922,
"learning_rate": 3.785654076941317e-06,
"loss": 1.5578,
"step": 547
},
{
"epoch": 2.4128843338213763,
"grad_norm": 0.35434480211680486,
"learning_rate": 3.7679027557966136e-06,
"loss": 1.5855,
"step": 548
},
{
"epoch": 2.4172767203513907,
"grad_norm": 0.3278075604228065,
"learning_rate": 3.750167953116272e-06,
"loss": 1.6277,
"step": 549
},
{
"epoch": 2.4216691068814056,
"grad_norm": 0.28809005974662427,
"learning_rate": 3.7324499066670008e-06,
"loss": 1.6628,
"step": 550
},
{
"epoch": 2.42606149341142,
"grad_norm": 0.315430224939953,
"learning_rate": 3.71474885399086e-06,
"loss": 1.5712,
"step": 551
},
{
"epoch": 2.430453879941435,
"grad_norm": 0.29541436843467994,
"learning_rate": 3.6970650324020784e-06,
"loss": 1.6554,
"step": 552
},
{
"epoch": 2.4348462664714496,
"grad_norm": 0.31836804528358076,
"learning_rate": 3.6793986789838745e-06,
"loss": 1.5242,
"step": 553
},
{
"epoch": 2.439238653001464,
"grad_norm": 0.3097916118349815,
"learning_rate": 3.6617500305852676e-06,
"loss": 1.5067,
"step": 554
},
{
"epoch": 2.443631039531479,
"grad_norm": 0.30552956835830547,
"learning_rate": 3.6441193238179152e-06,
"loss": 1.5544,
"step": 555
},
{
"epoch": 2.4480234260614933,
"grad_norm": 0.33196026415940266,
"learning_rate": 3.6265067950529363e-06,
"loss": 1.5967,
"step": 556
},
{
"epoch": 2.452415812591508,
"grad_norm": 0.45992827917949797,
"learning_rate": 3.6089126804177373e-06,
"loss": 1.5314,
"step": 557
},
{
"epoch": 2.4568081991215225,
"grad_norm": 0.3850494843555264,
"learning_rate": 3.5913372157928515e-06,
"loss": 1.584,
"step": 558
},
{
"epoch": 2.4612005856515373,
"grad_norm": 0.3130838633878226,
"learning_rate": 3.5737806368087776e-06,
"loss": 1.5472,
"step": 559
},
{
"epoch": 2.465592972181552,
"grad_norm": 0.33165390842885173,
"learning_rate": 3.556243178842816e-06,
"loss": 1.5396,
"step": 560
},
{
"epoch": 2.4699853587115665,
"grad_norm": 0.30410685779778446,
"learning_rate": 3.5387250770159152e-06,
"loss": 1.6436,
"step": 561
},
{
"epoch": 2.4743777452415814,
"grad_norm": 0.43575069016621865,
"learning_rate": 3.521226566189523e-06,
"loss": 1.6752,
"step": 562
},
{
"epoch": 2.4787701317715958,
"grad_norm": 0.3043077774937357,
"learning_rate": 3.5037478809624315e-06,
"loss": 1.5879,
"step": 563
},
{
"epoch": 2.4831625183016106,
"grad_norm": 0.29787633854732515,
"learning_rate": 3.48628925566764e-06,
"loss": 1.6488,
"step": 564
},
{
"epoch": 2.487554904831625,
"grad_norm": 0.33159507739731536,
"learning_rate": 3.4688509243692037e-06,
"loss": 1.5911,
"step": 565
},
{
"epoch": 2.49194729136164,
"grad_norm": 0.2790057356017803,
"learning_rate": 3.4514331208591027e-06,
"loss": 1.6126,
"step": 566
},
{
"epoch": 2.4963396778916547,
"grad_norm": 0.307396122565463,
"learning_rate": 3.4340360786541067e-06,
"loss": 1.6613,
"step": 567
},
{
"epoch": 2.500732064421669,
"grad_norm": 0.2896656371367208,
"learning_rate": 3.416660030992639e-06,
"loss": 1.6264,
"step": 568
},
{
"epoch": 2.505124450951684,
"grad_norm": 0.28661415223621256,
"learning_rate": 3.3993052108316566e-06,
"loss": 1.5758,
"step": 569
},
{
"epoch": 2.5095168374816983,
"grad_norm": 0.41303310304829133,
"learning_rate": 3.381971850843523e-06,
"loss": 1.5584,
"step": 570
},
{
"epoch": 2.513909224011713,
"grad_norm": 0.28374903595837936,
"learning_rate": 3.3646601834128924e-06,
"loss": 1.6081,
"step": 571
},
{
"epoch": 2.5183016105417275,
"grad_norm": 0.3292975740580804,
"learning_rate": 3.3473704406335873e-06,
"loss": 1.5221,
"step": 572
},
{
"epoch": 2.5226939970717424,
"grad_norm": 0.3452782616763211,
"learning_rate": 3.3301028543054935e-06,
"loss": 1.5544,
"step": 573
},
{
"epoch": 2.527086383601757,
"grad_norm": 0.3294649058226655,
"learning_rate": 3.3128576559314507e-06,
"loss": 1.5776,
"step": 574
},
{
"epoch": 2.5314787701317716,
"grad_norm": 0.3605542745485364,
"learning_rate": 3.295635076714144e-06,
"loss": 1.5856,
"step": 575
},
{
"epoch": 2.535871156661786,
"grad_norm": 0.2941353695509803,
"learning_rate": 3.278435347553014e-06,
"loss": 1.576,
"step": 576
},
{
"epoch": 2.540263543191801,
"grad_norm": 0.3182229392679776,
"learning_rate": 3.2612586990411516e-06,
"loss": 1.5215,
"step": 577
},
{
"epoch": 2.5446559297218156,
"grad_norm": 0.3160193285643318,
"learning_rate": 3.2441053614622096e-06,
"loss": 1.6507,
"step": 578
},
{
"epoch": 2.54904831625183,
"grad_norm": 0.3308377767667811,
"learning_rate": 3.226975564787322e-06,
"loss": 1.5665,
"step": 579
},
{
"epoch": 2.553440702781845,
"grad_norm": 0.2976887584358953,
"learning_rate": 3.209869538672008e-06,
"loss": 1.5684,
"step": 580
},
{
"epoch": 2.5578330893118597,
"grad_norm": 0.3562637257648593,
"learning_rate": 3.1927875124531048e-06,
"loss": 1.5817,
"step": 581
},
{
"epoch": 2.562225475841874,
"grad_norm": 0.3864243441965815,
"learning_rate": 3.1757297151456844e-06,
"loss": 1.4897,
"step": 582
},
{
"epoch": 2.5666178623718885,
"grad_norm": 0.3054191855541809,
"learning_rate": 3.158696375439989e-06,
"loss": 1.5817,
"step": 583
},
{
"epoch": 2.5710102489019033,
"grad_norm": 0.40491573034866035,
"learning_rate": 3.141687721698363e-06,
"loss": 1.638,
"step": 584
},
{
"epoch": 2.575402635431918,
"grad_norm": 0.3120082453985608,
"learning_rate": 3.1247039819521907e-06,
"loss": 1.6502,
"step": 585
},
{
"epoch": 2.5797950219619326,
"grad_norm": 0.29842186795454706,
"learning_rate": 3.107745383898841e-06,
"loss": 1.5946,
"step": 586
},
{
"epoch": 2.5841874084919474,
"grad_norm": 0.3199137178402513,
"learning_rate": 3.090812154898614e-06,
"loss": 1.5514,
"step": 587
},
{
"epoch": 2.588579795021962,
"grad_norm": 0.32241669340221557,
"learning_rate": 3.0739045219716888e-06,
"loss": 1.6556,
"step": 588
},
{
"epoch": 2.5929721815519766,
"grad_norm": 0.32944149352991675,
"learning_rate": 3.0570227117950857e-06,
"loss": 1.5659,
"step": 589
},
{
"epoch": 2.597364568081991,
"grad_norm": 0.28766693574428776,
"learning_rate": 3.040166950699626e-06,
"loss": 1.5592,
"step": 590
},
{
"epoch": 2.601756954612006,
"grad_norm": 0.33175991862359117,
"learning_rate": 3.0233374646668935e-06,
"loss": 1.6019,
"step": 591
},
{
"epoch": 2.6061493411420207,
"grad_norm": 0.30897588965031886,
"learning_rate": 3.006534479326211e-06,
"loss": 1.5768,
"step": 592
},
{
"epoch": 2.610541727672035,
"grad_norm": 0.5063934549092376,
"learning_rate": 2.9897582199516105e-06,
"loss": 1.5424,
"step": 593
},
{
"epoch": 2.61493411420205,
"grad_norm": 0.40175797042520484,
"learning_rate": 2.973008911458816e-06,
"loss": 1.6206,
"step": 594
},
{
"epoch": 2.6193265007320643,
"grad_norm": 0.3006477529439128,
"learning_rate": 2.9562867784022264e-06,
"loss": 1.6468,
"step": 595
},
{
"epoch": 2.623718887262079,
"grad_norm": 0.32028780520359634,
"learning_rate": 2.9395920449719027e-06,
"loss": 1.6246,
"step": 596
},
{
"epoch": 2.6281112737920935,
"grad_norm": 0.2894216705891592,
"learning_rate": 2.9229249349905686e-06,
"loss": 1.5008,
"step": 597
},
{
"epoch": 2.6325036603221084,
"grad_norm": 0.30155534431390957,
"learning_rate": 2.9062856719106034e-06,
"loss": 1.5452,
"step": 598
},
{
"epoch": 2.636896046852123,
"grad_norm": 0.3405268172268061,
"learning_rate": 2.8896744788110497e-06,
"loss": 1.5117,
"step": 599
},
{
"epoch": 2.6412884333821376,
"grad_norm": 0.2963355726805553,
"learning_rate": 2.873091578394626e-06,
"loss": 1.5697,
"step": 600
},
{
"epoch": 2.6456808199121524,
"grad_norm": 0.3931854175541597,
"learning_rate": 2.8565371929847286e-06,
"loss": 1.5837,
"step": 601
},
{
"epoch": 2.650073206442167,
"grad_norm": 0.3081629305989419,
"learning_rate": 2.8400115445224676e-06,
"loss": 1.5532,
"step": 602
},
{
"epoch": 2.6544655929721817,
"grad_norm": 0.321923571672651,
"learning_rate": 2.8235148545636776e-06,
"loss": 1.5905,
"step": 603
},
{
"epoch": 2.658857979502196,
"grad_norm": 0.30381368428150873,
"learning_rate": 2.8070473442759517e-06,
"loss": 1.6202,
"step": 604
},
{
"epoch": 2.663250366032211,
"grad_norm": 0.34379437733116675,
"learning_rate": 2.7906092344356827e-06,
"loss": 1.5631,
"step": 605
},
{
"epoch": 2.6676427525622257,
"grad_norm": 0.3251066020024119,
"learning_rate": 2.7742007454250963e-06,
"loss": 1.6055,
"step": 606
},
{
"epoch": 2.67203513909224,
"grad_norm": 0.31178463454292143,
"learning_rate": 2.757822097229294e-06,
"loss": 1.6396,
"step": 607
},
{
"epoch": 2.6764275256222545,
"grad_norm": 0.6034144070140214,
"learning_rate": 2.741473509433314e-06,
"loss": 1.5724,
"step": 608
},
{
"epoch": 2.6808199121522693,
"grad_norm": 0.2879175094559386,
"learning_rate": 2.7251552012191763e-06,
"loss": 1.5744,
"step": 609
},
{
"epoch": 2.685212298682284,
"grad_norm": 0.3519868147832161,
"learning_rate": 2.708867391362948e-06,
"loss": 1.5935,
"step": 610
},
{
"epoch": 2.6896046852122986,
"grad_norm": 0.29308720247015513,
"learning_rate": 2.692610298231819e-06,
"loss": 1.595,
"step": 611
},
{
"epoch": 2.6939970717423134,
"grad_norm": 0.2759532499836008,
"learning_rate": 2.6763841397811576e-06,
"loss": 1.5826,
"step": 612
},
{
"epoch": 2.6983894582723282,
"grad_norm": 0.29629200692084146,
"learning_rate": 2.660189133551603e-06,
"loss": 1.5978,
"step": 613
},
{
"epoch": 2.7027818448023426,
"grad_norm": 0.4877644396819389,
"learning_rate": 2.644025496666143e-06,
"loss": 1.5618,
"step": 614
},
{
"epoch": 2.707174231332357,
"grad_norm": 0.32075291148747,
"learning_rate": 2.6278934458271998e-06,
"loss": 1.6419,
"step": 615
},
{
"epoch": 2.711566617862372,
"grad_norm": 0.31507589414611065,
"learning_rate": 2.6117931973137295e-06,
"loss": 1.5699,
"step": 616
},
{
"epoch": 2.7159590043923867,
"grad_norm": 0.3588085551568672,
"learning_rate": 2.5957249669783257e-06,
"loss": 1.5814,
"step": 617
},
{
"epoch": 2.720351390922401,
"grad_norm": 0.36359707618102943,
"learning_rate": 2.5796889702443127e-06,
"loss": 1.6475,
"step": 618
},
{
"epoch": 2.724743777452416,
"grad_norm": 0.29390296240068264,
"learning_rate": 2.5636854221028764e-06,
"loss": 1.5175,
"step": 619
},
{
"epoch": 2.7291361639824303,
"grad_norm": 0.26592753999493085,
"learning_rate": 2.54771453711016e-06,
"loss": 1.5708,
"step": 620
},
{
"epoch": 2.733528550512445,
"grad_norm": 0.34603630736032,
"learning_rate": 2.531776529384407e-06,
"loss": 1.6302,
"step": 621
},
{
"epoch": 2.7379209370424595,
"grad_norm": 0.28041321457757834,
"learning_rate": 2.5158716126030835e-06,
"loss": 1.5336,
"step": 622
},
{
"epoch": 2.7423133235724744,
"grad_norm": 0.3212436991662462,
"learning_rate": 2.5000000000000015e-06,
"loss": 1.5925,
"step": 623
},
{
"epoch": 2.746705710102489,
"grad_norm": 0.3047611974850631,
"learning_rate": 2.484161904362481e-06,
"loss": 1.6048,
"step": 624
},
{
"epoch": 2.7510980966325036,
"grad_norm": 0.3048539362248767,
"learning_rate": 2.4683575380284873e-06,
"loss": 1.5498,
"step": 625
},
{
"epoch": 2.7554904831625184,
"grad_norm": 0.34208807818895065,
"learning_rate": 2.4525871128837775e-06,
"loss": 1.576,
"step": 626
},
{
"epoch": 2.759882869692533,
"grad_norm": 0.2949849660779719,
"learning_rate": 2.436850840359073e-06,
"loss": 1.6486,
"step": 627
},
{
"epoch": 2.7642752562225477,
"grad_norm": 0.2779672502557031,
"learning_rate": 2.421148931427213e-06,
"loss": 1.5677,
"step": 628
},
{
"epoch": 2.768667642752562,
"grad_norm": 0.2960325820768123,
"learning_rate": 2.4054815966003365e-06,
"loss": 1.5999,
"step": 629
},
{
"epoch": 2.773060029282577,
"grad_norm": 0.30799619640674625,
"learning_rate": 2.389849045927049e-06,
"loss": 1.5423,
"step": 630
},
{
"epoch": 2.7774524158125917,
"grad_norm": 0.689995508540048,
"learning_rate": 2.3742514889896196e-06,
"loss": 1.6127,
"step": 631
},
{
"epoch": 2.781844802342606,
"grad_norm": 0.3022950443793775,
"learning_rate": 2.3586891349011555e-06,
"loss": 1.6556,
"step": 632
},
{
"epoch": 2.786237188872621,
"grad_norm": 0.32506564068047783,
"learning_rate": 2.3431621923028146e-06,
"loss": 1.581,
"step": 633
},
{
"epoch": 2.7906295754026353,
"grad_norm": 0.3659374843681539,
"learning_rate": 2.3276708693609947e-06,
"loss": 1.5245,
"step": 634
},
{
"epoch": 2.79502196193265,
"grad_norm": 0.2983223667818601,
"learning_rate": 2.3122153737645512e-06,
"loss": 1.5934,
"step": 635
},
{
"epoch": 2.7994143484626646,
"grad_norm": 0.3863672711683231,
"learning_rate": 2.296795912722014e-06,
"loss": 1.6243,
"step": 636
},
{
"epoch": 2.8038067349926794,
"grad_norm": 0.30331251914397456,
"learning_rate": 2.281412692958793e-06,
"loss": 1.5746,
"step": 637
},
{
"epoch": 2.8081991215226942,
"grad_norm": 0.3196136845979999,
"learning_rate": 2.266065920714432e-06,
"loss": 1.6012,
"step": 638
},
{
"epoch": 2.8125915080527086,
"grad_norm": 0.27541958919800974,
"learning_rate": 2.2507558017398263e-06,
"loss": 1.6426,
"step": 639
},
{
"epoch": 2.816983894582723,
"grad_norm": 0.28786178032428017,
"learning_rate": 2.2354825412944643e-06,
"loss": 1.5385,
"step": 640
},
{
"epoch": 2.821376281112738,
"grad_norm": 0.32511038200012793,
"learning_rate": 2.2202463441436885e-06,
"loss": 1.7098,
"step": 641
},
{
"epoch": 2.8257686676427527,
"grad_norm": 0.3001315837884633,
"learning_rate": 2.2050474145559326e-06,
"loss": 1.612,
"step": 642
},
{
"epoch": 2.830161054172767,
"grad_norm": 0.47527905160043865,
"learning_rate": 2.1898859563000003e-06,
"loss": 1.6511,
"step": 643
},
{
"epoch": 2.834553440702782,
"grad_norm": 0.3168531896637398,
"learning_rate": 2.174762172642319e-06,
"loss": 1.5376,
"step": 644
},
{
"epoch": 2.8389458272327968,
"grad_norm": 0.4624637579943484,
"learning_rate": 2.159676266344222e-06,
"loss": 1.6121,
"step": 645
},
{
"epoch": 2.843338213762811,
"grad_norm": 0.291816559938584,
"learning_rate": 2.144628439659231e-06,
"loss": 1.4975,
"step": 646
},
{
"epoch": 2.8477306002928255,
"grad_norm": 0.29391820275780933,
"learning_rate": 2.1296188943303446e-06,
"loss": 1.6212,
"step": 647
},
{
"epoch": 2.8521229868228404,
"grad_norm": 0.31828111479061616,
"learning_rate": 2.1146478315873238e-06,
"loss": 1.6776,
"step": 648
},
{
"epoch": 2.856515373352855,
"grad_norm": 0.2976871167454781,
"learning_rate": 2.09971545214401e-06,
"loss": 1.5939,
"step": 649
},
{
"epoch": 2.8609077598828696,
"grad_norm": 0.2929358425075381,
"learning_rate": 2.0848219561956205e-06,
"loss": 1.6026,
"step": 650
},
{
"epoch": 2.8653001464128844,
"grad_norm": 0.28268849637868454,
"learning_rate": 2.06996754341607e-06,
"loss": 1.5525,
"step": 651
},
{
"epoch": 2.869692532942899,
"grad_norm": 0.3205109285976867,
"learning_rate": 2.0551524129552986e-06,
"loss": 1.6212,
"step": 652
},
{
"epoch": 2.8740849194729137,
"grad_norm": 0.3396332248604952,
"learning_rate": 2.04037676343659e-06,
"loss": 1.6332,
"step": 653
},
{
"epoch": 2.878477306002928,
"grad_norm": 0.2893628943703605,
"learning_rate": 2.0256407929539203e-06,
"loss": 1.5796,
"step": 654
},
{
"epoch": 2.882869692532943,
"grad_norm": 0.31138257479839093,
"learning_rate": 2.0109446990692967e-06,
"loss": 1.5832,
"step": 655
},
{
"epoch": 2.8872620790629577,
"grad_norm": 0.30316986773262733,
"learning_rate": 1.996288678810105e-06,
"loss": 1.6454,
"step": 656
},
{
"epoch": 2.891654465592972,
"grad_norm": 0.3616046943167089,
"learning_rate": 1.98167292866648e-06,
"loss": 1.4587,
"step": 657
},
{
"epoch": 2.896046852122987,
"grad_norm": 0.3182467743924217,
"learning_rate": 1.967097644588657e-06,
"loss": 1.6342,
"step": 658
},
{
"epoch": 2.9004392386530014,
"grad_norm": 0.291092108286567,
"learning_rate": 1.9525630219843524e-06,
"loss": 1.5981,
"step": 659
},
{
"epoch": 2.904831625183016,
"grad_norm": 0.3198399401699167,
"learning_rate": 1.9380692557161453e-06,
"loss": 1.5587,
"step": 660
},
{
"epoch": 2.9092240117130306,
"grad_norm": 0.284926327834223,
"learning_rate": 1.923616540098864e-06,
"loss": 1.5807,
"step": 661
},
{
"epoch": 2.9136163982430454,
"grad_norm": 0.34133283690090355,
"learning_rate": 1.9092050688969736e-06,
"loss": 1.6016,
"step": 662
},
{
"epoch": 2.9180087847730602,
"grad_norm": 0.3234782424139794,
"learning_rate": 1.8948350353219913e-06,
"loss": 1.5699,
"step": 663
},
{
"epoch": 2.9224011713030746,
"grad_norm": 0.32527383445723507,
"learning_rate": 1.880506632029883e-06,
"loss": 1.6162,
"step": 664
},
{
"epoch": 2.9267935578330895,
"grad_norm": 0.3194926131288373,
"learning_rate": 1.8662200511184874e-06,
"loss": 1.5822,
"step": 665
},
{
"epoch": 2.931185944363104,
"grad_norm": 0.3436218283010263,
"learning_rate": 1.8519754841249444e-06,
"loss": 1.5979,
"step": 666
},
{
"epoch": 2.9355783308931187,
"grad_norm": 0.3035445625939091,
"learning_rate": 1.8377731220231144e-06,
"loss": 1.547,
"step": 667
},
{
"epoch": 2.939970717423133,
"grad_norm": 0.33552700833979515,
"learning_rate": 1.8236131552210317e-06,
"loss": 1.5559,
"step": 668
},
{
"epoch": 2.944363103953148,
"grad_norm": 0.30372475017209144,
"learning_rate": 1.8094957735583463e-06,
"loss": 1.5642,
"step": 669
},
{
"epoch": 2.9487554904831628,
"grad_norm": 0.31181597688183876,
"learning_rate": 1.795421166303773e-06,
"loss": 1.5574,
"step": 670
},
{
"epoch": 2.953147877013177,
"grad_norm": 0.33729677529922286,
"learning_rate": 1.7813895221525651e-06,
"loss": 1.6204,
"step": 671
},
{
"epoch": 2.9575402635431915,
"grad_norm": 0.4308912143370284,
"learning_rate": 1.7674010292239746e-06,
"loss": 1.5801,
"step": 672
},
{
"epoch": 2.9619326500732064,
"grad_norm": 0.30463101988066804,
"learning_rate": 1.7534558750587345e-06,
"loss": 1.5911,
"step": 673
},
{
"epoch": 2.966325036603221,
"grad_norm": 0.3768568218829568,
"learning_rate": 1.7395542466165488e-06,
"loss": 1.5717,
"step": 674
},
{
"epoch": 2.9707174231332356,
"grad_norm": 0.2999853917153679,
"learning_rate": 1.7256963302735752e-06,
"loss": 1.5727,
"step": 675
},
{
"epoch": 2.9751098096632504,
"grad_norm": 0.3013746219343556,
"learning_rate": 1.7118823118199363e-06,
"loss": 1.6322,
"step": 676
},
{
"epoch": 2.9795021961932653,
"grad_norm": 0.3074935580559331,
"learning_rate": 1.6981123764572272e-06,
"loss": 1.5159,
"step": 677
},
{
"epoch": 2.9838945827232797,
"grad_norm": 0.3333567378869067,
"learning_rate": 1.6843867087960252e-06,
"loss": 1.6671,
"step": 678
},
{
"epoch": 2.988286969253294,
"grad_norm": 0.32409788340370355,
"learning_rate": 1.670705492853421e-06,
"loss": 1.7106,
"step": 679
},
{
"epoch": 2.992679355783309,
"grad_norm": 0.29584784991310104,
"learning_rate": 1.6570689120505557e-06,
"loss": 1.6037,
"step": 680
},
{
"epoch": 2.9970717423133237,
"grad_norm": 0.33480028358751196,
"learning_rate": 1.6434771492101487e-06,
"loss": 1.5156,
"step": 681
},
{
"epoch": 3.004392386530015,
"grad_norm": 0.31677037762304755,
"learning_rate": 1.6299303865540616e-06,
"loss": 3.3042,
"step": 682
},
{
"epoch": 3.0087847730600292,
"grad_norm": 0.3119634571083339,
"learning_rate": 1.6164288057008466e-06,
"loss": 1.6111,
"step": 683
},
{
"epoch": 3.013177159590044,
"grad_norm": 0.3176907934650192,
"learning_rate": 1.6029725876633085e-06,
"loss": 1.5259,
"step": 684
}
],
"logging_steps": 1,
"max_steps": 908,
"num_input_tokens_seen": 0,
"num_train_epochs": 4,
"save_steps": 114,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 1.8086729312832061e+18,
"train_batch_size": 8,
"trial_name": null,
"trial_params": null
}