Qwen2.5-0.5B-Open-R1-Distill / trainer_state.json
ezzaldeen's picture
Model save
985d796 verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.0,
"eval_steps": 500,
"global_step": 5859,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0008533879501621437,
"grad_norm": 4.798749402792709,
"learning_rate": 6.825938566552902e-07,
"loss": 1.0233,
"num_tokens": 495890.0,
"step": 5
},
{
"epoch": 0.0017067759003242873,
"grad_norm": 4.0425866323185256,
"learning_rate": 1.5358361774744028e-06,
"loss": 1.0272,
"num_tokens": 949600.0,
"step": 10
},
{
"epoch": 0.002560163850486431,
"grad_norm": 3.3086017576382143,
"learning_rate": 2.389078498293516e-06,
"loss": 1.0324,
"num_tokens": 1480979.0,
"step": 15
},
{
"epoch": 0.0034135518006485747,
"grad_norm": 2.635692129663441,
"learning_rate": 3.242320819112628e-06,
"loss": 0.9332,
"num_tokens": 1933925.0,
"step": 20
},
{
"epoch": 0.004266939750810718,
"grad_norm": 2.1861061417641277,
"learning_rate": 4.095563139931741e-06,
"loss": 0.8952,
"num_tokens": 2403607.0,
"step": 25
},
{
"epoch": 0.005120327700972862,
"grad_norm": 1.7489741971256096,
"learning_rate": 4.948805460750854e-06,
"loss": 0.8596,
"num_tokens": 2810063.0,
"step": 30
},
{
"epoch": 0.005973715651135006,
"grad_norm": 1.428222790090035,
"learning_rate": 5.802047781569966e-06,
"loss": 0.8399,
"num_tokens": 3245647.0,
"step": 35
},
{
"epoch": 0.006827103601297149,
"grad_norm": 1.3331284271200252,
"learning_rate": 6.655290102389079e-06,
"loss": 0.818,
"num_tokens": 3759146.0,
"step": 40
},
{
"epoch": 0.007680491551459293,
"grad_norm": 1.6553057262744924,
"learning_rate": 7.508532423208192e-06,
"loss": 0.7976,
"num_tokens": 4202971.0,
"step": 45
},
{
"epoch": 0.008533879501621437,
"grad_norm": 1.516759211785286,
"learning_rate": 8.361774744027304e-06,
"loss": 0.8221,
"num_tokens": 4718430.0,
"step": 50
},
{
"epoch": 0.00938726745178358,
"grad_norm": 1.4828872159472009,
"learning_rate": 9.215017064846417e-06,
"loss": 0.8361,
"num_tokens": 5162362.0,
"step": 55
},
{
"epoch": 0.010240655401945725,
"grad_norm": 1.4796322255718408,
"learning_rate": 1.006825938566553e-05,
"loss": 0.8286,
"num_tokens": 5644524.0,
"step": 60
},
{
"epoch": 0.011094043352107869,
"grad_norm": 1.3342423505815704,
"learning_rate": 1.0921501706484643e-05,
"loss": 0.7944,
"num_tokens": 6151522.0,
"step": 65
},
{
"epoch": 0.011947431302270013,
"grad_norm": 1.3878247508488946,
"learning_rate": 1.1774744027303754e-05,
"loss": 0.8079,
"num_tokens": 6674546.0,
"step": 70
},
{
"epoch": 0.012800819252432157,
"grad_norm": 1.4745878596040554,
"learning_rate": 1.2627986348122867e-05,
"loss": 0.7936,
"num_tokens": 7178713.0,
"step": 75
},
{
"epoch": 0.013654207202594299,
"grad_norm": 1.400943190419203,
"learning_rate": 1.348122866894198e-05,
"loss": 0.7827,
"num_tokens": 7668808.0,
"step": 80
},
{
"epoch": 0.014507595152756443,
"grad_norm": 1.4092175999951435,
"learning_rate": 1.4334470989761092e-05,
"loss": 0.7837,
"num_tokens": 8131699.0,
"step": 85
},
{
"epoch": 0.015360983102918587,
"grad_norm": 1.2931296016661182,
"learning_rate": 1.5187713310580206e-05,
"loss": 0.7488,
"num_tokens": 8560803.0,
"step": 90
},
{
"epoch": 0.01621437105308073,
"grad_norm": 1.5862120531875272,
"learning_rate": 1.604095563139932e-05,
"loss": 0.8045,
"num_tokens": 9030225.0,
"step": 95
},
{
"epoch": 0.017067759003242873,
"grad_norm": 1.510461750449168,
"learning_rate": 1.689419795221843e-05,
"loss": 0.762,
"num_tokens": 9513010.0,
"step": 100
},
{
"epoch": 0.017921146953405017,
"grad_norm": 1.5232874703838686,
"learning_rate": 1.7747440273037545e-05,
"loss": 0.7473,
"num_tokens": 9959461.0,
"step": 105
},
{
"epoch": 0.01877453490356716,
"grad_norm": 1.5288148915863156,
"learning_rate": 1.8600682593856656e-05,
"loss": 0.7436,
"num_tokens": 10421917.0,
"step": 110
},
{
"epoch": 0.019627922853729305,
"grad_norm": 1.729552367155352,
"learning_rate": 1.945392491467577e-05,
"loss": 0.7726,
"num_tokens": 10871710.0,
"step": 115
},
{
"epoch": 0.02048131080389145,
"grad_norm": 1.8434967482782947,
"learning_rate": 2.0307167235494882e-05,
"loss": 0.7253,
"num_tokens": 11368873.0,
"step": 120
},
{
"epoch": 0.021334698754053593,
"grad_norm": 1.8640322213390468,
"learning_rate": 2.1160409556313997e-05,
"loss": 0.7744,
"num_tokens": 11819871.0,
"step": 125
},
{
"epoch": 0.022188086704215737,
"grad_norm": 1.9701041320400614,
"learning_rate": 2.201365187713311e-05,
"loss": 0.7393,
"num_tokens": 12202952.0,
"step": 130
},
{
"epoch": 0.02304147465437788,
"grad_norm": 1.5669191735272756,
"learning_rate": 2.286689419795222e-05,
"loss": 0.7683,
"num_tokens": 12733944.0,
"step": 135
},
{
"epoch": 0.023894862604540025,
"grad_norm": 1.6475291377422423,
"learning_rate": 2.3720136518771334e-05,
"loss": 0.7404,
"num_tokens": 13219424.0,
"step": 140
},
{
"epoch": 0.02474825055470217,
"grad_norm": 1.6445326464409311,
"learning_rate": 2.4573378839590446e-05,
"loss": 0.7317,
"num_tokens": 13674985.0,
"step": 145
},
{
"epoch": 0.025601638504864313,
"grad_norm": 1.4791858001610043,
"learning_rate": 2.5426621160409557e-05,
"loss": 0.7416,
"num_tokens": 14130423.0,
"step": 150
},
{
"epoch": 0.026455026455026454,
"grad_norm": 1.625734240185079,
"learning_rate": 2.627986348122867e-05,
"loss": 0.7464,
"num_tokens": 14639707.0,
"step": 155
},
{
"epoch": 0.027308414405188598,
"grad_norm": 1.663672051577665,
"learning_rate": 2.7133105802047783e-05,
"loss": 0.7506,
"num_tokens": 15118063.0,
"step": 160
},
{
"epoch": 0.02816180235535074,
"grad_norm": 1.729758637917448,
"learning_rate": 2.7986348122866894e-05,
"loss": 0.762,
"num_tokens": 15588155.0,
"step": 165
},
{
"epoch": 0.029015190305512886,
"grad_norm": 1.583147120158948,
"learning_rate": 2.883959044368601e-05,
"loss": 0.773,
"num_tokens": 16120926.0,
"step": 170
},
{
"epoch": 0.02986857825567503,
"grad_norm": 1.650347974549021,
"learning_rate": 2.969283276450512e-05,
"loss": 0.7748,
"num_tokens": 16622154.0,
"step": 175
},
{
"epoch": 0.030721966205837174,
"grad_norm": 1.6026594473880433,
"learning_rate": 3.054607508532423e-05,
"loss": 0.7031,
"num_tokens": 17042190.0,
"step": 180
},
{
"epoch": 0.03157535415599932,
"grad_norm": 1.5107968926974098,
"learning_rate": 3.139931740614335e-05,
"loss": 0.7557,
"num_tokens": 17488690.0,
"step": 185
},
{
"epoch": 0.03242874210616146,
"grad_norm": 1.5595215938461728,
"learning_rate": 3.225255972696246e-05,
"loss": 0.7512,
"num_tokens": 17976522.0,
"step": 190
},
{
"epoch": 0.033282130056323606,
"grad_norm": 1.6617908648933237,
"learning_rate": 3.310580204778157e-05,
"loss": 0.7599,
"num_tokens": 18452237.0,
"step": 195
},
{
"epoch": 0.034135518006485746,
"grad_norm": 1.565404710832716,
"learning_rate": 3.395904436860068e-05,
"loss": 0.7086,
"num_tokens": 18906839.0,
"step": 200
},
{
"epoch": 0.034988905956647894,
"grad_norm": 1.865841075640289,
"learning_rate": 3.48122866894198e-05,
"loss": 0.7623,
"num_tokens": 19413448.0,
"step": 205
},
{
"epoch": 0.035842293906810034,
"grad_norm": 1.5414831434561684,
"learning_rate": 3.5665529010238906e-05,
"loss": 0.745,
"num_tokens": 19889730.0,
"step": 210
},
{
"epoch": 0.03669568185697218,
"grad_norm": 1.5168119568091742,
"learning_rate": 3.6518771331058024e-05,
"loss": 0.7275,
"num_tokens": 20362643.0,
"step": 215
},
{
"epoch": 0.03754906980713432,
"grad_norm": 1.930454368475353,
"learning_rate": 3.7372013651877135e-05,
"loss": 0.6998,
"num_tokens": 20791105.0,
"step": 220
},
{
"epoch": 0.03840245775729647,
"grad_norm": 1.6010073717621283,
"learning_rate": 3.822525597269625e-05,
"loss": 0.7435,
"num_tokens": 21256777.0,
"step": 225
},
{
"epoch": 0.03925584570745861,
"grad_norm": 1.5494732570872156,
"learning_rate": 3.907849829351536e-05,
"loss": 0.7465,
"num_tokens": 21704917.0,
"step": 230
},
{
"epoch": 0.04010923365762076,
"grad_norm": 1.6190828576003704,
"learning_rate": 3.9931740614334476e-05,
"loss": 0.7602,
"num_tokens": 22185554.0,
"step": 235
},
{
"epoch": 0.0409626216077829,
"grad_norm": 1.4786172771037231,
"learning_rate": 4.078498293515359e-05,
"loss": 0.7384,
"num_tokens": 22641755.0,
"step": 240
},
{
"epoch": 0.04181600955794504,
"grad_norm": 1.696545361799666,
"learning_rate": 4.16382252559727e-05,
"loss": 0.7297,
"num_tokens": 23146999.0,
"step": 245
},
{
"epoch": 0.042669397508107186,
"grad_norm": 1.6836150060894002,
"learning_rate": 4.249146757679181e-05,
"loss": 0.7471,
"num_tokens": 23595511.0,
"step": 250
},
{
"epoch": 0.04352278545826933,
"grad_norm": 1.3649462368231138,
"learning_rate": 4.334470989761093e-05,
"loss": 0.7918,
"num_tokens": 24118220.0,
"step": 255
},
{
"epoch": 0.044376173408431474,
"grad_norm": 1.6116458751617535,
"learning_rate": 4.419795221843004e-05,
"loss": 0.7442,
"num_tokens": 24604914.0,
"step": 260
},
{
"epoch": 0.045229561358593615,
"grad_norm": 2.0923322596387215,
"learning_rate": 4.505119453924915e-05,
"loss": 0.7513,
"num_tokens": 25123670.0,
"step": 265
},
{
"epoch": 0.04608294930875576,
"grad_norm": 1.5206213010037783,
"learning_rate": 4.590443686006826e-05,
"loss": 0.7525,
"num_tokens": 25614380.0,
"step": 270
},
{
"epoch": 0.0469363372589179,
"grad_norm": 1.4529405567523968,
"learning_rate": 4.675767918088737e-05,
"loss": 0.7455,
"num_tokens": 26164927.0,
"step": 275
},
{
"epoch": 0.04778972520908005,
"grad_norm": 2.068428332035821,
"learning_rate": 4.7610921501706484e-05,
"loss": 0.7382,
"num_tokens": 26636629.0,
"step": 280
},
{
"epoch": 0.04864311315924219,
"grad_norm": 1.7574119116801188,
"learning_rate": 4.84641638225256e-05,
"loss": 0.6937,
"num_tokens": 27092899.0,
"step": 285
},
{
"epoch": 0.04949650110940434,
"grad_norm": 1.871558963994762,
"learning_rate": 4.931740614334471e-05,
"loss": 0.7411,
"num_tokens": 27513214.0,
"step": 290
},
{
"epoch": 0.05034988905956648,
"grad_norm": 1.4982774488949393,
"learning_rate": 4.999999641601773e-05,
"loss": 0.7004,
"num_tokens": 27982873.0,
"step": 295
},
{
"epoch": 0.051203277009728626,
"grad_norm": 1.4747335125750545,
"learning_rate": 4.999987097675823e-05,
"loss": 0.7196,
"num_tokens": 28441260.0,
"step": 300
},
{
"epoch": 0.05205666495989077,
"grad_norm": 1.9186147707485868,
"learning_rate": 4.99995663395271e-05,
"loss": 0.7463,
"num_tokens": 28958598.0,
"step": 305
},
{
"epoch": 0.05291005291005291,
"grad_norm": 1.5419614690981815,
"learning_rate": 4.999908250675058e-05,
"loss": 0.7287,
"num_tokens": 29484984.0,
"step": 310
},
{
"epoch": 0.053763440860215055,
"grad_norm": 1.545284237235818,
"learning_rate": 4.999841948228211e-05,
"loss": 0.7488,
"num_tokens": 29961915.0,
"step": 315
},
{
"epoch": 0.054616828810377195,
"grad_norm": 1.5650635439857692,
"learning_rate": 4.999757727140229e-05,
"loss": 0.7505,
"num_tokens": 30417967.0,
"step": 320
},
{
"epoch": 0.05547021676053934,
"grad_norm": 1.5662720451827132,
"learning_rate": 4.999655588081883e-05,
"loss": 0.7408,
"num_tokens": 30905676.0,
"step": 325
},
{
"epoch": 0.05632360471070148,
"grad_norm": 1.4790884805746911,
"learning_rate": 4.999535531866646e-05,
"loss": 0.7089,
"num_tokens": 31367061.0,
"step": 330
},
{
"epoch": 0.05717699266086363,
"grad_norm": 1.3765457548452067,
"learning_rate": 4.9993975594506975e-05,
"loss": 0.7347,
"num_tokens": 31862251.0,
"step": 335
},
{
"epoch": 0.05803038061102577,
"grad_norm": 1.3559491046855545,
"learning_rate": 4.999241671932903e-05,
"loss": 0.7641,
"num_tokens": 32351323.0,
"step": 340
},
{
"epoch": 0.05888376856118792,
"grad_norm": 1.4705848919024582,
"learning_rate": 4.999067870554814e-05,
"loss": 0.7501,
"num_tokens": 32837613.0,
"step": 345
},
{
"epoch": 0.05973715651135006,
"grad_norm": 1.4932750683350335,
"learning_rate": 4.9988761567006536e-05,
"loss": 0.7352,
"num_tokens": 33256284.0,
"step": 350
},
{
"epoch": 0.06059054446151221,
"grad_norm": 1.5883669600294514,
"learning_rate": 4.998666531897308e-05,
"loss": 0.7738,
"num_tokens": 33761379.0,
"step": 355
},
{
"epoch": 0.06144393241167435,
"grad_norm": 1.3497135990866862,
"learning_rate": 4.998438997814312e-05,
"loss": 0.7155,
"num_tokens": 34231333.0,
"step": 360
},
{
"epoch": 0.06229732036183649,
"grad_norm": 1.3866569803887845,
"learning_rate": 4.9981935562638395e-05,
"loss": 0.7532,
"num_tokens": 34703212.0,
"step": 365
},
{
"epoch": 0.06315070831199864,
"grad_norm": 1.329023893890578,
"learning_rate": 4.997930209200684e-05,
"loss": 0.7337,
"num_tokens": 35161427.0,
"step": 370
},
{
"epoch": 0.06400409626216078,
"grad_norm": 1.3726123506316625,
"learning_rate": 4.997648958722248e-05,
"loss": 0.728,
"num_tokens": 35595055.0,
"step": 375
},
{
"epoch": 0.06485748421232292,
"grad_norm": 1.3990487623850296,
"learning_rate": 4.997349807068521e-05,
"loss": 0.7252,
"num_tokens": 36068371.0,
"step": 380
},
{
"epoch": 0.06571087216248507,
"grad_norm": 1.2097191821688327,
"learning_rate": 4.997032756622068e-05,
"loss": 0.7592,
"num_tokens": 36530284.0,
"step": 385
},
{
"epoch": 0.06656426011264721,
"grad_norm": 1.3220720351207618,
"learning_rate": 4.996697809908006e-05,
"loss": 0.798,
"num_tokens": 37049317.0,
"step": 390
},
{
"epoch": 0.06741764806280935,
"grad_norm": 1.3047177399220788,
"learning_rate": 4.9963449695939824e-05,
"loss": 0.7175,
"num_tokens": 37518919.0,
"step": 395
},
{
"epoch": 0.06827103601297149,
"grad_norm": 1.3310960648595318,
"learning_rate": 4.995974238490161e-05,
"loss": 0.7165,
"num_tokens": 37961424.0,
"step": 400
},
{
"epoch": 0.06912442396313365,
"grad_norm": 1.2227488039808938,
"learning_rate": 4.9955856195491904e-05,
"loss": 0.7512,
"num_tokens": 38449337.0,
"step": 405
},
{
"epoch": 0.06997781191329579,
"grad_norm": 1.310648114420623,
"learning_rate": 4.995179115866189e-05,
"loss": 0.7444,
"num_tokens": 38917825.0,
"step": 410
},
{
"epoch": 0.07083119986345793,
"grad_norm": 1.957817056349623,
"learning_rate": 4.994754730678713e-05,
"loss": 0.7321,
"num_tokens": 39359207.0,
"step": 415
},
{
"epoch": 0.07168458781362007,
"grad_norm": 1.306911496638819,
"learning_rate": 4.994312467366738e-05,
"loss": 0.7609,
"num_tokens": 39846738.0,
"step": 420
},
{
"epoch": 0.07253797576378221,
"grad_norm": 1.2794594670813513,
"learning_rate": 4.9938523294526243e-05,
"loss": 0.7203,
"num_tokens": 40311353.0,
"step": 425
},
{
"epoch": 0.07339136371394436,
"grad_norm": 1.4448318443084942,
"learning_rate": 4.993374320601095e-05,
"loss": 0.7343,
"num_tokens": 40804304.0,
"step": 430
},
{
"epoch": 0.0742447516641065,
"grad_norm": 1.5087684442212936,
"learning_rate": 4.992878444619203e-05,
"loss": 0.7456,
"num_tokens": 41244260.0,
"step": 435
},
{
"epoch": 0.07509813961426864,
"grad_norm": 1.7043643961943444,
"learning_rate": 4.992364705456304e-05,
"loss": 0.7613,
"num_tokens": 41826860.0,
"step": 440
},
{
"epoch": 0.07595152756443078,
"grad_norm": 1.4073577965500255,
"learning_rate": 4.991833107204022e-05,
"loss": 0.7162,
"num_tokens": 42279795.0,
"step": 445
},
{
"epoch": 0.07680491551459294,
"grad_norm": 1.2103323507083124,
"learning_rate": 4.9912836540962165e-05,
"loss": 0.7176,
"num_tokens": 42737662.0,
"step": 450
},
{
"epoch": 0.07765830346475508,
"grad_norm": 1.228293757492738,
"learning_rate": 4.9907163505089535e-05,
"loss": 0.7289,
"num_tokens": 43200466.0,
"step": 455
},
{
"epoch": 0.07851169141491722,
"grad_norm": 1.2950510394136565,
"learning_rate": 4.9901312009604665e-05,
"loss": 0.782,
"num_tokens": 43675049.0,
"step": 460
},
{
"epoch": 0.07936507936507936,
"grad_norm": 1.4178723133579891,
"learning_rate": 4.989528210111117e-05,
"loss": 0.756,
"num_tokens": 44122925.0,
"step": 465
},
{
"epoch": 0.08021846731524152,
"grad_norm": 1.2829343514459521,
"learning_rate": 4.98890738276337e-05,
"loss": 0.6999,
"num_tokens": 44601987.0,
"step": 470
},
{
"epoch": 0.08107185526540366,
"grad_norm": 1.4374504524826346,
"learning_rate": 4.988268723861739e-05,
"loss": 0.6932,
"num_tokens": 45053630.0,
"step": 475
},
{
"epoch": 0.0819252432155658,
"grad_norm": 1.3512486308390388,
"learning_rate": 4.9876122384927606e-05,
"loss": 0.7527,
"num_tokens": 45543403.0,
"step": 480
},
{
"epoch": 0.08277863116572794,
"grad_norm": 1.4860548613695417,
"learning_rate": 4.9869379318849456e-05,
"loss": 0.7689,
"num_tokens": 45978851.0,
"step": 485
},
{
"epoch": 0.08363201911589008,
"grad_norm": 1.312504852785056,
"learning_rate": 4.9862458094087435e-05,
"loss": 0.7236,
"num_tokens": 46425608.0,
"step": 490
},
{
"epoch": 0.08448540706605223,
"grad_norm": 1.2303271888413203,
"learning_rate": 4.985535876576493e-05,
"loss": 0.7189,
"num_tokens": 46939676.0,
"step": 495
},
{
"epoch": 0.08533879501621437,
"grad_norm": 1.3194117498740927,
"learning_rate": 4.984808139042385e-05,
"loss": 0.7435,
"num_tokens": 47466657.0,
"step": 500
},
{
"epoch": 0.08619218296637651,
"grad_norm": 1.1193590719992939,
"learning_rate": 4.9840626026024094e-05,
"loss": 0.7237,
"num_tokens": 47942013.0,
"step": 505
},
{
"epoch": 0.08704557091653865,
"grad_norm": 1.4253925033083865,
"learning_rate": 4.983299273194318e-05,
"loss": 0.7362,
"num_tokens": 48417309.0,
"step": 510
},
{
"epoch": 0.08789895886670081,
"grad_norm": 1.1947452604617999,
"learning_rate": 4.982518156897573e-05,
"loss": 0.7235,
"num_tokens": 48882486.0,
"step": 515
},
{
"epoch": 0.08875234681686295,
"grad_norm": 1.3561034274556405,
"learning_rate": 4.981719259933295e-05,
"loss": 0.7308,
"num_tokens": 49369486.0,
"step": 520
},
{
"epoch": 0.08960573476702509,
"grad_norm": 1.4421474252237962,
"learning_rate": 4.980902588664219e-05,
"loss": 0.7568,
"num_tokens": 49823574.0,
"step": 525
},
{
"epoch": 0.09045912271718723,
"grad_norm": 1.4711373305504778,
"learning_rate": 4.9800681495946424e-05,
"loss": 0.6981,
"num_tokens": 50289138.0,
"step": 530
},
{
"epoch": 0.09131251066734938,
"grad_norm": 1.4720189999012312,
"learning_rate": 4.979215949370372e-05,
"loss": 0.691,
"num_tokens": 50721951.0,
"step": 535
},
{
"epoch": 0.09216589861751152,
"grad_norm": 1.4042362668914812,
"learning_rate": 4.9783459947786706e-05,
"loss": 0.7536,
"num_tokens": 51193029.0,
"step": 540
},
{
"epoch": 0.09301928656767366,
"grad_norm": 1.3883834114302933,
"learning_rate": 4.977458292748204e-05,
"loss": 0.7222,
"num_tokens": 51676036.0,
"step": 545
},
{
"epoch": 0.0938726745178358,
"grad_norm": 1.5198834286996736,
"learning_rate": 4.9765528503489875e-05,
"loss": 0.6972,
"num_tokens": 52126399.0,
"step": 550
},
{
"epoch": 0.09472606246799795,
"grad_norm": 1.6818137310699364,
"learning_rate": 4.975629674792326e-05,
"loss": 0.746,
"num_tokens": 52565059.0,
"step": 555
},
{
"epoch": 0.0955794504181601,
"grad_norm": 1.2941381353196966,
"learning_rate": 4.974688773430759e-05,
"loss": 0.7239,
"num_tokens": 53010874.0,
"step": 560
},
{
"epoch": 0.09643283836832224,
"grad_norm": 1.115367136570152,
"learning_rate": 4.973730153758e-05,
"loss": 0.6817,
"num_tokens": 53502621.0,
"step": 565
},
{
"epoch": 0.09728622631848438,
"grad_norm": 1.2236644265034253,
"learning_rate": 4.972753823408882e-05,
"loss": 0.7467,
"num_tokens": 54012592.0,
"step": 570
},
{
"epoch": 0.09813961426864652,
"grad_norm": 1.3614539179632443,
"learning_rate": 4.9717597901592886e-05,
"loss": 0.695,
"num_tokens": 54452602.0,
"step": 575
},
{
"epoch": 0.09899300221880868,
"grad_norm": 1.4195040928837384,
"learning_rate": 4.970748061926097e-05,
"loss": 0.7252,
"num_tokens": 54919625.0,
"step": 580
},
{
"epoch": 0.09984639016897082,
"grad_norm": 1.4702094997469366,
"learning_rate": 4.9697186467671194e-05,
"loss": 0.6918,
"num_tokens": 55377389.0,
"step": 585
},
{
"epoch": 0.10069977811913296,
"grad_norm": 1.1828673990749254,
"learning_rate": 4.968671552881026e-05,
"loss": 0.7398,
"num_tokens": 55892431.0,
"step": 590
},
{
"epoch": 0.1015531660692951,
"grad_norm": 1.3726766976603892,
"learning_rate": 4.967606788607292e-05,
"loss": 0.7646,
"num_tokens": 56408483.0,
"step": 595
},
{
"epoch": 0.10240655401945725,
"grad_norm": 1.4496405231472012,
"learning_rate": 4.966524362426128e-05,
"loss": 0.7378,
"num_tokens": 56909889.0,
"step": 600
},
{
"epoch": 0.10325994196961939,
"grad_norm": 1.232834277723852,
"learning_rate": 4.965424282958407e-05,
"loss": 0.6929,
"num_tokens": 57369675.0,
"step": 605
},
{
"epoch": 0.10411332991978153,
"grad_norm": 1.2396125326108323,
"learning_rate": 4.964306558965604e-05,
"loss": 0.676,
"num_tokens": 57850087.0,
"step": 610
},
{
"epoch": 0.10496671786994367,
"grad_norm": 1.3528031434293055,
"learning_rate": 4.963171199349718e-05,
"loss": 0.7179,
"num_tokens": 58331874.0,
"step": 615
},
{
"epoch": 0.10582010582010581,
"grad_norm": 1.3381471923806956,
"learning_rate": 4.9620182131532074e-05,
"loss": 0.7139,
"num_tokens": 58759721.0,
"step": 620
},
{
"epoch": 0.10667349377026797,
"grad_norm": 1.2470605568646416,
"learning_rate": 4.960847609558916e-05,
"loss": 0.7068,
"num_tokens": 59248661.0,
"step": 625
},
{
"epoch": 0.10752688172043011,
"grad_norm": 1.2526599749042204,
"learning_rate": 4.959659397889998e-05,
"loss": 0.6864,
"num_tokens": 59676292.0,
"step": 630
},
{
"epoch": 0.10838026967059225,
"grad_norm": 1.2713776223670887,
"learning_rate": 4.958453587609848e-05,
"loss": 0.7325,
"num_tokens": 60199916.0,
"step": 635
},
{
"epoch": 0.10923365762075439,
"grad_norm": 1.260529146049976,
"learning_rate": 4.9572301883220196e-05,
"loss": 0.7068,
"num_tokens": 60650570.0,
"step": 640
},
{
"epoch": 0.11008704557091654,
"grad_norm": 1.3188706335664067,
"learning_rate": 4.955989209770155e-05,
"loss": 0.6759,
"num_tokens": 61094637.0,
"step": 645
},
{
"epoch": 0.11094043352107869,
"grad_norm": 1.2746918780007785,
"learning_rate": 4.954730661837904e-05,
"loss": 0.7157,
"num_tokens": 61630713.0,
"step": 650
},
{
"epoch": 0.11179382147124083,
"grad_norm": 1.3647999091533176,
"learning_rate": 4.9534545545488454e-05,
"loss": 0.6628,
"num_tokens": 62149084.0,
"step": 655
},
{
"epoch": 0.11264720942140297,
"grad_norm": 1.1251187771811682,
"learning_rate": 4.952160898066408e-05,
"loss": 0.6955,
"num_tokens": 62662896.0,
"step": 660
},
{
"epoch": 0.1135005973715651,
"grad_norm": 1.1296326633503708,
"learning_rate": 4.950849702693789e-05,
"loss": 0.696,
"num_tokens": 63159787.0,
"step": 665
},
{
"epoch": 0.11435398532172726,
"grad_norm": 1.1820195544892163,
"learning_rate": 4.949520978873874e-05,
"loss": 0.7212,
"num_tokens": 63644240.0,
"step": 670
},
{
"epoch": 0.1152073732718894,
"grad_norm": 1.0907000254670258,
"learning_rate": 4.9481747371891495e-05,
"loss": 0.671,
"num_tokens": 64108252.0,
"step": 675
},
{
"epoch": 0.11606076122205154,
"grad_norm": 1.2697067765385264,
"learning_rate": 4.946810988361623e-05,
"loss": 0.7178,
"num_tokens": 64564660.0,
"step": 680
},
{
"epoch": 0.11691414917221368,
"grad_norm": 1.162053282714903,
"learning_rate": 4.945429743252737e-05,
"loss": 0.71,
"num_tokens": 65057882.0,
"step": 685
},
{
"epoch": 0.11776753712237584,
"grad_norm": 1.1299555887132948,
"learning_rate": 4.9440310128632784e-05,
"loss": 0.7068,
"num_tokens": 65561717.0,
"step": 690
},
{
"epoch": 0.11862092507253798,
"grad_norm": 1.1824136631290918,
"learning_rate": 4.942614808333296e-05,
"loss": 0.7188,
"num_tokens": 65991955.0,
"step": 695
},
{
"epoch": 0.11947431302270012,
"grad_norm": 1.1229803744745124,
"learning_rate": 4.9411811409420094e-05,
"loss": 0.703,
"num_tokens": 66485190.0,
"step": 700
},
{
"epoch": 0.12032770097286226,
"grad_norm": 1.1747259463736386,
"learning_rate": 4.9397300221077194e-05,
"loss": 0.7358,
"num_tokens": 66953512.0,
"step": 705
},
{
"epoch": 0.12118108892302441,
"grad_norm": 1.2548061942186421,
"learning_rate": 4.9382614633877156e-05,
"loss": 0.7503,
"num_tokens": 67466205.0,
"step": 710
},
{
"epoch": 0.12203447687318655,
"grad_norm": 1.1195140238315797,
"learning_rate": 4.936775476478187e-05,
"loss": 0.7031,
"num_tokens": 67951050.0,
"step": 715
},
{
"epoch": 0.1228878648233487,
"grad_norm": 1.2386873813403512,
"learning_rate": 4.93527207321413e-05,
"loss": 0.7079,
"num_tokens": 68426882.0,
"step": 720
},
{
"epoch": 0.12374125277351083,
"grad_norm": 1.1091542877845337,
"learning_rate": 4.933751265569247e-05,
"loss": 0.7485,
"num_tokens": 68930147.0,
"step": 725
},
{
"epoch": 0.12459464072367298,
"grad_norm": 1.0745869384498508,
"learning_rate": 4.9322130656558604e-05,
"loss": 0.6681,
"num_tokens": 69444477.0,
"step": 730
},
{
"epoch": 0.12544802867383512,
"grad_norm": 1.2380251283369912,
"learning_rate": 4.9306574857248065e-05,
"loss": 0.7349,
"num_tokens": 69913688.0,
"step": 735
},
{
"epoch": 0.12630141662399727,
"grad_norm": 1.3475066813612873,
"learning_rate": 4.929084538165349e-05,
"loss": 0.7064,
"num_tokens": 70332714.0,
"step": 740
},
{
"epoch": 0.12715480457415942,
"grad_norm": 1.4400683759689517,
"learning_rate": 4.9274942355050705e-05,
"loss": 0.7243,
"num_tokens": 70813408.0,
"step": 745
},
{
"epoch": 0.12800819252432155,
"grad_norm": 1.2444145807438363,
"learning_rate": 4.9258865904097775e-05,
"loss": 0.7396,
"num_tokens": 71299972.0,
"step": 750
},
{
"epoch": 0.1288615804744837,
"grad_norm": 1.0242414181042974,
"learning_rate": 4.924261615683398e-05,
"loss": 0.7108,
"num_tokens": 71759515.0,
"step": 755
},
{
"epoch": 0.12971496842464583,
"grad_norm": 1.188493882753878,
"learning_rate": 4.922619324267881e-05,
"loss": 0.6651,
"num_tokens": 72252819.0,
"step": 760
},
{
"epoch": 0.130568356374808,
"grad_norm": 1.3126324479191385,
"learning_rate": 4.920959729243091e-05,
"loss": 0.6698,
"num_tokens": 72771440.0,
"step": 765
},
{
"epoch": 0.13142174432497014,
"grad_norm": 1.1411508728365074,
"learning_rate": 4.919282843826709e-05,
"loss": 0.6685,
"num_tokens": 73242194.0,
"step": 770
},
{
"epoch": 0.13227513227513227,
"grad_norm": 1.307936016764042,
"learning_rate": 4.91758868137412e-05,
"loss": 0.7338,
"num_tokens": 73786207.0,
"step": 775
},
{
"epoch": 0.13312852022529442,
"grad_norm": 1.1613529320310767,
"learning_rate": 4.9158772553783105e-05,
"loss": 0.6517,
"num_tokens": 74221358.0,
"step": 780
},
{
"epoch": 0.13398190817545655,
"grad_norm": 1.1510539792968009,
"learning_rate": 4.914148579469763e-05,
"loss": 0.6938,
"num_tokens": 74742975.0,
"step": 785
},
{
"epoch": 0.1348352961256187,
"grad_norm": 1.3850463951205576,
"learning_rate": 4.912402667416344e-05,
"loss": 0.7233,
"num_tokens": 75204120.0,
"step": 790
},
{
"epoch": 0.13568868407578086,
"grad_norm": 1.0345122553008024,
"learning_rate": 4.910639533123193e-05,
"loss": 0.7333,
"num_tokens": 75684137.0,
"step": 795
},
{
"epoch": 0.13654207202594298,
"grad_norm": 1.1993794512248417,
"learning_rate": 4.90885919063262e-05,
"loss": 0.6847,
"num_tokens": 76160914.0,
"step": 800
},
{
"epoch": 0.13739545997610514,
"grad_norm": 1.0772949797690683,
"learning_rate": 4.907061654123982e-05,
"loss": 0.6537,
"num_tokens": 76666991.0,
"step": 805
},
{
"epoch": 0.1382488479262673,
"grad_norm": 1.363751747556385,
"learning_rate": 4.9052469379135796e-05,
"loss": 0.7245,
"num_tokens": 77124379.0,
"step": 810
},
{
"epoch": 0.13910223587642942,
"grad_norm": 1.1901636342780861,
"learning_rate": 4.903415056454539e-05,
"loss": 0.7103,
"num_tokens": 77613980.0,
"step": 815
},
{
"epoch": 0.13995562382659157,
"grad_norm": 1.1910421162849438,
"learning_rate": 4.901566024336696e-05,
"loss": 0.691,
"num_tokens": 78170639.0,
"step": 820
},
{
"epoch": 0.1408090117767537,
"grad_norm": 1.129358397876882,
"learning_rate": 4.899699856286484e-05,
"loss": 0.7184,
"num_tokens": 78667993.0,
"step": 825
},
{
"epoch": 0.14166239972691586,
"grad_norm": 1.184336348998445,
"learning_rate": 4.8978165671668086e-05,
"loss": 0.7035,
"num_tokens": 79150600.0,
"step": 830
},
{
"epoch": 0.142515787677078,
"grad_norm": 1.2229940526291831,
"learning_rate": 4.8959161719769395e-05,
"loss": 0.657,
"num_tokens": 79643316.0,
"step": 835
},
{
"epoch": 0.14336917562724014,
"grad_norm": 1.181968986229247,
"learning_rate": 4.893998685852385e-05,
"loss": 0.7237,
"num_tokens": 80152955.0,
"step": 840
},
{
"epoch": 0.1442225635774023,
"grad_norm": 1.2870971641864188,
"learning_rate": 4.892064124064768e-05,
"loss": 0.6794,
"num_tokens": 80600797.0,
"step": 845
},
{
"epoch": 0.14507595152756442,
"grad_norm": 1.1431233390574438,
"learning_rate": 4.8901125020217165e-05,
"loss": 0.6922,
"num_tokens": 81108472.0,
"step": 850
},
{
"epoch": 0.14592933947772657,
"grad_norm": 1.1722412394244004,
"learning_rate": 4.888143835266726e-05,
"loss": 0.7057,
"num_tokens": 81535402.0,
"step": 855
},
{
"epoch": 0.14678272742788873,
"grad_norm": 1.3721276457948681,
"learning_rate": 4.8861581394790484e-05,
"loss": 0.7128,
"num_tokens": 81973764.0,
"step": 860
},
{
"epoch": 0.14763611537805085,
"grad_norm": 1.0428988961171062,
"learning_rate": 4.884155430473557e-05,
"loss": 0.7359,
"num_tokens": 82469903.0,
"step": 865
},
{
"epoch": 0.148489503328213,
"grad_norm": 1.1967509236053502,
"learning_rate": 4.882135724200628e-05,
"loss": 0.7002,
"num_tokens": 82980445.0,
"step": 870
},
{
"epoch": 0.14934289127837516,
"grad_norm": 1.2240077286805202,
"learning_rate": 4.8800990367460106e-05,
"loss": 0.7242,
"num_tokens": 83441946.0,
"step": 875
},
{
"epoch": 0.1501962792285373,
"grad_norm": 1.3039003245323004,
"learning_rate": 4.878045384330698e-05,
"loss": 0.6752,
"num_tokens": 83901702.0,
"step": 880
},
{
"epoch": 0.15104966717869944,
"grad_norm": 1.1382200264159172,
"learning_rate": 4.875974783310799e-05,
"loss": 0.6533,
"num_tokens": 84319720.0,
"step": 885
},
{
"epoch": 0.15190305512886157,
"grad_norm": 1.1777300254187848,
"learning_rate": 4.873887250177408e-05,
"loss": 0.7036,
"num_tokens": 84741165.0,
"step": 890
},
{
"epoch": 0.15275644307902372,
"grad_norm": 1.2494679499905845,
"learning_rate": 4.871782801556476e-05,
"loss": 0.677,
"num_tokens": 85228531.0,
"step": 895
},
{
"epoch": 0.15360983102918588,
"grad_norm": 1.019491634265827,
"learning_rate": 4.869661454208671e-05,
"loss": 0.6778,
"num_tokens": 85675897.0,
"step": 900
},
{
"epoch": 0.154463218979348,
"grad_norm": 1.2130254412309611,
"learning_rate": 4.867523225029253e-05,
"loss": 0.7389,
"num_tokens": 86192179.0,
"step": 905
},
{
"epoch": 0.15531660692951016,
"grad_norm": 1.1180346575861824,
"learning_rate": 4.865368131047933e-05,
"loss": 0.7269,
"num_tokens": 86722595.0,
"step": 910
},
{
"epoch": 0.15616999487967229,
"grad_norm": 1.0908623944975362,
"learning_rate": 4.8631961894287436e-05,
"loss": 0.7318,
"num_tokens": 87175045.0,
"step": 915
},
{
"epoch": 0.15702338282983444,
"grad_norm": 1.1815996060870517,
"learning_rate": 4.861007417469895e-05,
"loss": 0.6944,
"num_tokens": 87691018.0,
"step": 920
},
{
"epoch": 0.1578767707799966,
"grad_norm": 1.189743371344953,
"learning_rate": 4.858801832603643e-05,
"loss": 0.679,
"num_tokens": 88141621.0,
"step": 925
},
{
"epoch": 0.15873015873015872,
"grad_norm": 1.1579008757807039,
"learning_rate": 4.856579452396148e-05,
"loss": 0.6856,
"num_tokens": 88637962.0,
"step": 930
},
{
"epoch": 0.15958354668032088,
"grad_norm": 1.05143374186143,
"learning_rate": 4.854340294547334e-05,
"loss": 0.6718,
"num_tokens": 89133287.0,
"step": 935
},
{
"epoch": 0.16043693463048303,
"grad_norm": 1.255304539583812,
"learning_rate": 4.85208437689075e-05,
"loss": 0.7068,
"num_tokens": 89579835.0,
"step": 940
},
{
"epoch": 0.16129032258064516,
"grad_norm": 1.140267352892183,
"learning_rate": 4.8498117173934274e-05,
"loss": 0.7009,
"num_tokens": 90060334.0,
"step": 945
},
{
"epoch": 0.1621437105308073,
"grad_norm": 1.1687127700233666,
"learning_rate": 4.847522334155734e-05,
"loss": 0.6597,
"num_tokens": 90579891.0,
"step": 950
},
{
"epoch": 0.16299709848096944,
"grad_norm": 1.1682858279061235,
"learning_rate": 4.845216245411234e-05,
"loss": 0.7037,
"num_tokens": 91038959.0,
"step": 955
},
{
"epoch": 0.1638504864311316,
"grad_norm": 1.0192382594480243,
"learning_rate": 4.842893469526542e-05,
"loss": 0.6798,
"num_tokens": 91542428.0,
"step": 960
},
{
"epoch": 0.16470387438129375,
"grad_norm": 1.3088556304371173,
"learning_rate": 4.840554025001172e-05,
"loss": 0.6759,
"num_tokens": 92005035.0,
"step": 965
},
{
"epoch": 0.16555726233145587,
"grad_norm": 1.120110093470671,
"learning_rate": 4.838197930467397e-05,
"loss": 0.7155,
"num_tokens": 92505502.0,
"step": 970
},
{
"epoch": 0.16641065028161803,
"grad_norm": 1.0045477139885044,
"learning_rate": 4.835825204690096e-05,
"loss": 0.6813,
"num_tokens": 93031983.0,
"step": 975
},
{
"epoch": 0.16726403823178015,
"grad_norm": 1.037997321014646,
"learning_rate": 4.833435866566607e-05,
"loss": 0.6795,
"num_tokens": 93492833.0,
"step": 980
},
{
"epoch": 0.1681174261819423,
"grad_norm": 1.070891638236124,
"learning_rate": 4.831029935126572e-05,
"loss": 0.6678,
"num_tokens": 93977963.0,
"step": 985
},
{
"epoch": 0.16897081413210446,
"grad_norm": 1.1011467509105966,
"learning_rate": 4.828607429531795e-05,
"loss": 0.7554,
"num_tokens": 94474291.0,
"step": 990
},
{
"epoch": 0.1698242020822666,
"grad_norm": 1.2308698141252665,
"learning_rate": 4.826168369076076e-05,
"loss": 0.6906,
"num_tokens": 94951484.0,
"step": 995
},
{
"epoch": 0.17067759003242874,
"grad_norm": 1.1337629675512162,
"learning_rate": 4.82371277318507e-05,
"loss": 0.6905,
"num_tokens": 95438170.0,
"step": 1000
},
{
"epoch": 0.1715309779825909,
"grad_norm": 1.2104358520634946,
"learning_rate": 4.8212406614161244e-05,
"loss": 0.6794,
"num_tokens": 421288.0,
"step": 1005
},
{
"epoch": 0.17238436593275303,
"grad_norm": 1.2353406855810571,
"learning_rate": 4.818752053458126e-05,
"loss": 0.7033,
"num_tokens": 879235.0,
"step": 1010
},
{
"epoch": 0.17323775388291518,
"grad_norm": 1.0518126800684762,
"learning_rate": 4.816246969131342e-05,
"loss": 0.6864,
"num_tokens": 1334650.0,
"step": 1015
},
{
"epoch": 0.1740911418330773,
"grad_norm": 1.1609349480835343,
"learning_rate": 4.8137254283872696e-05,
"loss": 0.6996,
"num_tokens": 1864974.0,
"step": 1020
},
{
"epoch": 0.17494452978323946,
"grad_norm": 1.2355298722821886,
"learning_rate": 4.8111874513084656e-05,
"loss": 0.7045,
"num_tokens": 2399861.0,
"step": 1025
},
{
"epoch": 0.17579791773340162,
"grad_norm": 1.0394044113377778,
"learning_rate": 4.808633058108395e-05,
"loss": 0.696,
"num_tokens": 2920092.0,
"step": 1030
},
{
"epoch": 0.17665130568356374,
"grad_norm": 1.1557297341582518,
"learning_rate": 4.806062269131267e-05,
"loss": 0.655,
"num_tokens": 3390315.0,
"step": 1035
},
{
"epoch": 0.1775046936337259,
"grad_norm": 1.0701341821683303,
"learning_rate": 4.803475104851872e-05,
"loss": 0.6751,
"num_tokens": 3945522.0,
"step": 1040
},
{
"epoch": 0.17835808158388802,
"grad_norm": 1.2210516256173702,
"learning_rate": 4.800871585875424e-05,
"loss": 0.7172,
"num_tokens": 4434383.0,
"step": 1045
},
{
"epoch": 0.17921146953405018,
"grad_norm": 1.0817386299007326,
"learning_rate": 4.798251732937387e-05,
"loss": 0.6945,
"num_tokens": 4965050.0,
"step": 1050
},
{
"epoch": 0.18006485748421233,
"grad_norm": 1.0999316550186282,
"learning_rate": 4.795615566903318e-05,
"loss": 0.6711,
"num_tokens": 5453897.0,
"step": 1055
},
{
"epoch": 0.18091824543437446,
"grad_norm": 1.083934913872008,
"learning_rate": 4.792963108768698e-05,
"loss": 0.6775,
"num_tokens": 5919042.0,
"step": 1060
},
{
"epoch": 0.1817716333845366,
"grad_norm": 1.1472880921199,
"learning_rate": 4.7902943796587645e-05,
"loss": 0.6866,
"num_tokens": 6415784.0,
"step": 1065
},
{
"epoch": 0.18262502133469877,
"grad_norm": 1.0826593628805092,
"learning_rate": 4.787609400828343e-05,
"loss": 0.6851,
"num_tokens": 6876666.0,
"step": 1070
},
{
"epoch": 0.1834784092848609,
"grad_norm": 1.0224289865025395,
"learning_rate": 4.78490819366168e-05,
"loss": 0.6485,
"num_tokens": 7357276.0,
"step": 1075
},
{
"epoch": 0.18433179723502305,
"grad_norm": 1.333695422545476,
"learning_rate": 4.782190779672269e-05,
"loss": 0.6996,
"num_tokens": 7785367.0,
"step": 1080
},
{
"epoch": 0.18518518518518517,
"grad_norm": 1.1213413512803674,
"learning_rate": 4.779457180502682e-05,
"loss": 0.7119,
"num_tokens": 8254378.0,
"step": 1085
},
{
"epoch": 0.18603857313534733,
"grad_norm": 1.4925743105514908,
"learning_rate": 4.7767074179243957e-05,
"loss": 0.7025,
"num_tokens": 8707303.0,
"step": 1090
},
{
"epoch": 0.18689196108550948,
"grad_norm": 1.1048073112660228,
"learning_rate": 4.77394151383762e-05,
"loss": 0.6978,
"num_tokens": 9151397.0,
"step": 1095
},
{
"epoch": 0.1877453490356716,
"grad_norm": 1.0780037201458406,
"learning_rate": 4.771159490271121e-05,
"loss": 0.6762,
"num_tokens": 9606447.0,
"step": 1100
},
{
"epoch": 0.18859873698583376,
"grad_norm": 0.9849852514233166,
"learning_rate": 4.768361369382046e-05,
"loss": 0.732,
"num_tokens": 10046430.0,
"step": 1105
},
{
"epoch": 0.1894521249359959,
"grad_norm": 1.1071881064820523,
"learning_rate": 4.765547173455751e-05,
"loss": 0.6644,
"num_tokens": 10464352.0,
"step": 1110
},
{
"epoch": 0.19030551288615805,
"grad_norm": 1.03281339435922,
"learning_rate": 4.762716924905615e-05,
"loss": 0.694,
"num_tokens": 10939418.0,
"step": 1115
},
{
"epoch": 0.1911589008363202,
"grad_norm": 1.0457272637108797,
"learning_rate": 4.7598706462728724e-05,
"loss": 0.7351,
"num_tokens": 11463517.0,
"step": 1120
},
{
"epoch": 0.19201228878648233,
"grad_norm": 1.102053251146349,
"learning_rate": 4.757008360226423e-05,
"loss": 0.6274,
"num_tokens": 11941810.0,
"step": 1125
},
{
"epoch": 0.19286567673664448,
"grad_norm": 0.9964910402343398,
"learning_rate": 4.754130089562658e-05,
"loss": 0.6472,
"num_tokens": 12391040.0,
"step": 1130
},
{
"epoch": 0.19371906468680664,
"grad_norm": 1.0404504289704928,
"learning_rate": 4.751235857205277e-05,
"loss": 0.694,
"num_tokens": 12884964.0,
"step": 1135
},
{
"epoch": 0.19457245263696876,
"grad_norm": 1.1174967829219296,
"learning_rate": 4.748325686205103e-05,
"loss": 0.64,
"num_tokens": 13363123.0,
"step": 1140
},
{
"epoch": 0.19542584058713092,
"grad_norm": 1.0038620952250452,
"learning_rate": 4.7453995997399025e-05,
"loss": 0.6947,
"num_tokens": 13900418.0,
"step": 1145
},
{
"epoch": 0.19627922853729304,
"grad_norm": 1.0216529119957618,
"learning_rate": 4.742457621114198e-05,
"loss": 0.663,
"num_tokens": 14389010.0,
"step": 1150
},
{
"epoch": 0.1971326164874552,
"grad_norm": 1.27036867063678,
"learning_rate": 4.739499773759084e-05,
"loss": 0.692,
"num_tokens": 14884709.0,
"step": 1155
},
{
"epoch": 0.19798600443761735,
"grad_norm": 1.0489710663459084,
"learning_rate": 4.7365260812320395e-05,
"loss": 0.6778,
"num_tokens": 15402588.0,
"step": 1160
},
{
"epoch": 0.19883939238777948,
"grad_norm": 1.0811664736154534,
"learning_rate": 4.733536567216742e-05,
"loss": 0.6461,
"num_tokens": 15814538.0,
"step": 1165
},
{
"epoch": 0.19969278033794163,
"grad_norm": 1.0711433690647847,
"learning_rate": 4.7305312555228764e-05,
"loss": 0.7362,
"num_tokens": 16331982.0,
"step": 1170
},
{
"epoch": 0.20054616828810376,
"grad_norm": 1.0874721305575152,
"learning_rate": 4.7275101700859476e-05,
"loss": 0.6543,
"num_tokens": 16784047.0,
"step": 1175
},
{
"epoch": 0.20139955623826591,
"grad_norm": 0.9190334431115768,
"learning_rate": 4.724473334967087e-05,
"loss": 0.6998,
"num_tokens": 17275371.0,
"step": 1180
},
{
"epoch": 0.20225294418842807,
"grad_norm": 1.176443033005418,
"learning_rate": 4.721420774352866e-05,
"loss": 0.705,
"num_tokens": 17732176.0,
"step": 1185
},
{
"epoch": 0.2031063321385902,
"grad_norm": 0.9879245433547331,
"learning_rate": 4.7183525125550965e-05,
"loss": 0.6588,
"num_tokens": 18192140.0,
"step": 1190
},
{
"epoch": 0.20395972008875235,
"grad_norm": 1.2696399236895504,
"learning_rate": 4.715268574010644e-05,
"loss": 0.6869,
"num_tokens": 18655895.0,
"step": 1195
},
{
"epoch": 0.2048131080389145,
"grad_norm": 1.1183962733988029,
"learning_rate": 4.712168983281228e-05,
"loss": 0.6926,
"num_tokens": 19083334.0,
"step": 1200
},
{
"epoch": 0.20566649598907663,
"grad_norm": 1.1432216535816588,
"learning_rate": 4.709053765053228e-05,
"loss": 0.6894,
"num_tokens": 19593423.0,
"step": 1205
},
{
"epoch": 0.20651988393923879,
"grad_norm": 1.1410144790578511,
"learning_rate": 4.7059229441374894e-05,
"loss": 0.6772,
"num_tokens": 20023796.0,
"step": 1210
},
{
"epoch": 0.2073732718894009,
"grad_norm": 1.0803016455976846,
"learning_rate": 4.7027765454691204e-05,
"loss": 0.6341,
"num_tokens": 20497719.0,
"step": 1215
},
{
"epoch": 0.20822665983956307,
"grad_norm": 0.8975736502997854,
"learning_rate": 4.6996145941073003e-05,
"loss": 0.6698,
"num_tokens": 21006857.0,
"step": 1220
},
{
"epoch": 0.20908004778972522,
"grad_norm": 1.1231707628107581,
"learning_rate": 4.6964371152350735e-05,
"loss": 0.6787,
"num_tokens": 21477891.0,
"step": 1225
},
{
"epoch": 0.20993343573988735,
"grad_norm": 1.026237227187416,
"learning_rate": 4.693244134159153e-05,
"loss": 0.6631,
"num_tokens": 21955868.0,
"step": 1230
},
{
"epoch": 0.2107868236900495,
"grad_norm": 1.1140790482607805,
"learning_rate": 4.690035676309716e-05,
"loss": 0.6814,
"num_tokens": 22520364.0,
"step": 1235
},
{
"epoch": 0.21164021164021163,
"grad_norm": 1.1246414543571086,
"learning_rate": 4.686811767240206e-05,
"loss": 0.6437,
"num_tokens": 23007589.0,
"step": 1240
},
{
"epoch": 0.21249359959037378,
"grad_norm": 1.105284668255734,
"learning_rate": 4.683572432627124e-05,
"loss": 0.7045,
"num_tokens": 23523518.0,
"step": 1245
},
{
"epoch": 0.21334698754053594,
"grad_norm": 1.1324649088135432,
"learning_rate": 4.6803176982698244e-05,
"loss": 0.6566,
"num_tokens": 23910445.0,
"step": 1250
},
{
"epoch": 0.21420037549069806,
"grad_norm": 1.1487613686834988,
"learning_rate": 4.677047590090315e-05,
"loss": 0.6342,
"num_tokens": 24373538.0,
"step": 1255
},
{
"epoch": 0.21505376344086022,
"grad_norm": 1.2715099668807717,
"learning_rate": 4.6737621341330454e-05,
"loss": 0.6977,
"num_tokens": 24865513.0,
"step": 1260
},
{
"epoch": 0.21590715139102234,
"grad_norm": 1.1514001156275855,
"learning_rate": 4.6704613565647005e-05,
"loss": 0.7177,
"num_tokens": 25386703.0,
"step": 1265
},
{
"epoch": 0.2167605393411845,
"grad_norm": 0.9774936460308985,
"learning_rate": 4.667145283673993e-05,
"loss": 0.6766,
"num_tokens": 25877951.0,
"step": 1270
},
{
"epoch": 0.21761392729134665,
"grad_norm": 1.2440248436188435,
"learning_rate": 4.663813941871454e-05,
"loss": 0.663,
"num_tokens": 26407535.0,
"step": 1275
},
{
"epoch": 0.21846731524150878,
"grad_norm": 1.0641868684389462,
"learning_rate": 4.6604673576892216e-05,
"loss": 0.6613,
"num_tokens": 26873523.0,
"step": 1280
},
{
"epoch": 0.21932070319167093,
"grad_norm": 0.9882397960743491,
"learning_rate": 4.657105557780831e-05,
"loss": 0.683,
"num_tokens": 27345312.0,
"step": 1285
},
{
"epoch": 0.2201740911418331,
"grad_norm": 0.96889308591211,
"learning_rate": 4.653728568921001e-05,
"loss": 0.6703,
"num_tokens": 27818123.0,
"step": 1290
},
{
"epoch": 0.22102747909199522,
"grad_norm": 1.03389444953015,
"learning_rate": 4.650336418005423e-05,
"loss": 0.6913,
"num_tokens": 28283920.0,
"step": 1295
},
{
"epoch": 0.22188086704215737,
"grad_norm": 0.992222197396554,
"learning_rate": 4.6469291320505423e-05,
"loss": 0.6861,
"num_tokens": 28802356.0,
"step": 1300
},
{
"epoch": 0.2227342549923195,
"grad_norm": 1.0960140838999854,
"learning_rate": 4.643506738193346e-05,
"loss": 0.7201,
"num_tokens": 29231839.0,
"step": 1305
},
{
"epoch": 0.22358764294248165,
"grad_norm": 1.244536050952739,
"learning_rate": 4.64006926369115e-05,
"loss": 0.6644,
"num_tokens": 29719816.0,
"step": 1310
},
{
"epoch": 0.2244410308926438,
"grad_norm": 1.0506600574878056,
"learning_rate": 4.6366167359213744e-05,
"loss": 0.6563,
"num_tokens": 30171452.0,
"step": 1315
},
{
"epoch": 0.22529441884280593,
"grad_norm": 1.0321133449697444,
"learning_rate": 4.6331491823813325e-05,
"loss": 0.6675,
"num_tokens": 30656354.0,
"step": 1320
},
{
"epoch": 0.2261478067929681,
"grad_norm": 0.905068136544555,
"learning_rate": 4.629666630688006e-05,
"loss": 0.6644,
"num_tokens": 31143082.0,
"step": 1325
},
{
"epoch": 0.2270011947431302,
"grad_norm": 0.9569551554854419,
"learning_rate": 4.6261691085778315e-05,
"loss": 0.6478,
"num_tokens": 31664428.0,
"step": 1330
},
{
"epoch": 0.22785458269329237,
"grad_norm": 0.9719534189212212,
"learning_rate": 4.622656643906472e-05,
"loss": 0.6515,
"num_tokens": 32152562.0,
"step": 1335
},
{
"epoch": 0.22870797064345452,
"grad_norm": 0.9550619554021147,
"learning_rate": 4.619129264648602e-05,
"loss": 0.6569,
"num_tokens": 32637004.0,
"step": 1340
},
{
"epoch": 0.22956135859361665,
"grad_norm": 1.0541566817823893,
"learning_rate": 4.615586998897681e-05,
"loss": 0.6805,
"num_tokens": 33091371.0,
"step": 1345
},
{
"epoch": 0.2304147465437788,
"grad_norm": 0.9447239325405086,
"learning_rate": 4.6120298748657295e-05,
"loss": 0.6662,
"num_tokens": 33611317.0,
"step": 1350
},
{
"epoch": 0.23126813449394096,
"grad_norm": 1.3796632522640218,
"learning_rate": 4.6084579208831066e-05,
"loss": 0.6974,
"num_tokens": 34103804.0,
"step": 1355
},
{
"epoch": 0.23212152244410308,
"grad_norm": 1.0266712169012189,
"learning_rate": 4.604871165398282e-05,
"loss": 0.6952,
"num_tokens": 34532886.0,
"step": 1360
},
{
"epoch": 0.23297491039426524,
"grad_norm": 0.9833688702297602,
"learning_rate": 4.601269636977611e-05,
"loss": 0.6767,
"num_tokens": 35015944.0,
"step": 1365
},
{
"epoch": 0.23382829834442737,
"grad_norm": 1.0726665743497261,
"learning_rate": 4.5976533643051076e-05,
"loss": 0.6559,
"num_tokens": 35457611.0,
"step": 1370
},
{
"epoch": 0.23468168629458952,
"grad_norm": 1.0076220154429152,
"learning_rate": 4.594022376182212e-05,
"loss": 0.6934,
"num_tokens": 35915425.0,
"step": 1375
},
{
"epoch": 0.23553507424475167,
"grad_norm": 1.0848385881255227,
"learning_rate": 4.590376701527566e-05,
"loss": 0.7094,
"num_tokens": 36409135.0,
"step": 1380
},
{
"epoch": 0.2363884621949138,
"grad_norm": 1.07913510618759,
"learning_rate": 4.586716369376782e-05,
"loss": 0.7151,
"num_tokens": 36931490.0,
"step": 1385
},
{
"epoch": 0.23724185014507596,
"grad_norm": 1.0318989559802685,
"learning_rate": 4.5830414088822097e-05,
"loss": 0.6608,
"num_tokens": 37411796.0,
"step": 1390
},
{
"epoch": 0.23809523809523808,
"grad_norm": 0.9603277138511171,
"learning_rate": 4.579351849312703e-05,
"loss": 0.6751,
"num_tokens": 37874802.0,
"step": 1395
},
{
"epoch": 0.23894862604540024,
"grad_norm": 1.1478993387134349,
"learning_rate": 4.575647720053389e-05,
"loss": 0.7149,
"num_tokens": 38327812.0,
"step": 1400
},
{
"epoch": 0.2398020139955624,
"grad_norm": 1.0990531300486552,
"learning_rate": 4.5719290506054366e-05,
"loss": 0.6453,
"num_tokens": 421060.0,
"step": 1405
},
{
"epoch": 0.24065540194572452,
"grad_norm": 0.9921453796582193,
"learning_rate": 4.5681958705858155e-05,
"loss": 0.6657,
"num_tokens": 877105.0,
"step": 1410
},
{
"epoch": 0.24150878989588667,
"grad_norm": 1.028795003501946,
"learning_rate": 4.564448209727066e-05,
"loss": 0.6523,
"num_tokens": 1339698.0,
"step": 1415
},
{
"epoch": 0.24236217784604883,
"grad_norm": 1.2134426086619026,
"learning_rate": 4.5606860978770554e-05,
"loss": 0.6807,
"num_tokens": 1775407.0,
"step": 1420
},
{
"epoch": 0.24321556579621095,
"grad_norm": 0.9964638495357863,
"learning_rate": 4.55690956499875e-05,
"loss": 0.6524,
"num_tokens": 2215634.0,
"step": 1425
},
{
"epoch": 0.2440689537463731,
"grad_norm": 1.1990942172437682,
"learning_rate": 4.553118641169967e-05,
"loss": 0.6732,
"num_tokens": 2741804.0,
"step": 1430
},
{
"epoch": 0.24492234169653523,
"grad_norm": 1.1330285046336883,
"learning_rate": 4.5493133565831395e-05,
"loss": 0.656,
"num_tokens": 3266679.0,
"step": 1435
},
{
"epoch": 0.2457757296466974,
"grad_norm": 1.0388065194387546,
"learning_rate": 4.5454937415450774e-05,
"loss": 0.6909,
"num_tokens": 3756299.0,
"step": 1440
},
{
"epoch": 0.24662911759685954,
"grad_norm": 0.9196704665381511,
"learning_rate": 4.54165982647672e-05,
"loss": 0.6585,
"num_tokens": 4261608.0,
"step": 1445
},
{
"epoch": 0.24748250554702167,
"grad_norm": 1.104949625525001,
"learning_rate": 4.5378116419129035e-05,
"loss": 0.6712,
"num_tokens": 4687015.0,
"step": 1450
},
{
"epoch": 0.24833589349718382,
"grad_norm": 0.9995929020146768,
"learning_rate": 4.5339492185021066e-05,
"loss": 0.6367,
"num_tokens": 5154135.0,
"step": 1455
},
{
"epoch": 0.24918928144734595,
"grad_norm": 1.1234154727606218,
"learning_rate": 4.5300725870062153e-05,
"loss": 0.6432,
"num_tokens": 5646859.0,
"step": 1460
},
{
"epoch": 0.25004266939750813,
"grad_norm": 1.1513733815654665,
"learning_rate": 4.5261817783002726e-05,
"loss": 0.6696,
"num_tokens": 6122696.0,
"step": 1465
},
{
"epoch": 0.25089605734767023,
"grad_norm": 1.0606906202691488,
"learning_rate": 4.522276823372236e-05,
"loss": 0.651,
"num_tokens": 6525181.0,
"step": 1470
},
{
"epoch": 0.2517494452978324,
"grad_norm": 0.9055146266707058,
"learning_rate": 4.518357753322728e-05,
"loss": 0.6346,
"num_tokens": 6990704.0,
"step": 1475
},
{
"epoch": 0.25260283324799454,
"grad_norm": 1.1044997308379876,
"learning_rate": 4.5144245993647896e-05,
"loss": 0.6767,
"num_tokens": 7430102.0,
"step": 1480
},
{
"epoch": 0.2534562211981567,
"grad_norm": 0.9035531176190219,
"learning_rate": 4.5104773928236324e-05,
"loss": 0.6372,
"num_tokens": 8010997.0,
"step": 1485
},
{
"epoch": 0.25430960914831885,
"grad_norm": 1.0509691108933237,
"learning_rate": 4.506516165136388e-05,
"loss": 0.6853,
"num_tokens": 8514125.0,
"step": 1490
},
{
"epoch": 0.25516299709848095,
"grad_norm": 0.9387925154248077,
"learning_rate": 4.502540947851859e-05,
"loss": 0.7308,
"num_tokens": 9023552.0,
"step": 1495
},
{
"epoch": 0.2560163850486431,
"grad_norm": 0.9831579527984785,
"learning_rate": 4.498551772630264e-05,
"loss": 0.6966,
"num_tokens": 9511775.0,
"step": 1500
},
{
"epoch": 0.25686977299880526,
"grad_norm": 1.0854175039973468,
"learning_rate": 4.494548671242991e-05,
"loss": 0.6905,
"num_tokens": 9998350.0,
"step": 1505
},
{
"epoch": 0.2577231609489674,
"grad_norm": 1.1084538715121595,
"learning_rate": 4.490531675572341e-05,
"loss": 0.6185,
"num_tokens": 10442163.0,
"step": 1510
},
{
"epoch": 0.25857654889912957,
"grad_norm": 1.0374246201385566,
"learning_rate": 4.486500817611273e-05,
"loss": 0.6565,
"num_tokens": 10874287.0,
"step": 1515
},
{
"epoch": 0.25942993684929166,
"grad_norm": 1.0288126747538786,
"learning_rate": 4.482456129463153e-05,
"loss": 0.6731,
"num_tokens": 11370722.0,
"step": 1520
},
{
"epoch": 0.2602833247994538,
"grad_norm": 1.0219221093241808,
"learning_rate": 4.478397643341495e-05,
"loss": 0.6421,
"num_tokens": 11837926.0,
"step": 1525
},
{
"epoch": 0.261136712749616,
"grad_norm": 0.9861358682693042,
"learning_rate": 4.474325391569706e-05,
"loss": 0.6584,
"num_tokens": 12331519.0,
"step": 1530
},
{
"epoch": 0.26199010069977813,
"grad_norm": 0.973697367336259,
"learning_rate": 4.470239406580827e-05,
"loss": 0.6459,
"num_tokens": 12836674.0,
"step": 1535
},
{
"epoch": 0.2628434886499403,
"grad_norm": 1.0550005993382137,
"learning_rate": 4.466139720917277e-05,
"loss": 0.6312,
"num_tokens": 13241414.0,
"step": 1540
},
{
"epoch": 0.2636968766001024,
"grad_norm": 0.9104618916882102,
"learning_rate": 4.4620263672305916e-05,
"loss": 0.6637,
"num_tokens": 13703806.0,
"step": 1545
},
{
"epoch": 0.26455026455026454,
"grad_norm": 1.0295282137438284,
"learning_rate": 4.457899378281167e-05,
"loss": 0.636,
"num_tokens": 14190906.0,
"step": 1550
},
{
"epoch": 0.2654036525004267,
"grad_norm": 0.895888271171603,
"learning_rate": 4.453758786937992e-05,
"loss": 0.6742,
"num_tokens": 14719491.0,
"step": 1555
},
{
"epoch": 0.26625704045058884,
"grad_norm": 0.918926913872113,
"learning_rate": 4.449604626178393e-05,
"loss": 0.6785,
"num_tokens": 15174140.0,
"step": 1560
},
{
"epoch": 0.267110428400751,
"grad_norm": 0.9505309720082539,
"learning_rate": 4.445436929087767e-05,
"loss": 0.7024,
"num_tokens": 15688067.0,
"step": 1565
},
{
"epoch": 0.2679638163509131,
"grad_norm": 0.9369802432268316,
"learning_rate": 4.441255728859321e-05,
"loss": 0.6402,
"num_tokens": 16217563.0,
"step": 1570
},
{
"epoch": 0.26881720430107525,
"grad_norm": 1.0494626842513919,
"learning_rate": 4.437061058793806e-05,
"loss": 0.6755,
"num_tokens": 16704728.0,
"step": 1575
},
{
"epoch": 0.2696705922512374,
"grad_norm": 1.0331486147107156,
"learning_rate": 4.432852952299252e-05,
"loss": 0.6699,
"num_tokens": 17146472.0,
"step": 1580
},
{
"epoch": 0.27052398020139956,
"grad_norm": 1.1119123215431965,
"learning_rate": 4.428631442890702e-05,
"loss": 0.6374,
"num_tokens": 17589690.0,
"step": 1585
},
{
"epoch": 0.2713773681515617,
"grad_norm": 1.1833713096668448,
"learning_rate": 4.424396564189947e-05,
"loss": 0.6965,
"num_tokens": 18069122.0,
"step": 1590
},
{
"epoch": 0.27223075610172387,
"grad_norm": 1.0755796928146804,
"learning_rate": 4.420148349925252e-05,
"loss": 0.6645,
"num_tokens": 18595086.0,
"step": 1595
},
{
"epoch": 0.27308414405188597,
"grad_norm": 1.1333750082764271,
"learning_rate": 4.415886833931097e-05,
"loss": 0.6428,
"num_tokens": 19043696.0,
"step": 1600
},
{
"epoch": 0.2739375320020481,
"grad_norm": 1.043025277361079,
"learning_rate": 4.411612050147899e-05,
"loss": 0.619,
"num_tokens": 19518397.0,
"step": 1605
},
{
"epoch": 0.2747909199522103,
"grad_norm": 0.9822809038945924,
"learning_rate": 4.4073240326217446e-05,
"loss": 0.6365,
"num_tokens": 19970492.0,
"step": 1610
},
{
"epoch": 0.27564430790237243,
"grad_norm": 0.9891307971026575,
"learning_rate": 4.403022815504122e-05,
"loss": 0.6459,
"num_tokens": 20461858.0,
"step": 1615
},
{
"epoch": 0.2764976958525346,
"grad_norm": 1.0566611352935433,
"learning_rate": 4.398708433051645e-05,
"loss": 0.6543,
"num_tokens": 20947870.0,
"step": 1620
},
{
"epoch": 0.2773510838026967,
"grad_norm": 1.0064327049389676,
"learning_rate": 4.3943809196257794e-05,
"loss": 0.7042,
"num_tokens": 21466994.0,
"step": 1625
},
{
"epoch": 0.27820447175285884,
"grad_norm": 0.998464431867688,
"learning_rate": 4.390040309692574e-05,
"loss": 0.6652,
"num_tokens": 21934487.0,
"step": 1630
},
{
"epoch": 0.279057859703021,
"grad_norm": 1.1034198382023583,
"learning_rate": 4.385686637822382e-05,
"loss": 0.7008,
"num_tokens": 22428073.0,
"step": 1635
},
{
"epoch": 0.27991124765318315,
"grad_norm": 1.1452495006729742,
"learning_rate": 4.381319938689588e-05,
"loss": 0.6417,
"num_tokens": 22917591.0,
"step": 1640
},
{
"epoch": 0.2807646356033453,
"grad_norm": 1.1134914421972593,
"learning_rate": 4.376940247072331e-05,
"loss": 0.6593,
"num_tokens": 23362074.0,
"step": 1645
},
{
"epoch": 0.2816180235535074,
"grad_norm": 0.9360103567579704,
"learning_rate": 4.372547597852225e-05,
"loss": 0.6506,
"num_tokens": 23813090.0,
"step": 1650
},
{
"epoch": 0.28247141150366956,
"grad_norm": 0.9486301910092465,
"learning_rate": 4.368142026014086e-05,
"loss": 0.6631,
"num_tokens": 24277894.0,
"step": 1655
},
{
"epoch": 0.2833247994538317,
"grad_norm": 0.9431082592173952,
"learning_rate": 4.3637235666456506e-05,
"loss": 0.6129,
"num_tokens": 24749220.0,
"step": 1660
},
{
"epoch": 0.28417818740399386,
"grad_norm": 1.1286583774589005,
"learning_rate": 4.359292254937296e-05,
"loss": 0.6812,
"num_tokens": 25227258.0,
"step": 1665
},
{
"epoch": 0.285031575354156,
"grad_norm": 0.9400669821950434,
"learning_rate": 4.354848126181762e-05,
"loss": 0.6652,
"num_tokens": 25733368.0,
"step": 1670
},
{
"epoch": 0.2858849633043181,
"grad_norm": 1.0759732617628444,
"learning_rate": 4.350391215773867e-05,
"loss": 0.6667,
"num_tokens": 26193707.0,
"step": 1675
},
{
"epoch": 0.2867383512544803,
"grad_norm": 1.0322942760702165,
"learning_rate": 4.345921559210227e-05,
"loss": 0.6621,
"num_tokens": 26667344.0,
"step": 1680
},
{
"epoch": 0.2875917392046424,
"grad_norm": 0.9563146317074364,
"learning_rate": 4.341439192088976e-05,
"loss": 0.638,
"num_tokens": 27107554.0,
"step": 1685
},
{
"epoch": 0.2884451271548046,
"grad_norm": 1.1135033435484845,
"learning_rate": 4.336944150109478e-05,
"loss": 0.6513,
"num_tokens": 27590348.0,
"step": 1690
},
{
"epoch": 0.28929851510496674,
"grad_norm": 1.1239383737366093,
"learning_rate": 4.332436469072044e-05,
"loss": 0.6513,
"num_tokens": 28111682.0,
"step": 1695
},
{
"epoch": 0.29015190305512883,
"grad_norm": 1.0086351161064906,
"learning_rate": 4.327916184877652e-05,
"loss": 0.666,
"num_tokens": 28565548.0,
"step": 1700
},
{
"epoch": 0.291005291005291,
"grad_norm": 0.9803052286266831,
"learning_rate": 4.3233833335276494e-05,
"loss": 0.6848,
"num_tokens": 29069662.0,
"step": 1705
},
{
"epoch": 0.29185867895545314,
"grad_norm": 1.2970656590944643,
"learning_rate": 4.31883795112348e-05,
"loss": 0.6702,
"num_tokens": 29540008.0,
"step": 1710
},
{
"epoch": 0.2927120669056153,
"grad_norm": 1.002197523474358,
"learning_rate": 4.314280073866386e-05,
"loss": 0.6677,
"num_tokens": 30069043.0,
"step": 1715
},
{
"epoch": 0.29356545485577745,
"grad_norm": 1.158257731716908,
"learning_rate": 4.3097097380571256e-05,
"loss": 0.6536,
"num_tokens": 30608334.0,
"step": 1720
},
{
"epoch": 0.2944188428059396,
"grad_norm": 1.1565617780482107,
"learning_rate": 4.305126980095681e-05,
"loss": 0.627,
"num_tokens": 31077481.0,
"step": 1725
},
{
"epoch": 0.2952722307561017,
"grad_norm": 1.0631530173310553,
"learning_rate": 4.300531836480968e-05,
"loss": 0.6164,
"num_tokens": 31523212.0,
"step": 1730
},
{
"epoch": 0.29612561870626386,
"grad_norm": 1.057549223421548,
"learning_rate": 4.295924343810551e-05,
"loss": 0.6402,
"num_tokens": 32009162.0,
"step": 1735
},
{
"epoch": 0.296979006656426,
"grad_norm": 0.9450402858117135,
"learning_rate": 4.291304538780343e-05,
"loss": 0.6341,
"num_tokens": 32483562.0,
"step": 1740
},
{
"epoch": 0.29783239460658817,
"grad_norm": 0.983929424012367,
"learning_rate": 4.286672458184319e-05,
"loss": 0.6368,
"num_tokens": 33004902.0,
"step": 1745
},
{
"epoch": 0.2986857825567503,
"grad_norm": 0.9137933823639911,
"learning_rate": 4.282028138914221e-05,
"loss": 0.6215,
"num_tokens": 33504512.0,
"step": 1750
},
{
"epoch": 0.2995391705069124,
"grad_norm": 0.9871617316090521,
"learning_rate": 4.2773716179592666e-05,
"loss": 0.6432,
"num_tokens": 33951768.0,
"step": 1755
},
{
"epoch": 0.3003925584570746,
"grad_norm": 1.160100489576663,
"learning_rate": 4.27270293240585e-05,
"loss": 0.6645,
"num_tokens": 34457317.0,
"step": 1760
},
{
"epoch": 0.30124594640723673,
"grad_norm": 0.8685501113426534,
"learning_rate": 4.26802211943725e-05,
"loss": 0.661,
"num_tokens": 34964422.0,
"step": 1765
},
{
"epoch": 0.3020993343573989,
"grad_norm": 0.9480039454372314,
"learning_rate": 4.263329216333335e-05,
"loss": 0.6468,
"num_tokens": 35451730.0,
"step": 1770
},
{
"epoch": 0.30295272230756104,
"grad_norm": 1.034557261853204,
"learning_rate": 4.258624260470262e-05,
"loss": 0.6767,
"num_tokens": 35975531.0,
"step": 1775
},
{
"epoch": 0.30380611025772314,
"grad_norm": 1.0583649410054492,
"learning_rate": 4.253907289320179e-05,
"loss": 0.6699,
"num_tokens": 36423440.0,
"step": 1780
},
{
"epoch": 0.3046594982078853,
"grad_norm": 0.9244887110033405,
"learning_rate": 4.249178340450933e-05,
"loss": 0.6536,
"num_tokens": 36907135.0,
"step": 1785
},
{
"epoch": 0.30551288615804745,
"grad_norm": 1.0956310368630402,
"learning_rate": 4.244437451525764e-05,
"loss": 0.6119,
"num_tokens": 37349185.0,
"step": 1790
},
{
"epoch": 0.3063662741082096,
"grad_norm": 0.8894609101942197,
"learning_rate": 4.239684660303006e-05,
"loss": 0.6206,
"num_tokens": 37857975.0,
"step": 1795
},
{
"epoch": 0.30721966205837176,
"grad_norm": 1.0075648632984608,
"learning_rate": 4.234920004635792e-05,
"loss": 0.6587,
"num_tokens": 38366018.0,
"step": 1800
},
{
"epoch": 0.30807305000853386,
"grad_norm": 0.9178640617606731,
"learning_rate": 4.230143522471743e-05,
"loss": 0.6422,
"num_tokens": 38885944.0,
"step": 1805
},
{
"epoch": 0.308926437958696,
"grad_norm": 0.9916540849655225,
"learning_rate": 4.225355251852675e-05,
"loss": 0.6407,
"num_tokens": 39400675.0,
"step": 1810
},
{
"epoch": 0.30977982590885816,
"grad_norm": 0.9767548190932287,
"learning_rate": 4.2205552309142885e-05,
"loss": 0.6349,
"num_tokens": 39850819.0,
"step": 1815
},
{
"epoch": 0.3106332138590203,
"grad_norm": 0.9626467489189492,
"learning_rate": 4.215743497885873e-05,
"loss": 0.6825,
"num_tokens": 40317411.0,
"step": 1820
},
{
"epoch": 0.3114866018091825,
"grad_norm": 0.9835758110325883,
"learning_rate": 4.2109200910899916e-05,
"loss": 0.6379,
"num_tokens": 40821787.0,
"step": 1825
},
{
"epoch": 0.31233998975934457,
"grad_norm": 0.8761549604162322,
"learning_rate": 4.206085048942187e-05,
"loss": 0.6712,
"num_tokens": 41357145.0,
"step": 1830
},
{
"epoch": 0.3131933777095067,
"grad_norm": 1.1816568369777558,
"learning_rate": 4.2012384099506694e-05,
"loss": 0.626,
"num_tokens": 41867236.0,
"step": 1835
},
{
"epoch": 0.3140467656596689,
"grad_norm": 1.0101399172107357,
"learning_rate": 4.196380212716008e-05,
"loss": 0.6177,
"num_tokens": 42320349.0,
"step": 1840
},
{
"epoch": 0.31490015360983103,
"grad_norm": 1.1136485632953617,
"learning_rate": 4.191510495930831e-05,
"loss": 0.6361,
"num_tokens": 42827624.0,
"step": 1845
},
{
"epoch": 0.3157535415599932,
"grad_norm": 0.9957428721397101,
"learning_rate": 4.1866292983795084e-05,
"loss": 0.6191,
"num_tokens": 43289900.0,
"step": 1850
},
{
"epoch": 0.31660692951015534,
"grad_norm": 0.8747769330269698,
"learning_rate": 4.1817366589378526e-05,
"loss": 0.612,
"num_tokens": 43788954.0,
"step": 1855
},
{
"epoch": 0.31746031746031744,
"grad_norm": 0.8584924919689573,
"learning_rate": 4.1768326165727975e-05,
"loss": 0.6061,
"num_tokens": 44253905.0,
"step": 1860
},
{
"epoch": 0.3183137054104796,
"grad_norm": 1.062043835867046,
"learning_rate": 4.171917210342101e-05,
"loss": 0.6599,
"num_tokens": 44729983.0,
"step": 1865
},
{
"epoch": 0.31916709336064175,
"grad_norm": 1.0217914781820183,
"learning_rate": 4.166990479394023e-05,
"loss": 0.648,
"num_tokens": 45195098.0,
"step": 1870
},
{
"epoch": 0.3200204813108039,
"grad_norm": 0.9201344018326302,
"learning_rate": 4.1620524629670196e-05,
"loss": 0.6671,
"num_tokens": 45672842.0,
"step": 1875
},
{
"epoch": 0.32087386926096606,
"grad_norm": 1.1397216517807938,
"learning_rate": 4.157103200389428e-05,
"loss": 0.6322,
"num_tokens": 46151658.0,
"step": 1880
},
{
"epoch": 0.32172725721112816,
"grad_norm": 0.9990337165224222,
"learning_rate": 4.152142731079155e-05,
"loss": 0.6761,
"num_tokens": 46666876.0,
"step": 1885
},
{
"epoch": 0.3225806451612903,
"grad_norm": 1.208502511536086,
"learning_rate": 4.147171094543363e-05,
"loss": 0.6406,
"num_tokens": 47099591.0,
"step": 1890
},
{
"epoch": 0.32343403311145247,
"grad_norm": 0.9304907451089872,
"learning_rate": 4.142188330378155e-05,
"loss": 0.6854,
"num_tokens": 47632239.0,
"step": 1895
},
{
"epoch": 0.3242874210616146,
"grad_norm": 1.0436402924297088,
"learning_rate": 4.1371944782682536e-05,
"loss": 0.6202,
"num_tokens": 48095443.0,
"step": 1900
},
{
"epoch": 0.3251408090117768,
"grad_norm": 0.9644270813331612,
"learning_rate": 4.132189577986699e-05,
"loss": 0.6237,
"num_tokens": 48554427.0,
"step": 1905
},
{
"epoch": 0.3259941969619389,
"grad_norm": 1.1748268554914663,
"learning_rate": 4.127173669394516e-05,
"loss": 0.6729,
"num_tokens": 49033431.0,
"step": 1910
},
{
"epoch": 0.32684758491210103,
"grad_norm": 1.0380245238600094,
"learning_rate": 4.1221467924404075e-05,
"loss": 0.6324,
"num_tokens": 49483384.0,
"step": 1915
},
{
"epoch": 0.3277009728622632,
"grad_norm": 0.9648072557402981,
"learning_rate": 4.117108987160432e-05,
"loss": 0.6481,
"num_tokens": 49980147.0,
"step": 1920
},
{
"epoch": 0.32855436081242534,
"grad_norm": 1.0206985745749346,
"learning_rate": 4.112060293677687e-05,
"loss": 0.6373,
"num_tokens": 50417382.0,
"step": 1925
},
{
"epoch": 0.3294077487625875,
"grad_norm": 0.8984622561385278,
"learning_rate": 4.107000752201984e-05,
"loss": 0.6332,
"num_tokens": 50943097.0,
"step": 1930
},
{
"epoch": 0.3302611367127496,
"grad_norm": 1.0000056966065096,
"learning_rate": 4.101930403029538e-05,
"loss": 0.6896,
"num_tokens": 51447513.0,
"step": 1935
},
{
"epoch": 0.33111452466291175,
"grad_norm": 1.3267825088425038,
"learning_rate": 4.0968492865426367e-05,
"loss": 0.6939,
"num_tokens": 51924577.0,
"step": 1940
},
{
"epoch": 0.3319679126130739,
"grad_norm": 1.0364145972348338,
"learning_rate": 4.091757443209322e-05,
"loss": 0.6587,
"num_tokens": 52461321.0,
"step": 1945
},
{
"epoch": 0.33282130056323606,
"grad_norm": 0.9473197857926919,
"learning_rate": 4.0866549135830745e-05,
"loss": 0.6492,
"num_tokens": 52942461.0,
"step": 1950
},
{
"epoch": 0.3336746885133982,
"grad_norm": 1.027596288413729,
"learning_rate": 4.081541738302478e-05,
"loss": 0.6149,
"num_tokens": 53444941.0,
"step": 1955
},
{
"epoch": 0.3345280764635603,
"grad_norm": 1.0567423158418994,
"learning_rate": 4.076417958090906e-05,
"loss": 0.7059,
"num_tokens": 53933388.0,
"step": 1960
},
{
"epoch": 0.33538146441372246,
"grad_norm": 0.9817599310427655,
"learning_rate": 4.071283613756196e-05,
"loss": 0.6472,
"num_tokens": 54391687.0,
"step": 1965
},
{
"epoch": 0.3362348523638846,
"grad_norm": 0.995280470621319,
"learning_rate": 4.0661387461903154e-05,
"loss": 0.6344,
"num_tokens": 54855386.0,
"step": 1970
},
{
"epoch": 0.33708824031404677,
"grad_norm": 0.9154397814757621,
"learning_rate": 4.060983396369051e-05,
"loss": 0.6384,
"num_tokens": 55368447.0,
"step": 1975
},
{
"epoch": 0.3379416282642089,
"grad_norm": 0.9448740168747737,
"learning_rate": 4.055817605351669e-05,
"loss": 0.6411,
"num_tokens": 55814026.0,
"step": 1980
},
{
"epoch": 0.3387950162143711,
"grad_norm": 1.094004234955502,
"learning_rate": 4.050641414280597e-05,
"loss": 0.614,
"num_tokens": 56255121.0,
"step": 1985
},
{
"epoch": 0.3396484041645332,
"grad_norm": 0.7742679468622998,
"learning_rate": 4.045454864381088e-05,
"loss": 0.6796,
"num_tokens": 56796489.0,
"step": 1990
},
{
"epoch": 0.34050179211469533,
"grad_norm": 0.9621627980405628,
"learning_rate": 4.0402579969609024e-05,
"loss": 0.6284,
"num_tokens": 57279668.0,
"step": 1995
},
{
"epoch": 0.3413551800648575,
"grad_norm": 0.9077184248917183,
"learning_rate": 4.035050853409969e-05,
"loss": 0.6386,
"num_tokens": 57746160.0,
"step": 2000
},
{
"epoch": 0.34220856801501964,
"grad_norm": 0.8295552770492037,
"learning_rate": 4.0298334752000634e-05,
"loss": 0.6142,
"num_tokens": 58261166.0,
"step": 2005
},
{
"epoch": 0.3430619559651818,
"grad_norm": 0.9468242453122664,
"learning_rate": 4.0246059038844716e-05,
"loss": 0.6364,
"num_tokens": 58738443.0,
"step": 2010
},
{
"epoch": 0.3439153439153439,
"grad_norm": 1.0397478691275794,
"learning_rate": 4.019368181097663e-05,
"loss": 0.6293,
"num_tokens": 59205817.0,
"step": 2015
},
{
"epoch": 0.34476873186550605,
"grad_norm": 1.0350500134093694,
"learning_rate": 4.0141203485549564e-05,
"loss": 0.6505,
"num_tokens": 59653617.0,
"step": 2020
},
{
"epoch": 0.3456221198156682,
"grad_norm": 0.9043465643179709,
"learning_rate": 4.008862448052188e-05,
"loss": 0.6542,
"num_tokens": 60145641.0,
"step": 2025
},
{
"epoch": 0.34647550776583036,
"grad_norm": 0.9539526130039545,
"learning_rate": 4.003594521465381e-05,
"loss": 0.651,
"num_tokens": 60593934.0,
"step": 2030
},
{
"epoch": 0.3473288957159925,
"grad_norm": 0.9659486932550566,
"learning_rate": 3.9983166107504114e-05,
"loss": 0.6528,
"num_tokens": 61088175.0,
"step": 2035
},
{
"epoch": 0.3481822836661546,
"grad_norm": 0.964763472219998,
"learning_rate": 3.99302875794267e-05,
"loss": 0.6141,
"num_tokens": 61601767.0,
"step": 2040
},
{
"epoch": 0.34903567161631677,
"grad_norm": 0.9675579189227672,
"learning_rate": 3.987731005156731e-05,
"loss": 0.6199,
"num_tokens": 62100317.0,
"step": 2045
},
{
"epoch": 0.3498890595664789,
"grad_norm": 0.9032844144807181,
"learning_rate": 3.9824233945860165e-05,
"loss": 0.6304,
"num_tokens": 62565169.0,
"step": 2050
},
{
"epoch": 0.3507424475166411,
"grad_norm": 0.98765077203059,
"learning_rate": 3.977105968502461e-05,
"loss": 0.5942,
"num_tokens": 63032634.0,
"step": 2055
},
{
"epoch": 0.35159583546680323,
"grad_norm": 1.0669138250049846,
"learning_rate": 3.971778769256172e-05,
"loss": 0.6312,
"num_tokens": 63491002.0,
"step": 2060
},
{
"epoch": 0.35244922341696533,
"grad_norm": 0.9971058529881554,
"learning_rate": 3.966441839275095e-05,
"loss": 0.6788,
"num_tokens": 63993961.0,
"step": 2065
},
{
"epoch": 0.3533026113671275,
"grad_norm": 0.9440611528418567,
"learning_rate": 3.9610952210646746e-05,
"loss": 0.6253,
"num_tokens": 64465071.0,
"step": 2070
},
{
"epoch": 0.35415599931728964,
"grad_norm": 0.9864109612249565,
"learning_rate": 3.955738957207517e-05,
"loss": 0.5995,
"num_tokens": 64924863.0,
"step": 2075
},
{
"epoch": 0.3550093872674518,
"grad_norm": 0.8734114010653501,
"learning_rate": 3.95037309036305e-05,
"loss": 0.6147,
"num_tokens": 65449389.0,
"step": 2080
},
{
"epoch": 0.35586277521761395,
"grad_norm": 0.9323878220474048,
"learning_rate": 3.944997663267183e-05,
"loss": 0.6499,
"num_tokens": 65933344.0,
"step": 2085
},
{
"epoch": 0.35671616316777605,
"grad_norm": 1.2029027862535284,
"learning_rate": 3.939612718731968e-05,
"loss": 0.6352,
"num_tokens": 66383890.0,
"step": 2090
},
{
"epoch": 0.3575695511179382,
"grad_norm": 1.062980629087297,
"learning_rate": 3.934218299645256e-05,
"loss": 0.6787,
"num_tokens": 66852966.0,
"step": 2095
},
{
"epoch": 0.35842293906810035,
"grad_norm": 1.068461942262625,
"learning_rate": 3.9288144489703595e-05,
"loss": 0.688,
"num_tokens": 67336145.0,
"step": 2100
},
{
"epoch": 0.3592763270182625,
"grad_norm": 1.1070164272184257,
"learning_rate": 3.923401209745705e-05,
"loss": 0.6366,
"num_tokens": 67785161.0,
"step": 2105
},
{
"epoch": 0.36012971496842466,
"grad_norm": 1.0589262427093307,
"learning_rate": 3.917978625084497e-05,
"loss": 0.6272,
"num_tokens": 68235334.0,
"step": 2110
},
{
"epoch": 0.36098310291858676,
"grad_norm": 0.9562551264817776,
"learning_rate": 3.912546738174367e-05,
"loss": 0.6758,
"num_tokens": 68709817.0,
"step": 2115
},
{
"epoch": 0.3618364908687489,
"grad_norm": 0.9656995006466078,
"learning_rate": 3.907105592277035e-05,
"loss": 0.5975,
"num_tokens": 69137066.0,
"step": 2120
},
{
"epoch": 0.36268987881891107,
"grad_norm": 1.194718586831029,
"learning_rate": 3.901655230727964e-05,
"loss": 0.622,
"num_tokens": 69534328.0,
"step": 2125
},
{
"epoch": 0.3635432667690732,
"grad_norm": 0.9072228311900601,
"learning_rate": 3.896195696936012e-05,
"loss": 0.6298,
"num_tokens": 70009467.0,
"step": 2130
},
{
"epoch": 0.3643966547192354,
"grad_norm": 0.979540589242795,
"learning_rate": 3.890727034383092e-05,
"loss": 0.6555,
"num_tokens": 70495279.0,
"step": 2135
},
{
"epoch": 0.36525004266939753,
"grad_norm": 1.0006563543000302,
"learning_rate": 3.885249286623816e-05,
"loss": 0.6323,
"num_tokens": 71003407.0,
"step": 2140
},
{
"epoch": 0.36610343061955963,
"grad_norm": 0.9867551345086583,
"learning_rate": 3.879762497285162e-05,
"loss": 0.6493,
"num_tokens": 71504190.0,
"step": 2145
},
{
"epoch": 0.3669568185697218,
"grad_norm": 1.0604473548798987,
"learning_rate": 3.874266710066113e-05,
"loss": 0.6375,
"num_tokens": 71964614.0,
"step": 2150
},
{
"epoch": 0.36781020651988394,
"grad_norm": 1.0541953933655328,
"learning_rate": 3.868761968737318e-05,
"loss": 0.6261,
"num_tokens": 72517880.0,
"step": 2155
},
{
"epoch": 0.3686635944700461,
"grad_norm": 0.9282749054885615,
"learning_rate": 3.863248317140736e-05,
"loss": 0.6267,
"num_tokens": 72995231.0,
"step": 2160
},
{
"epoch": 0.36951698242020825,
"grad_norm": 0.8559509162708646,
"learning_rate": 3.857725799189296e-05,
"loss": 0.6473,
"num_tokens": 73496130.0,
"step": 2165
},
{
"epoch": 0.37037037037037035,
"grad_norm": 0.9466112720742375,
"learning_rate": 3.852194458866538e-05,
"loss": 0.6232,
"num_tokens": 73976169.0,
"step": 2170
},
{
"epoch": 0.3712237583205325,
"grad_norm": 0.916022914049245,
"learning_rate": 3.846654340226271e-05,
"loss": 0.6342,
"num_tokens": 74473862.0,
"step": 2175
},
{
"epoch": 0.37207714627069466,
"grad_norm": 0.9594372023699115,
"learning_rate": 3.841105487392213e-05,
"loss": 0.6201,
"num_tokens": 75006419.0,
"step": 2180
},
{
"epoch": 0.3729305342208568,
"grad_norm": 1.1199332662310155,
"learning_rate": 3.835547944557648e-05,
"loss": 0.6321,
"num_tokens": 75460453.0,
"step": 2185
},
{
"epoch": 0.37378392217101897,
"grad_norm": 1.0298123419544616,
"learning_rate": 3.829981755985072e-05,
"loss": 0.6206,
"num_tokens": 75905545.0,
"step": 2190
},
{
"epoch": 0.37463731012118107,
"grad_norm": 1.0059371478730523,
"learning_rate": 3.824406966005835e-05,
"loss": 0.6316,
"num_tokens": 76325746.0,
"step": 2195
},
{
"epoch": 0.3754906980713432,
"grad_norm": 0.8565948465056196,
"learning_rate": 3.818823619019795e-05,
"loss": 0.63,
"num_tokens": 76819650.0,
"step": 2200
},
{
"epoch": 0.3763440860215054,
"grad_norm": 0.971718735543094,
"learning_rate": 3.8132317594949593e-05,
"loss": 0.6303,
"num_tokens": 77267120.0,
"step": 2205
},
{
"epoch": 0.37719747397166753,
"grad_norm": 0.964877825336049,
"learning_rate": 3.807631431967135e-05,
"loss": 0.6599,
"num_tokens": 77718204.0,
"step": 2210
},
{
"epoch": 0.3780508619218297,
"grad_norm": 0.9131715426906987,
"learning_rate": 3.8020226810395706e-05,
"loss": 0.6357,
"num_tokens": 78201205.0,
"step": 2215
},
{
"epoch": 0.3789042498719918,
"grad_norm": 0.9681729255888176,
"learning_rate": 3.796405551382602e-05,
"loss": 0.6362,
"num_tokens": 78680163.0,
"step": 2220
},
{
"epoch": 0.37975763782215394,
"grad_norm": 1.024261468471383,
"learning_rate": 3.7907800877332945e-05,
"loss": 0.6262,
"num_tokens": 79147537.0,
"step": 2225
},
{
"epoch": 0.3806110257723161,
"grad_norm": 0.8968073214866014,
"learning_rate": 3.785146334895093e-05,
"loss": 0.613,
"num_tokens": 79600159.0,
"step": 2230
},
{
"epoch": 0.38146441372247825,
"grad_norm": 0.948257919338274,
"learning_rate": 3.779504337737456e-05,
"loss": 0.6318,
"num_tokens": 80042192.0,
"step": 2235
},
{
"epoch": 0.3823178016726404,
"grad_norm": 0.9629472763837074,
"learning_rate": 3.7738541411955074e-05,
"loss": 0.6534,
"num_tokens": 80495756.0,
"step": 2240
},
{
"epoch": 0.3831711896228025,
"grad_norm": 0.877217572957814,
"learning_rate": 3.768195790269672e-05,
"loss": 0.6071,
"num_tokens": 81028627.0,
"step": 2245
},
{
"epoch": 0.38402457757296465,
"grad_norm": 0.8818900363189004,
"learning_rate": 3.762529330025319e-05,
"loss": 0.6812,
"num_tokens": 81505213.0,
"step": 2250
},
{
"epoch": 0.3848779655231268,
"grad_norm": 0.8225263179706899,
"learning_rate": 3.756854805592405e-05,
"loss": 0.6587,
"num_tokens": 81971160.0,
"step": 2255
},
{
"epoch": 0.38573135347328896,
"grad_norm": 0.9901818767360601,
"learning_rate": 3.7511722621651116e-05,
"loss": 0.6401,
"num_tokens": 82465162.0,
"step": 2260
},
{
"epoch": 0.3865847414234511,
"grad_norm": 1.0000087207676795,
"learning_rate": 3.745481745001488e-05,
"loss": 0.6431,
"num_tokens": 82959331.0,
"step": 2265
},
{
"epoch": 0.38743812937361327,
"grad_norm": 0.8801367506244648,
"learning_rate": 3.7397832994230886e-05,
"loss": 0.6524,
"num_tokens": 83442507.0,
"step": 2270
},
{
"epoch": 0.38829151732377537,
"grad_norm": 0.8357184360328325,
"learning_rate": 3.734076970814613e-05,
"loss": 0.6123,
"num_tokens": 83886573.0,
"step": 2275
},
{
"epoch": 0.3891449052739375,
"grad_norm": 0.9819493221246193,
"learning_rate": 3.728362804623544e-05,
"loss": 0.642,
"num_tokens": 84378042.0,
"step": 2280
},
{
"epoch": 0.3899982932240997,
"grad_norm": 1.2011907738377074,
"learning_rate": 3.7226408463597885e-05,
"loss": 0.6725,
"num_tokens": 84863488.0,
"step": 2285
},
{
"epoch": 0.39085168117426183,
"grad_norm": 0.8281172195899965,
"learning_rate": 3.716911141595309e-05,
"loss": 0.6141,
"num_tokens": 85415205.0,
"step": 2290
},
{
"epoch": 0.391705069124424,
"grad_norm": 0.9770574138656928,
"learning_rate": 3.711173735963767e-05,
"loss": 0.6286,
"num_tokens": 85874153.0,
"step": 2295
},
{
"epoch": 0.3925584570745861,
"grad_norm": 0.9169044783146749,
"learning_rate": 3.705428675160154e-05,
"loss": 0.6239,
"num_tokens": 86342702.0,
"step": 2300
},
{
"epoch": 0.39341184502474824,
"grad_norm": 0.8589301929082652,
"learning_rate": 3.6996760049404344e-05,
"loss": 0.6022,
"num_tokens": 86838157.0,
"step": 2305
},
{
"epoch": 0.3942652329749104,
"grad_norm": 1.0061321674465398,
"learning_rate": 3.693915771121173e-05,
"loss": 0.6518,
"num_tokens": 87329550.0,
"step": 2310
},
{
"epoch": 0.39511862092507255,
"grad_norm": 1.0648969069589578,
"learning_rate": 3.688148019579177e-05,
"loss": 0.6574,
"num_tokens": 87806338.0,
"step": 2315
},
{
"epoch": 0.3959720088752347,
"grad_norm": 0.9160782426371946,
"learning_rate": 3.682372796251125e-05,
"loss": 0.6356,
"num_tokens": 88291313.0,
"step": 2320
},
{
"epoch": 0.3968253968253968,
"grad_norm": 1.0359966560447595,
"learning_rate": 3.676590147133206e-05,
"loss": 0.6411,
"num_tokens": 88782067.0,
"step": 2325
},
{
"epoch": 0.39767878477555896,
"grad_norm": 1.013568304434419,
"learning_rate": 3.670800118280753e-05,
"loss": 0.6383,
"num_tokens": 89251725.0,
"step": 2330
},
{
"epoch": 0.3985321727257211,
"grad_norm": 0.8483728475918468,
"learning_rate": 3.665002755807868e-05,
"loss": 0.6106,
"num_tokens": 89677245.0,
"step": 2335
},
{
"epoch": 0.39938556067588327,
"grad_norm": 0.9049641479903795,
"learning_rate": 3.6591981058870666e-05,
"loss": 0.5812,
"num_tokens": 90126369.0,
"step": 2340
},
{
"epoch": 0.4002389486260454,
"grad_norm": 0.8707887587181531,
"learning_rate": 3.653386214748902e-05,
"loss": 0.6756,
"num_tokens": 90629081.0,
"step": 2345
},
{
"epoch": 0.4010923365762075,
"grad_norm": 0.9654943475549265,
"learning_rate": 3.647567128681598e-05,
"loss": 0.5985,
"num_tokens": 91062791.0,
"step": 2350
},
{
"epoch": 0.4019457245263697,
"grad_norm": 1.434236971759778,
"learning_rate": 3.641740894030687e-05,
"loss": 0.6202,
"num_tokens": 91544042.0,
"step": 2355
},
{
"epoch": 0.40279911247653183,
"grad_norm": 0.891703650231472,
"learning_rate": 3.635907557198629e-05,
"loss": 0.6246,
"num_tokens": 92062591.0,
"step": 2360
},
{
"epoch": 0.403652500426694,
"grad_norm": 0.9924195803456223,
"learning_rate": 3.630067164644453e-05,
"loss": 0.5949,
"num_tokens": 92465083.0,
"step": 2365
},
{
"epoch": 0.40450588837685614,
"grad_norm": 0.9934691293711319,
"learning_rate": 3.624219762883381e-05,
"loss": 0.6093,
"num_tokens": 92946072.0,
"step": 2370
},
{
"epoch": 0.40535927632701824,
"grad_norm": 0.9356359151969718,
"learning_rate": 3.61836539848646e-05,
"loss": 0.6722,
"num_tokens": 93480913.0,
"step": 2375
},
{
"epoch": 0.4062126642771804,
"grad_norm": 0.8475881209916387,
"learning_rate": 3.6125041180801876e-05,
"loss": 0.6104,
"num_tokens": 93982593.0,
"step": 2380
},
{
"epoch": 0.40706605222734255,
"grad_norm": 0.876694958055183,
"learning_rate": 3.606635968346148e-05,
"loss": 0.6004,
"num_tokens": 94448263.0,
"step": 2385
},
{
"epoch": 0.4079194401775047,
"grad_norm": 0.8457478713791695,
"learning_rate": 3.6007609960206316e-05,
"loss": 0.6372,
"num_tokens": 94960061.0,
"step": 2390
},
{
"epoch": 0.40877282812766685,
"grad_norm": 0.9818501941975467,
"learning_rate": 3.5948792478942666e-05,
"loss": 0.6157,
"num_tokens": 95470351.0,
"step": 2395
},
{
"epoch": 0.409626216077829,
"grad_norm": 0.9205272547654763,
"learning_rate": 3.588990770811649e-05,
"loss": 0.6256,
"num_tokens": 95944505.0,
"step": 2400
},
{
"epoch": 0.4104796040279911,
"grad_norm": 1.0164677508459452,
"learning_rate": 3.583095611670965e-05,
"loss": 0.6084,
"num_tokens": 482939.0,
"step": 2405
},
{
"epoch": 0.41133299197815326,
"grad_norm": 0.9091799199639967,
"learning_rate": 3.57719381742362e-05,
"loss": 0.6317,
"num_tokens": 989777.0,
"step": 2410
},
{
"epoch": 0.4121863799283154,
"grad_norm": 0.9187327421482128,
"learning_rate": 3.571285435073865e-05,
"loss": 0.6409,
"num_tokens": 1451952.0,
"step": 2415
},
{
"epoch": 0.41303976787847757,
"grad_norm": 0.8268322159131443,
"learning_rate": 3.5653705116784174e-05,
"loss": 0.6317,
"num_tokens": 1967639.0,
"step": 2420
},
{
"epoch": 0.4138931558286397,
"grad_norm": 0.8917435541473175,
"learning_rate": 3.559449094346096e-05,
"loss": 0.648,
"num_tokens": 2432780.0,
"step": 2425
},
{
"epoch": 0.4147465437788018,
"grad_norm": 0.823719365284321,
"learning_rate": 3.5535212302374334e-05,
"loss": 0.6364,
"num_tokens": 2949439.0,
"step": 2430
},
{
"epoch": 0.415599931728964,
"grad_norm": 1.0803913143852297,
"learning_rate": 3.547586966564314e-05,
"loss": 0.6751,
"num_tokens": 3411629.0,
"step": 2435
},
{
"epoch": 0.41645331967912613,
"grad_norm": 0.9749590498819914,
"learning_rate": 3.5416463505895836e-05,
"loss": 0.6443,
"num_tokens": 3907038.0,
"step": 2440
},
{
"epoch": 0.4173067076292883,
"grad_norm": 0.8505045179117103,
"learning_rate": 3.5356994296266874e-05,
"loss": 0.6228,
"num_tokens": 4398651.0,
"step": 2445
},
{
"epoch": 0.41816009557945044,
"grad_norm": 0.9912020364637751,
"learning_rate": 3.5297462510392796e-05,
"loss": 0.6325,
"num_tokens": 4847281.0,
"step": 2450
},
{
"epoch": 0.41901348352961254,
"grad_norm": 0.9095743217389802,
"learning_rate": 3.5237868622408574e-05,
"loss": 0.6658,
"num_tokens": 5318809.0,
"step": 2455
},
{
"epoch": 0.4198668714797747,
"grad_norm": 0.8293429203316864,
"learning_rate": 3.5178213106943754e-05,
"loss": 0.6082,
"num_tokens": 5753540.0,
"step": 2460
},
{
"epoch": 0.42072025942993685,
"grad_norm": 1.0446630574566,
"learning_rate": 3.5118496439118734e-05,
"loss": 0.6353,
"num_tokens": 6226147.0,
"step": 2465
},
{
"epoch": 0.421573647380099,
"grad_norm": 0.8769207386177866,
"learning_rate": 3.505871909454093e-05,
"loss": 0.6529,
"num_tokens": 6725686.0,
"step": 2470
},
{
"epoch": 0.42242703533026116,
"grad_norm": 0.9508154637486791,
"learning_rate": 3.4998881549301025e-05,
"loss": 0.6222,
"num_tokens": 7265229.0,
"step": 2475
},
{
"epoch": 0.42328042328042326,
"grad_norm": 0.9566300180169934,
"learning_rate": 3.493898427996917e-05,
"loss": 0.6558,
"num_tokens": 7683984.0,
"step": 2480
},
{
"epoch": 0.4241338112305854,
"grad_norm": 1.0927270546087369,
"learning_rate": 3.487902776359118e-05,
"loss": 0.6249,
"num_tokens": 8156250.0,
"step": 2485
},
{
"epoch": 0.42498719918074757,
"grad_norm": 0.9928807292515285,
"learning_rate": 3.481901247768471e-05,
"loss": 0.5972,
"num_tokens": 8628772.0,
"step": 2490
},
{
"epoch": 0.4258405871309097,
"grad_norm": 0.9449942264532448,
"learning_rate": 3.4758938900235514e-05,
"loss": 0.6578,
"num_tokens": 9120921.0,
"step": 2495
},
{
"epoch": 0.4266939750810719,
"grad_norm": 1.0607940692442015,
"learning_rate": 3.46988075096936e-05,
"loss": 0.6304,
"num_tokens": 9560658.0,
"step": 2500
},
{
"epoch": 0.427547363031234,
"grad_norm": 0.9152490107525716,
"learning_rate": 3.463861878496939e-05,
"loss": 0.6212,
"num_tokens": 10061014.0,
"step": 2505
},
{
"epoch": 0.42840075098139613,
"grad_norm": 1.0829777365198388,
"learning_rate": 3.457837320542998e-05,
"loss": 0.6402,
"num_tokens": 10515660.0,
"step": 2510
},
{
"epoch": 0.4292541389315583,
"grad_norm": 0.9597202991590822,
"learning_rate": 3.451807125089525e-05,
"loss": 0.6428,
"num_tokens": 10976284.0,
"step": 2515
},
{
"epoch": 0.43010752688172044,
"grad_norm": 1.0855106871299507,
"learning_rate": 3.445771340163409e-05,
"loss": 0.6245,
"num_tokens": 11403774.0,
"step": 2520
},
{
"epoch": 0.4309609148318826,
"grad_norm": 1.130452291984405,
"learning_rate": 3.4397300138360565e-05,
"loss": 0.6442,
"num_tokens": 11874868.0,
"step": 2525
},
{
"epoch": 0.4318143027820447,
"grad_norm": 0.8901582972402161,
"learning_rate": 3.433683194223004e-05,
"loss": 0.6598,
"num_tokens": 12323802.0,
"step": 2530
},
{
"epoch": 0.43266769073220684,
"grad_norm": 1.0163374881517988,
"learning_rate": 3.427630929483543e-05,
"loss": 0.6568,
"num_tokens": 12821783.0,
"step": 2535
},
{
"epoch": 0.433521078682369,
"grad_norm": 1.2078170030289623,
"learning_rate": 3.42157326782033e-05,
"loss": 0.6282,
"num_tokens": 13282255.0,
"step": 2540
},
{
"epoch": 0.43437446663253115,
"grad_norm": 0.9781762323975279,
"learning_rate": 3.415510257479008e-05,
"loss": 0.6611,
"num_tokens": 13799399.0,
"step": 2545
},
{
"epoch": 0.4352278545826933,
"grad_norm": 1.0308563805242064,
"learning_rate": 3.409441946747813e-05,
"loss": 0.6139,
"num_tokens": 14301887.0,
"step": 2550
},
{
"epoch": 0.43608124253285546,
"grad_norm": 0.9289960246789825,
"learning_rate": 3.403368383957199e-05,
"loss": 0.5933,
"num_tokens": 14745656.0,
"step": 2555
},
{
"epoch": 0.43693463048301756,
"grad_norm": 0.8577337044308939,
"learning_rate": 3.39728961747945e-05,
"loss": 0.5892,
"num_tokens": 15257500.0,
"step": 2560
},
{
"epoch": 0.4377880184331797,
"grad_norm": 0.9491205699096495,
"learning_rate": 3.391205695728295e-05,
"loss": 0.643,
"num_tokens": 15815630.0,
"step": 2565
},
{
"epoch": 0.43864140638334187,
"grad_norm": 0.819417628826071,
"learning_rate": 3.3851166671585176e-05,
"loss": 0.633,
"num_tokens": 16325393.0,
"step": 2570
},
{
"epoch": 0.439494794333504,
"grad_norm": 0.9084966436728551,
"learning_rate": 3.3790225802655796e-05,
"loss": 0.7004,
"num_tokens": 16832508.0,
"step": 2575
},
{
"epoch": 0.4403481822836662,
"grad_norm": 0.9355719385094893,
"learning_rate": 3.3729234835852236e-05,
"loss": 0.6495,
"num_tokens": 17265900.0,
"step": 2580
},
{
"epoch": 0.4412015702338283,
"grad_norm": 0.9198866510461836,
"learning_rate": 3.3668194256930966e-05,
"loss": 0.6203,
"num_tokens": 17758587.0,
"step": 2585
},
{
"epoch": 0.44205495818399043,
"grad_norm": 0.9864299064702182,
"learning_rate": 3.360710455204357e-05,
"loss": 0.5901,
"num_tokens": 18162019.0,
"step": 2590
},
{
"epoch": 0.4429083461341526,
"grad_norm": 0.8533788630772795,
"learning_rate": 3.354596620773288e-05,
"loss": 0.6233,
"num_tokens": 18623614.0,
"step": 2595
},
{
"epoch": 0.44376173408431474,
"grad_norm": 0.897565045339347,
"learning_rate": 3.348477971092914e-05,
"loss": 0.6353,
"num_tokens": 19040608.0,
"step": 2600
},
{
"epoch": 0.4446151220344769,
"grad_norm": 0.8517543561030608,
"learning_rate": 3.3423545548946074e-05,
"loss": 0.6051,
"num_tokens": 19548853.0,
"step": 2605
},
{
"epoch": 0.445468509984639,
"grad_norm": 0.9520584025010822,
"learning_rate": 3.336226420947704e-05,
"loss": 0.648,
"num_tokens": 19984697.0,
"step": 2610
},
{
"epoch": 0.44632189793480115,
"grad_norm": 0.8190289095396062,
"learning_rate": 3.330093618059114e-05,
"loss": 0.6475,
"num_tokens": 20478274.0,
"step": 2615
},
{
"epoch": 0.4471752858849633,
"grad_norm": 0.9202078769277446,
"learning_rate": 3.323956195072932e-05,
"loss": 0.6154,
"num_tokens": 20961369.0,
"step": 2620
},
{
"epoch": 0.44802867383512546,
"grad_norm": 0.9572401045682531,
"learning_rate": 3.3178142008700494e-05,
"loss": 0.6179,
"num_tokens": 21399865.0,
"step": 2625
},
{
"epoch": 0.4488820617852876,
"grad_norm": 1.0143431696499436,
"learning_rate": 3.311667684367765e-05,
"loss": 0.6418,
"num_tokens": 21883446.0,
"step": 2630
},
{
"epoch": 0.4497354497354497,
"grad_norm": 0.9286975643218015,
"learning_rate": 3.3055166945193944e-05,
"loss": 0.6044,
"num_tokens": 22380147.0,
"step": 2635
},
{
"epoch": 0.45058883768561186,
"grad_norm": 0.8826200601623698,
"learning_rate": 3.299361280313881e-05,
"loss": 0.5855,
"num_tokens": 22896778.0,
"step": 2640
},
{
"epoch": 0.451442225635774,
"grad_norm": 0.9121057769453851,
"learning_rate": 3.293201490775406e-05,
"loss": 0.6272,
"num_tokens": 23387347.0,
"step": 2645
},
{
"epoch": 0.4522956135859362,
"grad_norm": 1.021199041775351,
"learning_rate": 3.2870373749629954e-05,
"loss": 0.6673,
"num_tokens": 23866169.0,
"step": 2650
},
{
"epoch": 0.45314900153609833,
"grad_norm": 0.7934600813626838,
"learning_rate": 3.280868981970134e-05,
"loss": 0.6036,
"num_tokens": 24294707.0,
"step": 2655
},
{
"epoch": 0.4540023894862604,
"grad_norm": 1.0668312744043429,
"learning_rate": 3.2746963609243716e-05,
"loss": 0.6176,
"num_tokens": 24786772.0,
"step": 2660
},
{
"epoch": 0.4548557774364226,
"grad_norm": 0.8327511404831184,
"learning_rate": 3.26851956098693e-05,
"loss": 0.6254,
"num_tokens": 25263421.0,
"step": 2665
},
{
"epoch": 0.45570916538658474,
"grad_norm": 0.8948559186273163,
"learning_rate": 3.2623386313523145e-05,
"loss": 0.6201,
"num_tokens": 25723061.0,
"step": 2670
},
{
"epoch": 0.4565625533367469,
"grad_norm": 0.8282682651999119,
"learning_rate": 3.256153621247921e-05,
"loss": 0.6495,
"num_tokens": 26259692.0,
"step": 2675
},
{
"epoch": 0.45741594128690904,
"grad_norm": 0.9165264208822277,
"learning_rate": 3.249964579933644e-05,
"loss": 0.6151,
"num_tokens": 26707422.0,
"step": 2680
},
{
"epoch": 0.4582693292370712,
"grad_norm": 0.9824352883104055,
"learning_rate": 3.2437715567014836e-05,
"loss": 0.5971,
"num_tokens": 27173821.0,
"step": 2685
},
{
"epoch": 0.4591227171872333,
"grad_norm": 1.0074666109427157,
"learning_rate": 3.237574600875154e-05,
"loss": 0.615,
"num_tokens": 27631439.0,
"step": 2690
},
{
"epoch": 0.45997610513739545,
"grad_norm": 0.858111222643122,
"learning_rate": 3.231373761809689e-05,
"loss": 0.6305,
"num_tokens": 28197458.0,
"step": 2695
},
{
"epoch": 0.4608294930875576,
"grad_norm": 0.9212213548939474,
"learning_rate": 3.2251690888910535e-05,
"loss": 0.5823,
"num_tokens": 28686498.0,
"step": 2700
},
{
"epoch": 0.46168288103771976,
"grad_norm": 1.0348751550407185,
"learning_rate": 3.218960631535742e-05,
"loss": 0.622,
"num_tokens": 29144972.0,
"step": 2705
},
{
"epoch": 0.4625362689878819,
"grad_norm": 1.0248437192672697,
"learning_rate": 3.212748439190392e-05,
"loss": 0.6212,
"num_tokens": 29666782.0,
"step": 2710
},
{
"epoch": 0.463389656938044,
"grad_norm": 0.84228312481392,
"learning_rate": 3.20653256133139e-05,
"loss": 0.6194,
"num_tokens": 30192754.0,
"step": 2715
},
{
"epoch": 0.46424304488820617,
"grad_norm": 0.818769118662692,
"learning_rate": 3.200313047464471e-05,
"loss": 0.6678,
"num_tokens": 30745942.0,
"step": 2720
},
{
"epoch": 0.4650964328383683,
"grad_norm": 1.0823879075880105,
"learning_rate": 3.194089947124333e-05,
"loss": 0.5908,
"num_tokens": 31152396.0,
"step": 2725
},
{
"epoch": 0.4659498207885305,
"grad_norm": 0.8659299087412192,
"learning_rate": 3.1878633098742344e-05,
"loss": 0.6113,
"num_tokens": 31689199.0,
"step": 2730
},
{
"epoch": 0.46680320873869263,
"grad_norm": 0.8851915995943411,
"learning_rate": 3.1816331853056063e-05,
"loss": 0.6078,
"num_tokens": 32128480.0,
"step": 2735
},
{
"epoch": 0.46765659668885473,
"grad_norm": 0.7702876245179221,
"learning_rate": 3.175399623037652e-05,
"loss": 0.6129,
"num_tokens": 32674508.0,
"step": 2740
},
{
"epoch": 0.4685099846390169,
"grad_norm": 0.8836130988239285,
"learning_rate": 3.169162672716954e-05,
"loss": 0.5919,
"num_tokens": 33081921.0,
"step": 2745
},
{
"epoch": 0.46936337258917904,
"grad_norm": 0.8231620704033741,
"learning_rate": 3.162922384017081e-05,
"loss": 0.5991,
"num_tokens": 33556906.0,
"step": 2750
},
{
"epoch": 0.4702167605393412,
"grad_norm": 0.952339994710836,
"learning_rate": 3.156678806638186e-05,
"loss": 0.6042,
"num_tokens": 34031283.0,
"step": 2755
},
{
"epoch": 0.47107014848950335,
"grad_norm": 0.9622665131906707,
"learning_rate": 3.150431990306617e-05,
"loss": 0.6557,
"num_tokens": 34577418.0,
"step": 2760
},
{
"epoch": 0.47192353643966545,
"grad_norm": 0.8311985943600112,
"learning_rate": 3.1441819847745186e-05,
"loss": 0.6042,
"num_tokens": 35007918.0,
"step": 2765
},
{
"epoch": 0.4727769243898276,
"grad_norm": 0.947220685474545,
"learning_rate": 3.137928839819434e-05,
"loss": 0.638,
"num_tokens": 35516081.0,
"step": 2770
},
{
"epoch": 0.47363031233998976,
"grad_norm": 0.9708185654838827,
"learning_rate": 3.131672605243911e-05,
"loss": 0.6151,
"num_tokens": 35972013.0,
"step": 2775
},
{
"epoch": 0.4744837002901519,
"grad_norm": 0.9665397926545222,
"learning_rate": 3.125413330875104e-05,
"loss": 0.6332,
"num_tokens": 36487961.0,
"step": 2780
},
{
"epoch": 0.47533708824031407,
"grad_norm": 0.825937896752512,
"learning_rate": 3.119151066564378e-05,
"loss": 0.6508,
"num_tokens": 36954084.0,
"step": 2785
},
{
"epoch": 0.47619047619047616,
"grad_norm": 0.8814205249866639,
"learning_rate": 3.1128858621869084e-05,
"loss": 0.5928,
"num_tokens": 37436748.0,
"step": 2790
},
{
"epoch": 0.4770438641406383,
"grad_norm": 0.8560968867180144,
"learning_rate": 3.106617767641291e-05,
"loss": 0.6041,
"num_tokens": 37908735.0,
"step": 2795
},
{
"epoch": 0.4778972520908005,
"grad_norm": 0.8960390256733356,
"learning_rate": 3.100346832849137e-05,
"loss": 0.633,
"num_tokens": 38388303.0,
"step": 2800
},
{
"epoch": 0.4787506400409626,
"grad_norm": 0.8289004343409846,
"learning_rate": 3.0940731077546806e-05,
"loss": 0.6412,
"num_tokens": 38876259.0,
"step": 2805
},
{
"epoch": 0.4796040279911248,
"grad_norm": 0.9023913726126142,
"learning_rate": 3.087796642324376e-05,
"loss": 0.6347,
"num_tokens": 39361858.0,
"step": 2810
},
{
"epoch": 0.48045741594128694,
"grad_norm": 0.8817899043399531,
"learning_rate": 3.0815174865465046e-05,
"loss": 0.6335,
"num_tokens": 39877714.0,
"step": 2815
},
{
"epoch": 0.48131080389144903,
"grad_norm": 0.9020909342984744,
"learning_rate": 3.075235690430775e-05,
"loss": 0.6206,
"num_tokens": 40323502.0,
"step": 2820
},
{
"epoch": 0.4821641918416112,
"grad_norm": 0.9191982487626535,
"learning_rate": 3.0689513040079235e-05,
"loss": 0.6072,
"num_tokens": 40800904.0,
"step": 2825
},
{
"epoch": 0.48301757979177334,
"grad_norm": 0.9142822978216951,
"learning_rate": 3.062664377329317e-05,
"loss": 0.6488,
"num_tokens": 41310389.0,
"step": 2830
},
{
"epoch": 0.4838709677419355,
"grad_norm": 0.9574374642953535,
"learning_rate": 3.0563749604665556e-05,
"loss": 0.6544,
"num_tokens": 41801610.0,
"step": 2835
},
{
"epoch": 0.48472435569209765,
"grad_norm": 1.021826726972745,
"learning_rate": 3.0500831035110677e-05,
"loss": 0.6066,
"num_tokens": 42263578.0,
"step": 2840
},
{
"epoch": 0.48557774364225975,
"grad_norm": 0.8711698710249252,
"learning_rate": 3.0437888565737215e-05,
"loss": 0.6565,
"num_tokens": 42794652.0,
"step": 2845
},
{
"epoch": 0.4864311315924219,
"grad_norm": 0.9515362101152438,
"learning_rate": 3.0374922697844167e-05,
"loss": 0.5999,
"num_tokens": 43295831.0,
"step": 2850
},
{
"epoch": 0.48728451954258406,
"grad_norm": 0.7767880334427985,
"learning_rate": 3.0311933932916874e-05,
"loss": 0.6604,
"num_tokens": 43762546.0,
"step": 2855
},
{
"epoch": 0.4881379074927462,
"grad_norm": 0.999247983508965,
"learning_rate": 3.0248922772623066e-05,
"loss": 0.635,
"num_tokens": 44271674.0,
"step": 2860
},
{
"epoch": 0.48899129544290837,
"grad_norm": 0.9808245624992524,
"learning_rate": 3.0185889718808813e-05,
"loss": 0.6324,
"num_tokens": 44767111.0,
"step": 2865
},
{
"epoch": 0.48984468339307047,
"grad_norm": 0.9794159238802201,
"learning_rate": 3.012283527349458e-05,
"loss": 0.592,
"num_tokens": 45220967.0,
"step": 2870
},
{
"epoch": 0.4906980713432326,
"grad_norm": 0.9730204959795733,
"learning_rate": 3.0059759938871194e-05,
"loss": 0.6593,
"num_tokens": 45692929.0,
"step": 2875
},
{
"epoch": 0.4915514592933948,
"grad_norm": 0.999538493061937,
"learning_rate": 2.9996664217295832e-05,
"loss": 0.6325,
"num_tokens": 46141558.0,
"step": 2880
},
{
"epoch": 0.49240484724355693,
"grad_norm": 0.9461090327652082,
"learning_rate": 2.9933548611288064e-05,
"loss": 0.5998,
"num_tokens": 46591176.0,
"step": 2885
},
{
"epoch": 0.4932582351937191,
"grad_norm": 0.990050853768675,
"learning_rate": 2.987041362352581e-05,
"loss": 0.5853,
"num_tokens": 47011064.0,
"step": 2890
},
{
"epoch": 0.4941116231438812,
"grad_norm": 0.8903704890612268,
"learning_rate": 2.9807259756841383e-05,
"loss": 0.5927,
"num_tokens": 47456082.0,
"step": 2895
},
{
"epoch": 0.49496501109404334,
"grad_norm": 0.8895395920757792,
"learning_rate": 2.974408751421743e-05,
"loss": 0.6047,
"num_tokens": 47977344.0,
"step": 2900
},
{
"epoch": 0.4958183990442055,
"grad_norm": 1.0762715239860545,
"learning_rate": 2.9680897398782946e-05,
"loss": 0.6443,
"num_tokens": 48413819.0,
"step": 2905
},
{
"epoch": 0.49667178699436765,
"grad_norm": 1.0093778915297285,
"learning_rate": 2.9617689913809304e-05,
"loss": 0.6147,
"num_tokens": 48898798.0,
"step": 2910
},
{
"epoch": 0.4975251749445298,
"grad_norm": 0.9519508557055824,
"learning_rate": 2.955446556270618e-05,
"loss": 0.6226,
"num_tokens": 49346202.0,
"step": 2915
},
{
"epoch": 0.4983785628946919,
"grad_norm": 0.945004577120924,
"learning_rate": 2.9491224849017602e-05,
"loss": 0.6104,
"num_tokens": 49793997.0,
"step": 2920
},
{
"epoch": 0.49923195084485406,
"grad_norm": 0.9129913648493336,
"learning_rate": 2.94279682764179e-05,
"loss": 0.605,
"num_tokens": 50284460.0,
"step": 2925
},
{
"epoch": 0.5000853387950163,
"grad_norm": 0.8409386583627643,
"learning_rate": 2.9364696348707726e-05,
"loss": 0.6135,
"num_tokens": 50836014.0,
"step": 2930
},
{
"epoch": 0.5009387267451784,
"grad_norm": 0.990670854143676,
"learning_rate": 2.930140956981002e-05,
"loss": 0.5999,
"num_tokens": 51250570.0,
"step": 2935
},
{
"epoch": 0.5017921146953405,
"grad_norm": 0.9459574916127076,
"learning_rate": 2.9238108443765988e-05,
"loss": 0.5934,
"num_tokens": 51745806.0,
"step": 2940
},
{
"epoch": 0.5026455026455027,
"grad_norm": 0.8678525092738497,
"learning_rate": 2.9174793474731133e-05,
"loss": 0.6233,
"num_tokens": 52250600.0,
"step": 2945
},
{
"epoch": 0.5034988905956648,
"grad_norm": 0.9384958904208902,
"learning_rate": 2.911146516697118e-05,
"loss": 0.6299,
"num_tokens": 52797693.0,
"step": 2950
},
{
"epoch": 0.504352278545827,
"grad_norm": 0.9868110919179371,
"learning_rate": 2.904812402485811e-05,
"loss": 0.6446,
"num_tokens": 53261423.0,
"step": 2955
},
{
"epoch": 0.5052056664959891,
"grad_norm": 1.0091887647980669,
"learning_rate": 2.8984770552866108e-05,
"loss": 0.5639,
"num_tokens": 53751059.0,
"step": 2960
},
{
"epoch": 0.5060590544461512,
"grad_norm": 0.8342160700122503,
"learning_rate": 2.8921405255567578e-05,
"loss": 0.5679,
"num_tokens": 54223040.0,
"step": 2965
},
{
"epoch": 0.5069124423963134,
"grad_norm": 0.8986532584280877,
"learning_rate": 2.8858028637629063e-05,
"loss": 0.607,
"num_tokens": 54673453.0,
"step": 2970
},
{
"epoch": 0.5077658303464755,
"grad_norm": 0.8925992400801118,
"learning_rate": 2.8794641203807314e-05,
"loss": 0.6218,
"num_tokens": 55202493.0,
"step": 2975
},
{
"epoch": 0.5086192182966377,
"grad_norm": 0.7890714280668591,
"learning_rate": 2.873124345894521e-05,
"loss": 0.638,
"num_tokens": 55722817.0,
"step": 2980
},
{
"epoch": 0.5094726062467998,
"grad_norm": 0.9064838230559494,
"learning_rate": 2.8667835907967748e-05,
"loss": 0.6314,
"num_tokens": 56227280.0,
"step": 2985
},
{
"epoch": 0.5103259941969619,
"grad_norm": 0.998743125755284,
"learning_rate": 2.8604419055878017e-05,
"loss": 0.5891,
"num_tokens": 56669120.0,
"step": 2990
},
{
"epoch": 0.5111793821471241,
"grad_norm": 0.940747148658803,
"learning_rate": 2.854099340775319e-05,
"loss": 0.6301,
"num_tokens": 57122362.0,
"step": 2995
},
{
"epoch": 0.5120327700972862,
"grad_norm": 0.9437400600614468,
"learning_rate": 2.8477559468740506e-05,
"loss": 0.6555,
"num_tokens": 57661431.0,
"step": 3000
},
{
"epoch": 0.5128861580474484,
"grad_norm": 0.8394567709397782,
"learning_rate": 2.8414117744053225e-05,
"loss": 0.6056,
"num_tokens": 58131273.0,
"step": 3005
},
{
"epoch": 0.5137395459976105,
"grad_norm": 0.9489257789311477,
"learning_rate": 2.8350668738966612e-05,
"loss": 0.5858,
"num_tokens": 58585451.0,
"step": 3010
},
{
"epoch": 0.5145929339477726,
"grad_norm": 0.8727065090879019,
"learning_rate": 2.8287212958813926e-05,
"loss": 0.5646,
"num_tokens": 59037920.0,
"step": 3015
},
{
"epoch": 0.5154463218979348,
"grad_norm": 0.9682582828974684,
"learning_rate": 2.8223750908982378e-05,
"loss": 0.6208,
"num_tokens": 59548752.0,
"step": 3020
},
{
"epoch": 0.5162997098480969,
"grad_norm": 0.9339447380894269,
"learning_rate": 2.8160283094909105e-05,
"loss": 0.6118,
"num_tokens": 60023348.0,
"step": 3025
},
{
"epoch": 0.5171530977982591,
"grad_norm": 0.9091766179100768,
"learning_rate": 2.8096810022077184e-05,
"loss": 0.6063,
"num_tokens": 60478370.0,
"step": 3030
},
{
"epoch": 0.5180064857484212,
"grad_norm": 0.8832027277397922,
"learning_rate": 2.8033332196011548e-05,
"loss": 0.6184,
"num_tokens": 60896095.0,
"step": 3035
},
{
"epoch": 0.5188598736985833,
"grad_norm": 1.0271689932750887,
"learning_rate": 2.7969850122274977e-05,
"loss": 0.6155,
"num_tokens": 61347251.0,
"step": 3040
},
{
"epoch": 0.5197132616487455,
"grad_norm": 0.9743044480912783,
"learning_rate": 2.7906364306464116e-05,
"loss": 0.6648,
"num_tokens": 61804678.0,
"step": 3045
},
{
"epoch": 0.5205666495989076,
"grad_norm": 0.9109542005139061,
"learning_rate": 2.784287525420538e-05,
"loss": 0.6142,
"num_tokens": 62331454.0,
"step": 3050
},
{
"epoch": 0.5214200375490698,
"grad_norm": 0.8849038752588272,
"learning_rate": 2.777938347115098e-05,
"loss": 0.5899,
"num_tokens": 62817171.0,
"step": 3055
},
{
"epoch": 0.522273425499232,
"grad_norm": 0.8074591324273821,
"learning_rate": 2.771588946297488e-05,
"loss": 0.6066,
"num_tokens": 63324176.0,
"step": 3060
},
{
"epoch": 0.523126813449394,
"grad_norm": 0.8686242811447925,
"learning_rate": 2.7652393735368747e-05,
"loss": 0.61,
"num_tokens": 63793104.0,
"step": 3065
},
{
"epoch": 0.5239802013995563,
"grad_norm": 0.9096641571043528,
"learning_rate": 2.758889679403795e-05,
"loss": 0.6088,
"num_tokens": 64198275.0,
"step": 3070
},
{
"epoch": 0.5248335893497184,
"grad_norm": 0.9775542616996967,
"learning_rate": 2.7525399144697534e-05,
"loss": 0.6071,
"num_tokens": 64704063.0,
"step": 3075
},
{
"epoch": 0.5256869772998806,
"grad_norm": 0.94132553062766,
"learning_rate": 2.746190129306816e-05,
"loss": 0.605,
"num_tokens": 65171476.0,
"step": 3080
},
{
"epoch": 0.5265403652500427,
"grad_norm": 0.9605299925817627,
"learning_rate": 2.739840374487214e-05,
"loss": 0.6351,
"num_tokens": 65672579.0,
"step": 3085
},
{
"epoch": 0.5273937532002048,
"grad_norm": 0.8545901170879364,
"learning_rate": 2.733490700582932e-05,
"loss": 0.6194,
"num_tokens": 66139328.0,
"step": 3090
},
{
"epoch": 0.528247141150367,
"grad_norm": 0.8486195663590458,
"learning_rate": 2.7271411581653145e-05,
"loss": 0.5836,
"num_tokens": 66594619.0,
"step": 3095
},
{
"epoch": 0.5291005291005291,
"grad_norm": 0.9274528003638315,
"learning_rate": 2.720791797804656e-05,
"loss": 0.6368,
"num_tokens": 67083533.0,
"step": 3100
},
{
"epoch": 0.5299539170506913,
"grad_norm": 0.8969308802413379,
"learning_rate": 2.7144426700698012e-05,
"loss": 0.6316,
"num_tokens": 67618627.0,
"step": 3105
},
{
"epoch": 0.5308073050008534,
"grad_norm": 0.8416577756233091,
"learning_rate": 2.708093825527745e-05,
"loss": 0.6116,
"num_tokens": 68115381.0,
"step": 3110
},
{
"epoch": 0.5316606929510155,
"grad_norm": 0.8131996556741273,
"learning_rate": 2.7017453147432225e-05,
"loss": 0.5828,
"num_tokens": 68538403.0,
"step": 3115
},
{
"epoch": 0.5325140809011777,
"grad_norm": 1.0454639098669911,
"learning_rate": 2.6953971882783142e-05,
"loss": 0.6265,
"num_tokens": 68967408.0,
"step": 3120
},
{
"epoch": 0.5333674688513398,
"grad_norm": 0.9549895357737828,
"learning_rate": 2.689049496692037e-05,
"loss": 0.6101,
"num_tokens": 69472052.0,
"step": 3125
},
{
"epoch": 0.534220856801502,
"grad_norm": 0.8308698422547768,
"learning_rate": 2.6827022905399456e-05,
"loss": 0.6269,
"num_tokens": 69964036.0,
"step": 3130
},
{
"epoch": 0.5350742447516641,
"grad_norm": 0.9102679595896443,
"learning_rate": 2.676355620373731e-05,
"loss": 0.6088,
"num_tokens": 70445245.0,
"step": 3135
},
{
"epoch": 0.5359276327018262,
"grad_norm": 0.9313136439436694,
"learning_rate": 2.67000953674081e-05,
"loss": 0.5689,
"num_tokens": 70900222.0,
"step": 3140
},
{
"epoch": 0.5367810206519884,
"grad_norm": 0.9388407450010738,
"learning_rate": 2.663664090183932e-05,
"loss": 0.6126,
"num_tokens": 71305878.0,
"step": 3145
},
{
"epoch": 0.5376344086021505,
"grad_norm": 0.914581426947238,
"learning_rate": 2.657319331240771e-05,
"loss": 0.5944,
"num_tokens": 71729509.0,
"step": 3150
},
{
"epoch": 0.5384877965523127,
"grad_norm": 0.83701319276453,
"learning_rate": 2.650975310443525e-05,
"loss": 0.5664,
"num_tokens": 72238073.0,
"step": 3155
},
{
"epoch": 0.5393411845024748,
"grad_norm": 0.9171217186457761,
"learning_rate": 2.644632078318513e-05,
"loss": 0.5879,
"num_tokens": 72677498.0,
"step": 3160
},
{
"epoch": 0.540194572452637,
"grad_norm": 0.8098622964637798,
"learning_rate": 2.6382896853857736e-05,
"loss": 0.6153,
"num_tokens": 73179076.0,
"step": 3165
},
{
"epoch": 0.5410479604027991,
"grad_norm": 0.9000788802277151,
"learning_rate": 2.63194818215866e-05,
"loss": 0.6381,
"num_tokens": 73703391.0,
"step": 3170
},
{
"epoch": 0.5419013483529612,
"grad_norm": 0.9158524475261981,
"learning_rate": 2.625607619143439e-05,
"loss": 0.6276,
"num_tokens": 74152776.0,
"step": 3175
},
{
"epoch": 0.5427547363031234,
"grad_norm": 0.8909922680173544,
"learning_rate": 2.619268046838893e-05,
"loss": 0.5935,
"num_tokens": 74582980.0,
"step": 3180
},
{
"epoch": 0.5436081242532855,
"grad_norm": 0.9278472564551875,
"learning_rate": 2.61292951573591e-05,
"loss": 0.6297,
"num_tokens": 75055808.0,
"step": 3185
},
{
"epoch": 0.5444615122034477,
"grad_norm": 0.8845869821677456,
"learning_rate": 2.606592076317087e-05,
"loss": 0.6279,
"num_tokens": 75509960.0,
"step": 3190
},
{
"epoch": 0.5453149001536098,
"grad_norm": 0.7587670201893357,
"learning_rate": 2.6002557790563276e-05,
"loss": 0.6239,
"num_tokens": 76023431.0,
"step": 3195
},
{
"epoch": 0.5461682881037719,
"grad_norm": 0.9336917428778075,
"learning_rate": 2.5939206744184354e-05,
"loss": 0.5938,
"num_tokens": 76420106.0,
"step": 3200
},
{
"epoch": 0.5470216760539341,
"grad_norm": 0.8806170933713402,
"learning_rate": 2.5875868128587177e-05,
"loss": 0.6088,
"num_tokens": 76908186.0,
"step": 3205
},
{
"epoch": 0.5478750640040962,
"grad_norm": 1.0263401501278369,
"learning_rate": 2.5812542448225836e-05,
"loss": 0.6792,
"num_tokens": 77391407.0,
"step": 3210
},
{
"epoch": 0.5487284519542585,
"grad_norm": 0.8644207041767296,
"learning_rate": 2.574923020745135e-05,
"loss": 0.6314,
"num_tokens": 77845333.0,
"step": 3215
},
{
"epoch": 0.5495818399044206,
"grad_norm": 0.8712907469129566,
"learning_rate": 2.5685931910507756e-05,
"loss": 0.5854,
"num_tokens": 78314883.0,
"step": 3220
},
{
"epoch": 0.5504352278545827,
"grad_norm": 0.7917989133845237,
"learning_rate": 2.562264806152798e-05,
"loss": 0.6453,
"num_tokens": 78792697.0,
"step": 3225
},
{
"epoch": 0.5512886158047449,
"grad_norm": 1.2698451028714428,
"learning_rate": 2.5559379164529916e-05,
"loss": 0.6055,
"num_tokens": 79268449.0,
"step": 3230
},
{
"epoch": 0.552142003754907,
"grad_norm": 0.8698617334948361,
"learning_rate": 2.5496125723412378e-05,
"loss": 0.5981,
"num_tokens": 79777685.0,
"step": 3235
},
{
"epoch": 0.5529953917050692,
"grad_norm": 1.1367372216369092,
"learning_rate": 2.5432888241951047e-05,
"loss": 0.6072,
"num_tokens": 80241521.0,
"step": 3240
},
{
"epoch": 0.5538487796552313,
"grad_norm": 1.1000070125764803,
"learning_rate": 2.5369667223794546e-05,
"loss": 0.5812,
"num_tokens": 80731872.0,
"step": 3245
},
{
"epoch": 0.5547021676053934,
"grad_norm": 0.9833667568222638,
"learning_rate": 2.5306463172460327e-05,
"loss": 0.5954,
"num_tokens": 81185238.0,
"step": 3250
},
{
"epoch": 0.5555555555555556,
"grad_norm": 0.894923840538592,
"learning_rate": 2.524327659133075e-05,
"loss": 0.6241,
"num_tokens": 81670790.0,
"step": 3255
},
{
"epoch": 0.5564089435057177,
"grad_norm": 0.8738246405469254,
"learning_rate": 2.5180107983649e-05,
"loss": 0.585,
"num_tokens": 82146300.0,
"step": 3260
},
{
"epoch": 0.5572623314558799,
"grad_norm": 0.9163256478577108,
"learning_rate": 2.5116957852515144e-05,
"loss": 0.6172,
"num_tokens": 82649931.0,
"step": 3265
},
{
"epoch": 0.558115719406042,
"grad_norm": 0.9132249310168558,
"learning_rate": 2.50538267008821e-05,
"loss": 0.6112,
"num_tokens": 83046925.0,
"step": 3270
},
{
"epoch": 0.5589691073562041,
"grad_norm": 1.0076436822777552,
"learning_rate": 2.4990715031551576e-05,
"loss": 0.5812,
"num_tokens": 83483265.0,
"step": 3275
},
{
"epoch": 0.5598224953063663,
"grad_norm": 0.8149595056663635,
"learning_rate": 2.4927623347170187e-05,
"loss": 0.6116,
"num_tokens": 84015002.0,
"step": 3280
},
{
"epoch": 0.5606758832565284,
"grad_norm": 0.8659160058321907,
"learning_rate": 2.4864552150225313e-05,
"loss": 0.601,
"num_tokens": 84530159.0,
"step": 3285
},
{
"epoch": 0.5615292712066906,
"grad_norm": 1.6488676893470589,
"learning_rate": 2.4801501943041207e-05,
"loss": 0.6287,
"num_tokens": 84976777.0,
"step": 3290
},
{
"epoch": 0.5623826591568527,
"grad_norm": 0.903284657679167,
"learning_rate": 2.473847322777494e-05,
"loss": 0.5746,
"num_tokens": 85475390.0,
"step": 3295
},
{
"epoch": 0.5632360471070148,
"grad_norm": 0.9168478565461777,
"learning_rate": 2.467546650641239e-05,
"loss": 0.6107,
"num_tokens": 85979990.0,
"step": 3300
},
{
"epoch": 0.564089435057177,
"grad_norm": 0.9421153163859376,
"learning_rate": 2.461248228076431e-05,
"loss": 0.6217,
"num_tokens": 86427182.0,
"step": 3305
},
{
"epoch": 0.5649428230073391,
"grad_norm": 0.9418335674217378,
"learning_rate": 2.454952105246225e-05,
"loss": 0.6229,
"num_tokens": 86974407.0,
"step": 3310
},
{
"epoch": 0.5657962109575013,
"grad_norm": 0.9616597438867678,
"learning_rate": 2.4486583322954615e-05,
"loss": 0.5953,
"num_tokens": 87406506.0,
"step": 3315
},
{
"epoch": 0.5666495989076634,
"grad_norm": 0.9001557558355506,
"learning_rate": 2.4423669593502674e-05,
"loss": 0.6028,
"num_tokens": 87814661.0,
"step": 3320
},
{
"epoch": 0.5675029868578255,
"grad_norm": 0.9282668273832819,
"learning_rate": 2.43607803651765e-05,
"loss": 0.6505,
"num_tokens": 88286707.0,
"step": 3325
},
{
"epoch": 0.5683563748079877,
"grad_norm": 0.8061451148141976,
"learning_rate": 2.429791613885109e-05,
"loss": 0.5717,
"num_tokens": 88814762.0,
"step": 3330
},
{
"epoch": 0.5692097627581498,
"grad_norm": 0.8372348822992951,
"learning_rate": 2.4235077415202267e-05,
"loss": 0.6131,
"num_tokens": 89333477.0,
"step": 3335
},
{
"epoch": 0.570063150708312,
"grad_norm": 1.074881795157168,
"learning_rate": 2.4172264694702766e-05,
"loss": 0.5847,
"num_tokens": 89794835.0,
"step": 3340
},
{
"epoch": 0.5709165386584741,
"grad_norm": 1.0051952346241788,
"learning_rate": 2.4109478477618226e-05,
"loss": 0.6132,
"num_tokens": 90209243.0,
"step": 3345
},
{
"epoch": 0.5717699266086362,
"grad_norm": 0.9160834252199135,
"learning_rate": 2.404671926400317e-05,
"loss": 0.5956,
"num_tokens": 90748582.0,
"step": 3350
},
{
"epoch": 0.5726233145587984,
"grad_norm": 0.8831271838249432,
"learning_rate": 2.39839875536971e-05,
"loss": 0.628,
"num_tokens": 91203963.0,
"step": 3355
},
{
"epoch": 0.5734767025089605,
"grad_norm": 1.0215192824643375,
"learning_rate": 2.3921283846320434e-05,
"loss": 0.5969,
"num_tokens": 91699614.0,
"step": 3360
},
{
"epoch": 0.5743300904591228,
"grad_norm": 0.8198005107193644,
"learning_rate": 2.385860864127057e-05,
"loss": 0.5841,
"num_tokens": 92169991.0,
"step": 3365
},
{
"epoch": 0.5751834784092849,
"grad_norm": 0.8904708685605999,
"learning_rate": 2.3795962437717933e-05,
"loss": 0.5831,
"num_tokens": 92616892.0,
"step": 3370
},
{
"epoch": 0.576036866359447,
"grad_norm": 0.8480544139333845,
"learning_rate": 2.3733345734601926e-05,
"loss": 0.6098,
"num_tokens": 93089897.0,
"step": 3375
},
{
"epoch": 0.5768902543096092,
"grad_norm": 0.9175224967317364,
"learning_rate": 2.3670759030627026e-05,
"loss": 0.6125,
"num_tokens": 93591669.0,
"step": 3380
},
{
"epoch": 0.5777436422597713,
"grad_norm": 0.8457756162517169,
"learning_rate": 2.3608202824258756e-05,
"loss": 0.5802,
"num_tokens": 94048412.0,
"step": 3385
},
{
"epoch": 0.5785970302099335,
"grad_norm": 0.8657800668830039,
"learning_rate": 2.3545677613719796e-05,
"loss": 0.638,
"num_tokens": 94512997.0,
"step": 3390
},
{
"epoch": 0.5794504181600956,
"grad_norm": 0.9449477364366197,
"learning_rate": 2.3483183896985905e-05,
"loss": 0.6279,
"num_tokens": 94989943.0,
"step": 3395
},
{
"epoch": 0.5803038061102577,
"grad_norm": 0.89955796478803,
"learning_rate": 2.3420722171782044e-05,
"loss": 0.559,
"num_tokens": 95391791.0,
"step": 3400
},
{
"epoch": 0.5811571940604199,
"grad_norm": 0.929574266645046,
"learning_rate": 2.335829293557839e-05,
"loss": 0.562,
"num_tokens": 95831622.0,
"step": 3405
},
{
"epoch": 0.582010582010582,
"grad_norm": 0.8975741522316928,
"learning_rate": 2.3295896685586327e-05,
"loss": 0.5849,
"num_tokens": 96232233.0,
"step": 3410
},
{
"epoch": 0.5828639699607442,
"grad_norm": 0.9836672146613759,
"learning_rate": 2.3233533918754568e-05,
"loss": 0.629,
"num_tokens": 96733338.0,
"step": 3415
},
{
"epoch": 0.5837173579109063,
"grad_norm": 0.9309413627535794,
"learning_rate": 2.3171205131765106e-05,
"loss": 0.6573,
"num_tokens": 97192618.0,
"step": 3420
},
{
"epoch": 0.5845707458610685,
"grad_norm": 0.912682856813927,
"learning_rate": 2.3108910821029357e-05,
"loss": 0.6005,
"num_tokens": 97721791.0,
"step": 3425
},
{
"epoch": 0.5854241338112306,
"grad_norm": 1.0215174631608348,
"learning_rate": 2.304665148268411e-05,
"loss": 0.6355,
"num_tokens": 98220639.0,
"step": 3430
},
{
"epoch": 0.5862775217613927,
"grad_norm": 0.842703755218707,
"learning_rate": 2.2984427612587638e-05,
"loss": 0.5832,
"num_tokens": 98701613.0,
"step": 3435
},
{
"epoch": 0.5871309097115549,
"grad_norm": 0.9052396390768784,
"learning_rate": 2.2922239706315745e-05,
"loss": 0.5967,
"num_tokens": 99120584.0,
"step": 3440
},
{
"epoch": 0.587984297661717,
"grad_norm": 0.8641185188951044,
"learning_rate": 2.2860088259157776e-05,
"loss": 0.6187,
"num_tokens": 99620033.0,
"step": 3445
},
{
"epoch": 0.5888376856118792,
"grad_norm": 0.8898937101798601,
"learning_rate": 2.2797973766112702e-05,
"loss": 0.6015,
"num_tokens": 100158385.0,
"step": 3450
},
{
"epoch": 0.5896910735620413,
"grad_norm": 0.9808515333821658,
"learning_rate": 2.2735896721885218e-05,
"loss": 0.5857,
"num_tokens": 100605881.0,
"step": 3455
},
{
"epoch": 0.5905444615122034,
"grad_norm": 0.9118752503463046,
"learning_rate": 2.2673857620881712e-05,
"loss": 0.5949,
"num_tokens": 101113231.0,
"step": 3460
},
{
"epoch": 0.5913978494623656,
"grad_norm": 0.851226680431884,
"learning_rate": 2.2611856957206413e-05,
"loss": 0.5625,
"num_tokens": 101623625.0,
"step": 3465
},
{
"epoch": 0.5922512374125277,
"grad_norm": 0.9845603139612755,
"learning_rate": 2.2549895224657392e-05,
"loss": 0.567,
"num_tokens": 102084661.0,
"step": 3470
},
{
"epoch": 0.5931046253626899,
"grad_norm": 1.081339352649647,
"learning_rate": 2.248797291672267e-05,
"loss": 0.5548,
"num_tokens": 102541320.0,
"step": 3475
},
{
"epoch": 0.593958013312852,
"grad_norm": 1.0080529824324478,
"learning_rate": 2.2426090526576288e-05,
"loss": 0.6448,
"num_tokens": 103056557.0,
"step": 3480
},
{
"epoch": 0.5948114012630141,
"grad_norm": 0.932290425878926,
"learning_rate": 2.2364248547074335e-05,
"loss": 0.5876,
"num_tokens": 103525382.0,
"step": 3485
},
{
"epoch": 0.5956647892131763,
"grad_norm": 0.9350679504132159,
"learning_rate": 2.2302447470751087e-05,
"loss": 0.6405,
"num_tokens": 104029345.0,
"step": 3490
},
{
"epoch": 0.5965181771633384,
"grad_norm": 0.9496853296189571,
"learning_rate": 2.224068778981501e-05,
"loss": 0.5794,
"num_tokens": 104507487.0,
"step": 3495
},
{
"epoch": 0.5973715651135006,
"grad_norm": 0.9723799452134716,
"learning_rate": 2.2178969996144933e-05,
"loss": 0.6131,
"num_tokens": 105032823.0,
"step": 3500
},
{
"epoch": 0.5982249530636627,
"grad_norm": 0.9189931555598423,
"learning_rate": 2.211729458128603e-05,
"loss": 0.5845,
"num_tokens": 105477154.0,
"step": 3505
},
{
"epoch": 0.5990783410138248,
"grad_norm": 0.9239578729799689,
"learning_rate": 2.205566203644598e-05,
"loss": 0.5822,
"num_tokens": 105921473.0,
"step": 3510
},
{
"epoch": 0.599931728963987,
"grad_norm": 0.8956713854458557,
"learning_rate": 2.1994072852491028e-05,
"loss": 0.5684,
"num_tokens": 106425660.0,
"step": 3515
},
{
"epoch": 0.6007851169141492,
"grad_norm": 0.8483775695612638,
"learning_rate": 2.1932527519942048e-05,
"loss": 0.5502,
"num_tokens": 106885151.0,
"step": 3520
},
{
"epoch": 0.6016385048643114,
"grad_norm": 0.8076921584034902,
"learning_rate": 2.1871026528970706e-05,
"loss": 0.5755,
"num_tokens": 107317240.0,
"step": 3525
},
{
"epoch": 0.6024918928144735,
"grad_norm": 0.8309185583056886,
"learning_rate": 2.1809570369395476e-05,
"loss": 0.5729,
"num_tokens": 107753016.0,
"step": 3530
},
{
"epoch": 0.6033452807646356,
"grad_norm": 0.9182402156829599,
"learning_rate": 2.1748159530677808e-05,
"loss": 0.6363,
"num_tokens": 108236529.0,
"step": 3535
},
{
"epoch": 0.6041986687147978,
"grad_norm": 0.9215609061374522,
"learning_rate": 2.1686794501918183e-05,
"loss": 0.6056,
"num_tokens": 108671786.0,
"step": 3540
},
{
"epoch": 0.6050520566649599,
"grad_norm": 0.9245542050305942,
"learning_rate": 2.1625475771852217e-05,
"loss": 0.6313,
"num_tokens": 109188437.0,
"step": 3545
},
{
"epoch": 0.6059054446151221,
"grad_norm": 0.7511501718290673,
"learning_rate": 2.156420382884682e-05,
"loss": 0.6066,
"num_tokens": 109739863.0,
"step": 3550
},
{
"epoch": 0.6067588325652842,
"grad_norm": 0.9465637379583386,
"learning_rate": 2.1502979160896243e-05,
"loss": 0.5925,
"num_tokens": 110107306.0,
"step": 3555
},
{
"epoch": 0.6076122205154463,
"grad_norm": 0.8693114102932104,
"learning_rate": 2.1441802255618227e-05,
"loss": 0.602,
"num_tokens": 110648438.0,
"step": 3560
},
{
"epoch": 0.6084656084656085,
"grad_norm": 1.013343208376697,
"learning_rate": 2.138067360025012e-05,
"loss": 0.6372,
"num_tokens": 111128095.0,
"step": 3565
},
{
"epoch": 0.6093189964157706,
"grad_norm": 1.0456050971697508,
"learning_rate": 2.1319593681644983e-05,
"loss": 0.6223,
"num_tokens": 111581630.0,
"step": 3570
},
{
"epoch": 0.6101723843659328,
"grad_norm": 0.9777795049451665,
"learning_rate": 2.125856298626772e-05,
"loss": 0.6072,
"num_tokens": 112053972.0,
"step": 3575
},
{
"epoch": 0.6110257723160949,
"grad_norm": 0.9366594738407755,
"learning_rate": 2.1197582000191195e-05,
"loss": 0.5691,
"num_tokens": 112477812.0,
"step": 3580
},
{
"epoch": 0.611879160266257,
"grad_norm": 0.8631576042358379,
"learning_rate": 2.1136651209092366e-05,
"loss": 0.578,
"num_tokens": 112954742.0,
"step": 3585
},
{
"epoch": 0.6127325482164192,
"grad_norm": 0.9229121201174906,
"learning_rate": 2.1075771098248435e-05,
"loss": 0.5906,
"num_tokens": 113477895.0,
"step": 3590
},
{
"epoch": 0.6135859361665813,
"grad_norm": 0.8747642696537712,
"learning_rate": 2.101494215253295e-05,
"loss": 0.598,
"num_tokens": 113996433.0,
"step": 3595
},
{
"epoch": 0.6144393241167435,
"grad_norm": 0.8226734099514453,
"learning_rate": 2.095416485641197e-05,
"loss": 0.6226,
"num_tokens": 114528571.0,
"step": 3600
},
{
"epoch": 0.6152927120669056,
"grad_norm": 0.8561260036624176,
"learning_rate": 2.0893439693940164e-05,
"loss": 0.5939,
"num_tokens": 114979015.0,
"step": 3605
},
{
"epoch": 0.6161461000170677,
"grad_norm": 0.7185593819396288,
"learning_rate": 2.083276714875704e-05,
"loss": 0.6138,
"num_tokens": 115545543.0,
"step": 3610
},
{
"epoch": 0.6169994879672299,
"grad_norm": 0.8727068314836006,
"learning_rate": 2.0772147704083018e-05,
"loss": 0.6458,
"num_tokens": 116033061.0,
"step": 3615
},
{
"epoch": 0.617852875917392,
"grad_norm": 1.0114732688960872,
"learning_rate": 2.071158184271558e-05,
"loss": 0.5838,
"num_tokens": 116495965.0,
"step": 3620
},
{
"epoch": 0.6187062638675542,
"grad_norm": 0.8448415729964601,
"learning_rate": 2.0651070047025495e-05,
"loss": 0.6449,
"num_tokens": 116996015.0,
"step": 3625
},
{
"epoch": 0.6195596518177163,
"grad_norm": 0.9318187070912285,
"learning_rate": 2.059061279895288e-05,
"loss": 0.5673,
"num_tokens": 117451785.0,
"step": 3630
},
{
"epoch": 0.6204130397678784,
"grad_norm": 0.900817894542971,
"learning_rate": 2.0530210580003462e-05,
"loss": 0.6137,
"num_tokens": 117898073.0,
"step": 3635
},
{
"epoch": 0.6212664277180406,
"grad_norm": 0.9353646155780253,
"learning_rate": 2.0469863871244653e-05,
"loss": 0.586,
"num_tokens": 118370098.0,
"step": 3640
},
{
"epoch": 0.6221198156682027,
"grad_norm": 0.9320717548727218,
"learning_rate": 2.040957315330179e-05,
"loss": 0.6319,
"num_tokens": 118836147.0,
"step": 3645
},
{
"epoch": 0.622973203618365,
"grad_norm": 0.9657934461409301,
"learning_rate": 2.0349338906354265e-05,
"loss": 0.6149,
"num_tokens": 119296261.0,
"step": 3650
},
{
"epoch": 0.623826591568527,
"grad_norm": 0.8123400658647655,
"learning_rate": 2.028916161013171e-05,
"loss": 0.5993,
"num_tokens": 119753329.0,
"step": 3655
},
{
"epoch": 0.6246799795186891,
"grad_norm": 0.8641657920470441,
"learning_rate": 2.0229041743910177e-05,
"loss": 0.6031,
"num_tokens": 120219689.0,
"step": 3660
},
{
"epoch": 0.6255333674688514,
"grad_norm": 0.8271950377447332,
"learning_rate": 2.016897978650833e-05,
"loss": 0.5861,
"num_tokens": 120695369.0,
"step": 3665
},
{
"epoch": 0.6263867554190135,
"grad_norm": 0.8151322459613584,
"learning_rate": 2.010897621628362e-05,
"loss": 0.6138,
"num_tokens": 121195424.0,
"step": 3670
},
{
"epoch": 0.6272401433691757,
"grad_norm": 0.9177483702936727,
"learning_rate": 2.0049031511128485e-05,
"loss": 0.588,
"num_tokens": 121647191.0,
"step": 3675
},
{
"epoch": 0.6280935313193378,
"grad_norm": 0.9121002779909388,
"learning_rate": 1.998914614846652e-05,
"loss": 0.5563,
"num_tokens": 122107208.0,
"step": 3680
},
{
"epoch": 0.6289469192694999,
"grad_norm": 0.8173540767440151,
"learning_rate": 1.9929320605248724e-05,
"loss": 0.564,
"num_tokens": 122573529.0,
"step": 3685
},
{
"epoch": 0.6298003072196621,
"grad_norm": 1.043731044651841,
"learning_rate": 1.9869555357949632e-05,
"loss": 0.6045,
"num_tokens": 123065555.0,
"step": 3690
},
{
"epoch": 0.6306536951698242,
"grad_norm": 1.0739948934639034,
"learning_rate": 1.980985088256358e-05,
"loss": 0.5956,
"num_tokens": 123541397.0,
"step": 3695
},
{
"epoch": 0.6315070831199864,
"grad_norm": 0.9196803826879293,
"learning_rate": 1.975020765460091e-05,
"loss": 0.5922,
"num_tokens": 124027333.0,
"step": 3700
},
{
"epoch": 0.6323604710701485,
"grad_norm": 0.850161249395921,
"learning_rate": 1.9690626149084123e-05,
"loss": 0.6113,
"num_tokens": 124511073.0,
"step": 3705
},
{
"epoch": 0.6332138590203107,
"grad_norm": 0.8586604654257546,
"learning_rate": 1.9631106840544172e-05,
"loss": 0.5594,
"num_tokens": 124983891.0,
"step": 3710
},
{
"epoch": 0.6340672469704728,
"grad_norm": 0.8071502796590858,
"learning_rate": 1.9571650203016617e-05,
"loss": 0.6022,
"num_tokens": 125450183.0,
"step": 3715
},
{
"epoch": 0.6349206349206349,
"grad_norm": 0.8369926469871681,
"learning_rate": 1.9512256710037917e-05,
"loss": 0.5924,
"num_tokens": 125928696.0,
"step": 3720
},
{
"epoch": 0.6357740228707971,
"grad_norm": 0.8487805091950206,
"learning_rate": 1.9452926834641617e-05,
"loss": 0.5845,
"num_tokens": 126461448.0,
"step": 3725
},
{
"epoch": 0.6366274108209592,
"grad_norm": 0.7988342822329317,
"learning_rate": 1.939366104935455e-05,
"loss": 0.599,
"num_tokens": 126938768.0,
"step": 3730
},
{
"epoch": 0.6374807987711214,
"grad_norm": 0.926173531550069,
"learning_rate": 1.9334459826193145e-05,
"loss": 0.6254,
"num_tokens": 127319352.0,
"step": 3735
},
{
"epoch": 0.6383341867212835,
"grad_norm": 0.837561318846321,
"learning_rate": 1.927532363665962e-05,
"loss": 0.653,
"num_tokens": 127838140.0,
"step": 3740
},
{
"epoch": 0.6391875746714456,
"grad_norm": 0.8466881218592522,
"learning_rate": 1.921625295173824e-05,
"loss": 0.5964,
"num_tokens": 128310279.0,
"step": 3745
},
{
"epoch": 0.6400409626216078,
"grad_norm": 0.7976790416425076,
"learning_rate": 1.9157248241891574e-05,
"loss": 0.6267,
"num_tokens": 128780236.0,
"step": 3750
},
{
"epoch": 0.6408943505717699,
"grad_norm": 0.9541677508158435,
"learning_rate": 1.9098309977056717e-05,
"loss": 0.5881,
"num_tokens": 129199502.0,
"step": 3755
},
{
"epoch": 0.6417477385219321,
"grad_norm": 1.0530842262619469,
"learning_rate": 1.9039438626641598e-05,
"loss": 0.6341,
"num_tokens": 129727094.0,
"step": 3760
},
{
"epoch": 0.6426011264720942,
"grad_norm": 0.8739037742597267,
"learning_rate": 1.8980634659521183e-05,
"loss": 0.5494,
"num_tokens": 130229389.0,
"step": 3765
},
{
"epoch": 0.6434545144222563,
"grad_norm": 0.8423251381628554,
"learning_rate": 1.89218985440338e-05,
"loss": 0.5699,
"num_tokens": 130679646.0,
"step": 3770
},
{
"epoch": 0.6443079023724185,
"grad_norm": 0.9485543731171855,
"learning_rate": 1.886323074797736e-05,
"loss": 0.5918,
"num_tokens": 131161913.0,
"step": 3775
},
{
"epoch": 0.6451612903225806,
"grad_norm": 0.8516489909370533,
"learning_rate": 1.880463173860565e-05,
"loss": 0.6067,
"num_tokens": 131619026.0,
"step": 3780
},
{
"epoch": 0.6460146782727428,
"grad_norm": 0.851808651800723,
"learning_rate": 1.8746101982624632e-05,
"loss": 0.5866,
"num_tokens": 132075908.0,
"step": 3785
},
{
"epoch": 0.6468680662229049,
"grad_norm": 0.8435590362188361,
"learning_rate": 1.8687641946188673e-05,
"loss": 0.5547,
"num_tokens": 132528956.0,
"step": 3790
},
{
"epoch": 0.647721454173067,
"grad_norm": 0.9957301124037699,
"learning_rate": 1.8629252094896903e-05,
"loss": 0.6261,
"num_tokens": 132985693.0,
"step": 3795
},
{
"epoch": 0.6485748421232292,
"grad_norm": 0.8507050967745332,
"learning_rate": 1.8570932893789443e-05,
"loss": 0.5779,
"num_tokens": 133479496.0,
"step": 3800
},
{
"epoch": 0.6494282300733913,
"grad_norm": 0.918721141542831,
"learning_rate": 1.8512684807343734e-05,
"loss": 0.5992,
"num_tokens": 133904688.0,
"step": 3805
},
{
"epoch": 0.6502816180235536,
"grad_norm": 0.8982922900971878,
"learning_rate": 1.8454508299470846e-05,
"loss": 0.6113,
"num_tokens": 134399643.0,
"step": 3810
},
{
"epoch": 0.6511350059737157,
"grad_norm": 1.0182409968166741,
"learning_rate": 1.8396403833511744e-05,
"loss": 0.6079,
"num_tokens": 134876960.0,
"step": 3815
},
{
"epoch": 0.6519883939238778,
"grad_norm": 0.8980802950652549,
"learning_rate": 1.8338371872233646e-05,
"loss": 0.5456,
"num_tokens": 135314218.0,
"step": 3820
},
{
"epoch": 0.65284178187404,
"grad_norm": 0.8141386565121531,
"learning_rate": 1.828041287782628e-05,
"loss": 0.596,
"num_tokens": 135803609.0,
"step": 3825
},
{
"epoch": 0.6536951698242021,
"grad_norm": 0.7990864081217415,
"learning_rate": 1.8222527311898274e-05,
"loss": 0.5935,
"num_tokens": 136226687.0,
"step": 3830
},
{
"epoch": 0.6545485577743643,
"grad_norm": 0.9144391970153878,
"learning_rate": 1.8164715635473438e-05,
"loss": 0.5617,
"num_tokens": 136652839.0,
"step": 3835
},
{
"epoch": 0.6554019457245264,
"grad_norm": 0.9096006554057322,
"learning_rate": 1.8106978308987076e-05,
"loss": 0.5756,
"num_tokens": 137152722.0,
"step": 3840
},
{
"epoch": 0.6562553336746885,
"grad_norm": 0.8047183013047816,
"learning_rate": 1.8049315792282345e-05,
"loss": 0.5913,
"num_tokens": 137684409.0,
"step": 3845
},
{
"epoch": 0.6571087216248507,
"grad_norm": 0.8641039631825128,
"learning_rate": 1.799172854460659e-05,
"loss": 0.6168,
"num_tokens": 138187589.0,
"step": 3850
},
{
"epoch": 0.6579621095750128,
"grad_norm": 0.8900431344536037,
"learning_rate": 1.793421702460769e-05,
"loss": 0.5878,
"num_tokens": 138652113.0,
"step": 3855
},
{
"epoch": 0.658815497525175,
"grad_norm": 0.9005201091983084,
"learning_rate": 1.78767816903304e-05,
"loss": 0.6507,
"num_tokens": 139126560.0,
"step": 3860
},
{
"epoch": 0.6596688854753371,
"grad_norm": 0.901548092005396,
"learning_rate": 1.7819422999212677e-05,
"loss": 0.5812,
"num_tokens": 139561418.0,
"step": 3865
},
{
"epoch": 0.6605222734254992,
"grad_norm": 0.9380903683320696,
"learning_rate": 1.7762141408082096e-05,
"loss": 0.6025,
"num_tokens": 140037433.0,
"step": 3870
},
{
"epoch": 0.6613756613756614,
"grad_norm": 0.953322480284107,
"learning_rate": 1.7704937373152147e-05,
"loss": 0.5809,
"num_tokens": 140523109.0,
"step": 3875
},
{
"epoch": 0.6622290493258235,
"grad_norm": 0.8874432089428697,
"learning_rate": 1.7647811350018646e-05,
"loss": 0.6044,
"num_tokens": 141015373.0,
"step": 3880
},
{
"epoch": 0.6630824372759857,
"grad_norm": 0.8957954095580423,
"learning_rate": 1.7590763793656095e-05,
"loss": 0.5643,
"num_tokens": 141446951.0,
"step": 3885
},
{
"epoch": 0.6639358252261478,
"grad_norm": 0.855096578289474,
"learning_rate": 1.753379515841404e-05,
"loss": 0.564,
"num_tokens": 141886136.0,
"step": 3890
},
{
"epoch": 0.6647892131763099,
"grad_norm": 0.8271763713417735,
"learning_rate": 1.7476905898013494e-05,
"loss": 0.5805,
"num_tokens": 142352308.0,
"step": 3895
},
{
"epoch": 0.6656426011264721,
"grad_norm": 0.7605289501292125,
"learning_rate": 1.7420096465543262e-05,
"loss": 0.5847,
"num_tokens": 142858814.0,
"step": 3900
},
{
"epoch": 0.6664959890766342,
"grad_norm": 0.9062476293435004,
"learning_rate": 1.73633673134564e-05,
"loss": 0.5945,
"num_tokens": 143326743.0,
"step": 3905
},
{
"epoch": 0.6673493770267964,
"grad_norm": 0.8738648590116904,
"learning_rate": 1.7306718893566556e-05,
"loss": 0.6153,
"num_tokens": 143878702.0,
"step": 3910
},
{
"epoch": 0.6682027649769585,
"grad_norm": 0.9899124493608162,
"learning_rate": 1.725015165704441e-05,
"loss": 0.598,
"num_tokens": 144328462.0,
"step": 3915
},
{
"epoch": 0.6690561529271206,
"grad_norm": 0.8292704517142464,
"learning_rate": 1.7193666054414062e-05,
"loss": 0.5899,
"num_tokens": 144856028.0,
"step": 3920
},
{
"epoch": 0.6699095408772828,
"grad_norm": 0.8783635732800318,
"learning_rate": 1.7137262535549424e-05,
"loss": 0.5837,
"num_tokens": 145319975.0,
"step": 3925
},
{
"epoch": 0.6707629288274449,
"grad_norm": 0.9691306002384159,
"learning_rate": 1.7080941549670704e-05,
"loss": 0.5947,
"num_tokens": 145801559.0,
"step": 3930
},
{
"epoch": 0.6716163167776071,
"grad_norm": 0.8355517780744063,
"learning_rate": 1.7024703545340738e-05,
"loss": 0.5661,
"num_tokens": 146254549.0,
"step": 3935
},
{
"epoch": 0.6724697047277692,
"grad_norm": 0.7902115733088367,
"learning_rate": 1.6968548970461497e-05,
"loss": 0.5626,
"num_tokens": 146721482.0,
"step": 3940
},
{
"epoch": 0.6733230926779313,
"grad_norm": 0.8524753464674293,
"learning_rate": 1.691247827227049e-05,
"loss": 0.5552,
"num_tokens": 147214936.0,
"step": 3945
},
{
"epoch": 0.6741764806280935,
"grad_norm": 0.8097399865281222,
"learning_rate": 1.6856491897337152e-05,
"loss": 0.5702,
"num_tokens": 147707141.0,
"step": 3950
},
{
"epoch": 0.6750298685782556,
"grad_norm": 0.8749505840231333,
"learning_rate": 1.6800590291559395e-05,
"loss": 0.5799,
"num_tokens": 148175414.0,
"step": 3955
},
{
"epoch": 0.6758832565284179,
"grad_norm": 0.9825297053765945,
"learning_rate": 1.6744773900159954e-05,
"loss": 0.5953,
"num_tokens": 148620002.0,
"step": 3960
},
{
"epoch": 0.67673664447858,
"grad_norm": 0.8965602948755153,
"learning_rate": 1.6689043167682884e-05,
"loss": 0.5825,
"num_tokens": 149121519.0,
"step": 3965
},
{
"epoch": 0.6775900324287422,
"grad_norm": 0.8623362168580441,
"learning_rate": 1.663339853799005e-05,
"loss": 0.5578,
"num_tokens": 149555256.0,
"step": 3970
},
{
"epoch": 0.6784434203789043,
"grad_norm": 1.0250814917423345,
"learning_rate": 1.657784045425752e-05,
"loss": 0.5544,
"num_tokens": 150051458.0,
"step": 3975
},
{
"epoch": 0.6792968083290664,
"grad_norm": 0.9016034723303509,
"learning_rate": 1.6522369358972107e-05,
"loss": 0.5731,
"num_tokens": 150535405.0,
"step": 3980
},
{
"epoch": 0.6801501962792286,
"grad_norm": 1.0801481065719016,
"learning_rate": 1.646698569392779e-05,
"loss": 0.609,
"num_tokens": 150994090.0,
"step": 3985
},
{
"epoch": 0.6810035842293907,
"grad_norm": 0.9026329662433026,
"learning_rate": 1.6411689900222233e-05,
"loss": 0.589,
"num_tokens": 151437507.0,
"step": 3990
},
{
"epoch": 0.6818569721795529,
"grad_norm": 0.8305286612610906,
"learning_rate": 1.6356482418253264e-05,
"loss": 0.6108,
"num_tokens": 151897755.0,
"step": 3995
},
{
"epoch": 0.682710360129715,
"grad_norm": 0.8759053799322227,
"learning_rate": 1.630136368771534e-05,
"loss": 0.5727,
"num_tokens": 152392981.0,
"step": 4000
},
{
"epoch": 0.6835637480798771,
"grad_norm": 0.8498248549941118,
"learning_rate": 1.624633414759608e-05,
"loss": 0.628,
"num_tokens": 152945921.0,
"step": 4005
},
{
"epoch": 0.6844171360300393,
"grad_norm": 0.9577467020674911,
"learning_rate": 1.619139423617274e-05,
"loss": 0.5955,
"num_tokens": 153405452.0,
"step": 4010
},
{
"epoch": 0.6852705239802014,
"grad_norm": 0.8677137719544256,
"learning_rate": 1.6136544391008766e-05,
"loss": 0.6011,
"num_tokens": 153866503.0,
"step": 4015
},
{
"epoch": 0.6861239119303636,
"grad_norm": 0.8762759941080992,
"learning_rate": 1.608178504895025e-05,
"loss": 0.6003,
"num_tokens": 154301458.0,
"step": 4020
},
{
"epoch": 0.6869772998805257,
"grad_norm": 0.9447545948904823,
"learning_rate": 1.6027116646122497e-05,
"loss": 0.5676,
"num_tokens": 154747758.0,
"step": 4025
},
{
"epoch": 0.6878306878306878,
"grad_norm": 0.8154066594371561,
"learning_rate": 1.5972539617926547e-05,
"loss": 0.5603,
"num_tokens": 155226031.0,
"step": 4030
},
{
"epoch": 0.68868407578085,
"grad_norm": 0.8283393416577676,
"learning_rate": 1.5918054399035656e-05,
"loss": 0.5773,
"num_tokens": 155700124.0,
"step": 4035
},
{
"epoch": 0.6895374637310121,
"grad_norm": 0.9745180674537747,
"learning_rate": 1.5863661423391924e-05,
"loss": 0.6037,
"num_tokens": 156126810.0,
"step": 4040
},
{
"epoch": 0.6903908516811743,
"grad_norm": 0.8064566786044969,
"learning_rate": 1.580936112420275e-05,
"loss": 0.5807,
"num_tokens": 156601011.0,
"step": 4045
},
{
"epoch": 0.6912442396313364,
"grad_norm": 0.8001959729849549,
"learning_rate": 1.5755153933937433e-05,
"loss": 0.6164,
"num_tokens": 157095880.0,
"step": 4050
},
{
"epoch": 0.6920976275814985,
"grad_norm": 0.942834039862139,
"learning_rate": 1.5701040284323733e-05,
"loss": 0.5941,
"num_tokens": 157586499.0,
"step": 4055
},
{
"epoch": 0.6929510155316607,
"grad_norm": 0.9715453174727748,
"learning_rate": 1.5647020606344374e-05,
"loss": 0.5795,
"num_tokens": 158008275.0,
"step": 4060
},
{
"epoch": 0.6938044034818228,
"grad_norm": 0.9026555991717418,
"learning_rate": 1.5593095330233702e-05,
"loss": 0.5771,
"num_tokens": 158452236.0,
"step": 4065
},
{
"epoch": 0.694657791431985,
"grad_norm": 1.0117298958493552,
"learning_rate": 1.553926488547417e-05,
"loss": 0.6012,
"num_tokens": 158941412.0,
"step": 4070
},
{
"epoch": 0.6955111793821471,
"grad_norm": 0.8584768240163265,
"learning_rate": 1.5485529700792972e-05,
"loss": 0.5709,
"num_tokens": 159418312.0,
"step": 4075
},
{
"epoch": 0.6963645673323092,
"grad_norm": 0.796633996689525,
"learning_rate": 1.5431890204158623e-05,
"loss": 0.5733,
"num_tokens": 159876193.0,
"step": 4080
},
{
"epoch": 0.6972179552824714,
"grad_norm": 0.8652610369119335,
"learning_rate": 1.5378346822777506e-05,
"loss": 0.5653,
"num_tokens": 160337567.0,
"step": 4085
},
{
"epoch": 0.6980713432326335,
"grad_norm": 0.883756733604466,
"learning_rate": 1.5324899983090552e-05,
"loss": 0.5973,
"num_tokens": 160836081.0,
"step": 4090
},
{
"epoch": 0.6989247311827957,
"grad_norm": 0.8346242397306459,
"learning_rate": 1.5271550110769756e-05,
"loss": 0.6197,
"num_tokens": 161374383.0,
"step": 4095
},
{
"epoch": 0.6997781191329578,
"grad_norm": 0.8332005290816415,
"learning_rate": 1.5218297630714829e-05,
"loss": 0.6205,
"num_tokens": 161857305.0,
"step": 4100
},
{
"epoch": 0.7006315070831199,
"grad_norm": 0.8712175844197076,
"learning_rate": 1.516514296704984e-05,
"loss": 0.5773,
"num_tokens": 162348361.0,
"step": 4105
},
{
"epoch": 0.7014848950332822,
"grad_norm": 0.9058422726334173,
"learning_rate": 1.511208654311977e-05,
"loss": 0.5879,
"num_tokens": 162851803.0,
"step": 4110
},
{
"epoch": 0.7023382829834443,
"grad_norm": 0.9044790346726684,
"learning_rate": 1.5059128781487225e-05,
"loss": 0.5829,
"num_tokens": 163287986.0,
"step": 4115
},
{
"epoch": 0.7031916709336065,
"grad_norm": 0.8648827055391948,
"learning_rate": 1.5006270103928976e-05,
"loss": 0.5817,
"num_tokens": 163718273.0,
"step": 4120
},
{
"epoch": 0.7040450588837686,
"grad_norm": 0.8076594039591006,
"learning_rate": 1.4953510931432685e-05,
"loss": 0.5985,
"num_tokens": 164166532.0,
"step": 4125
},
{
"epoch": 0.7048984468339307,
"grad_norm": 0.815303572116947,
"learning_rate": 1.4900851684193512e-05,
"loss": 0.6236,
"num_tokens": 164596080.0,
"step": 4130
},
{
"epoch": 0.7057518347840929,
"grad_norm": 0.8302985383503674,
"learning_rate": 1.4848292781610751e-05,
"loss": 0.597,
"num_tokens": 165078423.0,
"step": 4135
},
{
"epoch": 0.706605222734255,
"grad_norm": 0.8719511002078489,
"learning_rate": 1.4795834642284528e-05,
"loss": 0.5958,
"num_tokens": 165553538.0,
"step": 4140
},
{
"epoch": 0.7074586106844172,
"grad_norm": 0.8963125498569914,
"learning_rate": 1.4743477684012438e-05,
"loss": 0.5988,
"num_tokens": 166052376.0,
"step": 4145
},
{
"epoch": 0.7083119986345793,
"grad_norm": 0.9500910001284955,
"learning_rate": 1.4691222323786253e-05,
"loss": 0.5795,
"num_tokens": 166509198.0,
"step": 4150
},
{
"epoch": 0.7091653865847414,
"grad_norm": 0.9576655914462456,
"learning_rate": 1.4639068977788542e-05,
"loss": 0.6329,
"num_tokens": 167013563.0,
"step": 4155
},
{
"epoch": 0.7100187745349036,
"grad_norm": 0.8549981482176169,
"learning_rate": 1.4587018061389426e-05,
"loss": 0.63,
"num_tokens": 167448213.0,
"step": 4160
},
{
"epoch": 0.7108721624850657,
"grad_norm": 0.8226809221364069,
"learning_rate": 1.453506998914323e-05,
"loss": 0.5785,
"num_tokens": 167971432.0,
"step": 4165
},
{
"epoch": 0.7117255504352279,
"grad_norm": 0.8872256261929765,
"learning_rate": 1.448322517478516e-05,
"loss": 0.5908,
"num_tokens": 168433995.0,
"step": 4170
},
{
"epoch": 0.71257893838539,
"grad_norm": 0.8360410226445361,
"learning_rate": 1.4431484031228069e-05,
"loss": 0.5594,
"num_tokens": 168904923.0,
"step": 4175
},
{
"epoch": 0.7134323263355521,
"grad_norm": 0.8987026195932659,
"learning_rate": 1.4379846970559113e-05,
"loss": 0.5793,
"num_tokens": 169381861.0,
"step": 4180
},
{
"epoch": 0.7142857142857143,
"grad_norm": 0.9097929846772739,
"learning_rate": 1.4328314404036486e-05,
"loss": 0.5907,
"num_tokens": 169808194.0,
"step": 4185
},
{
"epoch": 0.7151391022358764,
"grad_norm": 1.0974482399269831,
"learning_rate": 1.4276886742086175e-05,
"loss": 0.5923,
"num_tokens": 170298702.0,
"step": 4190
},
{
"epoch": 0.7159924901860386,
"grad_norm": 0.9360284389299645,
"learning_rate": 1.4225564394298641e-05,
"loss": 0.6333,
"num_tokens": 170759502.0,
"step": 4195
},
{
"epoch": 0.7168458781362007,
"grad_norm": 0.9152888746195195,
"learning_rate": 1.4174347769425594e-05,
"loss": 0.5702,
"num_tokens": 171213451.0,
"step": 4200
},
{
"epoch": 0.7176992660863628,
"grad_norm": 0.886847518370069,
"learning_rate": 1.412323727537671e-05,
"loss": 0.6102,
"num_tokens": 171667948.0,
"step": 4205
},
{
"epoch": 0.718552654036525,
"grad_norm": 0.791848385672325,
"learning_rate": 1.407223331921641e-05,
"loss": 0.5422,
"num_tokens": 172135689.0,
"step": 4210
},
{
"epoch": 0.7194060419866871,
"grad_norm": 0.8410700768427187,
"learning_rate": 1.4021336307160612e-05,
"loss": 0.5762,
"num_tokens": 172566490.0,
"step": 4215
},
{
"epoch": 0.7202594299368493,
"grad_norm": 0.8585840404859496,
"learning_rate": 1.3970546644573467e-05,
"loss": 0.5619,
"num_tokens": 173084261.0,
"step": 4220
},
{
"epoch": 0.7211128178870114,
"grad_norm": 0.9115552103460094,
"learning_rate": 1.3919864735964167e-05,
"loss": 0.6028,
"num_tokens": 173525169.0,
"step": 4225
},
{
"epoch": 0.7219662058371735,
"grad_norm": 0.8649833459661458,
"learning_rate": 1.3869290984983685e-05,
"loss": 0.5774,
"num_tokens": 173986927.0,
"step": 4230
},
{
"epoch": 0.7228195937873357,
"grad_norm": 0.8467791476139611,
"learning_rate": 1.3818825794421619e-05,
"loss": 0.5741,
"num_tokens": 174485026.0,
"step": 4235
},
{
"epoch": 0.7236729817374978,
"grad_norm": 0.8692688687345984,
"learning_rate": 1.376846956620293e-05,
"loss": 0.5916,
"num_tokens": 174975168.0,
"step": 4240
},
{
"epoch": 0.72452636968766,
"grad_norm": 0.7864597052391264,
"learning_rate": 1.3718222701384757e-05,
"loss": 0.5736,
"num_tokens": 175474304.0,
"step": 4245
},
{
"epoch": 0.7253797576378221,
"grad_norm": 0.8607415619129325,
"learning_rate": 1.3668085600153232e-05,
"loss": 0.5864,
"num_tokens": 175914177.0,
"step": 4250
},
{
"epoch": 0.7262331455879844,
"grad_norm": 0.9509781922334833,
"learning_rate": 1.3618058661820277e-05,
"loss": 0.5675,
"num_tokens": 176354413.0,
"step": 4255
},
{
"epoch": 0.7270865335381465,
"grad_norm": 0.8824134882346422,
"learning_rate": 1.3568142284820442e-05,
"loss": 0.6194,
"num_tokens": 176903257.0,
"step": 4260
},
{
"epoch": 0.7279399214883086,
"grad_norm": 0.8709398836701243,
"learning_rate": 1.3518336866707723e-05,
"loss": 0.5883,
"num_tokens": 177361648.0,
"step": 4265
},
{
"epoch": 0.7287933094384708,
"grad_norm": 0.880944482403639,
"learning_rate": 1.3468642804152374e-05,
"loss": 0.5893,
"num_tokens": 177905691.0,
"step": 4270
},
{
"epoch": 0.7296466973886329,
"grad_norm": 0.8470029010342051,
"learning_rate": 1.3419060492937802e-05,
"loss": 0.5644,
"num_tokens": 178398025.0,
"step": 4275
},
{
"epoch": 0.7305000853387951,
"grad_norm": 0.9227327888215903,
"learning_rate": 1.3369590327957348e-05,
"loss": 0.5975,
"num_tokens": 178872312.0,
"step": 4280
},
{
"epoch": 0.7313534732889572,
"grad_norm": 0.9366754745843368,
"learning_rate": 1.3320232703211214e-05,
"loss": 0.5911,
"num_tokens": 179323634.0,
"step": 4285
},
{
"epoch": 0.7322068612391193,
"grad_norm": 0.8849252887229547,
"learning_rate": 1.3270988011803243e-05,
"loss": 0.6062,
"num_tokens": 179771488.0,
"step": 4290
},
{
"epoch": 0.7330602491892815,
"grad_norm": 0.7892258484928227,
"learning_rate": 1.3221856645937868e-05,
"loss": 0.5679,
"num_tokens": 180256627.0,
"step": 4295
},
{
"epoch": 0.7339136371394436,
"grad_norm": 0.9137667870343035,
"learning_rate": 1.3172838996916936e-05,
"loss": 0.61,
"num_tokens": 180732439.0,
"step": 4300
},
{
"epoch": 0.7347670250896058,
"grad_norm": 0.9631346489471143,
"learning_rate": 1.3123935455136599e-05,
"loss": 0.6022,
"num_tokens": 181226259.0,
"step": 4305
},
{
"epoch": 0.7356204130397679,
"grad_norm": 0.8968709358166163,
"learning_rate": 1.307514641008424e-05,
"loss": 0.5965,
"num_tokens": 181714583.0,
"step": 4310
},
{
"epoch": 0.73647380098993,
"grad_norm": 0.9166240270197292,
"learning_rate": 1.302647225033532e-05,
"loss": 0.5887,
"num_tokens": 182146534.0,
"step": 4315
},
{
"epoch": 0.7373271889400922,
"grad_norm": 0.8160802329739986,
"learning_rate": 1.2977913363550304e-05,
"loss": 0.5769,
"num_tokens": 182675522.0,
"step": 4320
},
{
"epoch": 0.7381805768902543,
"grad_norm": 0.8562280243416345,
"learning_rate": 1.2929470136471607e-05,
"loss": 0.6258,
"num_tokens": 183187657.0,
"step": 4325
},
{
"epoch": 0.7390339648404165,
"grad_norm": 0.8987806086881184,
"learning_rate": 1.288114295492045e-05,
"loss": 0.5997,
"num_tokens": 183642551.0,
"step": 4330
},
{
"epoch": 0.7398873527905786,
"grad_norm": 0.7328617713394691,
"learning_rate": 1.2832932203793848e-05,
"loss": 0.5957,
"num_tokens": 184184444.0,
"step": 4335
},
{
"epoch": 0.7407407407407407,
"grad_norm": 0.7980388669261456,
"learning_rate": 1.2784838267061491e-05,
"loss": 0.5857,
"num_tokens": 184646945.0,
"step": 4340
},
{
"epoch": 0.7415941286909029,
"grad_norm": 0.8859400994034398,
"learning_rate": 1.273686152776274e-05,
"loss": 0.6085,
"num_tokens": 185108387.0,
"step": 4345
},
{
"epoch": 0.742447516641065,
"grad_norm": 0.7854784674766948,
"learning_rate": 1.2689002368003539e-05,
"loss": 0.535,
"num_tokens": 185565586.0,
"step": 4350
},
{
"epoch": 0.7433009045912272,
"grad_norm": 0.7570419777611505,
"learning_rate": 1.2641261168953366e-05,
"loss": 0.6244,
"num_tokens": 186141614.0,
"step": 4355
},
{
"epoch": 0.7441542925413893,
"grad_norm": 0.7757142596947014,
"learning_rate": 1.2593638310842235e-05,
"loss": 0.581,
"num_tokens": 186661552.0,
"step": 4360
},
{
"epoch": 0.7450076804915514,
"grad_norm": 0.8839499200264586,
"learning_rate": 1.2546134172957619e-05,
"loss": 0.6213,
"num_tokens": 187144035.0,
"step": 4365
},
{
"epoch": 0.7458610684417136,
"grad_norm": 0.816312832091104,
"learning_rate": 1.2498749133641489e-05,
"loss": 0.5561,
"num_tokens": 187581120.0,
"step": 4370
},
{
"epoch": 0.7467144563918757,
"grad_norm": 0.8213980561955142,
"learning_rate": 1.245148357028725e-05,
"loss": 0.5857,
"num_tokens": 188075796.0,
"step": 4375
},
{
"epoch": 0.7475678443420379,
"grad_norm": 0.9380415013983583,
"learning_rate": 1.2404337859336743e-05,
"loss": 0.5675,
"num_tokens": 188555486.0,
"step": 4380
},
{
"epoch": 0.7484212322922,
"grad_norm": 0.9725183192014755,
"learning_rate": 1.2357312376277279e-05,
"loss": 0.5502,
"num_tokens": 188977941.0,
"step": 4385
},
{
"epoch": 0.7492746202423621,
"grad_norm": 0.7992605376572952,
"learning_rate": 1.2310407495638599e-05,
"loss": 0.5645,
"num_tokens": 189416481.0,
"step": 4390
},
{
"epoch": 0.7501280081925243,
"grad_norm": 0.9374154424761773,
"learning_rate": 1.226362359098995e-05,
"loss": 0.5715,
"num_tokens": 189868560.0,
"step": 4395
},
{
"epoch": 0.7509813961426864,
"grad_norm": 0.891667619403936,
"learning_rate": 1.2216961034937048e-05,
"loss": 0.593,
"num_tokens": 190341488.0,
"step": 4400
},
{
"epoch": 0.7518347840928487,
"grad_norm": 0.8840376687918124,
"learning_rate": 1.2170420199119151e-05,
"loss": 0.5809,
"num_tokens": 190774240.0,
"step": 4405
},
{
"epoch": 0.7526881720430108,
"grad_norm": 0.8190274046973514,
"learning_rate": 1.2124001454206102e-05,
"loss": 0.5845,
"num_tokens": 191282701.0,
"step": 4410
},
{
"epoch": 0.7535415599931728,
"grad_norm": 0.9123518049084535,
"learning_rate": 1.2077705169895338e-05,
"loss": 0.5477,
"num_tokens": 191713963.0,
"step": 4415
},
{
"epoch": 0.7543949479433351,
"grad_norm": 0.8188073549864183,
"learning_rate": 1.2031531714908997e-05,
"loss": 0.5755,
"num_tokens": 192195436.0,
"step": 4420
},
{
"epoch": 0.7552483358934972,
"grad_norm": 0.7994958336328707,
"learning_rate": 1.1985481456990928e-05,
"loss": 0.6002,
"num_tokens": 192609004.0,
"step": 4425
},
{
"epoch": 0.7561017238436594,
"grad_norm": 0.9659539232854811,
"learning_rate": 1.1939554762903813e-05,
"loss": 0.5689,
"num_tokens": 193040643.0,
"step": 4430
},
{
"epoch": 0.7569551117938215,
"grad_norm": 0.8994805121484224,
"learning_rate": 1.189375199842622e-05,
"loss": 0.5746,
"num_tokens": 193509769.0,
"step": 4435
},
{
"epoch": 0.7578084997439836,
"grad_norm": 0.879957189035845,
"learning_rate": 1.1848073528349676e-05,
"loss": 0.5944,
"num_tokens": 193975222.0,
"step": 4440
},
{
"epoch": 0.7586618876941458,
"grad_norm": 0.7874543956440837,
"learning_rate": 1.1802519716475786e-05,
"loss": 0.5826,
"num_tokens": 194477436.0,
"step": 4445
},
{
"epoch": 0.7595152756443079,
"grad_norm": 0.7963664649172572,
"learning_rate": 1.1757090925613323e-05,
"loss": 0.5891,
"num_tokens": 194908296.0,
"step": 4450
},
{
"epoch": 0.7603686635944701,
"grad_norm": 0.7927650818238564,
"learning_rate": 1.171178751757535e-05,
"loss": 0.5368,
"num_tokens": 195384337.0,
"step": 4455
},
{
"epoch": 0.7612220515446322,
"grad_norm": 0.8590390896353224,
"learning_rate": 1.1666609853176342e-05,
"loss": 0.5844,
"num_tokens": 195804956.0,
"step": 4460
},
{
"epoch": 0.7620754394947943,
"grad_norm": 0.8261475530402624,
"learning_rate": 1.1621558292229268e-05,
"loss": 0.5832,
"num_tokens": 196260094.0,
"step": 4465
},
{
"epoch": 0.7629288274449565,
"grad_norm": 0.7838091102369364,
"learning_rate": 1.1576633193542797e-05,
"loss": 0.5663,
"num_tokens": 196766634.0,
"step": 4470
},
{
"epoch": 0.7637822153951186,
"grad_norm": 0.8734185248220019,
"learning_rate": 1.1531834914918365e-05,
"loss": 0.5917,
"num_tokens": 197264377.0,
"step": 4475
},
{
"epoch": 0.7646356033452808,
"grad_norm": 0.7423950377840982,
"learning_rate": 1.14871638131474e-05,
"loss": 0.5446,
"num_tokens": 197695417.0,
"step": 4480
},
{
"epoch": 0.7654889912954429,
"grad_norm": 0.8525858260701448,
"learning_rate": 1.1442620244008409e-05,
"loss": 0.5807,
"num_tokens": 198119324.0,
"step": 4485
},
{
"epoch": 0.766342379245605,
"grad_norm": 0.75437978397281,
"learning_rate": 1.1398204562264192e-05,
"loss": 0.5745,
"num_tokens": 198624663.0,
"step": 4490
},
{
"epoch": 0.7671957671957672,
"grad_norm": 0.7874050783277934,
"learning_rate": 1.1353917121659017e-05,
"loss": 0.5672,
"num_tokens": 199080528.0,
"step": 4495
},
{
"epoch": 0.7680491551459293,
"grad_norm": 0.8607808840435229,
"learning_rate": 1.1309758274915756e-05,
"loss": 0.5774,
"num_tokens": 199619482.0,
"step": 4500
},
{
"epoch": 0.7689025430960915,
"grad_norm": 0.7816525166605554,
"learning_rate": 1.1265728373733138e-05,
"loss": 0.5837,
"num_tokens": 200153610.0,
"step": 4505
},
{
"epoch": 0.7697559310462536,
"grad_norm": 0.8119337918394042,
"learning_rate": 1.1221827768782909e-05,
"loss": 0.5649,
"num_tokens": 200611222.0,
"step": 4510
},
{
"epoch": 0.7706093189964157,
"grad_norm": 0.8798648885674402,
"learning_rate": 1.1178056809707035e-05,
"loss": 0.5703,
"num_tokens": 201102129.0,
"step": 4515
},
{
"epoch": 0.7714627069465779,
"grad_norm": 0.7970258353455149,
"learning_rate": 1.1134415845114954e-05,
"loss": 0.5595,
"num_tokens": 201577891.0,
"step": 4520
},
{
"epoch": 0.77231609489674,
"grad_norm": 0.8186504328550432,
"learning_rate": 1.1090905222580756e-05,
"loss": 0.5512,
"num_tokens": 202031474.0,
"step": 4525
},
{
"epoch": 0.7731694828469022,
"grad_norm": 0.8906791572152828,
"learning_rate": 1.104752528864044e-05,
"loss": 0.6207,
"num_tokens": 202513129.0,
"step": 4530
},
{
"epoch": 0.7740228707970643,
"grad_norm": 0.783806411579549,
"learning_rate": 1.1004276388789146e-05,
"loss": 0.5761,
"num_tokens": 203027497.0,
"step": 4535
},
{
"epoch": 0.7748762587472265,
"grad_norm": 0.773588946716583,
"learning_rate": 1.096115886747842e-05,
"loss": 0.5752,
"num_tokens": 203502830.0,
"step": 4540
},
{
"epoch": 0.7757296466973886,
"grad_norm": 0.8894811028102954,
"learning_rate": 1.0918173068113446e-05,
"loss": 0.5971,
"num_tokens": 203985185.0,
"step": 4545
},
{
"epoch": 0.7765830346475507,
"grad_norm": 0.8716160687877994,
"learning_rate": 1.0875319333050315e-05,
"loss": 0.572,
"num_tokens": 204502494.0,
"step": 4550
},
{
"epoch": 0.777436422597713,
"grad_norm": 0.8108686527858917,
"learning_rate": 1.0832598003593325e-05,
"loss": 0.555,
"num_tokens": 204918625.0,
"step": 4555
},
{
"epoch": 0.778289810547875,
"grad_norm": 0.8765591223692875,
"learning_rate": 1.079000941999222e-05,
"loss": 0.5606,
"num_tokens": 205367508.0,
"step": 4560
},
{
"epoch": 0.7791431984980373,
"grad_norm": 0.7740345225147104,
"learning_rate": 1.0747553921439515e-05,
"loss": 0.5662,
"num_tokens": 205882344.0,
"step": 4565
},
{
"epoch": 0.7799965864481994,
"grad_norm": 0.8585554136024073,
"learning_rate": 1.0705231846067792e-05,
"loss": 0.5968,
"num_tokens": 206375735.0,
"step": 4570
},
{
"epoch": 0.7808499743983615,
"grad_norm": 0.7706847829265646,
"learning_rate": 1.0663043530946979e-05,
"loss": 0.5787,
"num_tokens": 206887531.0,
"step": 4575
},
{
"epoch": 0.7817033623485237,
"grad_norm": 0.8490061354179466,
"learning_rate": 1.0620989312081695e-05,
"loss": 0.5658,
"num_tokens": 207352866.0,
"step": 4580
},
{
"epoch": 0.7825567502986858,
"grad_norm": 0.8906414696480907,
"learning_rate": 1.0579069524408547e-05,
"loss": 0.5704,
"num_tokens": 207800778.0,
"step": 4585
},
{
"epoch": 0.783410138248848,
"grad_norm": 0.8075876336778882,
"learning_rate": 1.0537284501793502e-05,
"loss": 0.542,
"num_tokens": 208278578.0,
"step": 4590
},
{
"epoch": 0.7842635261990101,
"grad_norm": 0.8861436743675016,
"learning_rate": 1.0495634577029192e-05,
"loss": 0.5987,
"num_tokens": 208767476.0,
"step": 4595
},
{
"epoch": 0.7851169141491722,
"grad_norm": 0.9121941445826955,
"learning_rate": 1.045412008183227e-05,
"loss": 0.5826,
"num_tokens": 209296377.0,
"step": 4600
},
{
"epoch": 0.7859703020993344,
"grad_norm": 0.8336097249773415,
"learning_rate": 1.0412741346840793e-05,
"loss": 0.5885,
"num_tokens": 209881821.0,
"step": 4605
},
{
"epoch": 0.7868236900494965,
"grad_norm": 0.853499272795292,
"learning_rate": 1.037149870161154e-05,
"loss": 0.6218,
"num_tokens": 210371052.0,
"step": 4610
},
{
"epoch": 0.7876770779996587,
"grad_norm": 0.78391018928751,
"learning_rate": 1.0330392474617448e-05,
"loss": 0.5802,
"num_tokens": 210913400.0,
"step": 4615
},
{
"epoch": 0.7885304659498208,
"grad_norm": 0.8886541706025143,
"learning_rate": 1.0289422993244942e-05,
"loss": 0.5863,
"num_tokens": 211412557.0,
"step": 4620
},
{
"epoch": 0.7893838538999829,
"grad_norm": 0.8510065551834175,
"learning_rate": 1.0248590583791355e-05,
"loss": 0.5917,
"num_tokens": 211919254.0,
"step": 4625
},
{
"epoch": 0.7902372418501451,
"grad_norm": 0.782054120335503,
"learning_rate": 1.0207895571462337e-05,
"loss": 0.5897,
"num_tokens": 212411021.0,
"step": 4630
},
{
"epoch": 0.7910906298003072,
"grad_norm": 0.9616075507807224,
"learning_rate": 1.0167338280369233e-05,
"loss": 0.5961,
"num_tokens": 212845993.0,
"step": 4635
},
{
"epoch": 0.7919440177504694,
"grad_norm": 0.8511972746806536,
"learning_rate": 1.0126919033526536e-05,
"loss": 0.5938,
"num_tokens": 213335201.0,
"step": 4640
},
{
"epoch": 0.7927974057006315,
"grad_norm": 0.8778820941483984,
"learning_rate": 1.0086638152849298e-05,
"loss": 0.585,
"num_tokens": 213824239.0,
"step": 4645
},
{
"epoch": 0.7936507936507936,
"grad_norm": 0.9536384542083954,
"learning_rate": 1.0046495959150554e-05,
"loss": 0.5723,
"num_tokens": 214265326.0,
"step": 4650
},
{
"epoch": 0.7945041816009558,
"grad_norm": 0.8088021837609581,
"learning_rate": 1.0006492772138798e-05,
"loss": 0.5743,
"num_tokens": 214758689.0,
"step": 4655
},
{
"epoch": 0.7953575695511179,
"grad_norm": 1.0221419584239615,
"learning_rate": 9.966628910415413e-06,
"loss": 0.5904,
"num_tokens": 215164364.0,
"step": 4660
},
{
"epoch": 0.7962109575012801,
"grad_norm": 0.7525718089356042,
"learning_rate": 9.926904691472134e-06,
"loss": 0.5867,
"num_tokens": 215687696.0,
"step": 4665
},
{
"epoch": 0.7970643454514422,
"grad_norm": 0.8226639021102814,
"learning_rate": 9.887320431688521e-06,
"loss": 0.5931,
"num_tokens": 216209019.0,
"step": 4670
},
{
"epoch": 0.7979177334016043,
"grad_norm": 0.8644761017616382,
"learning_rate": 9.847876446329457e-06,
"loss": 0.5637,
"num_tokens": 216694502.0,
"step": 4675
},
{
"epoch": 0.7987711213517665,
"grad_norm": 0.8286164858966968,
"learning_rate": 9.808573049542627e-06,
"loss": 0.5649,
"num_tokens": 217141531.0,
"step": 4680
},
{
"epoch": 0.7996245093019286,
"grad_norm": 0.778990278771165,
"learning_rate": 9.76941055435599e-06,
"loss": 0.5836,
"num_tokens": 217585422.0,
"step": 4685
},
{
"epoch": 0.8004778972520908,
"grad_norm": 0.8636912543908317,
"learning_rate": 9.730389272675331e-06,
"loss": 0.5756,
"num_tokens": 218014891.0,
"step": 4690
},
{
"epoch": 0.8013312852022529,
"grad_norm": 0.8904461215069976,
"learning_rate": 9.691509515281738e-06,
"loss": 0.6071,
"num_tokens": 218492528.0,
"step": 4695
},
{
"epoch": 0.802184673152415,
"grad_norm": 0.8795915282441338,
"learning_rate": 9.652771591829156e-06,
"loss": 0.5754,
"num_tokens": 218954717.0,
"step": 4700
},
{
"epoch": 0.8030380611025772,
"grad_norm": 0.9801298504817358,
"learning_rate": 9.614175810841896e-06,
"loss": 0.5862,
"num_tokens": 219452296.0,
"step": 4705
},
{
"epoch": 0.8038914490527393,
"grad_norm": 0.9097635974714854,
"learning_rate": 9.57572247971219e-06,
"loss": 0.616,
"num_tokens": 219967343.0,
"step": 4710
},
{
"epoch": 0.8047448370029016,
"grad_norm": 0.9048513040998128,
"learning_rate": 9.53741190469776e-06,
"loss": 0.619,
"num_tokens": 220415584.0,
"step": 4715
},
{
"epoch": 0.8055982249530637,
"grad_norm": 0.8720022920338458,
"learning_rate": 9.499244390919335e-06,
"loss": 0.5949,
"num_tokens": 220976800.0,
"step": 4720
},
{
"epoch": 0.8064516129032258,
"grad_norm": 0.874631764508416,
"learning_rate": 9.461220242358268e-06,
"loss": 0.5847,
"num_tokens": 221488239.0,
"step": 4725
},
{
"epoch": 0.807305000853388,
"grad_norm": 0.9258057592837162,
"learning_rate": 9.42333976185409e-06,
"loss": 0.5974,
"num_tokens": 221955584.0,
"step": 4730
},
{
"epoch": 0.8081583888035501,
"grad_norm": 0.8881679738093546,
"learning_rate": 9.385603251102084e-06,
"loss": 0.5706,
"num_tokens": 222421916.0,
"step": 4735
},
{
"epoch": 0.8090117767537123,
"grad_norm": 0.8639817388609756,
"learning_rate": 9.348011010650937e-06,
"loss": 0.5617,
"num_tokens": 222840212.0,
"step": 4740
},
{
"epoch": 0.8098651647038744,
"grad_norm": 0.7798641236952212,
"learning_rate": 9.310563339900272e-06,
"loss": 0.5672,
"num_tokens": 223329381.0,
"step": 4745
},
{
"epoch": 0.8107185526540365,
"grad_norm": 0.9982648728468079,
"learning_rate": 9.273260537098315e-06,
"loss": 0.5857,
"num_tokens": 223856776.0,
"step": 4750
},
{
"epoch": 0.8115719406041987,
"grad_norm": 0.8385007623825974,
"learning_rate": 9.236102899339519e-06,
"loss": 0.5767,
"num_tokens": 224327062.0,
"step": 4755
},
{
"epoch": 0.8124253285543608,
"grad_norm": 0.8360601966958555,
"learning_rate": 9.199090722562156e-06,
"loss": 0.5808,
"num_tokens": 224773409.0,
"step": 4760
},
{
"epoch": 0.813278716504523,
"grad_norm": 0.8512635733155746,
"learning_rate": 9.162224301546025e-06,
"loss": 0.5538,
"num_tokens": 225233757.0,
"step": 4765
},
{
"epoch": 0.8141321044546851,
"grad_norm": 0.8816024503899602,
"learning_rate": 9.125503929910035e-06,
"loss": 0.5847,
"num_tokens": 225737840.0,
"step": 4770
},
{
"epoch": 0.8149854924048472,
"grad_norm": 0.8511656206892778,
"learning_rate": 9.08892990010992e-06,
"loss": 0.5321,
"num_tokens": 226192735.0,
"step": 4775
},
{
"epoch": 0.8158388803550094,
"grad_norm": 0.8822874517324099,
"learning_rate": 9.052502503435873e-06,
"loss": 0.5657,
"num_tokens": 226708301.0,
"step": 4780
},
{
"epoch": 0.8166922683051715,
"grad_norm": 1.4766944705049547,
"learning_rate": 9.016222030010259e-06,
"loss": 0.5858,
"num_tokens": 227130751.0,
"step": 4785
},
{
"epoch": 0.8175456562553337,
"grad_norm": 0.9592968439333777,
"learning_rate": 8.980088768785271e-06,
"loss": 0.5858,
"num_tokens": 227615269.0,
"step": 4790
},
{
"epoch": 0.8183990442054958,
"grad_norm": 0.8141808313694623,
"learning_rate": 8.94410300754067e-06,
"loss": 0.5629,
"num_tokens": 228124400.0,
"step": 4795
},
{
"epoch": 0.819252432155658,
"grad_norm": 0.8149199518274812,
"learning_rate": 8.908265032881438e-06,
"loss": 0.6274,
"num_tokens": 228615374.0,
"step": 4800
},
{
"epoch": 0.8201058201058201,
"grad_norm": 0.7908437674204951,
"learning_rate": 8.872575130235533e-06,
"loss": 0.5894,
"num_tokens": 229119350.0,
"step": 4805
},
{
"epoch": 0.8209592080559822,
"grad_norm": 0.8506638324105442,
"learning_rate": 8.837033583851625e-06,
"loss": 0.6181,
"num_tokens": 229579333.0,
"step": 4810
},
{
"epoch": 0.8218125960061444,
"grad_norm": 0.9547144252263968,
"learning_rate": 8.801640676796811e-06,
"loss": 0.57,
"num_tokens": 230025206.0,
"step": 4815
},
{
"epoch": 0.8226659839563065,
"grad_norm": 0.8678414895568546,
"learning_rate": 8.76639669095434e-06,
"loss": 0.6118,
"num_tokens": 230517205.0,
"step": 4820
},
{
"epoch": 0.8235193719064687,
"grad_norm": 0.7943130767209409,
"learning_rate": 8.73130190702143e-06,
"loss": 0.5543,
"num_tokens": 230934827.0,
"step": 4825
},
{
"epoch": 0.8243727598566308,
"grad_norm": 0.9543452649607513,
"learning_rate": 8.696356604506964e-06,
"loss": 0.6102,
"num_tokens": 231385653.0,
"step": 4830
},
{
"epoch": 0.8252261478067929,
"grad_norm": 0.8397955922079047,
"learning_rate": 8.66156106172932e-06,
"loss": 0.5431,
"num_tokens": 231886693.0,
"step": 4835
},
{
"epoch": 0.8260795357569551,
"grad_norm": 0.8996229035875397,
"learning_rate": 8.62691555581411e-06,
"loss": 0.55,
"num_tokens": 232345381.0,
"step": 4840
},
{
"epoch": 0.8269329237071172,
"grad_norm": 0.8009699462259144,
"learning_rate": 8.592420362691994e-06,
"loss": 0.5681,
"num_tokens": 232881440.0,
"step": 4845
},
{
"epoch": 0.8277863116572794,
"grad_norm": 0.8342452775283754,
"learning_rate": 8.558075757096502e-06,
"loss": 0.5901,
"num_tokens": 233383237.0,
"step": 4850
},
{
"epoch": 0.8286396996074415,
"grad_norm": 0.7805241990962766,
"learning_rate": 8.523882012561792e-06,
"loss": 0.5532,
"num_tokens": 233853374.0,
"step": 4855
},
{
"epoch": 0.8294930875576036,
"grad_norm": 0.8964409321897953,
"learning_rate": 8.489839401420538e-06,
"loss": 0.5426,
"num_tokens": 234278092.0,
"step": 4860
},
{
"epoch": 0.8303464755077659,
"grad_norm": 0.8811351414159762,
"learning_rate": 8.455948194801706e-06,
"loss": 0.5938,
"num_tokens": 234751112.0,
"step": 4865
},
{
"epoch": 0.831199863457928,
"grad_norm": 0.7503250035269912,
"learning_rate": 8.422208662628415e-06,
"loss": 0.5571,
"num_tokens": 235269180.0,
"step": 4870
},
{
"epoch": 0.8320532514080902,
"grad_norm": 0.8600870648780747,
"learning_rate": 8.388621073615803e-06,
"loss": 0.6083,
"num_tokens": 235766057.0,
"step": 4875
},
{
"epoch": 0.8329066393582523,
"grad_norm": 0.7556334243138391,
"learning_rate": 8.355185695268858e-06,
"loss": 0.5586,
"num_tokens": 236263389.0,
"step": 4880
},
{
"epoch": 0.8337600273084144,
"grad_norm": 0.7869421912173387,
"learning_rate": 8.321902793880301e-06,
"loss": 0.5719,
"num_tokens": 236705262.0,
"step": 4885
},
{
"epoch": 0.8346134152585766,
"grad_norm": 0.7798357656578301,
"learning_rate": 8.28877263452848e-06,
"loss": 0.6194,
"num_tokens": 237259536.0,
"step": 4890
},
{
"epoch": 0.8354668032087387,
"grad_norm": 0.767779208940415,
"learning_rate": 8.255795481075228e-06,
"loss": 0.5648,
"num_tokens": 237769080.0,
"step": 4895
},
{
"epoch": 0.8363201911589009,
"grad_norm": 0.9854382778495104,
"learning_rate": 8.222971596163792e-06,
"loss": 0.5442,
"num_tokens": 238251727.0,
"step": 4900
},
{
"epoch": 0.837173579109063,
"grad_norm": 0.833319716288937,
"learning_rate": 8.19030124121671e-06,
"loss": 0.5461,
"num_tokens": 238648424.0,
"step": 4905
},
{
"epoch": 0.8380269670592251,
"grad_norm": 0.854597338972544,
"learning_rate": 8.157784676433764e-06,
"loss": 0.571,
"num_tokens": 239134618.0,
"step": 4910
},
{
"epoch": 0.8388803550093873,
"grad_norm": 0.8500312839651374,
"learning_rate": 8.125422160789878e-06,
"loss": 0.5711,
"num_tokens": 239603031.0,
"step": 4915
},
{
"epoch": 0.8397337429595494,
"grad_norm": 0.7630165552793707,
"learning_rate": 8.093213952033072e-06,
"loss": 0.5828,
"num_tokens": 240081011.0,
"step": 4920
},
{
"epoch": 0.8405871309097116,
"grad_norm": 0.7564955060386162,
"learning_rate": 8.061160306682406e-06,
"loss": 0.57,
"num_tokens": 240536075.0,
"step": 4925
},
{
"epoch": 0.8414405188598737,
"grad_norm": 0.8295745453389126,
"learning_rate": 8.029261480025922e-06,
"loss": 0.5609,
"num_tokens": 240964799.0,
"step": 4930
},
{
"epoch": 0.8422939068100358,
"grad_norm": 0.8572743861594461,
"learning_rate": 7.997517726118644e-06,
"loss": 0.5735,
"num_tokens": 241438662.0,
"step": 4935
},
{
"epoch": 0.843147294760198,
"grad_norm": 0.8689601209375987,
"learning_rate": 7.965929297780515e-06,
"loss": 0.5735,
"num_tokens": 241906415.0,
"step": 4940
},
{
"epoch": 0.8440006827103601,
"grad_norm": 0.8096196074976684,
"learning_rate": 7.934496446594417e-06,
"loss": 0.5712,
"num_tokens": 242373880.0,
"step": 4945
},
{
"epoch": 0.8448540706605223,
"grad_norm": 0.7535492611876398,
"learning_rate": 7.903219422904158e-06,
"loss": 0.5612,
"num_tokens": 242864038.0,
"step": 4950
},
{
"epoch": 0.8457074586106844,
"grad_norm": 0.7504541427432688,
"learning_rate": 7.87209847581245e-06,
"loss": 0.5956,
"num_tokens": 243431736.0,
"step": 4955
},
{
"epoch": 0.8465608465608465,
"grad_norm": 0.7428325861767835,
"learning_rate": 7.841133853178975e-06,
"loss": 0.5354,
"num_tokens": 243860094.0,
"step": 4960
},
{
"epoch": 0.8474142345110087,
"grad_norm": 0.7861488676367593,
"learning_rate": 7.810325801618365e-06,
"loss": 0.6067,
"num_tokens": 244395207.0,
"step": 4965
},
{
"epoch": 0.8482676224611708,
"grad_norm": 0.7669494987596239,
"learning_rate": 7.779674566498263e-06,
"loss": 0.5757,
"num_tokens": 244897324.0,
"step": 4970
},
{
"epoch": 0.849121010411333,
"grad_norm": 0.7909377762889123,
"learning_rate": 7.749180391937372e-06,
"loss": 0.5762,
"num_tokens": 245340722.0,
"step": 4975
},
{
"epoch": 0.8499743983614951,
"grad_norm": 0.7883324294633338,
"learning_rate": 7.718843520803487e-06,
"loss": 0.572,
"num_tokens": 245808193.0,
"step": 4980
},
{
"epoch": 0.8508277863116572,
"grad_norm": 0.7826348291990151,
"learning_rate": 7.688664194711592e-06,
"loss": 0.5989,
"num_tokens": 246277986.0,
"step": 4985
},
{
"epoch": 0.8516811742618194,
"grad_norm": 0.8490644704046821,
"learning_rate": 7.658642654021904e-06,
"loss": 0.5914,
"num_tokens": 246691850.0,
"step": 4990
},
{
"epoch": 0.8525345622119815,
"grad_norm": 1.0022405608345875,
"learning_rate": 7.628779137837981e-06,
"loss": 0.58,
"num_tokens": 247203770.0,
"step": 4995
},
{
"epoch": 0.8533879501621437,
"grad_norm": 0.8084244306668955,
"learning_rate": 7.5990738840048174e-06,
"loss": 0.5894,
"num_tokens": 247690866.0,
"step": 5000
},
{
"epoch": 0.8542413381123058,
"grad_norm": 0.8985497283703163,
"learning_rate": 7.569527129106931e-06,
"loss": 0.5861,
"num_tokens": 248161325.0,
"step": 5005
},
{
"epoch": 0.855094726062468,
"grad_norm": 0.7873859767195008,
"learning_rate": 7.540139108466496e-06,
"loss": 0.588,
"num_tokens": 248652400.0,
"step": 5010
},
{
"epoch": 0.8559481140126302,
"grad_norm": 0.819987556398134,
"learning_rate": 7.510910056141456e-06,
"loss": 0.5709,
"num_tokens": 249134729.0,
"step": 5015
},
{
"epoch": 0.8568015019627923,
"grad_norm": 0.7492864937367162,
"learning_rate": 7.481840204923681e-06,
"loss": 0.585,
"num_tokens": 249661501.0,
"step": 5020
},
{
"epoch": 0.8576548899129545,
"grad_norm": 0.8040792030511285,
"learning_rate": 7.452929786337096e-06,
"loss": 0.5965,
"num_tokens": 250217631.0,
"step": 5025
},
{
"epoch": 0.8585082778631166,
"grad_norm": 0.900081931997727,
"learning_rate": 7.424179030635831e-06,
"loss": 0.5641,
"num_tokens": 250690443.0,
"step": 5030
},
{
"epoch": 0.8593616658132787,
"grad_norm": 0.8160666402757599,
"learning_rate": 7.395588166802412e-06,
"loss": 0.5955,
"num_tokens": 251173634.0,
"step": 5035
},
{
"epoch": 0.8602150537634409,
"grad_norm": 0.8391441098982425,
"learning_rate": 7.367157422545904e-06,
"loss": 0.5855,
"num_tokens": 251638405.0,
"step": 5040
},
{
"epoch": 0.861068441713603,
"grad_norm": 0.7940539632042635,
"learning_rate": 7.338887024300134e-06,
"loss": 0.5572,
"num_tokens": 252139392.0,
"step": 5045
},
{
"epoch": 0.8619218296637652,
"grad_norm": 0.7894583290273409,
"learning_rate": 7.310777197221854e-06,
"loss": 0.5523,
"num_tokens": 252543885.0,
"step": 5050
},
{
"epoch": 0.8627752176139273,
"grad_norm": 0.7911532831982829,
"learning_rate": 7.282828165188976e-06,
"loss": 0.5868,
"num_tokens": 253005760.0,
"step": 5055
},
{
"epoch": 0.8636286055640894,
"grad_norm": 0.8585063316932868,
"learning_rate": 7.255040150798771e-06,
"loss": 0.5848,
"num_tokens": 253509550.0,
"step": 5060
},
{
"epoch": 0.8644819935142516,
"grad_norm": 0.7240412622759334,
"learning_rate": 7.227413375366089e-06,
"loss": 0.5845,
"num_tokens": 254008860.0,
"step": 5065
},
{
"epoch": 0.8653353814644137,
"grad_norm": 0.8250225040930351,
"learning_rate": 7.199948058921629e-06,
"loss": 0.6073,
"num_tokens": 254522809.0,
"step": 5070
},
{
"epoch": 0.8661887694145759,
"grad_norm": 0.9326895880461082,
"learning_rate": 7.1726444202101535e-06,
"loss": 0.6093,
"num_tokens": 255005160.0,
"step": 5075
},
{
"epoch": 0.867042157364738,
"grad_norm": 0.7994442628252905,
"learning_rate": 7.145502676688759e-06,
"loss": 0.5745,
"num_tokens": 255461423.0,
"step": 5080
},
{
"epoch": 0.8678955453149002,
"grad_norm": 0.9707923615366039,
"learning_rate": 7.1185230445251535e-06,
"loss": 0.5934,
"num_tokens": 255944452.0,
"step": 5085
},
{
"epoch": 0.8687489332650623,
"grad_norm": 0.796895516510252,
"learning_rate": 7.091705738595911e-06,
"loss": 0.5487,
"num_tokens": 256455794.0,
"step": 5090
},
{
"epoch": 0.8696023212152244,
"grad_norm": 0.7400404717908586,
"learning_rate": 7.065050972484788e-06,
"loss": 0.5577,
"num_tokens": 256935670.0,
"step": 5095
},
{
"epoch": 0.8704557091653866,
"grad_norm": 0.7988037904803611,
"learning_rate": 7.038558958481001e-06,
"loss": 0.5727,
"num_tokens": 257407700.0,
"step": 5100
},
{
"epoch": 0.8713090971155487,
"grad_norm": 0.9147730742138965,
"learning_rate": 7.01222990757754e-06,
"loss": 0.5934,
"num_tokens": 257838492.0,
"step": 5105
},
{
"epoch": 0.8721624850657109,
"grad_norm": 0.7135980779624871,
"learning_rate": 6.986064029469508e-06,
"loss": 0.5336,
"num_tokens": 258365498.0,
"step": 5110
},
{
"epoch": 0.873015873015873,
"grad_norm": 0.7544369621525995,
"learning_rate": 6.9600615325524115e-06,
"loss": 0.559,
"num_tokens": 258844796.0,
"step": 5115
},
{
"epoch": 0.8738692609660351,
"grad_norm": 0.7444148640752304,
"learning_rate": 6.934222623920547e-06,
"loss": 0.5602,
"num_tokens": 259349061.0,
"step": 5120
},
{
"epoch": 0.8747226489161973,
"grad_norm": 0.9049523089278464,
"learning_rate": 6.908547509365305e-06,
"loss": 0.5395,
"num_tokens": 259756016.0,
"step": 5125
},
{
"epoch": 0.8755760368663594,
"grad_norm": 0.7527681089262206,
"learning_rate": 6.883036393373579e-06,
"loss": 0.5912,
"num_tokens": 260313344.0,
"step": 5130
},
{
"epoch": 0.8764294248165216,
"grad_norm": 0.9028818740280382,
"learning_rate": 6.857689479126099e-06,
"loss": 0.5528,
"num_tokens": 260737923.0,
"step": 5135
},
{
"epoch": 0.8772828127666837,
"grad_norm": 0.793355764631848,
"learning_rate": 6.8325069684958235e-06,
"loss": 0.5845,
"num_tokens": 261263489.0,
"step": 5140
},
{
"epoch": 0.8781362007168458,
"grad_norm": 0.785894856367147,
"learning_rate": 6.8074890620463394e-06,
"loss": 0.5578,
"num_tokens": 261788584.0,
"step": 5145
},
{
"epoch": 0.878989588667008,
"grad_norm": 0.8143390310414678,
"learning_rate": 6.782635959030259e-06,
"loss": 0.5688,
"num_tokens": 262287010.0,
"step": 5150
},
{
"epoch": 0.8798429766171701,
"grad_norm": 0.865141361269142,
"learning_rate": 6.7579478573876366e-06,
"loss": 0.5842,
"num_tokens": 262746009.0,
"step": 5155
},
{
"epoch": 0.8806963645673324,
"grad_norm": 0.9092110398635419,
"learning_rate": 6.733424953744391e-06,
"loss": 0.5753,
"num_tokens": 263182542.0,
"step": 5160
},
{
"epoch": 0.8815497525174945,
"grad_norm": 0.821394284330915,
"learning_rate": 6.709067443410733e-06,
"loss": 0.5951,
"num_tokens": 263729263.0,
"step": 5165
},
{
"epoch": 0.8824031404676566,
"grad_norm": 0.7488745968684951,
"learning_rate": 6.684875520379618e-06,
"loss": 0.5433,
"num_tokens": 264227619.0,
"step": 5170
},
{
"epoch": 0.8832565284178188,
"grad_norm": 0.976772151141878,
"learning_rate": 6.66084937732519e-06,
"loss": 0.6446,
"num_tokens": 264718377.0,
"step": 5175
},
{
"epoch": 0.8841099163679809,
"grad_norm": 0.7642964720283203,
"learning_rate": 6.636989205601276e-06,
"loss": 0.5897,
"num_tokens": 265270188.0,
"step": 5180
},
{
"epoch": 0.8849633043181431,
"grad_norm": 0.8028339592698639,
"learning_rate": 6.613295195239816e-06,
"loss": 0.561,
"num_tokens": 265753973.0,
"step": 5185
},
{
"epoch": 0.8858166922683052,
"grad_norm": 0.7776641112247487,
"learning_rate": 6.589767534949384e-06,
"loss": 0.5511,
"num_tokens": 266247179.0,
"step": 5190
},
{
"epoch": 0.8866700802184673,
"grad_norm": 0.7276998937386446,
"learning_rate": 6.5664064121136865e-06,
"loss": 0.5465,
"num_tokens": 266736076.0,
"step": 5195
},
{
"epoch": 0.8875234681686295,
"grad_norm": 0.8212452792937524,
"learning_rate": 6.543212012790038e-06,
"loss": 0.5752,
"num_tokens": 267195187.0,
"step": 5200
},
{
"epoch": 0.8883768561187916,
"grad_norm": 0.7939352940851623,
"learning_rate": 6.520184521707923e-06,
"loss": 0.5677,
"num_tokens": 267663496.0,
"step": 5205
},
{
"epoch": 0.8892302440689538,
"grad_norm": 0.9467450707219625,
"learning_rate": 6.497324122267478e-06,
"loss": 0.6117,
"num_tokens": 268161021.0,
"step": 5210
},
{
"epoch": 0.8900836320191159,
"grad_norm": 0.8245860155572584,
"learning_rate": 6.474630996538078e-06,
"loss": 0.5591,
"num_tokens": 268673121.0,
"step": 5215
},
{
"epoch": 0.890937019969278,
"grad_norm": 0.7590094334951016,
"learning_rate": 6.452105325256852e-06,
"loss": 0.568,
"num_tokens": 269179279.0,
"step": 5220
},
{
"epoch": 0.8917904079194402,
"grad_norm": 0.8217521579318463,
"learning_rate": 6.429747287827254e-06,
"loss": 0.5807,
"num_tokens": 269693196.0,
"step": 5225
},
{
"epoch": 0.8926437958696023,
"grad_norm": 0.8623514216744014,
"learning_rate": 6.407557062317632e-06,
"loss": 0.5582,
"num_tokens": 270166668.0,
"step": 5230
},
{
"epoch": 0.8934971838197645,
"grad_norm": 0.8533922181449645,
"learning_rate": 6.38553482545982e-06,
"loss": 0.5716,
"num_tokens": 270670889.0,
"step": 5235
},
{
"epoch": 0.8943505717699266,
"grad_norm": 0.7713411612044356,
"learning_rate": 6.36368075264772e-06,
"loss": 0.5555,
"num_tokens": 271154055.0,
"step": 5240
},
{
"epoch": 0.8952039597200887,
"grad_norm": 0.7525851117026119,
"learning_rate": 6.341995017935916e-06,
"loss": 0.5926,
"num_tokens": 271718782.0,
"step": 5245
},
{
"epoch": 0.8960573476702509,
"grad_norm": 0.8380438697228516,
"learning_rate": 6.320477794038258e-06,
"loss": 0.5639,
"num_tokens": 272179817.0,
"step": 5250
},
{
"epoch": 0.896910735620413,
"grad_norm": 0.7363618546018679,
"learning_rate": 6.299129252326541e-06,
"loss": 0.6016,
"num_tokens": 272681025.0,
"step": 5255
},
{
"epoch": 0.8977641235705752,
"grad_norm": 0.964225303945823,
"learning_rate": 6.277949562829075e-06,
"loss": 0.5891,
"num_tokens": 273165716.0,
"step": 5260
},
{
"epoch": 0.8986175115207373,
"grad_norm": 0.806927745635919,
"learning_rate": 6.256938894229389e-06,
"loss": 0.5705,
"num_tokens": 273715683.0,
"step": 5265
},
{
"epoch": 0.8994708994708994,
"grad_norm": 0.7821754782396905,
"learning_rate": 6.236097413864841e-06,
"loss": 0.5697,
"num_tokens": 274153339.0,
"step": 5270
},
{
"epoch": 0.9003242874210616,
"grad_norm": 0.9491365135771385,
"learning_rate": 6.215425287725328e-06,
"loss": 0.5629,
"num_tokens": 274622449.0,
"step": 5275
},
{
"epoch": 0.9011776753712237,
"grad_norm": 0.8490653535475988,
"learning_rate": 6.194922680451922e-06,
"loss": 0.5641,
"num_tokens": 275099879.0,
"step": 5280
},
{
"epoch": 0.9020310633213859,
"grad_norm": 0.7060635522815266,
"learning_rate": 6.17458975533559e-06,
"loss": 0.5723,
"num_tokens": 275615026.0,
"step": 5285
},
{
"epoch": 0.902884451271548,
"grad_norm": 0.8929218635182984,
"learning_rate": 6.1544266743158805e-06,
"loss": 0.5758,
"num_tokens": 276105435.0,
"step": 5290
},
{
"epoch": 0.9037378392217101,
"grad_norm": 0.8900936194728736,
"learning_rate": 6.134433597979634e-06,
"loss": 0.5814,
"num_tokens": 276567753.0,
"step": 5295
},
{
"epoch": 0.9045912271718723,
"grad_norm": 0.7450155592638278,
"learning_rate": 6.114610685559708e-06,
"loss": 0.5492,
"num_tokens": 277067496.0,
"step": 5300
},
{
"epoch": 0.9054446151220344,
"grad_norm": 0.822036818022831,
"learning_rate": 6.09495809493371e-06,
"loss": 0.5945,
"num_tokens": 277531967.0,
"step": 5305
},
{
"epoch": 0.9062980030721967,
"grad_norm": 0.7866502345790104,
"learning_rate": 6.0754759826227225e-06,
"loss": 0.5558,
"num_tokens": 278023533.0,
"step": 5310
},
{
"epoch": 0.9071513910223588,
"grad_norm": 0.8423352400084989,
"learning_rate": 6.056164503790092e-06,
"loss": 0.5801,
"num_tokens": 278474312.0,
"step": 5315
},
{
"epoch": 0.9080047789725209,
"grad_norm": 0.9494363071010109,
"learning_rate": 6.0370238122401495e-06,
"loss": 0.5597,
"num_tokens": 278896745.0,
"step": 5320
},
{
"epoch": 0.9088581669226831,
"grad_norm": 0.8282491259672817,
"learning_rate": 6.01805406041702e-06,
"loss": 0.5626,
"num_tokens": 279349709.0,
"step": 5325
},
{
"epoch": 0.9097115548728452,
"grad_norm": 0.8108165719871948,
"learning_rate": 5.999255399403401e-06,
"loss": 0.5843,
"num_tokens": 279854864.0,
"step": 5330
},
{
"epoch": 0.9105649428230074,
"grad_norm": 1.368074602666708,
"learning_rate": 5.980627978919339e-06,
"loss": 0.573,
"num_tokens": 280321831.0,
"step": 5335
},
{
"epoch": 0.9114183307731695,
"grad_norm": 0.865994558880661,
"learning_rate": 5.962171947321067e-06,
"loss": 0.5722,
"num_tokens": 280833419.0,
"step": 5340
},
{
"epoch": 0.9122717187233317,
"grad_norm": 0.8067476350501013,
"learning_rate": 5.943887451599798e-06,
"loss": 0.5628,
"num_tokens": 281281758.0,
"step": 5345
},
{
"epoch": 0.9131251066734938,
"grad_norm": 0.7854491662288046,
"learning_rate": 5.925774637380573e-06,
"loss": 0.5867,
"num_tokens": 281796725.0,
"step": 5350
},
{
"epoch": 0.9139784946236559,
"grad_norm": 0.7889550827339223,
"learning_rate": 5.9078336489210895e-06,
"loss": 0.5945,
"num_tokens": 282294087.0,
"step": 5355
},
{
"epoch": 0.9148318825738181,
"grad_norm": 0.8663650736244458,
"learning_rate": 5.890064629110552e-06,
"loss": 0.6051,
"num_tokens": 282763597.0,
"step": 5360
},
{
"epoch": 0.9156852705239802,
"grad_norm": 0.8701454268029402,
"learning_rate": 5.8724677194685435e-06,
"loss": 0.5515,
"num_tokens": 283196267.0,
"step": 5365
},
{
"epoch": 0.9165386584741424,
"grad_norm": 0.7957477705163372,
"learning_rate": 5.855043060143887e-06,
"loss": 0.5779,
"num_tokens": 283679766.0,
"step": 5370
},
{
"epoch": 0.9173920464243045,
"grad_norm": 0.7978470026361059,
"learning_rate": 5.83779078991354e-06,
"loss": 0.5329,
"num_tokens": 284144403.0,
"step": 5375
},
{
"epoch": 0.9182454343744666,
"grad_norm": 0.8716938721156829,
"learning_rate": 5.820711046181488e-06,
"loss": 0.5652,
"num_tokens": 284612089.0,
"step": 5380
},
{
"epoch": 0.9190988223246288,
"grad_norm": 0.7905417532595569,
"learning_rate": 5.803803964977634e-06,
"loss": 0.5381,
"num_tokens": 285094795.0,
"step": 5385
},
{
"epoch": 0.9199522102747909,
"grad_norm": 0.8101830629318585,
"learning_rate": 5.7870696809567425e-06,
"loss": 0.5626,
"num_tokens": 285541742.0,
"step": 5390
},
{
"epoch": 0.9208055982249531,
"grad_norm": 0.8322898676886099,
"learning_rate": 5.770508327397339e-06,
"loss": 0.587,
"num_tokens": 286021840.0,
"step": 5395
},
{
"epoch": 0.9216589861751152,
"grad_norm": 0.8824032296667493,
"learning_rate": 5.754120036200669e-06,
"loss": 0.6291,
"num_tokens": 286505617.0,
"step": 5400
},
{
"epoch": 0.9225123741252773,
"grad_norm": 0.7998666514749609,
"learning_rate": 5.7379049378896406e-06,
"loss": 0.548,
"num_tokens": 286948538.0,
"step": 5405
},
{
"epoch": 0.9233657620754395,
"grad_norm": 0.9057331244599551,
"learning_rate": 5.721863161607775e-06,
"loss": 0.5991,
"num_tokens": 287462501.0,
"step": 5410
},
{
"epoch": 0.9242191500256016,
"grad_norm": 0.8123865828941764,
"learning_rate": 5.705994835118203e-06,
"loss": 0.607,
"num_tokens": 287916310.0,
"step": 5415
},
{
"epoch": 0.9250725379757638,
"grad_norm": 0.8199261801620987,
"learning_rate": 5.6903000848026165e-06,
"loss": 0.5809,
"num_tokens": 288427512.0,
"step": 5420
},
{
"epoch": 0.9259259259259259,
"grad_norm": 0.7633767277902096,
"learning_rate": 5.674779035660291e-06,
"loss": 0.5555,
"num_tokens": 288889633.0,
"step": 5425
},
{
"epoch": 0.926779313876088,
"grad_norm": 0.7638580491242525,
"learning_rate": 5.659431811307065e-06,
"loss": 0.5673,
"num_tokens": 289335039.0,
"step": 5430
},
{
"epoch": 0.9276327018262502,
"grad_norm": 0.7529647509629525,
"learning_rate": 5.644258533974374e-06,
"loss": 0.5793,
"num_tokens": 289786616.0,
"step": 5435
},
{
"epoch": 0.9284860897764123,
"grad_norm": 0.836400859683071,
"learning_rate": 5.629259324508267e-06,
"loss": 0.5867,
"num_tokens": 290255858.0,
"step": 5440
},
{
"epoch": 0.9293394777265745,
"grad_norm": 0.8755510963029428,
"learning_rate": 5.614434302368449e-06,
"loss": 0.5546,
"num_tokens": 290698740.0,
"step": 5445
},
{
"epoch": 0.9301928656767366,
"grad_norm": 0.8332440496913869,
"learning_rate": 5.599783585627322e-06,
"loss": 0.5484,
"num_tokens": 291138185.0,
"step": 5450
},
{
"epoch": 0.9310462536268987,
"grad_norm": 0.8426341821246235,
"learning_rate": 5.585307290969054e-06,
"loss": 0.5719,
"num_tokens": 291643871.0,
"step": 5455
},
{
"epoch": 0.931899641577061,
"grad_norm": 0.8678942218248095,
"learning_rate": 5.571005533688649e-06,
"loss": 0.5553,
"num_tokens": 292113768.0,
"step": 5460
},
{
"epoch": 0.932753029527223,
"grad_norm": 0.7594279684222428,
"learning_rate": 5.556878427691023e-06,
"loss": 0.5625,
"num_tokens": 292583353.0,
"step": 5465
},
{
"epoch": 0.9336064174773853,
"grad_norm": 0.7842725351760226,
"learning_rate": 5.542926085490093e-06,
"loss": 0.5387,
"num_tokens": 293080307.0,
"step": 5470
},
{
"epoch": 0.9344598054275474,
"grad_norm": 0.826007070503848,
"learning_rate": 5.529148618207897e-06,
"loss": 0.5479,
"num_tokens": 293565490.0,
"step": 5475
},
{
"epoch": 0.9353131933777095,
"grad_norm": 0.8597332041464362,
"learning_rate": 5.515546135573695e-06,
"loss": 0.6213,
"num_tokens": 294083350.0,
"step": 5480
},
{
"epoch": 0.9361665813278717,
"grad_norm": 0.8219571384073128,
"learning_rate": 5.5021187459230964e-06,
"loss": 0.5651,
"num_tokens": 294552165.0,
"step": 5485
},
{
"epoch": 0.9370199692780338,
"grad_norm": 0.8559937062906311,
"learning_rate": 5.4888665561972065e-06,
"loss": 0.5765,
"num_tokens": 294991459.0,
"step": 5490
},
{
"epoch": 0.937873357228196,
"grad_norm": 0.7579381758259627,
"learning_rate": 5.475789671941761e-06,
"loss": 0.6216,
"num_tokens": 295566010.0,
"step": 5495
},
{
"epoch": 0.9387267451783581,
"grad_norm": 0.7621531478864432,
"learning_rate": 5.462888197306301e-06,
"loss": 0.5396,
"num_tokens": 296035873.0,
"step": 5500
},
{
"epoch": 0.9395801331285202,
"grad_norm": 0.8260330228111058,
"learning_rate": 5.450162235043325e-06,
"loss": 0.5954,
"num_tokens": 296548316.0,
"step": 5505
},
{
"epoch": 0.9404335210786824,
"grad_norm": 0.7709410941202116,
"learning_rate": 5.43761188650749e-06,
"loss": 0.5849,
"num_tokens": 297048138.0,
"step": 5510
},
{
"epoch": 0.9412869090288445,
"grad_norm": 0.7507662055546271,
"learning_rate": 5.425237251654792e-06,
"loss": 0.5376,
"num_tokens": 297516443.0,
"step": 5515
},
{
"epoch": 0.9421402969790067,
"grad_norm": 0.8591780584259089,
"learning_rate": 5.41303842904177e-06,
"loss": 0.6037,
"num_tokens": 297956377.0,
"step": 5520
},
{
"epoch": 0.9429936849291688,
"grad_norm": 0.7529771128056942,
"learning_rate": 5.401015515824727e-06,
"loss": 0.5635,
"num_tokens": 298486321.0,
"step": 5525
},
{
"epoch": 0.9438470728793309,
"grad_norm": 0.8051753782069618,
"learning_rate": 5.389168607758956e-06,
"loss": 0.586,
"num_tokens": 299001151.0,
"step": 5530
},
{
"epoch": 0.9447004608294931,
"grad_norm": 0.8410405788431461,
"learning_rate": 5.377497799197965e-06,
"loss": 0.5875,
"num_tokens": 299468717.0,
"step": 5535
},
{
"epoch": 0.9455538487796552,
"grad_norm": 0.8088038460686016,
"learning_rate": 5.366003183092747e-06,
"loss": 0.5779,
"num_tokens": 299913523.0,
"step": 5540
},
{
"epoch": 0.9464072367298174,
"grad_norm": 0.9065078781042983,
"learning_rate": 5.354684850991019e-06,
"loss": 0.5678,
"num_tokens": 300388047.0,
"step": 5545
},
{
"epoch": 0.9472606246799795,
"grad_norm": 0.915648202403995,
"learning_rate": 5.343542893036508e-06,
"loss": 0.5536,
"num_tokens": 300834485.0,
"step": 5550
},
{
"epoch": 0.9481140126301416,
"grad_norm": 0.8343832418261607,
"learning_rate": 5.332577397968218e-06,
"loss": 0.5899,
"num_tokens": 301298744.0,
"step": 5555
},
{
"epoch": 0.9489674005803038,
"grad_norm": 0.7289519169355831,
"learning_rate": 5.321788453119741e-06,
"loss": 0.5762,
"num_tokens": 301755872.0,
"step": 5560
},
{
"epoch": 0.9498207885304659,
"grad_norm": 0.8430411206851547,
"learning_rate": 5.3111761444185486e-06,
"loss": 0.5741,
"num_tokens": 302223432.0,
"step": 5565
},
{
"epoch": 0.9506741764806281,
"grad_norm": 0.7548208551690323,
"learning_rate": 5.300740556385312e-06,
"loss": 0.6051,
"num_tokens": 302734843.0,
"step": 5570
},
{
"epoch": 0.9515275644307902,
"grad_norm": 0.9660243301325951,
"learning_rate": 5.29048177213323e-06,
"loss": 0.6151,
"num_tokens": 303156803.0,
"step": 5575
},
{
"epoch": 0.9523809523809523,
"grad_norm": 0.7993982834474233,
"learning_rate": 5.280399873367359e-06,
"loss": 0.5612,
"num_tokens": 303649911.0,
"step": 5580
},
{
"epoch": 0.9532343403311145,
"grad_norm": 0.9320720781344985,
"learning_rate": 5.270494940383981e-06,
"loss": 0.5702,
"num_tokens": 304114480.0,
"step": 5585
},
{
"epoch": 0.9540877282812766,
"grad_norm": 0.8829913819415941,
"learning_rate": 5.260767052069932e-06,
"loss": 0.5835,
"num_tokens": 304603280.0,
"step": 5590
},
{
"epoch": 0.9549411162314388,
"grad_norm": 0.8333512888706996,
"learning_rate": 5.251216285902014e-06,
"loss": 0.5597,
"num_tokens": 305082191.0,
"step": 5595
},
{
"epoch": 0.955794504181601,
"grad_norm": 0.7482070040515338,
"learning_rate": 5.241842717946349e-06,
"loss": 0.5598,
"num_tokens": 305542730.0,
"step": 5600
},
{
"epoch": 0.956647892131763,
"grad_norm": 0.8316101679517978,
"learning_rate": 5.232646422857779e-06,
"loss": 0.5351,
"num_tokens": 306042529.0,
"step": 5605
},
{
"epoch": 0.9575012800819253,
"grad_norm": 0.8066685285673841,
"learning_rate": 5.2236274738792755e-06,
"loss": 0.56,
"num_tokens": 306498168.0,
"step": 5610
},
{
"epoch": 0.9583546680320874,
"grad_norm": 0.8308438231158456,
"learning_rate": 5.214785942841354e-06,
"loss": 0.5731,
"num_tokens": 307010799.0,
"step": 5615
},
{
"epoch": 0.9592080559822496,
"grad_norm": 0.7360643184841489,
"learning_rate": 5.206121900161511e-06,
"loss": 0.5608,
"num_tokens": 307537300.0,
"step": 5620
},
{
"epoch": 0.9600614439324117,
"grad_norm": 0.764899408471559,
"learning_rate": 5.197635414843641e-06,
"loss": 0.5587,
"num_tokens": 308038692.0,
"step": 5625
},
{
"epoch": 0.9609148318825739,
"grad_norm": 0.771106174868778,
"learning_rate": 5.189326554477508e-06,
"loss": 0.6011,
"num_tokens": 308532583.0,
"step": 5630
},
{
"epoch": 0.961768219832736,
"grad_norm": 0.8673796675256896,
"learning_rate": 5.181195385238204e-06,
"loss": 0.5583,
"num_tokens": 308972109.0,
"step": 5635
},
{
"epoch": 0.9626216077828981,
"grad_norm": 0.8023969340172891,
"learning_rate": 5.173241971885606e-06,
"loss": 0.5415,
"num_tokens": 309426949.0,
"step": 5640
},
{
"epoch": 0.9634749957330603,
"grad_norm": 0.7207659472227531,
"learning_rate": 5.1654663777638825e-06,
"loss": 0.5689,
"num_tokens": 309918947.0,
"step": 5645
},
{
"epoch": 0.9643283836832224,
"grad_norm": 0.9340552949332678,
"learning_rate": 5.15786866480098e-06,
"loss": 0.5728,
"num_tokens": 310388816.0,
"step": 5650
},
{
"epoch": 0.9651817716333846,
"grad_norm": 0.8775884081140152,
"learning_rate": 5.150448893508114e-06,
"loss": 0.5577,
"num_tokens": 310788976.0,
"step": 5655
},
{
"epoch": 0.9660351595835467,
"grad_norm": 0.862770324032615,
"learning_rate": 5.143207122979319e-06,
"loss": 0.5742,
"num_tokens": 311244321.0,
"step": 5660
},
{
"epoch": 0.9668885475337088,
"grad_norm": 0.8645037498593092,
"learning_rate": 5.136143410890947e-06,
"loss": 0.6224,
"num_tokens": 311720200.0,
"step": 5665
},
{
"epoch": 0.967741935483871,
"grad_norm": 0.7700789746051657,
"learning_rate": 5.129257813501227e-06,
"loss": 0.5511,
"num_tokens": 312179788.0,
"step": 5670
},
{
"epoch": 0.9685953234340331,
"grad_norm": 1.0031075094614286,
"learning_rate": 5.122550385649811e-06,
"loss": 0.5569,
"num_tokens": 312622136.0,
"step": 5675
},
{
"epoch": 0.9694487113841953,
"grad_norm": 0.8550909317145511,
"learning_rate": 5.116021180757339e-06,
"loss": 0.5757,
"num_tokens": 313113611.0,
"step": 5680
},
{
"epoch": 0.9703020993343574,
"grad_norm": 0.8269524914604716,
"learning_rate": 5.1096702508250065e-06,
"loss": 0.549,
"num_tokens": 313614384.0,
"step": 5685
},
{
"epoch": 0.9711554872845195,
"grad_norm": 0.8723888693586144,
"learning_rate": 5.103497646434162e-06,
"loss": 0.6138,
"num_tokens": 314088892.0,
"step": 5690
},
{
"epoch": 0.9720088752346817,
"grad_norm": 1.005485841547129,
"learning_rate": 5.0975034167458985e-06,
"loss": 0.6324,
"num_tokens": 314588739.0,
"step": 5695
},
{
"epoch": 0.9728622631848438,
"grad_norm": 0.9615462609522011,
"learning_rate": 5.0916876095006525e-06,
"loss": 0.5696,
"num_tokens": 315051225.0,
"step": 5700
},
{
"epoch": 0.973715651135006,
"grad_norm": 0.9044747223815061,
"learning_rate": 5.086050271017843e-06,
"loss": 0.5698,
"num_tokens": 315515610.0,
"step": 5705
},
{
"epoch": 0.9745690390851681,
"grad_norm": 0.9352208596040631,
"learning_rate": 5.080591446195489e-06,
"loss": 0.5828,
"num_tokens": 315957594.0,
"step": 5710
},
{
"epoch": 0.9754224270353302,
"grad_norm": 0.7315299597342139,
"learning_rate": 5.075311178509852e-06,
"loss": 0.5462,
"num_tokens": 316377461.0,
"step": 5715
},
{
"epoch": 0.9762758149854924,
"grad_norm": 0.9452274514037523,
"learning_rate": 5.070209510015099e-06,
"loss": 0.6164,
"num_tokens": 316867560.0,
"step": 5720
},
{
"epoch": 0.9771292029356545,
"grad_norm": 0.8371279249883385,
"learning_rate": 5.065286481342953e-06,
"loss": 0.5534,
"num_tokens": 317298878.0,
"step": 5725
},
{
"epoch": 0.9779825908858167,
"grad_norm": 0.8014870954247736,
"learning_rate": 5.060542131702389e-06,
"loss": 0.5693,
"num_tokens": 317820296.0,
"step": 5730
},
{
"epoch": 0.9788359788359788,
"grad_norm": 0.909292736397671,
"learning_rate": 5.055976498879303e-06,
"loss": 0.5763,
"num_tokens": 318337653.0,
"step": 5735
},
{
"epoch": 0.9796893667861409,
"grad_norm": 0.8132523377203885,
"learning_rate": 5.05158961923622e-06,
"loss": 0.5716,
"num_tokens": 318780172.0,
"step": 5740
},
{
"epoch": 0.9805427547363031,
"grad_norm": 0.8716679753297544,
"learning_rate": 5.047381527712007e-06,
"loss": 0.5827,
"num_tokens": 319265582.0,
"step": 5745
},
{
"epoch": 0.9813961426864652,
"grad_norm": 0.9185765003687043,
"learning_rate": 5.0433522578215845e-06,
"loss": 0.585,
"num_tokens": 319710867.0,
"step": 5750
},
{
"epoch": 0.9822495306366275,
"grad_norm": 0.682401230441357,
"learning_rate": 5.039501841655672e-06,
"loss": 0.5582,
"num_tokens": 320197204.0,
"step": 5755
},
{
"epoch": 0.9831029185867896,
"grad_norm": 0.9090161531763877,
"learning_rate": 5.035830309880523e-06,
"loss": 0.5464,
"num_tokens": 320667324.0,
"step": 5760
},
{
"epoch": 0.9839563065369517,
"grad_norm": 0.902665666969385,
"learning_rate": 5.032337691737683e-06,
"loss": 0.5731,
"num_tokens": 321110869.0,
"step": 5765
},
{
"epoch": 0.9848096944871139,
"grad_norm": 0.8605159320287843,
"learning_rate": 5.0290240150437645e-06,
"loss": 0.5466,
"num_tokens": 321575498.0,
"step": 5770
},
{
"epoch": 0.985663082437276,
"grad_norm": 0.7295626658824659,
"learning_rate": 5.025889306190208e-06,
"loss": 0.5792,
"num_tokens": 322113584.0,
"step": 5775
},
{
"epoch": 0.9865164703874382,
"grad_norm": 0.7249267684266418,
"learning_rate": 5.0229335901430926e-06,
"loss": 0.5929,
"num_tokens": 322595635.0,
"step": 5780
},
{
"epoch": 0.9873698583376003,
"grad_norm": 0.7282049411658805,
"learning_rate": 5.020156890442924e-06,
"loss": 0.5452,
"num_tokens": 323066444.0,
"step": 5785
},
{
"epoch": 0.9882232462877624,
"grad_norm": 0.7731368697074033,
"learning_rate": 5.017559229204447e-06,
"loss": 0.5625,
"num_tokens": 323576730.0,
"step": 5790
},
{
"epoch": 0.9890766342379246,
"grad_norm": 0.8143826121511745,
"learning_rate": 5.015140627116475e-06,
"loss": 0.5574,
"num_tokens": 324026506.0,
"step": 5795
},
{
"epoch": 0.9899300221880867,
"grad_norm": 0.8762523071927865,
"learning_rate": 5.012901103441723e-06,
"loss": 0.5586,
"num_tokens": 324513537.0,
"step": 5800
},
{
"epoch": 0.9907834101382489,
"grad_norm": 0.9521303946801305,
"learning_rate": 5.010840676016651e-06,
"loss": 0.5537,
"num_tokens": 324953710.0,
"step": 5805
},
{
"epoch": 0.991636798088411,
"grad_norm": 0.8448341812920022,
"learning_rate": 5.008959361251331e-06,
"loss": 0.5736,
"num_tokens": 325415446.0,
"step": 5810
},
{
"epoch": 0.9924901860385731,
"grad_norm": 0.8913613215917732,
"learning_rate": 5.007257174129304e-06,
"loss": 0.5843,
"num_tokens": 325886164.0,
"step": 5815
},
{
"epoch": 0.9933435739887353,
"grad_norm": 0.8125616463451861,
"learning_rate": 5.00573412820747e-06,
"loss": 0.6087,
"num_tokens": 326345284.0,
"step": 5820
},
{
"epoch": 0.9941969619388974,
"grad_norm": 0.8354108870278607,
"learning_rate": 5.004390235615973e-06,
"loss": 0.5695,
"num_tokens": 326821080.0,
"step": 5825
},
{
"epoch": 0.9950503498890596,
"grad_norm": 0.874952932031631,
"learning_rate": 5.003225507058114e-06,
"loss": 0.5765,
"num_tokens": 327273974.0,
"step": 5830
},
{
"epoch": 0.9959037378392217,
"grad_norm": 0.8005811663402586,
"learning_rate": 5.002239951810257e-06,
"loss": 0.5482,
"num_tokens": 327795818.0,
"step": 5835
},
{
"epoch": 0.9967571257893838,
"grad_norm": 0.733021531503028,
"learning_rate": 5.001433577721758e-06,
"loss": 0.5602,
"num_tokens": 328290879.0,
"step": 5840
},
{
"epoch": 0.997610513739546,
"grad_norm": 0.906113294495654,
"learning_rate": 5.000806391214903e-06,
"loss": 0.5923,
"num_tokens": 328734615.0,
"step": 5845
},
{
"epoch": 0.9984639016897081,
"grad_norm": 0.7589826908596056,
"learning_rate": 5.000358397284853e-06,
"loss": 0.5371,
"num_tokens": 329195352.0,
"step": 5850
},
{
"epoch": 0.9993172896398703,
"grad_norm": 0.9083053676708959,
"learning_rate": 5.0000895994996155e-06,
"loss": 0.5777,
"num_tokens": 329702919.0,
"step": 5855
},
{
"epoch": 1.0,
"num_tokens": 330043597.0,
"step": 5859,
"total_flos": 566103768268800.0,
"train_loss": 0.349827840967499,
"train_runtime": 19944.0659,
"train_samples_per_second": 4.7,
"train_steps_per_second": 0.294
}
],
"logging_steps": 5,
"max_steps": 5859,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 100,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 566103768268800.0,
"train_batch_size": 16,
"trial_name": null,
"trial_params": null
}