sanjaylb's picture
Upload folder using huggingface_hub
65f1386 verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 3.0,
"eval_steps": 500,
"global_step": 762,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.019714144898965006,
"grad_norm": 2.673164129257202,
"learning_rate": 1.3114754098360657e-06,
"loss": 3.8588,
"step": 5
},
{
"epoch": 0.03942828979793001,
"grad_norm": 2.886237621307373,
"learning_rate": 2.9508196721311478e-06,
"loss": 3.8163,
"step": 10
},
{
"epoch": 0.05914243469689502,
"grad_norm": 2.4650022983551025,
"learning_rate": 4.59016393442623e-06,
"loss": 3.7443,
"step": 15
},
{
"epoch": 0.07885657959586002,
"grad_norm": 2.4647622108459473,
"learning_rate": 6.229508196721312e-06,
"loss": 3.7573,
"step": 20
},
{
"epoch": 0.09857072449482504,
"grad_norm": 2.5846645832061768,
"learning_rate": 7.868852459016394e-06,
"loss": 3.6941,
"step": 25
},
{
"epoch": 0.11828486939379004,
"grad_norm": 2.3263237476348877,
"learning_rate": 9.508196721311476e-06,
"loss": 3.5506,
"step": 30
},
{
"epoch": 0.13799901429275505,
"grad_norm": 2.181896448135376,
"learning_rate": 1.1147540983606558e-05,
"loss": 3.4485,
"step": 35
},
{
"epoch": 0.15771315919172005,
"grad_norm": 2.374994993209839,
"learning_rate": 1.2786885245901639e-05,
"loss": 3.269,
"step": 40
},
{
"epoch": 0.17742730409068508,
"grad_norm": 2.852327585220337,
"learning_rate": 1.4426229508196722e-05,
"loss": 3.1914,
"step": 45
},
{
"epoch": 0.19714144898965008,
"grad_norm": 2.855822801589966,
"learning_rate": 1.6065573770491805e-05,
"loss": 2.9482,
"step": 50
},
{
"epoch": 0.21685559388861508,
"grad_norm": 3.2098145484924316,
"learning_rate": 1.7704918032786887e-05,
"loss": 2.7581,
"step": 55
},
{
"epoch": 0.23656973878758009,
"grad_norm": 2.033719062805176,
"learning_rate": 1.934426229508197e-05,
"loss": 2.5988,
"step": 60
},
{
"epoch": 0.2562838836865451,
"grad_norm": 2.0503222942352295,
"learning_rate": 2.098360655737705e-05,
"loss": 2.6184,
"step": 65
},
{
"epoch": 0.2759980285855101,
"grad_norm": 1.858931303024292,
"learning_rate": 2.262295081967213e-05,
"loss": 2.5111,
"step": 70
},
{
"epoch": 0.2957121734844751,
"grad_norm": 1.6557573080062866,
"learning_rate": 2.4262295081967215e-05,
"loss": 2.4524,
"step": 75
},
{
"epoch": 0.3154263183834401,
"grad_norm": 1.706704020500183,
"learning_rate": 2.5901639344262297e-05,
"loss": 2.4317,
"step": 80
},
{
"epoch": 0.3351404632824051,
"grad_norm": 1.4746320247650146,
"learning_rate": 2.754098360655738e-05,
"loss": 2.3915,
"step": 85
},
{
"epoch": 0.35485460818137016,
"grad_norm": 1.7562377452850342,
"learning_rate": 2.9180327868852458e-05,
"loss": 2.3791,
"step": 90
},
{
"epoch": 0.37456875308033516,
"grad_norm": 1.7075871229171753,
"learning_rate": 3.0819672131147544e-05,
"loss": 2.3717,
"step": 95
},
{
"epoch": 0.39428289797930016,
"grad_norm": 1.5588061809539795,
"learning_rate": 3.245901639344263e-05,
"loss": 2.3924,
"step": 100
},
{
"epoch": 0.41399704287826516,
"grad_norm": 1.5635536909103394,
"learning_rate": 3.409836065573771e-05,
"loss": 2.3031,
"step": 105
},
{
"epoch": 0.43371118777723017,
"grad_norm": 1.7249213457107544,
"learning_rate": 3.5737704918032786e-05,
"loss": 2.3186,
"step": 110
},
{
"epoch": 0.45342533267619517,
"grad_norm": 1.868545651435852,
"learning_rate": 3.737704918032787e-05,
"loss": 2.1715,
"step": 115
},
{
"epoch": 0.47313947757516017,
"grad_norm": 1.9135463237762451,
"learning_rate": 3.901639344262295e-05,
"loss": 2.2488,
"step": 120
},
{
"epoch": 0.4928536224741252,
"grad_norm": 1.895492434501648,
"learning_rate": 4.0655737704918036e-05,
"loss": 2.2387,
"step": 125
},
{
"epoch": 0.5125677673730902,
"grad_norm": 1.767385721206665,
"learning_rate": 4.229508196721312e-05,
"loss": 2.2541,
"step": 130
},
{
"epoch": 0.5322819122720552,
"grad_norm": 2.0554378032684326,
"learning_rate": 4.3934426229508194e-05,
"loss": 2.1672,
"step": 135
},
{
"epoch": 0.5519960571710202,
"grad_norm": 2.3369956016540527,
"learning_rate": 4.557377049180328e-05,
"loss": 2.223,
"step": 140
},
{
"epoch": 0.5717102020699852,
"grad_norm": 2.0532915592193604,
"learning_rate": 4.7213114754098365e-05,
"loss": 2.1007,
"step": 145
},
{
"epoch": 0.5914243469689502,
"grad_norm": 2.2023046016693115,
"learning_rate": 4.885245901639344e-05,
"loss": 2.1027,
"step": 150
},
{
"epoch": 0.6111384918679152,
"grad_norm": 2.194356918334961,
"learning_rate": 5.049180327868853e-05,
"loss": 2.1188,
"step": 155
},
{
"epoch": 0.6308526367668802,
"grad_norm": 2.6036267280578613,
"learning_rate": 5.213114754098361e-05,
"loss": 1.9685,
"step": 160
},
{
"epoch": 0.6505667816658453,
"grad_norm": 2.6643617153167725,
"learning_rate": 5.3770491803278686e-05,
"loss": 2.0843,
"step": 165
},
{
"epoch": 0.6702809265648102,
"grad_norm": 2.3738605976104736,
"learning_rate": 5.540983606557377e-05,
"loss": 2.0489,
"step": 170
},
{
"epoch": 0.6899950714637753,
"grad_norm": 2.43137526512146,
"learning_rate": 5.704918032786886e-05,
"loss": 1.976,
"step": 175
},
{
"epoch": 0.7097092163627403,
"grad_norm": 3.0450685024261475,
"learning_rate": 5.868852459016394e-05,
"loss": 1.8117,
"step": 180
},
{
"epoch": 0.7294233612617053,
"grad_norm": 2.9214789867401123,
"learning_rate": 6.032786885245902e-05,
"loss": 1.8229,
"step": 185
},
{
"epoch": 0.7491375061606703,
"grad_norm": 2.7570457458496094,
"learning_rate": 6.19672131147541e-05,
"loss": 1.8662,
"step": 190
},
{
"epoch": 0.7688516510596353,
"grad_norm": 4.113077640533447,
"learning_rate": 6.360655737704918e-05,
"loss": 1.7928,
"step": 195
},
{
"epoch": 0.7885657959586003,
"grad_norm": 3.127991199493408,
"learning_rate": 6.524590163934427e-05,
"loss": 1.8187,
"step": 200
},
{
"epoch": 0.8082799408575653,
"grad_norm": 3.4384043216705322,
"learning_rate": 6.688524590163935e-05,
"loss": 1.7548,
"step": 205
},
{
"epoch": 0.8279940857565303,
"grad_norm": 3.2650253772735596,
"learning_rate": 6.852459016393443e-05,
"loss": 1.8635,
"step": 210
},
{
"epoch": 0.8477082306554953,
"grad_norm": 3.676208019256592,
"learning_rate": 7.016393442622952e-05,
"loss": 1.6462,
"step": 215
},
{
"epoch": 0.8674223755544603,
"grad_norm": 3.6363656520843506,
"learning_rate": 7.180327868852459e-05,
"loss": 1.6826,
"step": 220
},
{
"epoch": 0.8871365204534253,
"grad_norm": 3.487661123275757,
"learning_rate": 7.344262295081968e-05,
"loss": 1.7096,
"step": 225
},
{
"epoch": 0.9068506653523903,
"grad_norm": 4.129843235015869,
"learning_rate": 7.508196721311476e-05,
"loss": 1.6489,
"step": 230
},
{
"epoch": 0.9265648102513554,
"grad_norm": 3.7981042861938477,
"learning_rate": 7.672131147540984e-05,
"loss": 1.5388,
"step": 235
},
{
"epoch": 0.9462789551503203,
"grad_norm": 4.129542827606201,
"learning_rate": 7.836065573770493e-05,
"loss": 1.5883,
"step": 240
},
{
"epoch": 0.9659931000492854,
"grad_norm": 3.7202744483947754,
"learning_rate": 8e-05,
"loss": 1.5327,
"step": 245
},
{
"epoch": 0.9857072449482503,
"grad_norm": 5.224013805389404,
"learning_rate": 8.163934426229509e-05,
"loss": 1.5075,
"step": 250
},
{
"epoch": 1.003942828979793,
"grad_norm": 4.3526787757873535,
"learning_rate": 8.327868852459016e-05,
"loss": 1.4976,
"step": 255
},
{
"epoch": 1.023656973878758,
"grad_norm": 5.239654541015625,
"learning_rate": 8.491803278688524e-05,
"loss": 1.3648,
"step": 260
},
{
"epoch": 1.043371118777723,
"grad_norm": 4.757369518280029,
"learning_rate": 8.655737704918033e-05,
"loss": 1.3312,
"step": 265
},
{
"epoch": 1.063085263676688,
"grad_norm": 4.107004642486572,
"learning_rate": 8.819672131147541e-05,
"loss": 1.3081,
"step": 270
},
{
"epoch": 1.082799408575653,
"grad_norm": 3.8866994380950928,
"learning_rate": 8.98360655737705e-05,
"loss": 1.3473,
"step": 275
},
{
"epoch": 1.102513553474618,
"grad_norm": 4.225423812866211,
"learning_rate": 9.147540983606557e-05,
"loss": 1.3429,
"step": 280
},
{
"epoch": 1.122227698373583,
"grad_norm": 4.068089485168457,
"learning_rate": 9.311475409836066e-05,
"loss": 1.3851,
"step": 285
},
{
"epoch": 1.141941843272548,
"grad_norm": 4.4080986976623535,
"learning_rate": 9.475409836065574e-05,
"loss": 1.1531,
"step": 290
},
{
"epoch": 1.1616559881715132,
"grad_norm": 3.994210720062256,
"learning_rate": 9.639344262295082e-05,
"loss": 1.1968,
"step": 295
},
{
"epoch": 1.181370133070478,
"grad_norm": 4.020788669586182,
"learning_rate": 9.803278688524591e-05,
"loss": 1.1876,
"step": 300
},
{
"epoch": 1.201084277969443,
"grad_norm": 5.0161848068237305,
"learning_rate": 9.967213114754099e-05,
"loss": 1.135,
"step": 305
},
{
"epoch": 1.2207984228684081,
"grad_norm": 4.258621692657471,
"learning_rate": 9.999947530447293e-05,
"loss": 1.0355,
"step": 310
},
{
"epoch": 1.2405125677673732,
"grad_norm": 4.326329708099365,
"learning_rate": 9.99973437477677e-05,
"loss": 1.0635,
"step": 315
},
{
"epoch": 1.2602267126663382,
"grad_norm": 3.8772778511047363,
"learning_rate": 9.999357260626116e-05,
"loss": 1.1091,
"step": 320
},
{
"epoch": 1.279940857565303,
"grad_norm": 4.675949573516846,
"learning_rate": 9.998816200362199e-05,
"loss": 1.0763,
"step": 325
},
{
"epoch": 1.2996550024642681,
"grad_norm": 4.871826171875,
"learning_rate": 9.998111211728248e-05,
"loss": 1.016,
"step": 330
},
{
"epoch": 1.3193691473632332,
"grad_norm": 4.208927154541016,
"learning_rate": 9.99724231784326e-05,
"loss": 0.9334,
"step": 335
},
{
"epoch": 1.339083292262198,
"grad_norm": 3.859124183654785,
"learning_rate": 9.996209547201258e-05,
"loss": 0.9612,
"step": 340
},
{
"epoch": 1.358797437161163,
"grad_norm": 4.167286396026611,
"learning_rate": 9.99501293367034e-05,
"loss": 1.071,
"step": 345
},
{
"epoch": 1.3785115820601281,
"grad_norm": 4.229668617248535,
"learning_rate": 9.993652516491579e-05,
"loss": 0.9526,
"step": 350
},
{
"epoch": 1.3982257269590932,
"grad_norm": 6.326845645904541,
"learning_rate": 9.992128340277729e-05,
"loss": 0.8992,
"step": 355
},
{
"epoch": 1.4179398718580583,
"grad_norm": 4.119451522827148,
"learning_rate": 9.990440455011769e-05,
"loss": 0.9638,
"step": 360
},
{
"epoch": 1.437654016757023,
"grad_norm": 4.288192272186279,
"learning_rate": 9.988588916045263e-05,
"loss": 0.849,
"step": 365
},
{
"epoch": 1.4573681616559881,
"grad_norm": 3.985097885131836,
"learning_rate": 9.986573784096536e-05,
"loss": 0.8638,
"step": 370
},
{
"epoch": 1.4770823065549532,
"grad_norm": 3.398552894592285,
"learning_rate": 9.984395125248695e-05,
"loss": 0.8869,
"step": 375
},
{
"epoch": 1.4967964514539183,
"grad_norm": 5.5323486328125,
"learning_rate": 9.982053010947455e-05,
"loss": 0.8194,
"step": 380
},
{
"epoch": 1.5165105963528833,
"grad_norm": 4.199021816253662,
"learning_rate": 9.979547517998796e-05,
"loss": 0.7118,
"step": 385
},
{
"epoch": 1.5362247412518482,
"grad_norm": 4.152290344238281,
"learning_rate": 9.976878728566443e-05,
"loss": 0.8509,
"step": 390
},
{
"epoch": 1.5559388861508132,
"grad_norm": 3.820864200592041,
"learning_rate": 9.974046730169183e-05,
"loss": 0.768,
"step": 395
},
{
"epoch": 1.5756530310497783,
"grad_norm": 3.8033151626586914,
"learning_rate": 9.971051615677974e-05,
"loss": 0.7861,
"step": 400
},
{
"epoch": 1.595367175948743,
"grad_norm": 4.548523902893066,
"learning_rate": 9.967893483312922e-05,
"loss": 0.8347,
"step": 405
},
{
"epoch": 1.6150813208477084,
"grad_norm": 3.6572163105010986,
"learning_rate": 9.964572436640045e-05,
"loss": 0.7201,
"step": 410
},
{
"epoch": 1.6347954657466732,
"grad_norm": 3.980424404144287,
"learning_rate": 9.961088584567882e-05,
"loss": 0.7655,
"step": 415
},
{
"epoch": 1.6545096106456383,
"grad_norm": 3.2318334579467773,
"learning_rate": 9.95744204134392e-05,
"loss": 0.8285,
"step": 420
},
{
"epoch": 1.6742237555446033,
"grad_norm": 4.426093578338623,
"learning_rate": 9.953632926550847e-05,
"loss": 0.6971,
"step": 425
},
{
"epoch": 1.6939379004435682,
"grad_norm": 3.0529074668884277,
"learning_rate": 9.949661365102637e-05,
"loss": 0.7098,
"step": 430
},
{
"epoch": 1.7136520453425332,
"grad_norm": 3.948803424835205,
"learning_rate": 9.945527487240442e-05,
"loss": 0.7815,
"step": 435
},
{
"epoch": 1.7333661902414983,
"grad_norm": 3.2549209594726562,
"learning_rate": 9.941231428528332e-05,
"loss": 0.7385,
"step": 440
},
{
"epoch": 1.7530803351404631,
"grad_norm": 3.8000028133392334,
"learning_rate": 9.93677332984884e-05,
"loss": 0.7357,
"step": 445
},
{
"epoch": 1.7727944800394284,
"grad_norm": 3.5841259956359863,
"learning_rate": 9.932153337398354e-05,
"loss": 0.6641,
"step": 450
},
{
"epoch": 1.7925086249383932,
"grad_norm": 4.315596580505371,
"learning_rate": 9.927371602682305e-05,
"loss": 0.6193,
"step": 455
},
{
"epoch": 1.8122227698373583,
"grad_norm": 3.2834675312042236,
"learning_rate": 9.92242828251022e-05,
"loss": 0.5768,
"step": 460
},
{
"epoch": 1.8319369147363234,
"grad_norm": 3.127601385116577,
"learning_rate": 9.917323538990561e-05,
"loss": 0.5912,
"step": 465
},
{
"epoch": 1.8516510596352882,
"grad_norm": 3.2609682083129883,
"learning_rate": 9.912057539525419e-05,
"loss": 0.6093,
"step": 470
},
{
"epoch": 1.8713652045342535,
"grad_norm": 3.4914708137512207,
"learning_rate": 9.906630456805024e-05,
"loss": 0.5924,
"step": 475
},
{
"epoch": 1.8910793494332183,
"grad_norm": 3.5819380283355713,
"learning_rate": 9.901042468802074e-05,
"loss": 0.6426,
"step": 480
},
{
"epoch": 1.9107934943321834,
"grad_norm": 3.237508773803711,
"learning_rate": 9.89529375876591e-05,
"loss": 0.5933,
"step": 485
},
{
"epoch": 1.9305076392311484,
"grad_norm": 3.832315444946289,
"learning_rate": 9.889384515216501e-05,
"loss": 0.5767,
"step": 490
},
{
"epoch": 1.9502217841301133,
"grad_norm": 3.097031593322754,
"learning_rate": 9.883314931938258e-05,
"loss": 0.5225,
"step": 495
},
{
"epoch": 1.9699359290290783,
"grad_norm": 3.5058581829071045,
"learning_rate": 9.877085207973684e-05,
"loss": 0.5591,
"step": 500
},
{
"epoch": 1.9896500739280434,
"grad_norm": 3.0875792503356934,
"learning_rate": 9.870695547616851e-05,
"loss": 0.537,
"step": 505
},
{
"epoch": 2.007885657959586,
"grad_norm": 3.042051315307617,
"learning_rate": 9.864146160406684e-05,
"loss": 0.5084,
"step": 510
},
{
"epoch": 2.027599802858551,
"grad_norm": 3.661208152770996,
"learning_rate": 9.857437261120115e-05,
"loss": 0.4259,
"step": 515
},
{
"epoch": 2.047313947757516,
"grad_norm": 2.7599899768829346,
"learning_rate": 9.850569069765012e-05,
"loss": 0.4659,
"step": 520
},
{
"epoch": 2.0670280926564812,
"grad_norm": 3.0007238388061523,
"learning_rate": 9.843541811572988e-05,
"loss": 0.4666,
"step": 525
},
{
"epoch": 2.086742237555446,
"grad_norm": 3.00451397895813,
"learning_rate": 9.836355716991999e-05,
"loss": 0.4842,
"step": 530
},
{
"epoch": 2.106456382454411,
"grad_norm": 3.2380623817443848,
"learning_rate": 9.82901102167879e-05,
"loss": 0.4566,
"step": 535
},
{
"epoch": 2.126170527353376,
"grad_norm": 3.130038022994995,
"learning_rate": 9.821507966491177e-05,
"loss": 0.4407,
"step": 540
},
{
"epoch": 2.145884672252341,
"grad_norm": 3.2306203842163086,
"learning_rate": 9.813846797480134e-05,
"loss": 0.4002,
"step": 545
},
{
"epoch": 2.165598817151306,
"grad_norm": 3.315145969390869,
"learning_rate": 9.806027765881734e-05,
"loss": 0.4392,
"step": 550
},
{
"epoch": 2.185312962050271,
"grad_norm": 2.3235769271850586,
"learning_rate": 9.798051128108907e-05,
"loss": 0.4443,
"step": 555
},
{
"epoch": 2.205027106949236,
"grad_norm": 2.693516731262207,
"learning_rate": 9.78991714574303e-05,
"loss": 0.4259,
"step": 560
},
{
"epoch": 2.2247412518482013,
"grad_norm": 2.472679853439331,
"learning_rate": 9.781626085525352e-05,
"loss": 0.4294,
"step": 565
},
{
"epoch": 2.244455396747166,
"grad_norm": 2.6586391925811768,
"learning_rate": 9.773178219348242e-05,
"loss": 0.4111,
"step": 570
},
{
"epoch": 2.264169541646131,
"grad_norm": 2.869844913482666,
"learning_rate": 9.764573824246277e-05,
"loss": 0.4128,
"step": 575
},
{
"epoch": 2.283883686545096,
"grad_norm": 2.2448887825012207,
"learning_rate": 9.755813182387158e-05,
"loss": 0.4833,
"step": 580
},
{
"epoch": 2.303597831444061,
"grad_norm": 2.5775742530822754,
"learning_rate": 9.74689658106245e-05,
"loss": 0.4466,
"step": 585
},
{
"epoch": 2.3233119763430263,
"grad_norm": 2.392392158508301,
"learning_rate": 9.73782431267817e-05,
"loss": 0.4098,
"step": 590
},
{
"epoch": 2.343026121241991,
"grad_norm": 2.7074978351593018,
"learning_rate": 9.728596674745187e-05,
"loss": 0.4585,
"step": 595
},
{
"epoch": 2.362740266140956,
"grad_norm": 3.289433240890503,
"learning_rate": 9.719213969869478e-05,
"loss": 0.3965,
"step": 600
},
{
"epoch": 2.3824544110399213,
"grad_norm": 2.337214708328247,
"learning_rate": 9.709676505742193e-05,
"loss": 0.3742,
"step": 605
},
{
"epoch": 2.402168555938886,
"grad_norm": 2.2772057056427,
"learning_rate": 9.699984595129575e-05,
"loss": 0.3871,
"step": 610
},
{
"epoch": 2.421882700837851,
"grad_norm": 2.8387715816497803,
"learning_rate": 9.690138555862691e-05,
"loss": 0.3897,
"step": 615
},
{
"epoch": 2.4415968457368162,
"grad_norm": 2.725186586380005,
"learning_rate": 9.680138710827025e-05,
"loss": 0.4116,
"step": 620
},
{
"epoch": 2.461310990635781,
"grad_norm": 2.4635608196258545,
"learning_rate": 9.669985387951877e-05,
"loss": 0.3498,
"step": 625
},
{
"epoch": 2.4810251355347464,
"grad_norm": 3.7746026515960693,
"learning_rate": 9.659678920199612e-05,
"loss": 0.351,
"step": 630
},
{
"epoch": 2.500739280433711,
"grad_norm": 2.5471270084381104,
"learning_rate": 9.649219645554741e-05,
"loss": 0.3738,
"step": 635
},
{
"epoch": 2.5204534253326765,
"grad_norm": 2.378277063369751,
"learning_rate": 9.638607907012844e-05,
"loss": 0.403,
"step": 640
},
{
"epoch": 2.5401675702316413,
"grad_norm": 2.655937433242798,
"learning_rate": 9.627844052569312e-05,
"loss": 0.4509,
"step": 645
},
{
"epoch": 2.559881715130606,
"grad_norm": 2.7430179119110107,
"learning_rate": 9.616928435207938e-05,
"loss": 0.4314,
"step": 650
},
{
"epoch": 2.5795958600295714,
"grad_norm": 3.2100863456726074,
"learning_rate": 9.605861412889347e-05,
"loss": 0.3806,
"step": 655
},
{
"epoch": 2.5993100049285363,
"grad_norm": 2.856989860534668,
"learning_rate": 9.594643348539251e-05,
"loss": 0.4194,
"step": 660
},
{
"epoch": 2.619024149827501,
"grad_norm": 2.215460777282715,
"learning_rate": 9.583274610036549e-05,
"loss": 0.3617,
"step": 665
},
{
"epoch": 2.6387382947264664,
"grad_norm": 2.89630389213562,
"learning_rate": 9.571755570201266e-05,
"loss": 0.4555,
"step": 670
},
{
"epoch": 2.658452439625431,
"grad_norm": 2.8467066287994385,
"learning_rate": 9.560086606782322e-05,
"loss": 0.3644,
"step": 675
},
{
"epoch": 2.678166584524396,
"grad_norm": 2.772758722305298,
"learning_rate": 9.548268102445145e-05,
"loss": 0.3363,
"step": 680
},
{
"epoch": 2.6978807294233613,
"grad_norm": 2.4616968631744385,
"learning_rate": 9.53630044475913e-05,
"loss": 0.3377,
"step": 685
},
{
"epoch": 2.717594874322326,
"grad_norm": 2.449794292449951,
"learning_rate": 9.524184026184917e-05,
"loss": 0.4089,
"step": 690
},
{
"epoch": 2.7373090192212914,
"grad_norm": 2.1945290565490723,
"learning_rate": 9.511919244061532e-05,
"loss": 0.3989,
"step": 695
},
{
"epoch": 2.7570231641202563,
"grad_norm": 2.0283315181732178,
"learning_rate": 9.499506500593345e-05,
"loss": 0.4989,
"step": 700
},
{
"epoch": 2.7767373090192216,
"grad_norm": 2.6042144298553467,
"learning_rate": 9.486946202836898e-05,
"loss": 0.3996,
"step": 705
},
{
"epoch": 2.7964514539181864,
"grad_norm": 2.1515965461730957,
"learning_rate": 9.474238762687532e-05,
"loss": 0.3421,
"step": 710
},
{
"epoch": 2.8161655988171512,
"grad_norm": 2.147844076156616,
"learning_rate": 9.461384596865904e-05,
"loss": 0.4217,
"step": 715
},
{
"epoch": 2.8358797437161165,
"grad_norm": 2.502347707748413,
"learning_rate": 9.448384126904306e-05,
"loss": 0.4129,
"step": 720
},
{
"epoch": 2.8555938886150813,
"grad_norm": 2.373370409011841,
"learning_rate": 9.435237779132845e-05,
"loss": 0.3447,
"step": 725
},
{
"epoch": 2.875308033514046,
"grad_norm": 2.6700592041015625,
"learning_rate": 9.421945984665465e-05,
"loss": 0.355,
"step": 730
},
{
"epoch": 2.8950221784130115,
"grad_norm": 2.2917869091033936,
"learning_rate": 9.408509179385806e-05,
"loss": 0.3461,
"step": 735
},
{
"epoch": 2.9147363233119763,
"grad_norm": 1.9684882164001465,
"learning_rate": 9.394927803932909e-05,
"loss": 0.3912,
"step": 740
},
{
"epoch": 2.934450468210941,
"grad_norm": 2.7318103313446045,
"learning_rate": 9.381202303686773e-05,
"loss": 0.385,
"step": 745
},
{
"epoch": 2.9541646131099064,
"grad_norm": 2.490328311920166,
"learning_rate": 9.367333128753741e-05,
"loss": 0.3136,
"step": 750
},
{
"epoch": 2.9738787580088712,
"grad_norm": 1.991101861000061,
"learning_rate": 9.353320733951745e-05,
"loss": 0.3771,
"step": 755
},
{
"epoch": 2.9935929029078365,
"grad_norm": 2.4289438724517822,
"learning_rate": 9.339165578795383e-05,
"loss": 0.3484,
"step": 760
}
],
"logging_steps": 5,
"max_steps": 3048,
"num_input_tokens_seen": 0,
"num_train_epochs": 12,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 1.1958325696180224e+16,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}