CodCodingCode's picture
Upload folder using huggingface_hub
39a8015 verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.2917653815780028,
"eval_steps": 500,
"global_step": 2250,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.00574341302318903,
"grad_norm": 3.515625,
"learning_rate": 1.9920000000000002e-05,
"loss": 1.3417,
"step": 10
},
{
"epoch": 0.01148682604637806,
"grad_norm": 2.609375,
"learning_rate": 1.9831111111111113e-05,
"loss": 0.654,
"step": 20
},
{
"epoch": 0.01723023906956709,
"grad_norm": 2.171875,
"learning_rate": 1.9742222222222223e-05,
"loss": 0.6043,
"step": 30
},
{
"epoch": 0.02297365209275612,
"grad_norm": 2.296875,
"learning_rate": 1.9653333333333334e-05,
"loss": 0.5744,
"step": 40
},
{
"epoch": 0.02871706511594515,
"grad_norm": 1.890625,
"learning_rate": 1.9564444444444444e-05,
"loss": 0.563,
"step": 50
},
{
"epoch": 0.03446047813913418,
"grad_norm": 2.3125,
"learning_rate": 1.9475555555555558e-05,
"loss": 0.5506,
"step": 60
},
{
"epoch": 0.04020389116232321,
"grad_norm": 1.984375,
"learning_rate": 1.938666666666667e-05,
"loss": 0.563,
"step": 70
},
{
"epoch": 0.04594730418551224,
"grad_norm": 2.125,
"learning_rate": 1.929777777777778e-05,
"loss": 0.561,
"step": 80
},
{
"epoch": 0.05169071720870127,
"grad_norm": 1.9765625,
"learning_rate": 1.920888888888889e-05,
"loss": 0.5288,
"step": 90
},
{
"epoch": 0.0574341302318903,
"grad_norm": 2.109375,
"learning_rate": 1.912e-05,
"loss": 0.5693,
"step": 100
},
{
"epoch": 0.06317754325507934,
"grad_norm": 2.21875,
"learning_rate": 1.9031111111111114e-05,
"loss": 0.5353,
"step": 110
},
{
"epoch": 0.06892095627826836,
"grad_norm": 1.9140625,
"learning_rate": 1.8942222222222224e-05,
"loss": 0.5383,
"step": 120
},
{
"epoch": 0.0746643693014574,
"grad_norm": 2.40625,
"learning_rate": 1.8853333333333335e-05,
"loss": 0.4849,
"step": 130
},
{
"epoch": 0.08040778232464642,
"grad_norm": 1.8671875,
"learning_rate": 1.8764444444444445e-05,
"loss": 0.5155,
"step": 140
},
{
"epoch": 0.08615119534783545,
"grad_norm": 2.265625,
"learning_rate": 1.867555555555556e-05,
"loss": 0.5071,
"step": 150
},
{
"epoch": 0.09189460837102448,
"grad_norm": 2.03125,
"learning_rate": 1.858666666666667e-05,
"loss": 0.5065,
"step": 160
},
{
"epoch": 0.09763802139421351,
"grad_norm": 1.84375,
"learning_rate": 1.849777777777778e-05,
"loss": 0.509,
"step": 170
},
{
"epoch": 0.10338143441740254,
"grad_norm": 1.703125,
"learning_rate": 1.840888888888889e-05,
"loss": 0.5076,
"step": 180
},
{
"epoch": 0.10912484744059157,
"grad_norm": 2.0,
"learning_rate": 1.832e-05,
"loss": 0.4804,
"step": 190
},
{
"epoch": 0.1148682604637806,
"grad_norm": 1.8828125,
"learning_rate": 1.823111111111111e-05,
"loss": 0.4584,
"step": 200
},
{
"epoch": 0.12061167348696963,
"grad_norm": 1.8046875,
"learning_rate": 1.814222222222222e-05,
"loss": 0.4951,
"step": 210
},
{
"epoch": 0.12635508651015867,
"grad_norm": 1.7265625,
"learning_rate": 1.8053333333333332e-05,
"loss": 0.4758,
"step": 220
},
{
"epoch": 0.13209849953334768,
"grad_norm": 2.1875,
"learning_rate": 1.7964444444444446e-05,
"loss": 0.5146,
"step": 230
},
{
"epoch": 0.13784191255653672,
"grad_norm": 2.140625,
"learning_rate": 1.7875555555555556e-05,
"loss": 0.5004,
"step": 240
},
{
"epoch": 0.14358532557972575,
"grad_norm": 2.234375,
"learning_rate": 1.7786666666666667e-05,
"loss": 0.4744,
"step": 250
},
{
"epoch": 0.1493287386029148,
"grad_norm": 2.078125,
"learning_rate": 1.7697777777777777e-05,
"loss": 0.4943,
"step": 260
},
{
"epoch": 0.1550721516261038,
"grad_norm": 2.046875,
"learning_rate": 1.760888888888889e-05,
"loss": 0.4751,
"step": 270
},
{
"epoch": 0.16081556464929284,
"grad_norm": 2.09375,
"learning_rate": 1.752e-05,
"loss": 0.4599,
"step": 280
},
{
"epoch": 0.16655897767248187,
"grad_norm": 1.8984375,
"learning_rate": 1.7431111111111112e-05,
"loss": 0.4656,
"step": 290
},
{
"epoch": 0.1723023906956709,
"grad_norm": 1.9609375,
"learning_rate": 1.7342222222222226e-05,
"loss": 0.4242,
"step": 300
},
{
"epoch": 0.17804580371885995,
"grad_norm": 2.0625,
"learning_rate": 1.7253333333333336e-05,
"loss": 0.4804,
"step": 310
},
{
"epoch": 0.18378921674204896,
"grad_norm": 1.8046875,
"learning_rate": 1.7164444444444447e-05,
"loss": 0.4736,
"step": 320
},
{
"epoch": 0.189532629765238,
"grad_norm": 2.0625,
"learning_rate": 1.7075555555555557e-05,
"loss": 0.4317,
"step": 330
},
{
"epoch": 0.19527604278842703,
"grad_norm": 2.46875,
"learning_rate": 1.6986666666666668e-05,
"loss": 0.4752,
"step": 340
},
{
"epoch": 0.20101945581161607,
"grad_norm": 1.8359375,
"learning_rate": 1.6897777777777778e-05,
"loss": 0.3932,
"step": 350
},
{
"epoch": 0.20676286883480507,
"grad_norm": 2.109375,
"learning_rate": 1.680888888888889e-05,
"loss": 0.4742,
"step": 360
},
{
"epoch": 0.2125062818579941,
"grad_norm": 2.34375,
"learning_rate": 1.672e-05,
"loss": 0.4542,
"step": 370
},
{
"epoch": 0.21824969488118315,
"grad_norm": 1.9140625,
"learning_rate": 1.6631111111111113e-05,
"loss": 0.4284,
"step": 380
},
{
"epoch": 0.22399310790437219,
"grad_norm": 1.90625,
"learning_rate": 1.6542222222222223e-05,
"loss": 0.4232,
"step": 390
},
{
"epoch": 0.2297365209275612,
"grad_norm": 1.890625,
"learning_rate": 1.6453333333333334e-05,
"loss": 0.4299,
"step": 400
},
{
"epoch": 0.23547993395075023,
"grad_norm": 2.375,
"learning_rate": 1.6364444444444444e-05,
"loss": 0.4246,
"step": 410
},
{
"epoch": 0.24122334697393927,
"grad_norm": 2.015625,
"learning_rate": 1.6275555555555558e-05,
"loss": 0.4215,
"step": 420
},
{
"epoch": 0.2469667599971283,
"grad_norm": 1.9375,
"learning_rate": 1.618666666666667e-05,
"loss": 0.4398,
"step": 430
},
{
"epoch": 0.25271017302031734,
"grad_norm": 2.1875,
"learning_rate": 1.609777777777778e-05,
"loss": 0.4319,
"step": 440
},
{
"epoch": 0.2584535860435064,
"grad_norm": 2.109375,
"learning_rate": 1.600888888888889e-05,
"loss": 0.4227,
"step": 450
},
{
"epoch": 0.26419699906669536,
"grad_norm": 2.390625,
"learning_rate": 1.5920000000000003e-05,
"loss": 0.431,
"step": 460
},
{
"epoch": 0.2699404120898844,
"grad_norm": 2.421875,
"learning_rate": 1.5831111111111114e-05,
"loss": 0.4323,
"step": 470
},
{
"epoch": 0.27568382511307343,
"grad_norm": 2.15625,
"learning_rate": 1.5742222222222224e-05,
"loss": 0.4417,
"step": 480
},
{
"epoch": 0.28142723813626247,
"grad_norm": 1.8984375,
"learning_rate": 1.5653333333333335e-05,
"loss": 0.3823,
"step": 490
},
{
"epoch": 0.2871706511594515,
"grad_norm": 1.9140625,
"learning_rate": 1.5564444444444445e-05,
"loss": 0.394,
"step": 500
},
{
"epoch": 0.29291406418264054,
"grad_norm": 2.171875,
"learning_rate": 1.5475555555555556e-05,
"loss": 0.4017,
"step": 510
},
{
"epoch": 0.2986574772058296,
"grad_norm": 2.4375,
"learning_rate": 1.5386666666666666e-05,
"loss": 0.4121,
"step": 520
},
{
"epoch": 0.3044008902290186,
"grad_norm": 2.078125,
"learning_rate": 1.5297777777777777e-05,
"loss": 0.3843,
"step": 530
},
{
"epoch": 0.3101443032522076,
"grad_norm": 2.34375,
"learning_rate": 1.520888888888889e-05,
"loss": 0.4395,
"step": 540
},
{
"epoch": 0.31588771627539664,
"grad_norm": 2.25,
"learning_rate": 1.5120000000000001e-05,
"loss": 0.3883,
"step": 550
},
{
"epoch": 0.32163112929858567,
"grad_norm": 1.6171875,
"learning_rate": 1.5031111111111111e-05,
"loss": 0.4089,
"step": 560
},
{
"epoch": 0.3273745423217747,
"grad_norm": 1.734375,
"learning_rate": 1.4942222222222222e-05,
"loss": 0.3895,
"step": 570
},
{
"epoch": 0.33311795534496375,
"grad_norm": 1.9453125,
"learning_rate": 1.4853333333333336e-05,
"loss": 0.4084,
"step": 580
},
{
"epoch": 0.3388613683681528,
"grad_norm": 2.171875,
"learning_rate": 1.4764444444444446e-05,
"loss": 0.4083,
"step": 590
},
{
"epoch": 0.3446047813913418,
"grad_norm": 2.109375,
"learning_rate": 1.4675555555555557e-05,
"loss": 0.4037,
"step": 600
},
{
"epoch": 0.35034819441453086,
"grad_norm": 2.53125,
"learning_rate": 1.4586666666666667e-05,
"loss": 0.3538,
"step": 610
},
{
"epoch": 0.3560916074377199,
"grad_norm": 2.015625,
"learning_rate": 1.449777777777778e-05,
"loss": 0.3661,
"step": 620
},
{
"epoch": 0.3618350204609089,
"grad_norm": 1.984375,
"learning_rate": 1.440888888888889e-05,
"loss": 0.3992,
"step": 630
},
{
"epoch": 0.3675784334840979,
"grad_norm": 2.328125,
"learning_rate": 1.432e-05,
"loss": 0.3721,
"step": 640
},
{
"epoch": 0.37332184650728695,
"grad_norm": 2.453125,
"learning_rate": 1.4231111111111114e-05,
"loss": 0.4009,
"step": 650
},
{
"epoch": 0.379065259530476,
"grad_norm": 2.25,
"learning_rate": 1.4142222222222224e-05,
"loss": 0.401,
"step": 660
},
{
"epoch": 0.384808672553665,
"grad_norm": 2.46875,
"learning_rate": 1.4053333333333335e-05,
"loss": 0.3783,
"step": 670
},
{
"epoch": 0.39055208557685406,
"grad_norm": 2.234375,
"learning_rate": 1.3964444444444445e-05,
"loss": 0.4021,
"step": 680
},
{
"epoch": 0.3962954986000431,
"grad_norm": 2.15625,
"learning_rate": 1.3875555555555557e-05,
"loss": 0.3713,
"step": 690
},
{
"epoch": 0.40203891162323213,
"grad_norm": 2.125,
"learning_rate": 1.3786666666666668e-05,
"loss": 0.4036,
"step": 700
},
{
"epoch": 0.4077823246464211,
"grad_norm": 2.03125,
"learning_rate": 1.3697777777777778e-05,
"loss": 0.368,
"step": 710
},
{
"epoch": 0.41352573766961015,
"grad_norm": 2.5625,
"learning_rate": 1.3608888888888889e-05,
"loss": 0.3578,
"step": 720
},
{
"epoch": 0.4192691506927992,
"grad_norm": 2.390625,
"learning_rate": 1.3520000000000003e-05,
"loss": 0.3731,
"step": 730
},
{
"epoch": 0.4250125637159882,
"grad_norm": 2.5,
"learning_rate": 1.3431111111111113e-05,
"loss": 0.3817,
"step": 740
},
{
"epoch": 0.43075597673917726,
"grad_norm": 2.515625,
"learning_rate": 1.3342222222222224e-05,
"loss": 0.3717,
"step": 750
},
{
"epoch": 0.4364993897623663,
"grad_norm": 2.203125,
"learning_rate": 1.3253333333333334e-05,
"loss": 0.3629,
"step": 760
},
{
"epoch": 0.44224280278555533,
"grad_norm": 2.625,
"learning_rate": 1.3164444444444446e-05,
"loss": 0.3698,
"step": 770
},
{
"epoch": 0.44798621580874437,
"grad_norm": 2.0,
"learning_rate": 1.3075555555555557e-05,
"loss": 0.3661,
"step": 780
},
{
"epoch": 0.45372962883193335,
"grad_norm": 2.5,
"learning_rate": 1.2986666666666667e-05,
"loss": 0.342,
"step": 790
},
{
"epoch": 0.4594730418551224,
"grad_norm": 2.046875,
"learning_rate": 1.2897777777777778e-05,
"loss": 0.3862,
"step": 800
},
{
"epoch": 0.4652164548783114,
"grad_norm": 2.40625,
"learning_rate": 1.2808888888888891e-05,
"loss": 0.3436,
"step": 810
},
{
"epoch": 0.47095986790150046,
"grad_norm": 2.3125,
"learning_rate": 1.2720000000000002e-05,
"loss": 0.3374,
"step": 820
},
{
"epoch": 0.4767032809246895,
"grad_norm": 2.15625,
"learning_rate": 1.2631111111111112e-05,
"loss": 0.3648,
"step": 830
},
{
"epoch": 0.48244669394787854,
"grad_norm": 2.25,
"learning_rate": 1.2542222222222223e-05,
"loss": 0.327,
"step": 840
},
{
"epoch": 0.48819010697106757,
"grad_norm": 2.3125,
"learning_rate": 1.2453333333333335e-05,
"loss": 0.3406,
"step": 850
},
{
"epoch": 0.4939335199942566,
"grad_norm": 2.03125,
"learning_rate": 1.2364444444444445e-05,
"loss": 0.3441,
"step": 860
},
{
"epoch": 0.4996769330174456,
"grad_norm": 2.609375,
"learning_rate": 1.2275555555555556e-05,
"loss": 0.3695,
"step": 870
},
{
"epoch": 0.5054203460406347,
"grad_norm": 2.359375,
"learning_rate": 1.2186666666666666e-05,
"loss": 0.3386,
"step": 880
},
{
"epoch": 0.5111637590638237,
"grad_norm": 2.328125,
"learning_rate": 1.209777777777778e-05,
"loss": 0.3478,
"step": 890
},
{
"epoch": 0.5169071720870128,
"grad_norm": 2.078125,
"learning_rate": 1.200888888888889e-05,
"loss": 0.3274,
"step": 900
},
{
"epoch": 0.5226505851102017,
"grad_norm": 2.3125,
"learning_rate": 1.1920000000000001e-05,
"loss": 0.3381,
"step": 910
},
{
"epoch": 0.5283939981333907,
"grad_norm": 2.359375,
"learning_rate": 1.1831111111111112e-05,
"loss": 0.3438,
"step": 920
},
{
"epoch": 0.5341374111565798,
"grad_norm": 2.390625,
"learning_rate": 1.1742222222222224e-05,
"loss": 0.3655,
"step": 930
},
{
"epoch": 0.5398808241797688,
"grad_norm": 2.203125,
"learning_rate": 1.1653333333333334e-05,
"loss": 0.3219,
"step": 940
},
{
"epoch": 0.5456242372029578,
"grad_norm": 2.453125,
"learning_rate": 1.1564444444444445e-05,
"loss": 0.3286,
"step": 950
},
{
"epoch": 0.5513676502261469,
"grad_norm": 2.34375,
"learning_rate": 1.1475555555555557e-05,
"loss": 0.3547,
"step": 960
},
{
"epoch": 0.5571110632493359,
"grad_norm": 2.578125,
"learning_rate": 1.1386666666666669e-05,
"loss": 0.3325,
"step": 970
},
{
"epoch": 0.5628544762725249,
"grad_norm": 2.375,
"learning_rate": 1.129777777777778e-05,
"loss": 0.3711,
"step": 980
},
{
"epoch": 0.568597889295714,
"grad_norm": 2.234375,
"learning_rate": 1.120888888888889e-05,
"loss": 0.3124,
"step": 990
},
{
"epoch": 0.574341302318903,
"grad_norm": 2.234375,
"learning_rate": 1.1120000000000002e-05,
"loss": 0.3289,
"step": 1000
},
{
"epoch": 0.580084715342092,
"grad_norm": 2.234375,
"learning_rate": 1.1031111111111112e-05,
"loss": 0.3218,
"step": 1010
},
{
"epoch": 0.5858281283652811,
"grad_norm": 2.515625,
"learning_rate": 1.0942222222222223e-05,
"loss": 0.3503,
"step": 1020
},
{
"epoch": 0.5915715413884701,
"grad_norm": 2.328125,
"learning_rate": 1.0853333333333333e-05,
"loss": 0.3551,
"step": 1030
},
{
"epoch": 0.5973149544116592,
"grad_norm": 2.453125,
"learning_rate": 1.0764444444444445e-05,
"loss": 0.3194,
"step": 1040
},
{
"epoch": 0.6030583674348482,
"grad_norm": 2.1875,
"learning_rate": 1.0675555555555558e-05,
"loss": 0.3478,
"step": 1050
},
{
"epoch": 0.6088017804580372,
"grad_norm": 2.265625,
"learning_rate": 1.0586666666666668e-05,
"loss": 0.3202,
"step": 1060
},
{
"epoch": 0.6145451934812263,
"grad_norm": 1.9140625,
"learning_rate": 1.0497777777777779e-05,
"loss": 0.3095,
"step": 1070
},
{
"epoch": 0.6202886065044152,
"grad_norm": 2.71875,
"learning_rate": 1.040888888888889e-05,
"loss": 0.3112,
"step": 1080
},
{
"epoch": 0.6260320195276042,
"grad_norm": 2.359375,
"learning_rate": 1.0320000000000001e-05,
"loss": 0.3115,
"step": 1090
},
{
"epoch": 0.6317754325507933,
"grad_norm": 2.296875,
"learning_rate": 1.0231111111111112e-05,
"loss": 0.3296,
"step": 1100
},
{
"epoch": 0.6375188455739823,
"grad_norm": 2.703125,
"learning_rate": 1.0142222222222222e-05,
"loss": 0.279,
"step": 1110
},
{
"epoch": 0.6432622585971713,
"grad_norm": 2.71875,
"learning_rate": 1.0053333333333334e-05,
"loss": 0.3153,
"step": 1120
},
{
"epoch": 0.6490056716203604,
"grad_norm": 1.953125,
"learning_rate": 9.964444444444445e-06,
"loss": 0.3039,
"step": 1130
},
{
"epoch": 0.6547490846435494,
"grad_norm": 2.140625,
"learning_rate": 9.875555555555557e-06,
"loss": 0.3238,
"step": 1140
},
{
"epoch": 0.6604924976667385,
"grad_norm": 2.25,
"learning_rate": 9.786666666666667e-06,
"loss": 0.3143,
"step": 1150
},
{
"epoch": 0.6662359106899275,
"grad_norm": 2.546875,
"learning_rate": 9.697777777777778e-06,
"loss": 0.3145,
"step": 1160
},
{
"epoch": 0.6719793237131165,
"grad_norm": 2.15625,
"learning_rate": 9.60888888888889e-06,
"loss": 0.3034,
"step": 1170
},
{
"epoch": 0.6777227367363056,
"grad_norm": 3.875,
"learning_rate": 9.52e-06,
"loss": 0.2813,
"step": 1180
},
{
"epoch": 0.6834661497594946,
"grad_norm": 2.046875,
"learning_rate": 9.431111111111112e-06,
"loss": 0.3211,
"step": 1190
},
{
"epoch": 0.6892095627826836,
"grad_norm": 2.515625,
"learning_rate": 9.342222222222223e-06,
"loss": 0.2954,
"step": 1200
},
{
"epoch": 0.6949529758058727,
"grad_norm": 2.34375,
"learning_rate": 9.253333333333333e-06,
"loss": 0.2969,
"step": 1210
},
{
"epoch": 0.7006963888290617,
"grad_norm": 2.171875,
"learning_rate": 9.164444444444446e-06,
"loss": 0.2942,
"step": 1220
},
{
"epoch": 0.7064398018522507,
"grad_norm": 2.0,
"learning_rate": 9.075555555555556e-06,
"loss": 0.321,
"step": 1230
},
{
"epoch": 0.7121832148754398,
"grad_norm": 2.171875,
"learning_rate": 8.986666666666666e-06,
"loss": 0.2949,
"step": 1240
},
{
"epoch": 0.7179266278986287,
"grad_norm": 2.609375,
"learning_rate": 8.897777777777779e-06,
"loss": 0.3111,
"step": 1250
},
{
"epoch": 0.7236700409218177,
"grad_norm": 2.234375,
"learning_rate": 8.80888888888889e-06,
"loss": 0.3199,
"step": 1260
},
{
"epoch": 0.7294134539450068,
"grad_norm": 3.171875,
"learning_rate": 8.720000000000001e-06,
"loss": 0.3239,
"step": 1270
},
{
"epoch": 0.7351568669681958,
"grad_norm": 2.640625,
"learning_rate": 8.631111111111112e-06,
"loss": 0.3203,
"step": 1280
},
{
"epoch": 0.7409002799913849,
"grad_norm": 2.3125,
"learning_rate": 8.542222222222222e-06,
"loss": 0.3071,
"step": 1290
},
{
"epoch": 0.7466436930145739,
"grad_norm": 2.328125,
"learning_rate": 8.453333333333334e-06,
"loss": 0.3296,
"step": 1300
},
{
"epoch": 0.7523871060377629,
"grad_norm": 2.296875,
"learning_rate": 8.364444444444445e-06,
"loss": 0.2994,
"step": 1310
},
{
"epoch": 0.758130519060952,
"grad_norm": 2.390625,
"learning_rate": 8.275555555555557e-06,
"loss": 0.2661,
"step": 1320
},
{
"epoch": 0.763873932084141,
"grad_norm": 2.34375,
"learning_rate": 8.186666666666667e-06,
"loss": 0.3194,
"step": 1330
},
{
"epoch": 0.76961734510733,
"grad_norm": 2.28125,
"learning_rate": 8.09777777777778e-06,
"loss": 0.283,
"step": 1340
},
{
"epoch": 0.7753607581305191,
"grad_norm": 2.734375,
"learning_rate": 8.00888888888889e-06,
"loss": 0.3358,
"step": 1350
},
{
"epoch": 0.7811041711537081,
"grad_norm": 2.53125,
"learning_rate": 7.92e-06,
"loss": 0.3041,
"step": 1360
},
{
"epoch": 0.7868475841768972,
"grad_norm": 2.671875,
"learning_rate": 7.831111111111111e-06,
"loss": 0.2701,
"step": 1370
},
{
"epoch": 0.7925909972000862,
"grad_norm": 2.53125,
"learning_rate": 7.742222222222223e-06,
"loss": 0.2783,
"step": 1380
},
{
"epoch": 0.7983344102232752,
"grad_norm": 2.296875,
"learning_rate": 7.653333333333333e-06,
"loss": 0.2989,
"step": 1390
},
{
"epoch": 0.8040778232464643,
"grad_norm": 3.09375,
"learning_rate": 7.564444444444446e-06,
"loss": 0.2711,
"step": 1400
},
{
"epoch": 0.8098212362696532,
"grad_norm": 2.21875,
"learning_rate": 7.475555555555556e-06,
"loss": 0.29,
"step": 1410
},
{
"epoch": 0.8155646492928422,
"grad_norm": 2.359375,
"learning_rate": 7.386666666666667e-06,
"loss": 0.3208,
"step": 1420
},
{
"epoch": 0.8213080623160313,
"grad_norm": 2.21875,
"learning_rate": 7.297777777777778e-06,
"loss": 0.2838,
"step": 1430
},
{
"epoch": 0.8270514753392203,
"grad_norm": 2.828125,
"learning_rate": 7.20888888888889e-06,
"loss": 0.3104,
"step": 1440
},
{
"epoch": 0.8327948883624093,
"grad_norm": 2.3125,
"learning_rate": 7.1200000000000004e-06,
"loss": 0.2805,
"step": 1450
},
{
"epoch": 0.8385383013855984,
"grad_norm": 1.71875,
"learning_rate": 7.031111111111112e-06,
"loss": 0.2542,
"step": 1460
},
{
"epoch": 0.8442817144087874,
"grad_norm": 2.1875,
"learning_rate": 6.942222222222222e-06,
"loss": 0.2698,
"step": 1470
},
{
"epoch": 0.8500251274319764,
"grad_norm": 2.671875,
"learning_rate": 6.853333333333334e-06,
"loss": 0.2938,
"step": 1480
},
{
"epoch": 0.8557685404551655,
"grad_norm": 2.484375,
"learning_rate": 6.764444444444445e-06,
"loss": 0.2657,
"step": 1490
},
{
"epoch": 0.8615119534783545,
"grad_norm": 2.125,
"learning_rate": 6.675555555555556e-06,
"loss": 0.3194,
"step": 1500
},
{
"epoch": 0.8672553665015436,
"grad_norm": 3.0,
"learning_rate": 6.5866666666666666e-06,
"loss": 0.2863,
"step": 1510
},
{
"epoch": 0.8729987795247326,
"grad_norm": 2.640625,
"learning_rate": 6.497777777777779e-06,
"loss": 0.2636,
"step": 1520
},
{
"epoch": 0.8787421925479216,
"grad_norm": 2.1875,
"learning_rate": 6.408888888888889e-06,
"loss": 0.2537,
"step": 1530
},
{
"epoch": 0.8844856055711107,
"grad_norm": 2.265625,
"learning_rate": 6.3200000000000005e-06,
"loss": 0.2915,
"step": 1540
},
{
"epoch": 0.8902290185942997,
"grad_norm": 2.53125,
"learning_rate": 6.231111111111111e-06,
"loss": 0.2814,
"step": 1550
},
{
"epoch": 0.8959724316174887,
"grad_norm": 2.421875,
"learning_rate": 6.142222222222223e-06,
"loss": 0.2508,
"step": 1560
},
{
"epoch": 0.9017158446406778,
"grad_norm": 1.8984375,
"learning_rate": 6.0533333333333335e-06,
"loss": 0.2499,
"step": 1570
},
{
"epoch": 0.9074592576638667,
"grad_norm": 3.515625,
"learning_rate": 5.964444444444445e-06,
"loss": 0.2779,
"step": 1580
},
{
"epoch": 0.9132026706870557,
"grad_norm": 2.09375,
"learning_rate": 5.875555555555556e-06,
"loss": 0.2703,
"step": 1590
},
{
"epoch": 0.9189460837102448,
"grad_norm": 2.515625,
"learning_rate": 5.7866666666666674e-06,
"loss": 0.2851,
"step": 1600
},
{
"epoch": 0.9246894967334338,
"grad_norm": 2.53125,
"learning_rate": 5.697777777777779e-06,
"loss": 0.2771,
"step": 1610
},
{
"epoch": 0.9304329097566229,
"grad_norm": 2.515625,
"learning_rate": 5.608888888888889e-06,
"loss": 0.2545,
"step": 1620
},
{
"epoch": 0.9361763227798119,
"grad_norm": 2.171875,
"learning_rate": 5.5200000000000005e-06,
"loss": 0.2534,
"step": 1630
},
{
"epoch": 0.9419197358030009,
"grad_norm": 2.65625,
"learning_rate": 5.431111111111112e-06,
"loss": 0.2851,
"step": 1640
},
{
"epoch": 0.94766314882619,
"grad_norm": 2.109375,
"learning_rate": 5.342222222222223e-06,
"loss": 0.3072,
"step": 1650
},
{
"epoch": 0.953406561849379,
"grad_norm": 2.171875,
"learning_rate": 5.2533333333333336e-06,
"loss": 0.2709,
"step": 1660
},
{
"epoch": 0.959149974872568,
"grad_norm": 2.359375,
"learning_rate": 5.164444444444445e-06,
"loss": 0.2876,
"step": 1670
},
{
"epoch": 0.9648933878957571,
"grad_norm": 2.015625,
"learning_rate": 5.075555555555556e-06,
"loss": 0.2824,
"step": 1680
},
{
"epoch": 0.9706368009189461,
"grad_norm": 2.515625,
"learning_rate": 4.986666666666667e-06,
"loss": 0.2867,
"step": 1690
},
{
"epoch": 0.9763802139421351,
"grad_norm": 2.421875,
"learning_rate": 4.897777777777778e-06,
"loss": 0.2711,
"step": 1700
},
{
"epoch": 0.9821236269653242,
"grad_norm": 2.375,
"learning_rate": 4.808888888888889e-06,
"loss": 0.3008,
"step": 1710
},
{
"epoch": 0.9878670399885132,
"grad_norm": 1.9296875,
"learning_rate": 4.7200000000000005e-06,
"loss": 0.2649,
"step": 1720
},
{
"epoch": 0.9936104530117023,
"grad_norm": 1.9296875,
"learning_rate": 4.631111111111111e-06,
"loss": 0.2625,
"step": 1730
},
{
"epoch": 0.9993538660348912,
"grad_norm": 2.359375,
"learning_rate": 4.542222222222223e-06,
"loss": 0.2995,
"step": 1740
},
{
"epoch": 1.0045947304185512,
"grad_norm": 2.078125,
"learning_rate": 4.453333333333334e-06,
"loss": 0.2862,
"step": 1750
},
{
"epoch": 1.0103381434417402,
"grad_norm": 1.953125,
"learning_rate": 4.364444444444445e-06,
"loss": 0.24,
"step": 1760
},
{
"epoch": 1.0160815564649293,
"grad_norm": 2.28125,
"learning_rate": 4.275555555555556e-06,
"loss": 0.2394,
"step": 1770
},
{
"epoch": 1.0218249694881183,
"grad_norm": 1.796875,
"learning_rate": 4.1866666666666675e-06,
"loss": 0.2177,
"step": 1780
},
{
"epoch": 1.0275683825113073,
"grad_norm": 2.28125,
"learning_rate": 4.097777777777778e-06,
"loss": 0.2514,
"step": 1790
},
{
"epoch": 1.0333117955344964,
"grad_norm": 1.890625,
"learning_rate": 4.008888888888889e-06,
"loss": 0.2158,
"step": 1800
},
{
"epoch": 1.0390552085576854,
"grad_norm": 2.015625,
"learning_rate": 3.920000000000001e-06,
"loss": 0.2377,
"step": 1810
},
{
"epoch": 1.0447986215808744,
"grad_norm": 2.5,
"learning_rate": 3.831111111111112e-06,
"loss": 0.2193,
"step": 1820
},
{
"epoch": 1.0505420346040635,
"grad_norm": 2.40625,
"learning_rate": 3.7422222222222228e-06,
"loss": 0.2354,
"step": 1830
},
{
"epoch": 1.0562854476272525,
"grad_norm": 1.8984375,
"learning_rate": 3.6533333333333336e-06,
"loss": 0.2315,
"step": 1840
},
{
"epoch": 1.0620288606504416,
"grad_norm": 2.484375,
"learning_rate": 3.564444444444445e-06,
"loss": 0.1987,
"step": 1850
},
{
"epoch": 1.0677722736736306,
"grad_norm": 2.5,
"learning_rate": 3.475555555555556e-06,
"loss": 0.2276,
"step": 1860
},
{
"epoch": 1.0735156866968196,
"grad_norm": 2.90625,
"learning_rate": 3.386666666666667e-06,
"loss": 0.225,
"step": 1870
},
{
"epoch": 1.0792590997200087,
"grad_norm": 1.8046875,
"learning_rate": 3.297777777777778e-06,
"loss": 0.1992,
"step": 1880
},
{
"epoch": 1.0850025127431977,
"grad_norm": 2.265625,
"learning_rate": 3.2088888888888893e-06,
"loss": 0.2068,
"step": 1890
},
{
"epoch": 1.0907459257663867,
"grad_norm": 2.296875,
"learning_rate": 3.12e-06,
"loss": 0.225,
"step": 1900
},
{
"epoch": 1.0964893387895758,
"grad_norm": 2.703125,
"learning_rate": 3.0311111111111115e-06,
"loss": 0.2173,
"step": 1910
},
{
"epoch": 1.1022327518127648,
"grad_norm": 2.625,
"learning_rate": 2.9422222222222224e-06,
"loss": 0.2348,
"step": 1920
},
{
"epoch": 1.1079761648359538,
"grad_norm": 1.796875,
"learning_rate": 2.8533333333333337e-06,
"loss": 0.2427,
"step": 1930
},
{
"epoch": 1.1137195778591429,
"grad_norm": 2.0625,
"learning_rate": 2.7644444444444446e-06,
"loss": 0.2386,
"step": 1940
},
{
"epoch": 1.119462990882332,
"grad_norm": 2.140625,
"learning_rate": 2.675555555555556e-06,
"loss": 0.2433,
"step": 1950
},
{
"epoch": 1.125206403905521,
"grad_norm": 2.25,
"learning_rate": 2.5866666666666667e-06,
"loss": 0.2448,
"step": 1960
},
{
"epoch": 1.1309498169287098,
"grad_norm": 2.140625,
"learning_rate": 2.497777777777778e-06,
"loss": 0.2304,
"step": 1970
},
{
"epoch": 1.136693229951899,
"grad_norm": 2.09375,
"learning_rate": 2.4088888888888894e-06,
"loss": 0.2304,
"step": 1980
},
{
"epoch": 1.1424366429750878,
"grad_norm": 2.515625,
"learning_rate": 2.3200000000000002e-06,
"loss": 0.236,
"step": 1990
},
{
"epoch": 1.1481800559982769,
"grad_norm": 2.125,
"learning_rate": 2.2311111111111115e-06,
"loss": 0.2367,
"step": 2000
},
{
"epoch": 1.153923469021466,
"grad_norm": 2.484375,
"learning_rate": 2.1422222222222224e-06,
"loss": 0.2178,
"step": 2010
},
{
"epoch": 1.159666882044655,
"grad_norm": 2.53125,
"learning_rate": 2.0533333333333337e-06,
"loss": 0.2333,
"step": 2020
},
{
"epoch": 1.165410295067844,
"grad_norm": 3.09375,
"learning_rate": 1.9644444444444446e-06,
"loss": 0.2586,
"step": 2030
},
{
"epoch": 1.171153708091033,
"grad_norm": 2.953125,
"learning_rate": 1.8755555555555557e-06,
"loss": 0.2483,
"step": 2040
},
{
"epoch": 1.176897121114222,
"grad_norm": 2.25,
"learning_rate": 1.7866666666666668e-06,
"loss": 0.2505,
"step": 2050
},
{
"epoch": 1.182640534137411,
"grad_norm": 1.96875,
"learning_rate": 1.6977777777777779e-06,
"loss": 0.208,
"step": 2060
},
{
"epoch": 1.1883839471606001,
"grad_norm": 2.390625,
"learning_rate": 1.608888888888889e-06,
"loss": 0.2279,
"step": 2070
},
{
"epoch": 1.1941273601837892,
"grad_norm": 2.171875,
"learning_rate": 1.52e-06,
"loss": 0.2002,
"step": 2080
},
{
"epoch": 1.1998707732069782,
"grad_norm": 2.28125,
"learning_rate": 1.4311111111111111e-06,
"loss": 0.2204,
"step": 2090
},
{
"epoch": 1.2056141862301673,
"grad_norm": 2.453125,
"learning_rate": 1.3422222222222222e-06,
"loss": 0.2339,
"step": 2100
},
{
"epoch": 1.2113575992533563,
"grad_norm": 1.875,
"learning_rate": 1.2533333333333333e-06,
"loss": 0.2362,
"step": 2110
},
{
"epoch": 1.2171010122765453,
"grad_norm": 2.484375,
"learning_rate": 1.1644444444444446e-06,
"loss": 0.2324,
"step": 2120
},
{
"epoch": 1.2228444252997344,
"grad_norm": 2.640625,
"learning_rate": 1.0755555555555557e-06,
"loss": 0.2404,
"step": 2130
},
{
"epoch": 1.2285878383229234,
"grad_norm": 2.265625,
"learning_rate": 9.866666666666668e-07,
"loss": 0.202,
"step": 2140
},
{
"epoch": 1.2343312513461124,
"grad_norm": 2.421875,
"learning_rate": 8.977777777777778e-07,
"loss": 0.2306,
"step": 2150
},
{
"epoch": 1.2400746643693015,
"grad_norm": 2.46875,
"learning_rate": 8.088888888888889e-07,
"loss": 0.2094,
"step": 2160
},
{
"epoch": 1.2458180773924905,
"grad_norm": 2.09375,
"learning_rate": 7.2e-07,
"loss": 0.2177,
"step": 2170
},
{
"epoch": 1.2515614904156795,
"grad_norm": 2.515625,
"learning_rate": 6.311111111111112e-07,
"loss": 0.2508,
"step": 2180
},
{
"epoch": 1.2573049034388686,
"grad_norm": 2.15625,
"learning_rate": 5.422222222222223e-07,
"loss": 0.2231,
"step": 2190
},
{
"epoch": 1.2630483164620576,
"grad_norm": 1.7109375,
"learning_rate": 4.533333333333334e-07,
"loss": 0.2178,
"step": 2200
},
{
"epoch": 1.2687917294852467,
"grad_norm": 2.5625,
"learning_rate": 3.644444444444445e-07,
"loss": 0.194,
"step": 2210
},
{
"epoch": 1.2745351425084357,
"grad_norm": 2.125,
"learning_rate": 2.7555555555555555e-07,
"loss": 0.2302,
"step": 2220
},
{
"epoch": 1.2802785555316247,
"grad_norm": 2.328125,
"learning_rate": 1.866666666666667e-07,
"loss": 0.2347,
"step": 2230
},
{
"epoch": 1.2860219685548138,
"grad_norm": 2.1875,
"learning_rate": 9.777777777777779e-08,
"loss": 0.2061,
"step": 2240
},
{
"epoch": 1.2917653815780028,
"grad_norm": 2.828125,
"learning_rate": 8.88888888888889e-09,
"loss": 0.2463,
"step": 2250
}
],
"logging_steps": 10,
"max_steps": 2250,
"num_input_tokens_seen": 0,
"num_train_epochs": 2,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 1.0743805199693906e+18,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}