| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.0, | |
| "eval_steps": 500, | |
| "global_step": 2860, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0006993006993006993, | |
| "grad_norm": 4.37381113540513, | |
| "learning_rate": 3.4965034965034967e-08, | |
| "loss": 2.0811, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0034965034965034965, | |
| "grad_norm": 4.252217899298764, | |
| "learning_rate": 1.7482517482517484e-07, | |
| "loss": 2.1063, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.006993006993006993, | |
| "grad_norm": 4.404207861857175, | |
| "learning_rate": 3.496503496503497e-07, | |
| "loss": 2.1104, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.01048951048951049, | |
| "grad_norm": 3.3737210251020002, | |
| "learning_rate": 5.244755244755246e-07, | |
| "loss": 1.9985, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.013986013986013986, | |
| "grad_norm": 2.021686734185303, | |
| "learning_rate": 6.993006993006994e-07, | |
| "loss": 1.8118, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.017482517482517484, | |
| "grad_norm": 2.0309163807573354, | |
| "learning_rate": 8.741258741258741e-07, | |
| "loss": 1.5367, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.02097902097902098, | |
| "grad_norm": 2.0173956831253403, | |
| "learning_rate": 1.0489510489510491e-06, | |
| "loss": 1.2332, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.024475524475524476, | |
| "grad_norm": 0.6292284380908406, | |
| "learning_rate": 1.2237762237762238e-06, | |
| "loss": 0.8794, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.027972027972027972, | |
| "grad_norm": 0.5873925928700078, | |
| "learning_rate": 1.3986013986013987e-06, | |
| "loss": 0.7875, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.03146853146853147, | |
| "grad_norm": 0.49151785822031857, | |
| "learning_rate": 1.5734265734265736e-06, | |
| "loss": 0.7022, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.03496503496503497, | |
| "grad_norm": 0.40138283435220545, | |
| "learning_rate": 1.7482517482517483e-06, | |
| "loss": 0.6464, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.038461538461538464, | |
| "grad_norm": 0.32203725166447594, | |
| "learning_rate": 1.9230769230769234e-06, | |
| "loss": 0.5814, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.04195804195804196, | |
| "grad_norm": 0.26055998901566146, | |
| "learning_rate": 2.0979020979020983e-06, | |
| "loss": 0.5595, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.045454545454545456, | |
| "grad_norm": 0.2551008009829932, | |
| "learning_rate": 2.2727272727272728e-06, | |
| "loss": 0.5608, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.04895104895104895, | |
| "grad_norm": 0.23665791665183983, | |
| "learning_rate": 2.4475524475524477e-06, | |
| "loss": 0.5354, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.05244755244755245, | |
| "grad_norm": 0.22961308429968375, | |
| "learning_rate": 2.6223776223776225e-06, | |
| "loss": 0.5105, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.055944055944055944, | |
| "grad_norm": 0.2543666802898581, | |
| "learning_rate": 2.7972027972027974e-06, | |
| "loss": 0.4921, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.05944055944055944, | |
| "grad_norm": 0.2660361645798006, | |
| "learning_rate": 2.972027972027972e-06, | |
| "loss": 0.5034, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.06293706293706294, | |
| "grad_norm": 0.3014764103123489, | |
| "learning_rate": 3.1468531468531472e-06, | |
| "loss": 0.4792, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.06643356643356643, | |
| "grad_norm": 0.27689109931667516, | |
| "learning_rate": 3.321678321678322e-06, | |
| "loss": 0.4603, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.06993006993006994, | |
| "grad_norm": 0.2625100242281368, | |
| "learning_rate": 3.4965034965034966e-06, | |
| "loss": 0.4719, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.07342657342657342, | |
| "grad_norm": 0.26217583226922436, | |
| "learning_rate": 3.6713286713286715e-06, | |
| "loss": 0.4474, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.07692307692307693, | |
| "grad_norm": 0.20880050238815698, | |
| "learning_rate": 3.846153846153847e-06, | |
| "loss": 0.45, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.08041958041958042, | |
| "grad_norm": 0.2607881388634329, | |
| "learning_rate": 4.020979020979021e-06, | |
| "loss": 0.4116, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.08391608391608392, | |
| "grad_norm": 0.19276061867830058, | |
| "learning_rate": 4.195804195804197e-06, | |
| "loss": 0.4011, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.08741258741258741, | |
| "grad_norm": 0.206872497416213, | |
| "learning_rate": 4.3706293706293715e-06, | |
| "loss": 0.4227, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.09090909090909091, | |
| "grad_norm": 0.21844912873919067, | |
| "learning_rate": 4.5454545454545455e-06, | |
| "loss": 0.4244, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.0944055944055944, | |
| "grad_norm": 0.17251200513626644, | |
| "learning_rate": 4.72027972027972e-06, | |
| "loss": 0.4495, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.0979020979020979, | |
| "grad_norm": 0.18127640733082095, | |
| "learning_rate": 4.895104895104895e-06, | |
| "loss": 0.4103, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.10139860139860139, | |
| "grad_norm": 0.1622267706785198, | |
| "learning_rate": 5.06993006993007e-06, | |
| "loss": 0.4178, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.1048951048951049, | |
| "grad_norm": 0.16311308748836312, | |
| "learning_rate": 5.244755244755245e-06, | |
| "loss": 0.4145, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.10839160839160839, | |
| "grad_norm": 0.15870077394566784, | |
| "learning_rate": 5.41958041958042e-06, | |
| "loss": 0.3946, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.11188811188811189, | |
| "grad_norm": 0.1575025763124978, | |
| "learning_rate": 5.594405594405595e-06, | |
| "loss": 0.4431, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.11538461538461539, | |
| "grad_norm": 0.19464925796584603, | |
| "learning_rate": 5.769230769230769e-06, | |
| "loss": 0.4082, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.11888111888111888, | |
| "grad_norm": 0.19779522561895393, | |
| "learning_rate": 5.944055944055944e-06, | |
| "loss": 0.3833, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.12237762237762238, | |
| "grad_norm": 0.15172576025838877, | |
| "learning_rate": 6.1188811188811196e-06, | |
| "loss": 0.3827, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.1258741258741259, | |
| "grad_norm": 0.14304151028864226, | |
| "learning_rate": 6.2937062937062944e-06, | |
| "loss": 0.3996, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.12937062937062938, | |
| "grad_norm": 0.1397289499019953, | |
| "learning_rate": 6.468531468531469e-06, | |
| "loss": 0.4001, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.13286713286713286, | |
| "grad_norm": 0.12356479142674442, | |
| "learning_rate": 6.643356643356644e-06, | |
| "loss": 0.3631, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.13636363636363635, | |
| "grad_norm": 0.1456501947340643, | |
| "learning_rate": 6.818181818181818e-06, | |
| "loss": 0.399, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.13986013986013987, | |
| "grad_norm": 0.1777441051019193, | |
| "learning_rate": 6.993006993006993e-06, | |
| "loss": 0.4013, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.14335664335664336, | |
| "grad_norm": 0.15674206290465487, | |
| "learning_rate": 7.167832167832168e-06, | |
| "loss": 0.3846, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.14685314685314685, | |
| "grad_norm": 0.14334403555842584, | |
| "learning_rate": 7.342657342657343e-06, | |
| "loss": 0.3978, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.15034965034965034, | |
| "grad_norm": 0.12195727463513337, | |
| "learning_rate": 7.517482517482519e-06, | |
| "loss": 0.4025, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.15384615384615385, | |
| "grad_norm": 0.13655485264527165, | |
| "learning_rate": 7.692307692307694e-06, | |
| "loss": 0.3907, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.15734265734265734, | |
| "grad_norm": 0.1526742197214791, | |
| "learning_rate": 7.867132867132867e-06, | |
| "loss": 0.4056, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.16083916083916083, | |
| "grad_norm": 0.19679006943755223, | |
| "learning_rate": 8.041958041958042e-06, | |
| "loss": 0.3913, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.16433566433566432, | |
| "grad_norm": 0.1374898822284108, | |
| "learning_rate": 8.216783216783217e-06, | |
| "loss": 0.395, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.16783216783216784, | |
| "grad_norm": 0.1316377878353779, | |
| "learning_rate": 8.391608391608393e-06, | |
| "loss": 0.3913, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.17132867132867133, | |
| "grad_norm": 0.1360866720355628, | |
| "learning_rate": 8.566433566433568e-06, | |
| "loss": 0.389, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.17482517482517482, | |
| "grad_norm": 0.12896458401156194, | |
| "learning_rate": 8.741258741258743e-06, | |
| "loss": 0.3948, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.17832167832167833, | |
| "grad_norm": 0.13668138902696825, | |
| "learning_rate": 8.916083916083916e-06, | |
| "loss": 0.4162, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.18181818181818182, | |
| "grad_norm": 0.11375569205265923, | |
| "learning_rate": 9.090909090909091e-06, | |
| "loss": 0.3583, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.1853146853146853, | |
| "grad_norm": 0.13923337751226186, | |
| "learning_rate": 9.265734265734266e-06, | |
| "loss": 0.3679, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.1888111888111888, | |
| "grad_norm": 0.12548306730535067, | |
| "learning_rate": 9.44055944055944e-06, | |
| "loss": 0.3809, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.19230769230769232, | |
| "grad_norm": 0.12606188346945477, | |
| "learning_rate": 9.615384615384616e-06, | |
| "loss": 0.3971, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.1958041958041958, | |
| "grad_norm": 0.12354250313388718, | |
| "learning_rate": 9.79020979020979e-06, | |
| "loss": 0.4049, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.1993006993006993, | |
| "grad_norm": 0.13053765656410934, | |
| "learning_rate": 9.965034965034966e-06, | |
| "loss": 0.3845, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.20279720279720279, | |
| "grad_norm": 0.11608842411220202, | |
| "learning_rate": 9.999940414335223e-06, | |
| "loss": 0.3743, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.2062937062937063, | |
| "grad_norm": 0.13765081981314303, | |
| "learning_rate": 9.999698350006063e-06, | |
| "loss": 0.3708, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.2097902097902098, | |
| "grad_norm": 0.12381330340903716, | |
| "learning_rate": 9.999270091916259e-06, | |
| "loss": 0.3813, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.21328671328671328, | |
| "grad_norm": 0.11405435023477081, | |
| "learning_rate": 9.998655656014563e-06, | |
| "loss": 0.3962, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.21678321678321677, | |
| "grad_norm": 0.12904825154350827, | |
| "learning_rate": 9.997855065183185e-06, | |
| "loss": 0.3669, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.2202797202797203, | |
| "grad_norm": 0.11527332078152458, | |
| "learning_rate": 9.996868349236927e-06, | |
| "loss": 0.3585, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.22377622377622378, | |
| "grad_norm": 0.11397307562641748, | |
| "learning_rate": 9.995695544922076e-06, | |
| "loss": 0.3722, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.22727272727272727, | |
| "grad_norm": 0.12094683417044083, | |
| "learning_rate": 9.994336695915041e-06, | |
| "loss": 0.3746, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.23076923076923078, | |
| "grad_norm": 0.11457644069457708, | |
| "learning_rate": 9.992791852820709e-06, | |
| "loss": 0.3755, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.23426573426573427, | |
| "grad_norm": 0.10010657801032996, | |
| "learning_rate": 9.991061073170585e-06, | |
| "loss": 0.3655, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.23776223776223776, | |
| "grad_norm": 0.11875607366309843, | |
| "learning_rate": 9.98914442142063e-06, | |
| "loss": 0.3567, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.24125874125874125, | |
| "grad_norm": 0.12184450566051146, | |
| "learning_rate": 9.98704196894887e-06, | |
| "loss": 0.3843, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.24475524475524477, | |
| "grad_norm": 0.11503255592868246, | |
| "learning_rate": 9.984753794052735e-06, | |
| "loss": 0.353, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.24825174825174826, | |
| "grad_norm": 0.11359123753881316, | |
| "learning_rate": 9.982279981946143e-06, | |
| "loss": 0.3645, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.2517482517482518, | |
| "grad_norm": 0.12099169961130332, | |
| "learning_rate": 9.97962062475633e-06, | |
| "loss": 0.3854, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.25524475524475526, | |
| "grad_norm": 0.13217619060267752, | |
| "learning_rate": 9.976775821520412e-06, | |
| "loss": 0.3721, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.25874125874125875, | |
| "grad_norm": 0.09612633521304394, | |
| "learning_rate": 9.973745678181705e-06, | |
| "loss": 0.3457, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.26223776223776224, | |
| "grad_norm": 0.10517669260119224, | |
| "learning_rate": 9.970530307585774e-06, | |
| "loss": 0.3565, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.26573426573426573, | |
| "grad_norm": 0.11544943707626291, | |
| "learning_rate": 9.967129829476227e-06, | |
| "loss": 0.4004, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.2692307692307692, | |
| "grad_norm": 0.11395353055045045, | |
| "learning_rate": 9.96354437049027e-06, | |
| "loss": 0.3604, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.2727272727272727, | |
| "grad_norm": 0.1132567024291244, | |
| "learning_rate": 9.959774064153977e-06, | |
| "loss": 0.3779, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.2762237762237762, | |
| "grad_norm": 0.09873707074062922, | |
| "learning_rate": 9.955819050877321e-06, | |
| "loss": 0.3567, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.27972027972027974, | |
| "grad_norm": 0.10782299582051598, | |
| "learning_rate": 9.951679477948946e-06, | |
| "loss": 0.3864, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.28321678321678323, | |
| "grad_norm": 0.11261432971654586, | |
| "learning_rate": 9.947355499530685e-06, | |
| "loss": 0.3599, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.2867132867132867, | |
| "grad_norm": 0.12074994776023469, | |
| "learning_rate": 9.942847276651812e-06, | |
| "loss": 0.3284, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.2902097902097902, | |
| "grad_norm": 0.09741746253500448, | |
| "learning_rate": 9.93815497720305e-06, | |
| "loss": 0.3474, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.2937062937062937, | |
| "grad_norm": 0.11397076083204073, | |
| "learning_rate": 9.933278775930317e-06, | |
| "loss": 0.3556, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.2972027972027972, | |
| "grad_norm": 0.11555465535329675, | |
| "learning_rate": 9.928218854428223e-06, | |
| "loss": 0.3611, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.3006993006993007, | |
| "grad_norm": 0.11398129108565634, | |
| "learning_rate": 9.922975401133292e-06, | |
| "loss": 0.3666, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.3041958041958042, | |
| "grad_norm": 0.1047587143979841, | |
| "learning_rate": 9.917548611316969e-06, | |
| "loss": 0.3255, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.3076923076923077, | |
| "grad_norm": 0.09926317278084681, | |
| "learning_rate": 9.911938687078324e-06, | |
| "loss": 0.3533, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.3111888111888112, | |
| "grad_norm": 0.09642382922146973, | |
| "learning_rate": 9.90614583733654e-06, | |
| "loss": 0.3468, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.3146853146853147, | |
| "grad_norm": 0.10269176659026831, | |
| "learning_rate": 9.900170277823129e-06, | |
| "loss": 0.3487, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.3181818181818182, | |
| "grad_norm": 0.10195209542391877, | |
| "learning_rate": 9.894012231073895e-06, | |
| "loss": 0.3615, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.32167832167832167, | |
| "grad_norm": 0.10911945373160642, | |
| "learning_rate": 9.887671926420649e-06, | |
| "loss": 0.3624, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.32517482517482516, | |
| "grad_norm": 0.11633743875381607, | |
| "learning_rate": 9.881149599982671e-06, | |
| "loss": 0.3736, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.32867132867132864, | |
| "grad_norm": 0.0919962699070435, | |
| "learning_rate": 9.874445494657912e-06, | |
| "loss": 0.3364, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.3321678321678322, | |
| "grad_norm": 0.1169385489960078, | |
| "learning_rate": 9.86755986011395e-06, | |
| "loss": 0.3585, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.3356643356643357, | |
| "grad_norm": 0.11072840564933388, | |
| "learning_rate": 9.860492952778695e-06, | |
| "loss": 0.3441, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.33916083916083917, | |
| "grad_norm": 0.09838108948679272, | |
| "learning_rate": 9.853245035830834e-06, | |
| "loss": 0.3637, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.34265734265734266, | |
| "grad_norm": 0.09612232855439369, | |
| "learning_rate": 9.845816379190037e-06, | |
| "loss": 0.3473, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.34615384615384615, | |
| "grad_norm": 0.09998498487369935, | |
| "learning_rate": 9.838207259506891e-06, | |
| "loss": 0.3551, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.34965034965034963, | |
| "grad_norm": 0.10258972737213708, | |
| "learning_rate": 9.83041796015262e-06, | |
| "loss": 0.3796, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.3531468531468531, | |
| "grad_norm": 0.09995067780945936, | |
| "learning_rate": 9.82244877120851e-06, | |
| "loss": 0.3538, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.35664335664335667, | |
| "grad_norm": 0.10479687563865843, | |
| "learning_rate": 9.814299989455118e-06, | |
| "loss": 0.3782, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.36013986013986016, | |
| "grad_norm": 0.11535884576394473, | |
| "learning_rate": 9.805971918361215e-06, | |
| "loss": 0.3874, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.36363636363636365, | |
| "grad_norm": 0.09319987111020978, | |
| "learning_rate": 9.797464868072489e-06, | |
| "loss": 0.3216, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.36713286713286714, | |
| "grad_norm": 0.11218854149013563, | |
| "learning_rate": 9.788779155399988e-06, | |
| "loss": 0.3519, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.3706293706293706, | |
| "grad_norm": 0.10463389683892751, | |
| "learning_rate": 9.779915103808328e-06, | |
| "loss": 0.3487, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.3741258741258741, | |
| "grad_norm": 0.10312783064956889, | |
| "learning_rate": 9.770873043403648e-06, | |
| "loss": 0.3779, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.3776223776223776, | |
| "grad_norm": 0.09457645559277009, | |
| "learning_rate": 9.761653310921307e-06, | |
| "loss": 0.349, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.3811188811188811, | |
| "grad_norm": 0.10044623941110373, | |
| "learning_rate": 9.752256249713352e-06, | |
| "loss": 0.344, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.38461538461538464, | |
| "grad_norm": 0.10269026724616583, | |
| "learning_rate": 9.742682209735727e-06, | |
| "loss": 0.3409, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.3881118881118881, | |
| "grad_norm": 0.10855761396744447, | |
| "learning_rate": 9.73293154753525e-06, | |
| "loss": 0.3418, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.3916083916083916, | |
| "grad_norm": 0.11951416935360595, | |
| "learning_rate": 9.723004626236314e-06, | |
| "loss": 0.3754, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.3951048951048951, | |
| "grad_norm": 0.09145664665291538, | |
| "learning_rate": 9.712901815527387e-06, | |
| "loss": 0.3667, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.3986013986013986, | |
| "grad_norm": 0.09322645373153875, | |
| "learning_rate": 9.702623491647232e-06, | |
| "loss": 0.3578, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.4020979020979021, | |
| "grad_norm": 0.09436496121677602, | |
| "learning_rate": 9.692170037370899e-06, | |
| "loss": 0.36, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.40559440559440557, | |
| "grad_norm": 0.08980733979197301, | |
| "learning_rate": 9.68154184199546e-06, | |
| "loss": 0.3339, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.4090909090909091, | |
| "grad_norm": 0.10968764839801004, | |
| "learning_rate": 9.670739301325534e-06, | |
| "loss": 0.371, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.4125874125874126, | |
| "grad_norm": 0.10841544769993913, | |
| "learning_rate": 9.659762817658524e-06, | |
| "loss": 0.3561, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.4160839160839161, | |
| "grad_norm": 0.11208125619377086, | |
| "learning_rate": 9.648612799769644e-06, | |
| "loss": 0.3665, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.4195804195804196, | |
| "grad_norm": 0.09710993092315441, | |
| "learning_rate": 9.6372896628967e-06, | |
| "loss": 0.3487, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.4230769230769231, | |
| "grad_norm": 0.08611220115875796, | |
| "learning_rate": 9.62579382872462e-06, | |
| "loss": 0.3149, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.42657342657342656, | |
| "grad_norm": 0.09422154430787315, | |
| "learning_rate": 9.614125725369748e-06, | |
| "loss": 0.367, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.43006993006993005, | |
| "grad_norm": 0.10419775648300175, | |
| "learning_rate": 9.60228578736391e-06, | |
| "loss": 0.3339, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.43356643356643354, | |
| "grad_norm": 0.09182548432657049, | |
| "learning_rate": 9.590274455638225e-06, | |
| "loss": 0.3599, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.4370629370629371, | |
| "grad_norm": 0.09732070144300518, | |
| "learning_rate": 9.578092177506683e-06, | |
| "loss": 0.3677, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.4405594405594406, | |
| "grad_norm": 0.10356238281777523, | |
| "learning_rate": 9.565739406649492e-06, | |
| "loss": 0.3603, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.44405594405594406, | |
| "grad_norm": 0.09542634755210361, | |
| "learning_rate": 9.553216603096175e-06, | |
| "loss": 0.3368, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.44755244755244755, | |
| "grad_norm": 0.10330899480093415, | |
| "learning_rate": 9.540524233208449e-06, | |
| "loss": 0.3809, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.45104895104895104, | |
| "grad_norm": 0.0990570314447027, | |
| "learning_rate": 9.527662769662842e-06, | |
| "loss": 0.3321, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.45454545454545453, | |
| "grad_norm": 0.098605126175185, | |
| "learning_rate": 9.514632691433108e-06, | |
| "loss": 0.3362, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.458041958041958, | |
| "grad_norm": 0.11280604058910865, | |
| "learning_rate": 9.501434483772371e-06, | |
| "loss": 0.3502, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.46153846153846156, | |
| "grad_norm": 0.10690820736424175, | |
| "learning_rate": 9.488068638195072e-06, | |
| "loss": 0.3792, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.46503496503496505, | |
| "grad_norm": 0.10685361016021448, | |
| "learning_rate": 9.474535652458647e-06, | |
| "loss": 0.3372, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.46853146853146854, | |
| "grad_norm": 0.091863754283505, | |
| "learning_rate": 9.460836030545007e-06, | |
| "loss": 0.3279, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.47202797202797203, | |
| "grad_norm": 0.10137019023149173, | |
| "learning_rate": 9.446970282641754e-06, | |
| "loss": 0.3326, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.4755244755244755, | |
| "grad_norm": 0.09644694864886368, | |
| "learning_rate": 9.43293892512319e-06, | |
| "loss": 0.3238, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.479020979020979, | |
| "grad_norm": 0.09447255461234934, | |
| "learning_rate": 9.418742480531086e-06, | |
| "loss": 0.3482, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.4825174825174825, | |
| "grad_norm": 0.08135682687879796, | |
| "learning_rate": 9.404381477555216e-06, | |
| "loss": 0.3275, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.486013986013986, | |
| "grad_norm": 0.10069955308963059, | |
| "learning_rate": 9.38985645101368e-06, | |
| "loss": 0.3332, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.48951048951048953, | |
| "grad_norm": 0.09628893677315106, | |
| "learning_rate": 9.375167941832974e-06, | |
| "loss": 0.344, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.493006993006993, | |
| "grad_norm": 0.08809106076365064, | |
| "learning_rate": 9.360316497027849e-06, | |
| "loss": 0.3376, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.4965034965034965, | |
| "grad_norm": 0.09490092152704185, | |
| "learning_rate": 9.345302669680947e-06, | |
| "loss": 0.3419, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.10279282225217452, | |
| "learning_rate": 9.330127018922195e-06, | |
| "loss": 0.3574, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.5034965034965035, | |
| "grad_norm": 0.10554593393741951, | |
| "learning_rate": 9.314790109907983e-06, | |
| "loss": 0.3372, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.506993006993007, | |
| "grad_norm": 0.11231474223944753, | |
| "learning_rate": 9.299292513800129e-06, | |
| "loss": 0.3419, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.5104895104895105, | |
| "grad_norm": 0.09498236350352282, | |
| "learning_rate": 9.283634807744586e-06, | |
| "loss": 0.3171, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.513986013986014, | |
| "grad_norm": 0.09593334219573614, | |
| "learning_rate": 9.267817574849971e-06, | |
| "loss": 0.3485, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.5174825174825175, | |
| "grad_norm": 0.09742681439006996, | |
| "learning_rate": 9.251841404165835e-06, | |
| "loss": 0.3378, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.5209790209790209, | |
| "grad_norm": 0.0983937674421585, | |
| "learning_rate": 9.235706890660735e-06, | |
| "loss": 0.3493, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.5244755244755245, | |
| "grad_norm": 0.0915357872203584, | |
| "learning_rate": 9.219414635200065e-06, | |
| "loss": 0.3417, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.527972027972028, | |
| "grad_norm": 0.09146853221285936, | |
| "learning_rate": 9.202965244523696e-06, | |
| "loss": 0.3414, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.5314685314685315, | |
| "grad_norm": 0.09486109366945525, | |
| "learning_rate": 9.18635933122337e-06, | |
| "loss": 0.3777, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.534965034965035, | |
| "grad_norm": 0.09445667455904146, | |
| "learning_rate": 9.169597513719881e-06, | |
| "loss": 0.3517, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.5384615384615384, | |
| "grad_norm": 0.10056106300174576, | |
| "learning_rate": 9.152680416240059e-06, | |
| "loss": 0.3329, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.541958041958042, | |
| "grad_norm": 0.09747043642900897, | |
| "learning_rate": 9.135608668793511e-06, | |
| "loss": 0.3577, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.5454545454545454, | |
| "grad_norm": 0.1098420896663471, | |
| "learning_rate": 9.118382907149164e-06, | |
| "loss": 0.3471, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.548951048951049, | |
| "grad_norm": 0.10101868252033282, | |
| "learning_rate": 9.10100377281159e-06, | |
| "loss": 0.352, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.5524475524475524, | |
| "grad_norm": 0.09509262269603147, | |
| "learning_rate": 9.08347191299711e-06, | |
| "loss": 0.3453, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.5559440559440559, | |
| "grad_norm": 0.10223863291064357, | |
| "learning_rate": 9.065787980609696e-06, | |
| "loss": 0.3323, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.5594405594405595, | |
| "grad_norm": 0.09307831695448808, | |
| "learning_rate": 9.047952634216652e-06, | |
| "loss": 0.3189, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.5629370629370629, | |
| "grad_norm": 0.09664775207053762, | |
| "learning_rate": 9.029966538024097e-06, | |
| "loss": 0.3335, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.5664335664335665, | |
| "grad_norm": 0.10330830247866737, | |
| "learning_rate": 9.011830361852217e-06, | |
| "loss": 0.298, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.5699300699300699, | |
| "grad_norm": 0.09238529639878745, | |
| "learning_rate": 8.993544781110328e-06, | |
| "loss": 0.3515, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.5734265734265734, | |
| "grad_norm": 0.09946980186720472, | |
| "learning_rate": 8.975110476771724e-06, | |
| "loss": 0.3325, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.5769230769230769, | |
| "grad_norm": 0.10461426738254295, | |
| "learning_rate": 8.95652813534831e-06, | |
| "loss": 0.3461, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.5804195804195804, | |
| "grad_norm": 0.10521038266819552, | |
| "learning_rate": 8.937798448865043e-06, | |
| "loss": 0.3509, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.583916083916084, | |
| "grad_norm": 0.09296107569017567, | |
| "learning_rate": 8.918922114834156e-06, | |
| "loss": 0.3591, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.5874125874125874, | |
| "grad_norm": 0.09978737012221923, | |
| "learning_rate": 8.899899836229186e-06, | |
| "loss": 0.3494, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.5909090909090909, | |
| "grad_norm": 0.09887611819542517, | |
| "learning_rate": 8.880732321458785e-06, | |
| "loss": 0.3474, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.5944055944055944, | |
| "grad_norm": 0.09194493203548675, | |
| "learning_rate": 8.861420284340352e-06, | |
| "loss": 0.3287, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.5979020979020979, | |
| "grad_norm": 0.10774254756172354, | |
| "learning_rate": 8.841964444073437e-06, | |
| "loss": 0.3261, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.6013986013986014, | |
| "grad_norm": 0.08277889114743786, | |
| "learning_rate": 8.822365525212968e-06, | |
| "loss": 0.3298, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.6048951048951049, | |
| "grad_norm": 0.09403899388431841, | |
| "learning_rate": 8.802624257642262e-06, | |
| "loss": 0.3239, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.6083916083916084, | |
| "grad_norm": 0.09869839895903458, | |
| "learning_rate": 8.782741376545837e-06, | |
| "loss": 0.3271, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.6118881118881119, | |
| "grad_norm": 0.08742562281618353, | |
| "learning_rate": 8.76271762238205e-06, | |
| "loss": 0.3222, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.6153846153846154, | |
| "grad_norm": 0.10669936980092758, | |
| "learning_rate": 8.742553740855507e-06, | |
| "loss": 0.3343, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.6188811188811189, | |
| "grad_norm": 0.09705466668865588, | |
| "learning_rate": 8.722250482889293e-06, | |
| "loss": 0.3365, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.6223776223776224, | |
| "grad_norm": 0.10026304811604293, | |
| "learning_rate": 8.701808604597022e-06, | |
| "loss": 0.3268, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.6258741258741258, | |
| "grad_norm": 0.09499836148950658, | |
| "learning_rate": 8.681228867254657e-06, | |
| "loss": 0.3205, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.6293706293706294, | |
| "grad_norm": 0.09620440611439918, | |
| "learning_rate": 8.660512037272173e-06, | |
| "loss": 0.3377, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.6328671328671329, | |
| "grad_norm": 0.08591400161657516, | |
| "learning_rate": 8.639658886165012e-06, | |
| "loss": 0.3312, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.6363636363636364, | |
| "grad_norm": 0.09176587951531126, | |
| "learning_rate": 8.61867019052535e-06, | |
| "loss": 0.3288, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.6398601398601399, | |
| "grad_norm": 0.09488106076865752, | |
| "learning_rate": 8.597546731993182e-06, | |
| "loss": 0.3386, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.6433566433566433, | |
| "grad_norm": 0.08369635401062202, | |
| "learning_rate": 8.576289297227201e-06, | |
| "loss": 0.3054, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.6468531468531469, | |
| "grad_norm": 0.09730501351801213, | |
| "learning_rate": 8.554898677875509e-06, | |
| "loss": 0.3227, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.6503496503496503, | |
| "grad_norm": 0.1001056960078254, | |
| "learning_rate": 8.533375670546142e-06, | |
| "loss": 0.3492, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.6538461538461539, | |
| "grad_norm": 0.08957014382624598, | |
| "learning_rate": 8.511721076777388e-06, | |
| "loss": 0.3545, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 0.6573426573426573, | |
| "grad_norm": 0.09078278112834585, | |
| "learning_rate": 8.48993570300795e-06, | |
| "loss": 0.3284, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.6608391608391608, | |
| "grad_norm": 0.08656855201647384, | |
| "learning_rate": 8.46802036054691e-06, | |
| "loss": 0.3342, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 0.6643356643356644, | |
| "grad_norm": 0.09414987082795111, | |
| "learning_rate": 8.445975865543509e-06, | |
| "loss": 0.3362, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.6678321678321678, | |
| "grad_norm": 0.10356006952243683, | |
| "learning_rate": 8.423803038956763e-06, | |
| "loss": 0.3452, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 0.6713286713286714, | |
| "grad_norm": 0.09745963990707376, | |
| "learning_rate": 8.401502706524884e-06, | |
| "loss": 0.3245, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.6748251748251748, | |
| "grad_norm": 0.12682185311194272, | |
| "learning_rate": 8.379075698734523e-06, | |
| "loss": 0.3195, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 0.6783216783216783, | |
| "grad_norm": 0.10273803895554465, | |
| "learning_rate": 8.356522850789852e-06, | |
| "loss": 0.3183, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.6818181818181818, | |
| "grad_norm": 0.08936523805510559, | |
| "learning_rate": 8.33384500258146e-06, | |
| "loss": 0.3094, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 0.6853146853146853, | |
| "grad_norm": 0.09076443538007455, | |
| "learning_rate": 8.31104299865506e-06, | |
| "loss": 0.3209, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.6888111888111889, | |
| "grad_norm": 0.10360319946526882, | |
| "learning_rate": 8.288117688180064e-06, | |
| "loss": 0.3393, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 0.6923076923076923, | |
| "grad_norm": 0.09990142074597995, | |
| "learning_rate": 8.265069924917925e-06, | |
| "loss": 0.3143, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.6958041958041958, | |
| "grad_norm": 0.09863731703774027, | |
| "learning_rate": 8.241900567190376e-06, | |
| "loss": 0.3318, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 0.6993006993006993, | |
| "grad_norm": 0.0891504924468397, | |
| "learning_rate": 8.218610477847435e-06, | |
| "loss": 0.3469, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.7027972027972028, | |
| "grad_norm": 0.0903949668682383, | |
| "learning_rate": 8.1952005242353e-06, | |
| "loss": 0.3084, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 0.7062937062937062, | |
| "grad_norm": 0.0954433441080251, | |
| "learning_rate": 8.171671578164023e-06, | |
| "loss": 0.3315, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.7097902097902098, | |
| "grad_norm": 0.08559449666366757, | |
| "learning_rate": 8.148024515875057e-06, | |
| "loss": 0.3446, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 0.7132867132867133, | |
| "grad_norm": 0.09175217509532477, | |
| "learning_rate": 8.124260218008627e-06, | |
| "loss": 0.3376, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.7167832167832168, | |
| "grad_norm": 0.08175979271190216, | |
| "learning_rate": 8.100379569570919e-06, | |
| "loss": 0.3018, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 0.7202797202797203, | |
| "grad_norm": 0.10086175825188673, | |
| "learning_rate": 8.076383459901138e-06, | |
| "loss": 0.3602, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.7237762237762237, | |
| "grad_norm": 0.10549585300259735, | |
| "learning_rate": 8.052272782638375e-06, | |
| "loss": 0.3685, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 0.7272727272727273, | |
| "grad_norm": 0.11673660774686632, | |
| "learning_rate": 8.028048435688333e-06, | |
| "loss": 0.3421, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.7307692307692307, | |
| "grad_norm": 0.09147779127352527, | |
| "learning_rate": 8.003711321189895e-06, | |
| "loss": 0.3272, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 0.7342657342657343, | |
| "grad_norm": 0.095327743964242, | |
| "learning_rate": 7.979262345481511e-06, | |
| "loss": 0.326, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.7377622377622378, | |
| "grad_norm": 0.09278699522643637, | |
| "learning_rate": 7.95470241906746e-06, | |
| "loss": 0.3277, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 0.7412587412587412, | |
| "grad_norm": 0.09014483968822934, | |
| "learning_rate": 7.930032456583931e-06, | |
| "loss": 0.301, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.7447552447552448, | |
| "grad_norm": 0.10302938884427425, | |
| "learning_rate": 7.905253376764973e-06, | |
| "loss": 0.3294, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 0.7482517482517482, | |
| "grad_norm": 0.09555338249314554, | |
| "learning_rate": 7.880366102408266e-06, | |
| "loss": 0.3144, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.7517482517482518, | |
| "grad_norm": 0.09838205058263788, | |
| "learning_rate": 7.85537156034077e-06, | |
| "loss": 0.3116, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 0.7552447552447552, | |
| "grad_norm": 0.0902348464066785, | |
| "learning_rate": 7.830270681384196e-06, | |
| "loss": 0.3273, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.7587412587412588, | |
| "grad_norm": 0.10197904027133771, | |
| "learning_rate": 7.805064400320348e-06, | |
| "loss": 0.337, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 0.7622377622377622, | |
| "grad_norm": 0.09136556755535169, | |
| "learning_rate": 7.779753655856313e-06, | |
| "loss": 0.3146, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.7657342657342657, | |
| "grad_norm": 0.10390395983392695, | |
| "learning_rate": 7.754339390589497e-06, | |
| "loss": 0.3455, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 0.7692307692307693, | |
| "grad_norm": 0.08873006190996087, | |
| "learning_rate": 7.728822550972523e-06, | |
| "loss": 0.3189, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.7727272727272727, | |
| "grad_norm": 0.10767216954218523, | |
| "learning_rate": 7.703204087277989e-06, | |
| "loss": 0.335, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 0.7762237762237763, | |
| "grad_norm": 0.09226028528791834, | |
| "learning_rate": 7.67748495356307e-06, | |
| "loss": 0.309, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.7797202797202797, | |
| "grad_norm": 0.09501355043720337, | |
| "learning_rate": 7.651666107633996e-06, | |
| "loss": 0.3131, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 0.7832167832167832, | |
| "grad_norm": 0.10030664866053496, | |
| "learning_rate": 7.625748511010382e-06, | |
| "loss": 0.3368, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.7867132867132867, | |
| "grad_norm": 0.0960730084438704, | |
| "learning_rate": 7.599733128889413e-06, | |
| "loss": 0.311, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 0.7902097902097902, | |
| "grad_norm": 0.09631301174932007, | |
| "learning_rate": 7.573620930109906e-06, | |
| "loss": 0.3275, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.7937062937062938, | |
| "grad_norm": 0.09326550273629058, | |
| "learning_rate": 7.547412887116224e-06, | |
| "loss": 0.3193, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 0.7972027972027972, | |
| "grad_norm": 0.09732171619011357, | |
| "learning_rate": 7.521109975922069e-06, | |
| "loss": 0.3317, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.8006993006993007, | |
| "grad_norm": 0.08975727645012774, | |
| "learning_rate": 7.494713176074128e-06, | |
| "loss": 0.3232, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 0.8041958041958042, | |
| "grad_norm": 0.10268943209415242, | |
| "learning_rate": 7.468223470615593e-06, | |
| "loss": 0.3549, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.8076923076923077, | |
| "grad_norm": 0.09719763483337895, | |
| "learning_rate": 7.441641846049557e-06, | |
| "loss": 0.3217, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 0.8111888111888111, | |
| "grad_norm": 0.09866887439065343, | |
| "learning_rate": 7.414969292302267e-06, | |
| "loss": 0.3276, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.8146853146853147, | |
| "grad_norm": 0.09508520292838865, | |
| "learning_rate": 7.388206802686272e-06, | |
| "loss": 0.325, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 0.8181818181818182, | |
| "grad_norm": 0.10885901013429214, | |
| "learning_rate": 7.361355373863415e-06, | |
| "loss": 0.309, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.8216783216783217, | |
| "grad_norm": 0.100534618852991, | |
| "learning_rate": 7.334416005807726e-06, | |
| "loss": 0.3206, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 0.8251748251748252, | |
| "grad_norm": 0.12043671832245108, | |
| "learning_rate": 7.307389701768183e-06, | |
| "loss": 0.3299, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.8286713286713286, | |
| "grad_norm": 0.08393550874271685, | |
| "learning_rate": 7.280277468231343e-06, | |
| "loss": 0.3262, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 0.8321678321678322, | |
| "grad_norm": 0.11121015663273821, | |
| "learning_rate": 7.253080314883863e-06, | |
| "loss": 0.2895, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.8356643356643356, | |
| "grad_norm": 0.09185059584253953, | |
| "learning_rate": 7.2257992545749045e-06, | |
| "loss": 0.3225, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 0.8391608391608392, | |
| "grad_norm": 0.08881396203220153, | |
| "learning_rate": 7.198435303278401e-06, | |
| "loss": 0.3276, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.8426573426573427, | |
| "grad_norm": 0.09746576829873795, | |
| "learning_rate": 7.170989480055233e-06, | |
| "loss": 0.3455, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 0.8461538461538461, | |
| "grad_norm": 0.09888839565795066, | |
| "learning_rate": 7.143462807015271e-06, | |
| "loss": 0.3392, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.8496503496503497, | |
| "grad_norm": 0.09592592285144286, | |
| "learning_rate": 7.1158563092793165e-06, | |
| "loss": 0.3091, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 0.8531468531468531, | |
| "grad_norm": 0.0977987635493939, | |
| "learning_rate": 7.08817101494092e-06, | |
| "loss": 0.3165, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.8566433566433567, | |
| "grad_norm": 0.10380850676568182, | |
| "learning_rate": 7.060407955028098e-06, | |
| "loss": 0.3115, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 0.8601398601398601, | |
| "grad_norm": 0.09447180359888213, | |
| "learning_rate": 7.032568163464932e-06, | |
| "loss": 0.3207, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.8636363636363636, | |
| "grad_norm": 0.10061269710408283, | |
| "learning_rate": 7.004652677033069e-06, | |
| "loss": 0.3161, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 0.8671328671328671, | |
| "grad_norm": 0.09609425213694053, | |
| "learning_rate": 6.976662535333107e-06, | |
| "loss": 0.3136, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.8706293706293706, | |
| "grad_norm": 0.09598094845731318, | |
| "learning_rate": 6.948598780745885e-06, | |
| "loss": 0.3009, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 0.8741258741258742, | |
| "grad_norm": 0.10427104049050953, | |
| "learning_rate": 6.920462458393652e-06, | |
| "loss": 0.3428, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.8776223776223776, | |
| "grad_norm": 0.0994920233360674, | |
| "learning_rate": 6.892254616101158e-06, | |
| "loss": 0.3263, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 0.8811188811188811, | |
| "grad_norm": 0.10387800775103824, | |
| "learning_rate": 6.86397630435663e-06, | |
| "loss": 0.2997, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.8846153846153846, | |
| "grad_norm": 0.10508396462654218, | |
| "learning_rate": 6.8356285762726385e-06, | |
| "loss": 0.3192, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 0.8881118881118881, | |
| "grad_norm": 0.10589020185350513, | |
| "learning_rate": 6.807212487546897e-06, | |
| "loss": 0.2999, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.8916083916083916, | |
| "grad_norm": 0.09769265892044267, | |
| "learning_rate": 6.7787290964229325e-06, | |
| "loss": 0.3131, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 0.8951048951048951, | |
| "grad_norm": 0.09679683640782744, | |
| "learning_rate": 6.750179463650681e-06, | |
| "loss": 0.3268, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.8986013986013986, | |
| "grad_norm": 0.09209711149231241, | |
| "learning_rate": 6.721564652446987e-06, | |
| "loss": 0.3229, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 0.9020979020979021, | |
| "grad_norm": 0.11288807662970714, | |
| "learning_rate": 6.692885728456001e-06, | |
| "loss": 0.3331, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.9055944055944056, | |
| "grad_norm": 0.10191677161615505, | |
| "learning_rate": 6.6641437597095005e-06, | |
| "loss": 0.3037, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 0.9090909090909091, | |
| "grad_norm": 0.10997644202751486, | |
| "learning_rate": 6.635339816587109e-06, | |
| "loss": 0.2949, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.9125874125874126, | |
| "grad_norm": 0.09140651524432652, | |
| "learning_rate": 6.606474971776442e-06, | |
| "loss": 0.3409, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 0.916083916083916, | |
| "grad_norm": 0.08471656771823269, | |
| "learning_rate": 6.577550300233151e-06, | |
| "loss": 0.3238, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.9195804195804196, | |
| "grad_norm": 0.09813671908189145, | |
| "learning_rate": 6.548566879140897e-06, | |
| "loss": 0.3082, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 0.9230769230769231, | |
| "grad_norm": 0.09826741583981999, | |
| "learning_rate": 6.519525787871235e-06, | |
| "loss": 0.3289, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.9265734265734266, | |
| "grad_norm": 0.08826203150682889, | |
| "learning_rate": 6.490428107943409e-06, | |
| "loss": 0.3116, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 0.9300699300699301, | |
| "grad_norm": 0.11047210665505915, | |
| "learning_rate": 6.461274922984087e-06, | |
| "loss": 0.3096, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.9335664335664335, | |
| "grad_norm": 0.09280424318322683, | |
| "learning_rate": 6.4320673186869954e-06, | |
| "loss": 0.3022, | |
| "step": 1335 | |
| }, | |
| { | |
| "epoch": 0.9370629370629371, | |
| "grad_norm": 0.09428713209319824, | |
| "learning_rate": 6.402806382772494e-06, | |
| "loss": 0.3194, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.9405594405594405, | |
| "grad_norm": 0.09817792403050692, | |
| "learning_rate": 6.373493204947065e-06, | |
| "loss": 0.3042, | |
| "step": 1345 | |
| }, | |
| { | |
| "epoch": 0.9440559440559441, | |
| "grad_norm": 0.08292232832861181, | |
| "learning_rate": 6.344128876862729e-06, | |
| "loss": 0.2787, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.9475524475524476, | |
| "grad_norm": 0.1060764346689841, | |
| "learning_rate": 6.3147144920763985e-06, | |
| "loss": 0.3323, | |
| "step": 1355 | |
| }, | |
| { | |
| "epoch": 0.951048951048951, | |
| "grad_norm": 0.0908264916657139, | |
| "learning_rate": 6.2852511460091406e-06, | |
| "loss": 0.2884, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.9545454545454546, | |
| "grad_norm": 0.11839968846971091, | |
| "learning_rate": 6.255739935905396e-06, | |
| "loss": 0.3125, | |
| "step": 1365 | |
| }, | |
| { | |
| "epoch": 0.958041958041958, | |
| "grad_norm": 0.11288513824262601, | |
| "learning_rate": 6.226181960792107e-06, | |
| "loss": 0.3063, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.9615384615384616, | |
| "grad_norm": 0.10439467637777479, | |
| "learning_rate": 6.1965783214377895e-06, | |
| "loss": 0.3115, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 0.965034965034965, | |
| "grad_norm": 0.09149993105772337, | |
| "learning_rate": 6.166930120311545e-06, | |
| "loss": 0.2981, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.9685314685314685, | |
| "grad_norm": 0.10624923889838721, | |
| "learning_rate": 6.137238461541998e-06, | |
| "loss": 0.3083, | |
| "step": 1385 | |
| }, | |
| { | |
| "epoch": 0.972027972027972, | |
| "grad_norm": 0.09916502005960598, | |
| "learning_rate": 6.1075044508761804e-06, | |
| "loss": 0.3213, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.9755244755244755, | |
| "grad_norm": 0.11294055771283724, | |
| "learning_rate": 6.077729195638349e-06, | |
| "loss": 0.3038, | |
| "step": 1395 | |
| }, | |
| { | |
| "epoch": 0.9790209790209791, | |
| "grad_norm": 0.10125490787864704, | |
| "learning_rate": 6.047913804688751e-06, | |
| "loss": 0.3204, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.9825174825174825, | |
| "grad_norm": 0.1015505902245628, | |
| "learning_rate": 6.018059388382327e-06, | |
| "loss": 0.3272, | |
| "step": 1405 | |
| }, | |
| { | |
| "epoch": 0.986013986013986, | |
| "grad_norm": 0.0997218077059797, | |
| "learning_rate": 5.988167058527361e-06, | |
| "loss": 0.3095, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.9895104895104895, | |
| "grad_norm": 0.11184085378893502, | |
| "learning_rate": 5.958237928344077e-06, | |
| "loss": 0.3102, | |
| "step": 1415 | |
| }, | |
| { | |
| "epoch": 0.993006993006993, | |
| "grad_norm": 0.09497844419150701, | |
| "learning_rate": 5.928273112423177e-06, | |
| "loss": 0.3357, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.9965034965034965, | |
| "grad_norm": 0.09785679757144553, | |
| "learning_rate": 5.898273726684338e-06, | |
| "loss": 0.3177, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.10414865345603919, | |
| "learning_rate": 5.8682408883346535e-06, | |
| "loss": 0.3216, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 0.3180939555168152, | |
| "eval_runtime": 33.8853, | |
| "eval_samples_per_second": 20.481, | |
| "eval_steps_per_second": 5.135, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 1.0034965034965035, | |
| "grad_norm": 0.09860050229670009, | |
| "learning_rate": 5.838175715827015e-06, | |
| "loss": 0.283, | |
| "step": 1435 | |
| }, | |
| { | |
| "epoch": 1.006993006993007, | |
| "grad_norm": 0.09643250585976647, | |
| "learning_rate": 5.808079328818488e-06, | |
| "loss": 0.2831, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 1.0104895104895104, | |
| "grad_norm": 0.11960210700385758, | |
| "learning_rate": 5.777952848128578e-06, | |
| "loss": 0.306, | |
| "step": 1445 | |
| }, | |
| { | |
| "epoch": 1.013986013986014, | |
| "grad_norm": 0.09213590484634555, | |
| "learning_rate": 5.747797395697525e-06, | |
| "loss": 0.2905, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 1.0174825174825175, | |
| "grad_norm": 0.11181608085903355, | |
| "learning_rate": 5.717614094544497e-06, | |
| "loss": 0.2965, | |
| "step": 1455 | |
| }, | |
| { | |
| "epoch": 1.020979020979021, | |
| "grad_norm": 0.11274993170515431, | |
| "learning_rate": 5.687404068725779e-06, | |
| "loss": 0.2882, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 1.0244755244755244, | |
| "grad_norm": 0.10367759599697986, | |
| "learning_rate": 5.657168443292909e-06, | |
| "loss": 0.2848, | |
| "step": 1465 | |
| }, | |
| { | |
| "epoch": 1.027972027972028, | |
| "grad_norm": 0.0941238300478702, | |
| "learning_rate": 5.626908344250778e-06, | |
| "loss": 0.2992, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 1.0314685314685315, | |
| "grad_norm": 0.09619381493436249, | |
| "learning_rate": 5.5966248985157e-06, | |
| "loss": 0.2812, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 1.034965034965035, | |
| "grad_norm": 0.10400313409523196, | |
| "learning_rate": 5.566319233873446e-06, | |
| "loss": 0.3058, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 1.0384615384615385, | |
| "grad_norm": 0.10574597951326378, | |
| "learning_rate": 5.53599247893724e-06, | |
| "loss": 0.2758, | |
| "step": 1485 | |
| }, | |
| { | |
| "epoch": 1.0419580419580419, | |
| "grad_norm": 0.10462360020319139, | |
| "learning_rate": 5.5056457631057245e-06, | |
| "loss": 0.2746, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 1.0454545454545454, | |
| "grad_norm": 0.11618910922950017, | |
| "learning_rate": 5.475280216520913e-06, | |
| "loss": 0.2823, | |
| "step": 1495 | |
| }, | |
| { | |
| "epoch": 1.048951048951049, | |
| "grad_norm": 0.12712963054282944, | |
| "learning_rate": 5.444896970026092e-06, | |
| "loss": 0.2835, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 1.0524475524475525, | |
| "grad_norm": 0.09958988336796508, | |
| "learning_rate": 5.414497155123708e-06, | |
| "loss": 0.2653, | |
| "step": 1505 | |
| }, | |
| { | |
| "epoch": 1.055944055944056, | |
| "grad_norm": 0.1080659427330968, | |
| "learning_rate": 5.384081903933235e-06, | |
| "loss": 0.2949, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 1.0594405594405594, | |
| "grad_norm": 0.11665708479307332, | |
| "learning_rate": 5.353652349149007e-06, | |
| "loss": 0.278, | |
| "step": 1515 | |
| }, | |
| { | |
| "epoch": 1.062937062937063, | |
| "grad_norm": 0.12826080199663958, | |
| "learning_rate": 5.323209623998043e-06, | |
| "loss": 0.2768, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 1.0664335664335665, | |
| "grad_norm": 0.0931839219725714, | |
| "learning_rate": 5.292754862197831e-06, | |
| "loss": 0.2777, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 1.06993006993007, | |
| "grad_norm": 0.107320551035206, | |
| "learning_rate": 5.2622891979141265e-06, | |
| "loss": 0.2816, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 1.0734265734265733, | |
| "grad_norm": 0.1061573386524815, | |
| "learning_rate": 5.231813765718698e-06, | |
| "loss": 0.2895, | |
| "step": 1535 | |
| }, | |
| { | |
| "epoch": 1.0769230769230769, | |
| "grad_norm": 0.1579249118505657, | |
| "learning_rate": 5.201329700547077e-06, | |
| "loss": 0.2634, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 1.0804195804195804, | |
| "grad_norm": 0.10185189280525993, | |
| "learning_rate": 5.170838137656304e-06, | |
| "loss": 0.2808, | |
| "step": 1545 | |
| }, | |
| { | |
| "epoch": 1.083916083916084, | |
| "grad_norm": 0.11469755609644863, | |
| "learning_rate": 5.140340212582639e-06, | |
| "loss": 0.2817, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 1.0874125874125875, | |
| "grad_norm": 0.09969775272157483, | |
| "learning_rate": 5.109837061099274e-06, | |
| "loss": 0.2905, | |
| "step": 1555 | |
| }, | |
| { | |
| "epoch": 1.0909090909090908, | |
| "grad_norm": 0.11204511989520233, | |
| "learning_rate": 5.07932981917404e-06, | |
| "loss": 0.2983, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 1.0944055944055944, | |
| "grad_norm": 0.10859999114246341, | |
| "learning_rate": 5.0488196229271005e-06, | |
| "loss": 0.2817, | |
| "step": 1565 | |
| }, | |
| { | |
| "epoch": 1.097902097902098, | |
| "grad_norm": 0.10124983316705717, | |
| "learning_rate": 5.018307608588637e-06, | |
| "loss": 0.2694, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 1.1013986013986015, | |
| "grad_norm": 0.09673028033429812, | |
| "learning_rate": 4.9877949124565415e-06, | |
| "loss": 0.2804, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 1.104895104895105, | |
| "grad_norm": 0.12406697294731686, | |
| "learning_rate": 4.957282670854099e-06, | |
| "loss": 0.2842, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 1.1083916083916083, | |
| "grad_norm": 0.10389483855300476, | |
| "learning_rate": 4.926772020087663e-06, | |
| "loss": 0.3067, | |
| "step": 1585 | |
| }, | |
| { | |
| "epoch": 1.1118881118881119, | |
| "grad_norm": 0.11624832680414443, | |
| "learning_rate": 4.896264096404343e-06, | |
| "loss": 0.2989, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 1.1153846153846154, | |
| "grad_norm": 0.10001637455582953, | |
| "learning_rate": 4.865760035949695e-06, | |
| "loss": 0.3016, | |
| "step": 1595 | |
| }, | |
| { | |
| "epoch": 1.118881118881119, | |
| "grad_norm": 0.11275995426749594, | |
| "learning_rate": 4.835260974725397e-06, | |
| "loss": 0.3062, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 1.1223776223776223, | |
| "grad_norm": 0.09710030416818968, | |
| "learning_rate": 4.804768048546955e-06, | |
| "loss": 0.3021, | |
| "step": 1605 | |
| }, | |
| { | |
| "epoch": 1.1258741258741258, | |
| "grad_norm": 0.11969077356400575, | |
| "learning_rate": 4.774282393001397e-06, | |
| "loss": 0.2839, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 1.1293706293706294, | |
| "grad_norm": 0.1165475840983693, | |
| "learning_rate": 4.743805143404989e-06, | |
| "loss": 0.3033, | |
| "step": 1615 | |
| }, | |
| { | |
| "epoch": 1.132867132867133, | |
| "grad_norm": 0.11943387092333048, | |
| "learning_rate": 4.713337434760949e-06, | |
| "loss": 0.2911, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 1.1363636363636362, | |
| "grad_norm": 0.11663805469555324, | |
| "learning_rate": 4.682880401717178e-06, | |
| "loss": 0.2585, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 1.1398601398601398, | |
| "grad_norm": 0.10669609950388007, | |
| "learning_rate": 4.652435178524013e-06, | |
| "loss": 0.2907, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 1.1433566433566433, | |
| "grad_norm": 0.11232703578650871, | |
| "learning_rate": 4.622002898991972e-06, | |
| "loss": 0.2628, | |
| "step": 1635 | |
| }, | |
| { | |
| "epoch": 1.1468531468531469, | |
| "grad_norm": 0.11335115400681652, | |
| "learning_rate": 4.591584696449549e-06, | |
| "loss": 0.2793, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 1.1503496503496504, | |
| "grad_norm": 0.12058621719051137, | |
| "learning_rate": 4.561181703700986e-06, | |
| "loss": 0.2874, | |
| "step": 1645 | |
| }, | |
| { | |
| "epoch": 1.1538461538461537, | |
| "grad_norm": 0.1106409571474046, | |
| "learning_rate": 4.530795052984104e-06, | |
| "loss": 0.3156, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 1.1573426573426573, | |
| "grad_norm": 0.09597427027532647, | |
| "learning_rate": 4.500425875928129e-06, | |
| "loss": 0.2507, | |
| "step": 1655 | |
| }, | |
| { | |
| "epoch": 1.1608391608391608, | |
| "grad_norm": 0.11526214137962942, | |
| "learning_rate": 4.470075303511546e-06, | |
| "loss": 0.2992, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 1.1643356643356644, | |
| "grad_norm": 0.10720902756157567, | |
| "learning_rate": 4.439744466019993e-06, | |
| "loss": 0.3007, | |
| "step": 1665 | |
| }, | |
| { | |
| "epoch": 1.167832167832168, | |
| "grad_norm": 0.10095686169764254, | |
| "learning_rate": 4.4094344930041515e-06, | |
| "loss": 0.2827, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 1.1713286713286712, | |
| "grad_norm": 0.12768377416510107, | |
| "learning_rate": 4.379146513237695e-06, | |
| "loss": 0.2874, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 1.1748251748251748, | |
| "grad_norm": 0.098356265123962, | |
| "learning_rate": 4.348881654675241e-06, | |
| "loss": 0.2678, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 1.1783216783216783, | |
| "grad_norm": 0.12024264878083898, | |
| "learning_rate": 4.318641044410358e-06, | |
| "loss": 0.27, | |
| "step": 1685 | |
| }, | |
| { | |
| "epoch": 1.1818181818181819, | |
| "grad_norm": 0.10256378318597871, | |
| "learning_rate": 4.2884258086335755e-06, | |
| "loss": 0.2621, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 1.1853146853146854, | |
| "grad_norm": 0.123973849351433, | |
| "learning_rate": 4.258237072590455e-06, | |
| "loss": 0.2723, | |
| "step": 1695 | |
| }, | |
| { | |
| "epoch": 1.1888111888111887, | |
| "grad_norm": 0.12348063377350953, | |
| "learning_rate": 4.228075960539684e-06, | |
| "loss": 0.2971, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 1.1923076923076923, | |
| "grad_norm": 0.13820520258807037, | |
| "learning_rate": 4.1979435957111984e-06, | |
| "loss": 0.2959, | |
| "step": 1705 | |
| }, | |
| { | |
| "epoch": 1.1958041958041958, | |
| "grad_norm": 0.10508613597681296, | |
| "learning_rate": 4.167841100264369e-06, | |
| "loss": 0.2765, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 1.1993006993006994, | |
| "grad_norm": 0.11352798455372175, | |
| "learning_rate": 4.1377695952461905e-06, | |
| "loss": 0.2595, | |
| "step": 1715 | |
| }, | |
| { | |
| "epoch": 1.2027972027972027, | |
| "grad_norm": 0.11589669656742159, | |
| "learning_rate": 4.107730200549549e-06, | |
| "loss": 0.2673, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 1.2062937062937062, | |
| "grad_norm": 0.11665313294655853, | |
| "learning_rate": 4.0777240348715085e-06, | |
| "loss": 0.2986, | |
| "step": 1725 | |
| }, | |
| { | |
| "epoch": 1.2097902097902098, | |
| "grad_norm": 0.11700009297269698, | |
| "learning_rate": 4.04775221567165e-06, | |
| "loss": 0.2735, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 1.2132867132867133, | |
| "grad_norm": 0.10262056570150567, | |
| "learning_rate": 4.017815859130462e-06, | |
| "loss": 0.2802, | |
| "step": 1735 | |
| }, | |
| { | |
| "epoch": 1.2167832167832167, | |
| "grad_norm": 0.12088842148152552, | |
| "learning_rate": 3.987916080107762e-06, | |
| "loss": 0.2595, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 1.2202797202797202, | |
| "grad_norm": 0.123222073537631, | |
| "learning_rate": 3.958053992101189e-06, | |
| "loss": 0.2728, | |
| "step": 1745 | |
| }, | |
| { | |
| "epoch": 1.2237762237762237, | |
| "grad_norm": 0.14415879676636115, | |
| "learning_rate": 3.928230707204729e-06, | |
| "loss": 0.2757, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 1.2272727272727273, | |
| "grad_norm": 0.1059023549630983, | |
| "learning_rate": 3.898447336067297e-06, | |
| "loss": 0.268, | |
| "step": 1755 | |
| }, | |
| { | |
| "epoch": 1.2307692307692308, | |
| "grad_norm": 0.12235246365024488, | |
| "learning_rate": 3.86870498785139e-06, | |
| "loss": 0.2793, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 1.2342657342657342, | |
| "grad_norm": 0.1266933996658085, | |
| "learning_rate": 3.8390047701917625e-06, | |
| "loss": 0.2791, | |
| "step": 1765 | |
| }, | |
| { | |
| "epoch": 1.2377622377622377, | |
| "grad_norm": 0.1230245533106991, | |
| "learning_rate": 3.8093477891541864e-06, | |
| "loss": 0.3001, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 1.2412587412587412, | |
| "grad_norm": 0.11654806074283108, | |
| "learning_rate": 3.779735149194258e-06, | |
| "loss": 0.2826, | |
| "step": 1775 | |
| }, | |
| { | |
| "epoch": 1.2447552447552448, | |
| "grad_norm": 0.10770540503295759, | |
| "learning_rate": 3.750167953116272e-06, | |
| "loss": 0.2865, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 1.2482517482517483, | |
| "grad_norm": 0.11414218881414812, | |
| "learning_rate": 3.720647302032144e-06, | |
| "loss": 0.2742, | |
| "step": 1785 | |
| }, | |
| { | |
| "epoch": 1.2517482517482517, | |
| "grad_norm": 0.11830539545059722, | |
| "learning_rate": 3.6911742953204046e-06, | |
| "loss": 0.2767, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 1.2552447552447552, | |
| "grad_norm": 0.10011126409724302, | |
| "learning_rate": 3.6617500305852676e-06, | |
| "loss": 0.2552, | |
| "step": 1795 | |
| }, | |
| { | |
| "epoch": 1.2587412587412588, | |
| "grad_norm": 0.12742089574311471, | |
| "learning_rate": 3.6323756036157386e-06, | |
| "loss": 0.301, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 1.2622377622377623, | |
| "grad_norm": 0.12157261199386625, | |
| "learning_rate": 3.6030521083448256e-06, | |
| "loss": 0.275, | |
| "step": 1805 | |
| }, | |
| { | |
| "epoch": 1.2657342657342658, | |
| "grad_norm": 0.13018781979587624, | |
| "learning_rate": 3.5737806368087776e-06, | |
| "loss": 0.2791, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 1.2692307692307692, | |
| "grad_norm": 0.11472896479036702, | |
| "learning_rate": 3.544562279106436e-06, | |
| "loss": 0.2597, | |
| "step": 1815 | |
| }, | |
| { | |
| "epoch": 1.2727272727272727, | |
| "grad_norm": 0.13983756485000562, | |
| "learning_rate": 3.5153981233586277e-06, | |
| "loss": 0.2793, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 1.2762237762237763, | |
| "grad_norm": 0.12377750647958802, | |
| "learning_rate": 3.48628925566764e-06, | |
| "loss": 0.2779, | |
| "step": 1825 | |
| }, | |
| { | |
| "epoch": 1.2797202797202798, | |
| "grad_norm": 0.13033680974681505, | |
| "learning_rate": 3.4572367600767874e-06, | |
| "loss": 0.284, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 1.2832167832167833, | |
| "grad_norm": 0.1252056404310726, | |
| "learning_rate": 3.428241718530022e-06, | |
| "loss": 0.2704, | |
| "step": 1835 | |
| }, | |
| { | |
| "epoch": 1.2867132867132867, | |
| "grad_norm": 0.118752876893844, | |
| "learning_rate": 3.3993052108316566e-06, | |
| "loss": 0.2698, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 1.2902097902097902, | |
| "grad_norm": 0.12876147193567186, | |
| "learning_rate": 3.370428314606138e-06, | |
| "loss": 0.2885, | |
| "step": 1845 | |
| }, | |
| { | |
| "epoch": 1.2937062937062938, | |
| "grad_norm": 0.11959862346971185, | |
| "learning_rate": 3.3416121052579297e-06, | |
| "loss": 0.2695, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 1.297202797202797, | |
| "grad_norm": 0.13176076976769643, | |
| "learning_rate": 3.3128576559314507e-06, | |
| "loss": 0.2777, | |
| "step": 1855 | |
| }, | |
| { | |
| "epoch": 1.3006993006993006, | |
| "grad_norm": 0.1279070658782311, | |
| "learning_rate": 3.2841660374711144e-06, | |
| "loss": 0.2697, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 1.3041958041958042, | |
| "grad_norm": 0.13489716472147867, | |
| "learning_rate": 3.2555383183814544e-06, | |
| "loss": 0.3036, | |
| "step": 1865 | |
| }, | |
| { | |
| "epoch": 1.3076923076923077, | |
| "grad_norm": 0.12334889072330245, | |
| "learning_rate": 3.226975564787322e-06, | |
| "loss": 0.2705, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 1.3111888111888113, | |
| "grad_norm": 0.13592582265687866, | |
| "learning_rate": 3.198478840394196e-06, | |
| "loss": 0.2941, | |
| "step": 1875 | |
| }, | |
| { | |
| "epoch": 1.3146853146853146, | |
| "grad_norm": 0.11390774415602396, | |
| "learning_rate": 3.170049206448553e-06, | |
| "loss": 0.2824, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 1.3181818181818181, | |
| "grad_norm": 0.15890655850394841, | |
| "learning_rate": 3.141687721698363e-06, | |
| "loss": 0.2653, | |
| "step": 1885 | |
| }, | |
| { | |
| "epoch": 1.3216783216783217, | |
| "grad_norm": 0.13577126735405054, | |
| "learning_rate": 3.113395442353646e-06, | |
| "loss": 0.2963, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 1.3251748251748252, | |
| "grad_norm": 0.1271334783456917, | |
| "learning_rate": 3.0851734220471434e-06, | |
| "loss": 0.2676, | |
| "step": 1895 | |
| }, | |
| { | |
| "epoch": 1.3286713286713288, | |
| "grad_norm": 0.14583157351563786, | |
| "learning_rate": 3.0570227117950857e-06, | |
| "loss": 0.2831, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 1.332167832167832, | |
| "grad_norm": 0.14995769193849226, | |
| "learning_rate": 3.0289443599580387e-06, | |
| "loss": 0.2717, | |
| "step": 1905 | |
| }, | |
| { | |
| "epoch": 1.3356643356643356, | |
| "grad_norm": 0.12151760728520503, | |
| "learning_rate": 3.0009394122018744e-06, | |
| "loss": 0.2594, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 1.3391608391608392, | |
| "grad_norm": 0.14335474194484363, | |
| "learning_rate": 2.973008911458816e-06, | |
| "loss": 0.2658, | |
| "step": 1915 | |
| }, | |
| { | |
| "epoch": 1.3426573426573427, | |
| "grad_norm": 0.17742533944724417, | |
| "learning_rate": 2.9451538978886165e-06, | |
| "loss": 0.2745, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 1.3461538461538463, | |
| "grad_norm": 0.12048834268944708, | |
| "learning_rate": 2.917375408839803e-06, | |
| "loss": 0.2728, | |
| "step": 1925 | |
| }, | |
| { | |
| "epoch": 1.3496503496503496, | |
| "grad_norm": 0.1309489385175574, | |
| "learning_rate": 2.8896744788110497e-06, | |
| "loss": 0.2603, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 1.3531468531468531, | |
| "grad_norm": 0.12100172035944871, | |
| "learning_rate": 2.86205213941267e-06, | |
| "loss": 0.2497, | |
| "step": 1935 | |
| }, | |
| { | |
| "epoch": 1.3566433566433567, | |
| "grad_norm": 0.13660442658303193, | |
| "learning_rate": 2.8345094193281673e-06, | |
| "loss": 0.2683, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 1.3601398601398602, | |
| "grad_norm": 0.20029948084159513, | |
| "learning_rate": 2.8070473442759517e-06, | |
| "loss": 0.265, | |
| "step": 1945 | |
| }, | |
| { | |
| "epoch": 1.3636363636363638, | |
| "grad_norm": 0.1217718840811214, | |
| "learning_rate": 2.7796669369711294e-06, | |
| "loss": 0.2758, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 1.367132867132867, | |
| "grad_norm": 0.1218788812023423, | |
| "learning_rate": 2.752369217087418e-06, | |
| "loss": 0.2706, | |
| "step": 1955 | |
| }, | |
| { | |
| "epoch": 1.3706293706293706, | |
| "grad_norm": 0.1681619637715074, | |
| "learning_rate": 2.7251552012191763e-06, | |
| "loss": 0.2521, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 1.3741258741258742, | |
| "grad_norm": 0.13215871161118758, | |
| "learning_rate": 2.698025902843533e-06, | |
| "loss": 0.2668, | |
| "step": 1965 | |
| }, | |
| { | |
| "epoch": 1.3776223776223775, | |
| "grad_norm": 0.16597320281641273, | |
| "learning_rate": 2.670982332282661e-06, | |
| "loss": 0.2601, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 1.381118881118881, | |
| "grad_norm": 0.13286398460876903, | |
| "learning_rate": 2.644025496666143e-06, | |
| "loss": 0.2684, | |
| "step": 1975 | |
| }, | |
| { | |
| "epoch": 1.3846153846153846, | |
| "grad_norm": 0.14964743881741074, | |
| "learning_rate": 2.6171563998934605e-06, | |
| "loss": 0.2636, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 1.3881118881118881, | |
| "grad_norm": 0.17737672923194353, | |
| "learning_rate": 2.590376042596621e-06, | |
| "loss": 0.2667, | |
| "step": 1985 | |
| }, | |
| { | |
| "epoch": 1.3916083916083917, | |
| "grad_norm": 0.13696100404924544, | |
| "learning_rate": 2.5636854221028764e-06, | |
| "loss": 0.2692, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 1.395104895104895, | |
| "grad_norm": 0.1359722927391487, | |
| "learning_rate": 2.537085532397595e-06, | |
| "loss": 0.2672, | |
| "step": 1995 | |
| }, | |
| { | |
| "epoch": 1.3986013986013985, | |
| "grad_norm": 0.17097022815814872, | |
| "learning_rate": 2.5105773640872416e-06, | |
| "loss": 0.2561, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 1.402097902097902, | |
| "grad_norm": 0.1414647308761833, | |
| "learning_rate": 2.484161904362481e-06, | |
| "loss": 0.2437, | |
| "step": 2005 | |
| }, | |
| { | |
| "epoch": 1.4055944055944056, | |
| "grad_norm": 0.14581181430999388, | |
| "learning_rate": 2.4578401369614196e-06, | |
| "loss": 0.2733, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 1.4090909090909092, | |
| "grad_norm": 0.16322883114094733, | |
| "learning_rate": 2.4316130421329696e-06, | |
| "loss": 0.2699, | |
| "step": 2015 | |
| }, | |
| { | |
| "epoch": 1.4125874125874125, | |
| "grad_norm": 0.14215927984521137, | |
| "learning_rate": 2.4054815966003365e-06, | |
| "loss": 0.2719, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 1.416083916083916, | |
| "grad_norm": 0.14648779179733568, | |
| "learning_rate": 2.379446773524656e-06, | |
| "loss": 0.2416, | |
| "step": 2025 | |
| }, | |
| { | |
| "epoch": 1.4195804195804196, | |
| "grad_norm": 0.1521431992671824, | |
| "learning_rate": 2.3535095424687443e-06, | |
| "loss": 0.2365, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 1.4230769230769231, | |
| "grad_norm": 0.13347271495618993, | |
| "learning_rate": 2.3276708693609947e-06, | |
| "loss": 0.2695, | |
| "step": 2035 | |
| }, | |
| { | |
| "epoch": 1.4265734265734267, | |
| "grad_norm": 0.1645738689375383, | |
| "learning_rate": 2.301931716459405e-06, | |
| "loss": 0.2714, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 1.43006993006993, | |
| "grad_norm": 0.1601133081604159, | |
| "learning_rate": 2.2762930423157346e-06, | |
| "loss": 0.2628, | |
| "step": 2045 | |
| }, | |
| { | |
| "epoch": 1.4335664335664335, | |
| "grad_norm": 0.15437084886490815, | |
| "learning_rate": 2.2507558017398263e-06, | |
| "loss": 0.2559, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 1.437062937062937, | |
| "grad_norm": 0.15803419436134836, | |
| "learning_rate": 2.2253209457640205e-06, | |
| "loss": 0.2643, | |
| "step": 2055 | |
| }, | |
| { | |
| "epoch": 1.4405594405594406, | |
| "grad_norm": 0.12851123172489884, | |
| "learning_rate": 2.1999894216077633e-06, | |
| "loss": 0.2664, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 1.4440559440559442, | |
| "grad_norm": 0.19040825534872968, | |
| "learning_rate": 2.174762172642319e-06, | |
| "loss": 0.2473, | |
| "step": 2065 | |
| }, | |
| { | |
| "epoch": 1.4475524475524475, | |
| "grad_norm": 0.15065545559977453, | |
| "learning_rate": 2.149640138355632e-06, | |
| "loss": 0.2742, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 1.451048951048951, | |
| "grad_norm": 0.1922974353635457, | |
| "learning_rate": 2.1246242543173613e-06, | |
| "loss": 0.275, | |
| "step": 2075 | |
| }, | |
| { | |
| "epoch": 1.4545454545454546, | |
| "grad_norm": 0.17982162397533608, | |
| "learning_rate": 2.09971545214401e-06, | |
| "loss": 0.2635, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 1.458041958041958, | |
| "grad_norm": 0.1738289418178212, | |
| "learning_rate": 2.074914659464254e-06, | |
| "loss": 0.2381, | |
| "step": 2085 | |
| }, | |
| { | |
| "epoch": 1.4615384615384617, | |
| "grad_norm": 0.12853571069602446, | |
| "learning_rate": 2.050222799884387e-06, | |
| "loss": 0.2352, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 1.465034965034965, | |
| "grad_norm": 0.15868082671286524, | |
| "learning_rate": 2.0256407929539203e-06, | |
| "loss": 0.2701, | |
| "step": 2095 | |
| }, | |
| { | |
| "epoch": 1.4685314685314685, | |
| "grad_norm": 0.15024281799339492, | |
| "learning_rate": 2.0011695541313514e-06, | |
| "loss": 0.2687, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 1.472027972027972, | |
| "grad_norm": 0.16538424584383676, | |
| "learning_rate": 1.9768099947500535e-06, | |
| "loss": 0.2665, | |
| "step": 2105 | |
| }, | |
| { | |
| "epoch": 1.4755244755244754, | |
| "grad_norm": 0.1469847725186328, | |
| "learning_rate": 1.9525630219843524e-06, | |
| "loss": 0.2592, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 1.479020979020979, | |
| "grad_norm": 0.19914120331298027, | |
| "learning_rate": 1.9284295388157328e-06, | |
| "loss": 0.2526, | |
| "step": 2115 | |
| }, | |
| { | |
| "epoch": 1.4825174825174825, | |
| "grad_norm": 0.14171359694212368, | |
| "learning_rate": 1.9044104439992145e-06, | |
| "loss": 0.2571, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 1.486013986013986, | |
| "grad_norm": 0.15468593500001104, | |
| "learning_rate": 1.880506632029883e-06, | |
| "loss": 0.2665, | |
| "step": 2125 | |
| }, | |
| { | |
| "epoch": 1.4895104895104896, | |
| "grad_norm": 0.17533297642498313, | |
| "learning_rate": 1.8567189931095691e-06, | |
| "loss": 0.2493, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 1.493006993006993, | |
| "grad_norm": 0.158575042035332, | |
| "learning_rate": 1.8330484131137093e-06, | |
| "loss": 0.2643, | |
| "step": 2135 | |
| }, | |
| { | |
| "epoch": 1.4965034965034965, | |
| "grad_norm": 0.1699070377257086, | |
| "learning_rate": 1.8094957735583463e-06, | |
| "loss": 0.2424, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "grad_norm": 0.16191985547326435, | |
| "learning_rate": 1.7860619515673034e-06, | |
| "loss": 0.2442, | |
| "step": 2145 | |
| }, | |
| { | |
| "epoch": 1.5034965034965035, | |
| "grad_norm": 0.16249677192430212, | |
| "learning_rate": 1.7627478198395193e-06, | |
| "loss": 0.2755, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 1.506993006993007, | |
| "grad_norm": 0.16721652302600015, | |
| "learning_rate": 1.7395542466165488e-06, | |
| "loss": 0.2743, | |
| "step": 2155 | |
| }, | |
| { | |
| "epoch": 1.5104895104895104, | |
| "grad_norm": 0.1659393589666137, | |
| "learning_rate": 1.7164820956502237e-06, | |
| "loss": 0.2637, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 1.513986013986014, | |
| "grad_norm": 0.2015968042608536, | |
| "learning_rate": 1.6935322261704929e-06, | |
| "loss": 0.2439, | |
| "step": 2165 | |
| }, | |
| { | |
| "epoch": 1.5174825174825175, | |
| "grad_norm": 0.14206056623724686, | |
| "learning_rate": 1.670705492853421e-06, | |
| "loss": 0.2501, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 1.5209790209790208, | |
| "grad_norm": 0.16138600835314076, | |
| "learning_rate": 1.6480027457893572e-06, | |
| "loss": 0.24, | |
| "step": 2175 | |
| }, | |
| { | |
| "epoch": 1.5244755244755246, | |
| "grad_norm": 0.16457265389043566, | |
| "learning_rate": 1.6254248304512816e-06, | |
| "loss": 0.2563, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 1.527972027972028, | |
| "grad_norm": 0.1795356890102337, | |
| "learning_rate": 1.6029725876633085e-06, | |
| "loss": 0.263, | |
| "step": 2185 | |
| }, | |
| { | |
| "epoch": 1.5314685314685315, | |
| "grad_norm": 0.1664397601665864, | |
| "learning_rate": 1.580646853569393e-06, | |
| "loss": 0.24, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 1.534965034965035, | |
| "grad_norm": 0.14718176529521387, | |
| "learning_rate": 1.5584484596021694e-06, | |
| "loss": 0.2428, | |
| "step": 2195 | |
| }, | |
| { | |
| "epoch": 1.5384615384615383, | |
| "grad_norm": 0.14279558469667486, | |
| "learning_rate": 1.5363782324520033e-06, | |
| "loss": 0.249, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 1.541958041958042, | |
| "grad_norm": 0.15140009314370015, | |
| "learning_rate": 1.514436994036201e-06, | |
| "loss": 0.2495, | |
| "step": 2205 | |
| }, | |
| { | |
| "epoch": 1.5454545454545454, | |
| "grad_norm": 0.1619202860450826, | |
| "learning_rate": 1.4926255614683931e-06, | |
| "loss": 0.249, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 1.548951048951049, | |
| "grad_norm": 0.1716893915072177, | |
| "learning_rate": 1.4709447470281219e-06, | |
| "loss": 0.2588, | |
| "step": 2215 | |
| }, | |
| { | |
| "epoch": 1.5524475524475525, | |
| "grad_norm": 0.16886404646815958, | |
| "learning_rate": 1.4493953581305686e-06, | |
| "loss": 0.238, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 1.5559440559440558, | |
| "grad_norm": 0.16562116632419963, | |
| "learning_rate": 1.427978197296503e-06, | |
| "loss": 0.256, | |
| "step": 2225 | |
| }, | |
| { | |
| "epoch": 1.5594405594405596, | |
| "grad_norm": 0.1556634989889314, | |
| "learning_rate": 1.406694062122389e-06, | |
| "loss": 0.2378, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 1.562937062937063, | |
| "grad_norm": 0.1855724126302037, | |
| "learning_rate": 1.3855437452506759e-06, | |
| "loss": 0.2542, | |
| "step": 2235 | |
| }, | |
| { | |
| "epoch": 1.5664335664335665, | |
| "grad_norm": 0.16184087877019146, | |
| "learning_rate": 1.3645280343402973e-06, | |
| "loss": 0.2336, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 1.56993006993007, | |
| "grad_norm": 0.16147656381172418, | |
| "learning_rate": 1.3436477120373143e-06, | |
| "loss": 0.2446, | |
| "step": 2245 | |
| }, | |
| { | |
| "epoch": 1.5734265734265733, | |
| "grad_norm": 0.18106165581351266, | |
| "learning_rate": 1.3229035559457887e-06, | |
| "loss": 0.2339, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 1.5769230769230769, | |
| "grad_norm": 0.2111788787476443, | |
| "learning_rate": 1.3022963385988153e-06, | |
| "loss": 0.2269, | |
| "step": 2255 | |
| }, | |
| { | |
| "epoch": 1.5804195804195804, | |
| "grad_norm": 0.1669229456184738, | |
| "learning_rate": 1.2818268274297474e-06, | |
| "loss": 0.2395, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 1.583916083916084, | |
| "grad_norm": 0.17915301565504643, | |
| "learning_rate": 1.2614957847436342e-06, | |
| "loss": 0.2284, | |
| "step": 2265 | |
| }, | |
| { | |
| "epoch": 1.5874125874125875, | |
| "grad_norm": 0.1822294325304649, | |
| "learning_rate": 1.2413039676888089e-06, | |
| "loss": 0.2354, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 1.5909090909090908, | |
| "grad_norm": 0.19524684728719174, | |
| "learning_rate": 1.2212521282287093e-06, | |
| "loss": 0.2561, | |
| "step": 2275 | |
| }, | |
| { | |
| "epoch": 1.5944055944055944, | |
| "grad_norm": 0.21352770506743224, | |
| "learning_rate": 1.2013410131138658e-06, | |
| "loss": 0.2436, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 1.597902097902098, | |
| "grad_norm": 0.18729319265403963, | |
| "learning_rate": 1.1815713638540943e-06, | |
| "loss": 0.2489, | |
| "step": 2285 | |
| }, | |
| { | |
| "epoch": 1.6013986013986012, | |
| "grad_norm": 0.19511685104147358, | |
| "learning_rate": 1.161943916690883e-06, | |
| "loss": 0.2651, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 1.604895104895105, | |
| "grad_norm": 0.17784961830764232, | |
| "learning_rate": 1.1424594025699653e-06, | |
| "loss": 0.2459, | |
| "step": 2295 | |
| }, | |
| { | |
| "epoch": 1.6083916083916083, | |
| "grad_norm": 0.20035091122936416, | |
| "learning_rate": 1.1231185471141126e-06, | |
| "loss": 0.2425, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 1.6118881118881119, | |
| "grad_norm": 0.16193565878333754, | |
| "learning_rate": 1.1039220705961002e-06, | |
| "loss": 0.2277, | |
| "step": 2305 | |
| }, | |
| { | |
| "epoch": 1.6153846153846154, | |
| "grad_norm": 0.20673878902479834, | |
| "learning_rate": 1.0848706879118893e-06, | |
| "loss": 0.2475, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 1.6188811188811187, | |
| "grad_norm": 0.21816354127208326, | |
| "learning_rate": 1.065965108554e-06, | |
| "loss": 0.2578, | |
| "step": 2315 | |
| }, | |
| { | |
| "epoch": 1.6223776223776225, | |
| "grad_norm": 0.16456963901908442, | |
| "learning_rate": 1.047206036585095e-06, | |
| "loss": 0.2317, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 1.6258741258741258, | |
| "grad_norm": 0.2578576181305842, | |
| "learning_rate": 1.0285941706117486e-06, | |
| "loss": 0.2556, | |
| "step": 2325 | |
| }, | |
| { | |
| "epoch": 1.6293706293706294, | |
| "grad_norm": 0.20771441694982992, | |
| "learning_rate": 1.0101302037584427e-06, | |
| "loss": 0.2484, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 1.632867132867133, | |
| "grad_norm": 0.20558365348546553, | |
| "learning_rate": 9.918148236417453e-07, | |
| "loss": 0.2132, | |
| "step": 2335 | |
| }, | |
| { | |
| "epoch": 1.6363636363636362, | |
| "grad_norm": 0.1558966734301512, | |
| "learning_rate": 9.73648712344707e-07, | |
| "loss": 0.2203, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 1.63986013986014, | |
| "grad_norm": 0.2055595235565553, | |
| "learning_rate": 9.5563254639146e-07, | |
| "loss": 0.2185, | |
| "step": 2345 | |
| }, | |
| { | |
| "epoch": 1.6433566433566433, | |
| "grad_norm": 0.23036116631758255, | |
| "learning_rate": 9.377669967220166e-07, | |
| "loss": 0.2427, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 1.6468531468531469, | |
| "grad_norm": 0.2541374406766717, | |
| "learning_rate": 9.200527286672967e-07, | |
| "loss": 0.2406, | |
| "step": 2355 | |
| }, | |
| { | |
| "epoch": 1.6503496503496504, | |
| "grad_norm": 0.1965665252309361, | |
| "learning_rate": 9.02490401924333e-07, | |
| "loss": 0.237, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 1.6538461538461537, | |
| "grad_norm": 0.22022081232887075, | |
| "learning_rate": 8.850806705317183e-07, | |
| "loss": 0.2485, | |
| "step": 2365 | |
| }, | |
| { | |
| "epoch": 1.6573426573426573, | |
| "grad_norm": 0.27501820024516205, | |
| "learning_rate": 8.678241828452399e-07, | |
| "loss": 0.2407, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 1.6608391608391608, | |
| "grad_norm": 0.2068268227528754, | |
| "learning_rate": 8.507215815137337e-07, | |
| "loss": 0.233, | |
| "step": 2375 | |
| }, | |
| { | |
| "epoch": 1.6643356643356644, | |
| "grad_norm": 0.25620799000888017, | |
| "learning_rate": 8.337735034551614e-07, | |
| "loss": 0.2458, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 1.667832167832168, | |
| "grad_norm": 0.20951395283357943, | |
| "learning_rate": 8.169805798328745e-07, | |
| "loss": 0.2374, | |
| "step": 2385 | |
| }, | |
| { | |
| "epoch": 1.6713286713286712, | |
| "grad_norm": 0.1872465721165221, | |
| "learning_rate": 8.003434360321249e-07, | |
| "loss": 0.2465, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 1.6748251748251748, | |
| "grad_norm": 0.18145979976086574, | |
| "learning_rate": 7.838626916367675e-07, | |
| "loss": 0.2148, | |
| "step": 2395 | |
| }, | |
| { | |
| "epoch": 1.6783216783216783, | |
| "grad_norm": 0.24058782422084832, | |
| "learning_rate": 7.675389604061817e-07, | |
| "loss": 0.2345, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 1.6818181818181817, | |
| "grad_norm": 0.20087468348190377, | |
| "learning_rate": 7.513728502524286e-07, | |
| "loss": 0.2286, | |
| "step": 2405 | |
| }, | |
| { | |
| "epoch": 1.6853146853146854, | |
| "grad_norm": 0.17425447474287672, | |
| "learning_rate": 7.353649632175957e-07, | |
| "loss": 0.2256, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 1.6888111888111887, | |
| "grad_norm": 0.2396206296610538, | |
| "learning_rate": 7.195158954513864e-07, | |
| "loss": 0.225, | |
| "step": 2415 | |
| }, | |
| { | |
| "epoch": 1.6923076923076923, | |
| "grad_norm": 0.17549097854271423, | |
| "learning_rate": 7.03826237188916e-07, | |
| "loss": 0.2216, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 1.6958041958041958, | |
| "grad_norm": 0.22053185548722443, | |
| "learning_rate": 6.882965727287305e-07, | |
| "loss": 0.2295, | |
| "step": 2425 | |
| }, | |
| { | |
| "epoch": 1.6993006993006992, | |
| "grad_norm": 0.16904985185465962, | |
| "learning_rate": 6.72927480411047e-07, | |
| "loss": 0.2328, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 1.702797202797203, | |
| "grad_norm": 0.24716116115131842, | |
| "learning_rate": 6.577195325962138e-07, | |
| "loss": 0.2217, | |
| "step": 2435 | |
| }, | |
| { | |
| "epoch": 1.7062937062937062, | |
| "grad_norm": 0.21805674414937837, | |
| "learning_rate": 6.426732956433989e-07, | |
| "loss": 0.2389, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 1.7097902097902098, | |
| "grad_norm": 0.16493951842822327, | |
| "learning_rate": 6.277893298894966e-07, | |
| "loss": 0.2262, | |
| "step": 2445 | |
| }, | |
| { | |
| "epoch": 1.7132867132867133, | |
| "grad_norm": 0.20549852112013453, | |
| "learning_rate": 6.130681896282581e-07, | |
| "loss": 0.248, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 1.7167832167832167, | |
| "grad_norm": 0.24788701675807667, | |
| "learning_rate": 5.985104230896515e-07, | |
| "loss": 0.2464, | |
| "step": 2455 | |
| }, | |
| { | |
| "epoch": 1.7202797202797204, | |
| "grad_norm": 0.2019733596582574, | |
| "learning_rate": 5.841165724194453e-07, | |
| "loss": 0.2316, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 1.7237762237762237, | |
| "grad_norm": 0.23427331783210922, | |
| "learning_rate": 5.698871736590145e-07, | |
| "loss": 0.2353, | |
| "step": 2465 | |
| }, | |
| { | |
| "epoch": 1.7272727272727273, | |
| "grad_norm": 0.19882817499305466, | |
| "learning_rate": 5.558227567253832e-07, | |
| "loss": 0.2331, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 1.7307692307692308, | |
| "grad_norm": 0.1953244944246295, | |
| "learning_rate": 5.41923845391486e-07, | |
| "loss": 0.2249, | |
| "step": 2475 | |
| }, | |
| { | |
| "epoch": 1.7342657342657342, | |
| "grad_norm": 0.20860907438922652, | |
| "learning_rate": 5.281909572666649e-07, | |
| "loss": 0.2371, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 1.737762237762238, | |
| "grad_norm": 0.201292017090407, | |
| "learning_rate": 5.146246037773922e-07, | |
| "loss": 0.2223, | |
| "step": 2485 | |
| }, | |
| { | |
| "epoch": 1.7412587412587412, | |
| "grad_norm": 0.20531608712854288, | |
| "learning_rate": 5.012252901482211e-07, | |
| "loss": 0.2108, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 1.7447552447552448, | |
| "grad_norm": 0.24065289336782011, | |
| "learning_rate": 4.879935153829757e-07, | |
| "loss": 0.2454, | |
| "step": 2495 | |
| }, | |
| { | |
| "epoch": 1.7482517482517483, | |
| "grad_norm": 0.2410587912972817, | |
| "learning_rate": 4.74929772246166e-07, | |
| "loss": 0.2316, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 1.7517482517482517, | |
| "grad_norm": 0.20361234001274467, | |
| "learning_rate": 4.6203454724463523e-07, | |
| "loss": 0.217, | |
| "step": 2505 | |
| }, | |
| { | |
| "epoch": 1.7552447552447552, | |
| "grad_norm": 0.24316077385521911, | |
| "learning_rate": 4.4930832060944453e-07, | |
| "loss": 0.2519, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 1.7587412587412588, | |
| "grad_norm": 0.2588824914869855, | |
| "learning_rate": 4.36751566277982e-07, | |
| "loss": 0.2404, | |
| "step": 2515 | |
| }, | |
| { | |
| "epoch": 1.762237762237762, | |
| "grad_norm": 0.23339078143613176, | |
| "learning_rate": 4.243647518763261e-07, | |
| "loss": 0.2344, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 1.7657342657342658, | |
| "grad_norm": 0.1870709992934403, | |
| "learning_rate": 4.121483387018149e-07, | |
| "loss": 0.2027, | |
| "step": 2525 | |
| }, | |
| { | |
| "epoch": 1.7692307692307692, | |
| "grad_norm": 0.24240712525582014, | |
| "learning_rate": 4.001027817058789e-07, | |
| "loss": 0.2242, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 1.7727272727272727, | |
| "grad_norm": 0.1975158666317177, | |
| "learning_rate": 3.882285294770938e-07, | |
| "loss": 0.2323, | |
| "step": 2535 | |
| }, | |
| { | |
| "epoch": 1.7762237762237763, | |
| "grad_norm": 0.2848982157758639, | |
| "learning_rate": 3.765260242244706e-07, | |
| "loss": 0.2336, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 1.7797202797202796, | |
| "grad_norm": 0.18090559581095472, | |
| "learning_rate": 3.649957017609979e-07, | |
| "loss": 0.2309, | |
| "step": 2545 | |
| }, | |
| { | |
| "epoch": 1.7832167832167833, | |
| "grad_norm": 0.23858822129312376, | |
| "learning_rate": 3.536379914873983e-07, | |
| "loss": 0.2188, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 1.7867132867132867, | |
| "grad_norm": 0.237417705264017, | |
| "learning_rate": 3.4245331637614955e-07, | |
| "loss": 0.2231, | |
| "step": 2555 | |
| }, | |
| { | |
| "epoch": 1.7902097902097902, | |
| "grad_norm": 0.21649790024308277, | |
| "learning_rate": 3.3144209295572493e-07, | |
| "loss": 0.2329, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 1.7937062937062938, | |
| "grad_norm": 0.2390035333944304, | |
| "learning_rate": 3.206047312950811e-07, | |
| "loss": 0.2134, | |
| "step": 2565 | |
| }, | |
| { | |
| "epoch": 1.797202797202797, | |
| "grad_norm": 0.19422897750452517, | |
| "learning_rate": 3.099416349883949e-07, | |
| "loss": 0.2096, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 1.8006993006993008, | |
| "grad_norm": 0.2301108253195982, | |
| "learning_rate": 2.994532011400214e-07, | |
| "loss": 0.2298, | |
| "step": 2575 | |
| }, | |
| { | |
| "epoch": 1.8041958041958042, | |
| "grad_norm": 0.22166892323516244, | |
| "learning_rate": 2.8913982034971475e-07, | |
| "loss": 0.2089, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 1.8076923076923077, | |
| "grad_norm": 0.2627008526014087, | |
| "learning_rate": 2.790018766980773e-07, | |
| "loss": 0.2337, | |
| "step": 2585 | |
| }, | |
| { | |
| "epoch": 1.8111888111888113, | |
| "grad_norm": 0.19527688307645244, | |
| "learning_rate": 2.6903974773225703e-07, | |
| "loss": 0.2277, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 1.8146853146853146, | |
| "grad_norm": 0.24342278846709312, | |
| "learning_rate": 2.592538044518889e-07, | |
| "loss": 0.2122, | |
| "step": 2595 | |
| }, | |
| { | |
| "epoch": 1.8181818181818183, | |
| "grad_norm": 0.23354073502276002, | |
| "learning_rate": 2.4964441129527337e-07, | |
| "loss": 0.2366, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 1.8216783216783217, | |
| "grad_norm": 0.1762278220188555, | |
| "learning_rate": 2.4021192612581177e-07, | |
| "loss": 0.2325, | |
| "step": 2605 | |
| }, | |
| { | |
| "epoch": 1.8251748251748252, | |
| "grad_norm": 0.2630266024192207, | |
| "learning_rate": 2.30956700218673e-07, | |
| "loss": 0.2161, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 1.8286713286713288, | |
| "grad_norm": 0.22252967945056348, | |
| "learning_rate": 2.2187907824771538e-07, | |
| "loss": 0.2256, | |
| "step": 2615 | |
| }, | |
| { | |
| "epoch": 1.832167832167832, | |
| "grad_norm": 0.2652486743034812, | |
| "learning_rate": 2.1297939827264803e-07, | |
| "loss": 0.2344, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 1.8356643356643356, | |
| "grad_norm": 0.24375491611024352, | |
| "learning_rate": 2.0425799172644367e-07, | |
| "loss": 0.2071, | |
| "step": 2625 | |
| }, | |
| { | |
| "epoch": 1.8391608391608392, | |
| "grad_norm": 0.2279098129494666, | |
| "learning_rate": 1.9571518340299245e-07, | |
| "loss": 0.2189, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 1.8426573426573427, | |
| "grad_norm": 0.2725869066752257, | |
| "learning_rate": 1.873512914450104e-07, | |
| "loss": 0.1944, | |
| "step": 2635 | |
| }, | |
| { | |
| "epoch": 1.8461538461538463, | |
| "grad_norm": 0.23392292954377786, | |
| "learning_rate": 1.7916662733218848e-07, | |
| "loss": 0.2054, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 1.8496503496503496, | |
| "grad_norm": 0.23155242190332045, | |
| "learning_rate": 1.711614958695934e-07, | |
| "loss": 0.2137, | |
| "step": 2645 | |
| }, | |
| { | |
| "epoch": 1.8531468531468531, | |
| "grad_norm": 0.23009398789106972, | |
| "learning_rate": 1.6333619517631795e-07, | |
| "loss": 0.2266, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 1.8566433566433567, | |
| "grad_norm": 0.21611380202153502, | |
| "learning_rate": 1.5569101667437414e-07, | |
| "loss": 0.2273, | |
| "step": 2655 | |
| }, | |
| { | |
| "epoch": 1.86013986013986, | |
| "grad_norm": 0.2497907255920255, | |
| "learning_rate": 1.4822624507784932e-07, | |
| "loss": 0.2129, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 1.8636363636363638, | |
| "grad_norm": 0.21645263133309026, | |
| "learning_rate": 1.4094215838229176e-07, | |
| "loss": 0.2235, | |
| "step": 2665 | |
| }, | |
| { | |
| "epoch": 1.867132867132867, | |
| "grad_norm": 0.21828951546632747, | |
| "learning_rate": 1.3383902785436664e-07, | |
| "loss": 0.2149, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 1.8706293706293706, | |
| "grad_norm": 0.24393086475970577, | |
| "learning_rate": 1.2691711802175043e-07, | |
| "loss": 0.2008, | |
| "step": 2675 | |
| }, | |
| { | |
| "epoch": 1.8741258741258742, | |
| "grad_norm": 0.3188244652652861, | |
| "learning_rate": 1.2017668666327752e-07, | |
| "loss": 0.224, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 1.8776223776223775, | |
| "grad_norm": 0.3213240172913652, | |
| "learning_rate": 1.1361798479934737e-07, | |
| "loss": 0.2384, | |
| "step": 2685 | |
| }, | |
| { | |
| "epoch": 1.8811188811188813, | |
| "grad_norm": 0.1812445700799793, | |
| "learning_rate": 1.0724125668256647e-07, | |
| "loss": 0.2033, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 1.8846153846153846, | |
| "grad_norm": 0.22488642034657966, | |
| "learning_rate": 1.0104673978866164e-07, | |
| "loss": 0.2351, | |
| "step": 2695 | |
| }, | |
| { | |
| "epoch": 1.8881118881118881, | |
| "grad_norm": 0.21308817150324919, | |
| "learning_rate": 9.50346648076278e-08, | |
| "loss": 0.2177, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 1.8916083916083917, | |
| "grad_norm": 0.24780476048232253, | |
| "learning_rate": 8.920525563514249e-08, | |
| "loss": 0.2228, | |
| "step": 2705 | |
| }, | |
| { | |
| "epoch": 1.895104895104895, | |
| "grad_norm": 0.21720420251139552, | |
| "learning_rate": 8.355872936422759e-08, | |
| "loss": 0.2159, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 1.8986013986013988, | |
| "grad_norm": 0.25202284291362476, | |
| "learning_rate": 7.809529627716028e-08, | |
| "loss": 0.2367, | |
| "step": 2715 | |
| }, | |
| { | |
| "epoch": 1.902097902097902, | |
| "grad_norm": 0.19775164747072374, | |
| "learning_rate": 7.281515983764698e-08, | |
| "loss": 0.2072, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 1.9055944055944056, | |
| "grad_norm": 0.2562434879212032, | |
| "learning_rate": 6.771851668324226e-08, | |
| "loss": 0.2482, | |
| "step": 2725 | |
| }, | |
| { | |
| "epoch": 1.9090909090909092, | |
| "grad_norm": 0.26892430221524966, | |
| "learning_rate": 6.280555661802857e-08, | |
| "loss": 0.1994, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 1.9125874125874125, | |
| "grad_norm": 0.2887096340492057, | |
| "learning_rate": 5.807646260554578e-08, | |
| "loss": 0.2084, | |
| "step": 2735 | |
| }, | |
| { | |
| "epoch": 1.916083916083916, | |
| "grad_norm": 0.24777131941697597, | |
| "learning_rate": 5.353141076197888e-08, | |
| "loss": 0.2178, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 1.9195804195804196, | |
| "grad_norm": 0.22689623187545083, | |
| "learning_rate": 4.9170570349598713e-08, | |
| "loss": 0.2419, | |
| "step": 2745 | |
| }, | |
| { | |
| "epoch": 1.9230769230769231, | |
| "grad_norm": 0.2684480264633364, | |
| "learning_rate": 4.499410377045765e-08, | |
| "loss": 0.2267, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 1.9265734265734267, | |
| "grad_norm": 0.24416583221125596, | |
| "learning_rate": 4.100216656034328e-08, | |
| "loss": 0.2266, | |
| "step": 2755 | |
| }, | |
| { | |
| "epoch": 1.93006993006993, | |
| "grad_norm": 0.2302231356461665, | |
| "learning_rate": 3.7194907382985256e-08, | |
| "loss": 0.2166, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 1.9335664335664335, | |
| "grad_norm": 0.23467133093429907, | |
| "learning_rate": 3.357246802451752e-08, | |
| "loss": 0.2209, | |
| "step": 2765 | |
| }, | |
| { | |
| "epoch": 1.937062937062937, | |
| "grad_norm": 0.23392229422089317, | |
| "learning_rate": 3.013498338820031e-08, | |
| "loss": 0.1973, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 1.9405594405594404, | |
| "grad_norm": 0.20759708547706576, | |
| "learning_rate": 2.6882581489395264e-08, | |
| "loss": 0.2111, | |
| "step": 2775 | |
| }, | |
| { | |
| "epoch": 1.9440559440559442, | |
| "grad_norm": 0.20208456863131452, | |
| "learning_rate": 2.381538345079759e-08, | |
| "loss": 0.1976, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 1.9475524475524475, | |
| "grad_norm": 0.22727363631709724, | |
| "learning_rate": 2.0933503497926886e-08, | |
| "loss": 0.2331, | |
| "step": 2785 | |
| }, | |
| { | |
| "epoch": 1.951048951048951, | |
| "grad_norm": 0.2581882163809143, | |
| "learning_rate": 1.8237048954869997e-08, | |
| "loss": 0.2348, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 1.9545454545454546, | |
| "grad_norm": 0.2146854759288832, | |
| "learning_rate": 1.5726120240288632e-08, | |
| "loss": 0.2221, | |
| "step": 2795 | |
| }, | |
| { | |
| "epoch": 1.958041958041958, | |
| "grad_norm": 0.21344973354357963, | |
| "learning_rate": 1.3400810863675174e-08, | |
| "loss": 0.2164, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 1.9615384615384617, | |
| "grad_norm": 0.24854344588537242, | |
| "learning_rate": 1.1261207421874309e-08, | |
| "loss": 0.214, | |
| "step": 2805 | |
| }, | |
| { | |
| "epoch": 1.965034965034965, | |
| "grad_norm": 0.21448547178069746, | |
| "learning_rate": 9.307389595855088e-09, | |
| "loss": 0.2176, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 1.9685314685314685, | |
| "grad_norm": 0.20534250574992022, | |
| "learning_rate": 7.539430147745497e-09, | |
| "loss": 0.2149, | |
| "step": 2815 | |
| }, | |
| { | |
| "epoch": 1.972027972027972, | |
| "grad_norm": 0.2305338569267447, | |
| "learning_rate": 5.957394918120196e-09, | |
| "loss": 0.2336, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 1.9755244755244754, | |
| "grad_norm": 0.254113356726984, | |
| "learning_rate": 4.561342823553028e-09, | |
| "loss": 0.223, | |
| "step": 2825 | |
| }, | |
| { | |
| "epoch": 1.9790209790209792, | |
| "grad_norm": 0.23571564569853407, | |
| "learning_rate": 3.351325854417109e-09, | |
| "loss": 0.2357, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 1.9825174825174825, | |
| "grad_norm": 0.2321024664149865, | |
| "learning_rate": 2.327389072953601e-09, | |
| "loss": 0.2178, | |
| "step": 2835 | |
| }, | |
| { | |
| "epoch": 1.986013986013986, | |
| "grad_norm": 0.22568993609404986, | |
| "learning_rate": 1.4895706115908293e-09, | |
| "loss": 0.2094, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 1.9895104895104896, | |
| "grad_norm": 0.18478258906243253, | |
| "learning_rate": 8.379016715254207e-10, | |
| "loss": 0.2142, | |
| "step": 2845 | |
| }, | |
| { | |
| "epoch": 1.993006993006993, | |
| "grad_norm": 0.22552534213497075, | |
| "learning_rate": 3.724065215593431e-10, | |
| "loss": 0.2227, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 1.9965034965034965, | |
| "grad_norm": 0.242945516186064, | |
| "learning_rate": 9.310249719729491e-11, | |
| "loss": 0.2105, | |
| "step": 2855 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.2576848560938581, | |
| "learning_rate": 0.0, | |
| "loss": 0.2218, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_loss": 0.24351981282234192, | |
| "eval_runtime": 32.4748, | |
| "eval_samples_per_second": 21.37, | |
| "eval_steps_per_second": 5.358, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "step": 2860, | |
| "total_flos": 6.695403983227322e+18, | |
| "train_loss": 0.32075889967121446, | |
| "train_runtime": 20338.0941, | |
| "train_samples_per_second": 6.75, | |
| "train_steps_per_second": 0.141 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 2860, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 2, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 6.695403983227322e+18, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |