| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 1090, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0009174311926605505, | |
| "grad_norm": 8.590026968869019, | |
| "learning_rate": 1.8348623853211012e-07, | |
| "loss": 1.1365, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0045871559633027525, | |
| "grad_norm": 7.486574820466339, | |
| "learning_rate": 9.174311926605506e-07, | |
| "loss": 1.1311, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.009174311926605505, | |
| "grad_norm": 4.039589713379416, | |
| "learning_rate": 1.8348623853211011e-06, | |
| "loss": 1.0455, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.013761467889908258, | |
| "grad_norm": 3.7172848594571635, | |
| "learning_rate": 2.7522935779816517e-06, | |
| "loss": 1.0276, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.01834862385321101, | |
| "grad_norm": 2.6728393178954097, | |
| "learning_rate": 3.6697247706422022e-06, | |
| "loss": 0.9965, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.022935779816513763, | |
| "grad_norm": 2.1111759195076383, | |
| "learning_rate": 4.587155963302753e-06, | |
| "loss": 0.9946, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.027522935779816515, | |
| "grad_norm": 2.2065873188991474, | |
| "learning_rate": 5.504587155963303e-06, | |
| "loss": 0.9918, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.03211009174311927, | |
| "grad_norm": 2.3927751227272838, | |
| "learning_rate": 6.422018348623854e-06, | |
| "loss": 0.9876, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.03669724770642202, | |
| "grad_norm": 2.198198830881205, | |
| "learning_rate": 7.3394495412844045e-06, | |
| "loss": 0.9949, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.04128440366972477, | |
| "grad_norm": 2.3775388175981913, | |
| "learning_rate": 8.256880733944956e-06, | |
| "loss": 0.9785, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.045871559633027525, | |
| "grad_norm": 2.3636192230061694, | |
| "learning_rate": 9.174311926605506e-06, | |
| "loss": 0.9851, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.05045871559633028, | |
| "grad_norm": 2.551773091121245, | |
| "learning_rate": 1.0091743119266055e-05, | |
| "loss": 0.9935, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.05504587155963303, | |
| "grad_norm": 2.075016097277958, | |
| "learning_rate": 1.1009174311926607e-05, | |
| "loss": 0.9969, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.05963302752293578, | |
| "grad_norm": 2.288649040057403, | |
| "learning_rate": 1.1926605504587156e-05, | |
| "loss": 0.98, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.06422018348623854, | |
| "grad_norm": 2.7581294894600163, | |
| "learning_rate": 1.2844036697247708e-05, | |
| "loss": 0.9864, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.06880733944954129, | |
| "grad_norm": 2.727748671508115, | |
| "learning_rate": 1.3761467889908258e-05, | |
| "loss": 0.9998, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.07339449541284404, | |
| "grad_norm": 2.513424527269286, | |
| "learning_rate": 1.4678899082568809e-05, | |
| "loss": 0.9892, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.0779816513761468, | |
| "grad_norm": 2.0479456147390644, | |
| "learning_rate": 1.559633027522936e-05, | |
| "loss": 0.9927, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.08256880733944955, | |
| "grad_norm": 2.0867568861039487, | |
| "learning_rate": 1.6513761467889912e-05, | |
| "loss": 1.0121, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.0871559633027523, | |
| "grad_norm": 2.8827900604302603, | |
| "learning_rate": 1.743119266055046e-05, | |
| "loss": 1.0201, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.09174311926605505, | |
| "grad_norm": 2.0709329055230112, | |
| "learning_rate": 1.834862385321101e-05, | |
| "loss": 0.996, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.0963302752293578, | |
| "grad_norm": 2.5702081185928103, | |
| "learning_rate": 1.9266055045871563e-05, | |
| "loss": 1.0116, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.10091743119266056, | |
| "grad_norm": 2.1947935409816486, | |
| "learning_rate": 1.999994872196626e-05, | |
| "loss": 1.0224, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.10550458715596331, | |
| "grad_norm": 1.9964111903602946, | |
| "learning_rate": 1.9998154046002822e-05, | |
| "loss": 1.0057, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.11009174311926606, | |
| "grad_norm": 1.8945144621768437, | |
| "learning_rate": 1.999379599421534e-05, | |
| "loss": 1.0062, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.11467889908256881, | |
| "grad_norm": 1.8242468916557006, | |
| "learning_rate": 1.9986875683942535e-05, | |
| "loss": 1.0154, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.11926605504587157, | |
| "grad_norm": 1.8529020549594846, | |
| "learning_rate": 1.9977394889447526e-05, | |
| "loss": 1.0127, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.12385321100917432, | |
| "grad_norm": 1.8387109925666045, | |
| "learning_rate": 1.9965356041462954e-05, | |
| "loss": 0.9942, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.12844036697247707, | |
| "grad_norm": 2.0279915275712623, | |
| "learning_rate": 1.9950762226567783e-05, | |
| "loss": 1.0222, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.13302752293577982, | |
| "grad_norm": 2.1108737110478994, | |
| "learning_rate": 1.9933617186395917e-05, | |
| "loss": 1.0291, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.13761467889908258, | |
| "grad_norm": 2.1293431381675427, | |
| "learning_rate": 1.9913925316676946e-05, | |
| "loss": 1.0215, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.14220183486238533, | |
| "grad_norm": 1.9622943928954406, | |
| "learning_rate": 1.9891691666109112e-05, | |
| "loss": 1.0447, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.14678899082568808, | |
| "grad_norm": 1.9552108684711231, | |
| "learning_rate": 1.9866921935064907e-05, | |
| "loss": 1.034, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.15137614678899083, | |
| "grad_norm": 1.9190740305620202, | |
| "learning_rate": 1.9839622474129595e-05, | |
| "loss": 1.0446, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.1559633027522936, | |
| "grad_norm": 1.8020873014525902, | |
| "learning_rate": 1.9809800282473014e-05, | |
| "loss": 1.0322, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.16055045871559634, | |
| "grad_norm": 1.6996799547703936, | |
| "learning_rate": 1.977746300605507e-05, | |
| "loss": 1.0383, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.1651376146788991, | |
| "grad_norm": 2.187092931511103, | |
| "learning_rate": 1.9742618935665478e-05, | |
| "loss": 1.0294, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.16972477064220184, | |
| "grad_norm": 2.0403921365189963, | |
| "learning_rate": 1.9705277004798072e-05, | |
| "loss": 1.0349, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.1743119266055046, | |
| "grad_norm": 1.8496031208738504, | |
| "learning_rate": 1.9665446787360444e-05, | |
| "loss": 1.0404, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.17889908256880735, | |
| "grad_norm": 1.8823992627267585, | |
| "learning_rate": 1.9623138495219292e-05, | |
| "loss": 1.0209, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.1834862385321101, | |
| "grad_norm": 2.095822495179804, | |
| "learning_rate": 1.957836297558229e-05, | |
| "loss": 1.0222, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.18807339449541285, | |
| "grad_norm": 2.4911998482939355, | |
| "learning_rate": 1.9531131708217005e-05, | |
| "loss": 1.027, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.1926605504587156, | |
| "grad_norm": 1.821496874433616, | |
| "learning_rate": 1.948145680250766e-05, | |
| "loss": 1.0231, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.19724770642201836, | |
| "grad_norm": 1.759877910585677, | |
| "learning_rate": 1.9429350994350483e-05, | |
| "loss": 1.0345, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.2018348623853211, | |
| "grad_norm": 1.8748142613908458, | |
| "learning_rate": 1.93748276428884e-05, | |
| "loss": 1.0408, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.20642201834862386, | |
| "grad_norm": 2.0102856321068514, | |
| "learning_rate": 1.931790072708596e-05, | |
| "loss": 1.0245, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.21100917431192662, | |
| "grad_norm": 1.8044235525523873, | |
| "learning_rate": 1.9258584842145342e-05, | |
| "loss": 1.0434, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.21559633027522937, | |
| "grad_norm": 2.1107770756344193, | |
| "learning_rate": 1.9196895195764363e-05, | |
| "loss": 1.0216, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.22018348623853212, | |
| "grad_norm": 1.7741183736117787, | |
| "learning_rate": 1.913284760423745e-05, | |
| "loss": 1.0259, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.22477064220183487, | |
| "grad_norm": 1.8121132100517001, | |
| "learning_rate": 1.9066458488400586e-05, | |
| "loss": 1.0249, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.22935779816513763, | |
| "grad_norm": 1.6374262690884738, | |
| "learning_rate": 1.8997744869421248e-05, | |
| "loss": 1.02, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.23394495412844038, | |
| "grad_norm": 1.7269378718345392, | |
| "learning_rate": 1.8926724364434447e-05, | |
| "loss": 1.013, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.23853211009174313, | |
| "grad_norm": 1.7554961488227134, | |
| "learning_rate": 1.8853415182025953e-05, | |
| "loss": 1.0275, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.24311926605504589, | |
| "grad_norm": 1.888907283355598, | |
| "learning_rate": 1.8777836117563894e-05, | |
| "loss": 1.0307, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.24770642201834864, | |
| "grad_norm": 1.6943826060007061, | |
| "learning_rate": 1.8700006548379898e-05, | |
| "loss": 1.021, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.25229357798165136, | |
| "grad_norm": 1.622531543885841, | |
| "learning_rate": 1.861994642880105e-05, | |
| "loss": 1.0392, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.25688073394495414, | |
| "grad_norm": 1.5095301144772582, | |
| "learning_rate": 1.8537676285033886e-05, | |
| "loss": 1.0279, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.26146788990825687, | |
| "grad_norm": 1.6417056350190902, | |
| "learning_rate": 1.845321720990181e-05, | |
| "loss": 1.0296, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.26605504587155965, | |
| "grad_norm": 1.5722941630740008, | |
| "learning_rate": 1.8366590857437182e-05, | |
| "loss": 1.0334, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.2706422018348624, | |
| "grad_norm": 1.8638842301102823, | |
| "learning_rate": 1.8277819437329577e-05, | |
| "loss": 1.0215, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.27522935779816515, | |
| "grad_norm": 1.63281187814375, | |
| "learning_rate": 1.8186925709231534e-05, | |
| "loss": 1.0341, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.2798165137614679, | |
| "grad_norm": 1.661078648257904, | |
| "learning_rate": 1.809393297692334e-05, | |
| "loss": 1.0246, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.28440366972477066, | |
| "grad_norm": 1.7482598431925447, | |
| "learning_rate": 1.799886508233829e-05, | |
| "loss": 1.0442, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.2889908256880734, | |
| "grad_norm": 1.577636868272626, | |
| "learning_rate": 1.790174639944997e-05, | |
| "loss": 1.0369, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.29357798165137616, | |
| "grad_norm": 1.7044142589762405, | |
| "learning_rate": 1.780260182802314e-05, | |
| "loss": 1.0239, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.2981651376146789, | |
| "grad_norm": 1.7546359102106406, | |
| "learning_rate": 1.7701456787229805e-05, | |
| "loss": 1.0327, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.30275229357798167, | |
| "grad_norm": 1.5039111648273473, | |
| "learning_rate": 1.7598337209132142e-05, | |
| "loss": 1.0157, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.3073394495412844, | |
| "grad_norm": 1.6602442070381076, | |
| "learning_rate": 1.7493269532033882e-05, | |
| "loss": 1.0182, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.3119266055045872, | |
| "grad_norm": 1.53831993823906, | |
| "learning_rate": 1.738628069370195e-05, | |
| "loss": 1.029, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.3165137614678899, | |
| "grad_norm": 1.6805290811393307, | |
| "learning_rate": 1.7277398124460022e-05, | |
| "loss": 1.0235, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.3211009174311927, | |
| "grad_norm": 1.4853849203076779, | |
| "learning_rate": 1.71666497401558e-05, | |
| "loss": 0.9925, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.3256880733944954, | |
| "grad_norm": 1.672232887496304, | |
| "learning_rate": 1.7054063935003813e-05, | |
| "loss": 1.0252, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.3302752293577982, | |
| "grad_norm": 1.6345254123456943, | |
| "learning_rate": 1.6939669574305565e-05, | |
| "loss": 1.0085, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.3348623853211009, | |
| "grad_norm": 1.5262133900432817, | |
| "learning_rate": 1.6823495987048922e-05, | |
| "loss": 1.0105, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.3394495412844037, | |
| "grad_norm": 1.5242935487304878, | |
| "learning_rate": 1.6705572958388576e-05, | |
| "loss": 1.0108, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.3440366972477064, | |
| "grad_norm": 1.6718301781389493, | |
| "learning_rate": 1.6585930722009602e-05, | |
| "loss": 1.0347, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.3486238532110092, | |
| "grad_norm": 1.4913729762956598, | |
| "learning_rate": 1.6464599952375998e-05, | |
| "loss": 1.0232, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.3532110091743119, | |
| "grad_norm": 1.5351597927925422, | |
| "learning_rate": 1.63416117568662e-05, | |
| "loss": 1.0177, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.3577981651376147, | |
| "grad_norm": 1.5623411713080309, | |
| "learning_rate": 1.621699766779763e-05, | |
| "loss": 1.0144, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.3623853211009174, | |
| "grad_norm": 1.4434170639068806, | |
| "learning_rate": 1.6090789634342278e-05, | |
| "loss": 1.005, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.3669724770642202, | |
| "grad_norm": 1.524722181019825, | |
| "learning_rate": 1.5963020014335437e-05, | |
| "loss": 1.0134, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.37155963302752293, | |
| "grad_norm": 1.5856120759168468, | |
| "learning_rate": 1.583372156597961e-05, | |
| "loss": 1.0059, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.3761467889908257, | |
| "grad_norm": 1.4415082474651109, | |
| "learning_rate": 1.570292743944583e-05, | |
| "loss": 1.0029, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.38073394495412843, | |
| "grad_norm": 1.4260675517472883, | |
| "learning_rate": 1.557067116837444e-05, | |
| "loss": 1.0219, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.3853211009174312, | |
| "grad_norm": 1.468487983706826, | |
| "learning_rate": 1.5436986661277578e-05, | |
| "loss": 1.013, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.38990825688073394, | |
| "grad_norm": 1.5254325276571765, | |
| "learning_rate": 1.530190819284555e-05, | |
| "loss": 1.0034, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.3944954128440367, | |
| "grad_norm": 1.5798795100356116, | |
| "learning_rate": 1.5165470395159314e-05, | |
| "loss": 1.0332, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.39908256880733944, | |
| "grad_norm": 1.552200364694848, | |
| "learning_rate": 1.5027708248811331e-05, | |
| "loss": 1.0007, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.4036697247706422, | |
| "grad_norm": 1.4214931631494656, | |
| "learning_rate": 1.4888657073937077e-05, | |
| "loss": 1.0094, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.40825688073394495, | |
| "grad_norm": 1.4843255397237436, | |
| "learning_rate": 1.4748352521159492e-05, | |
| "loss": 0.9961, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.41284403669724773, | |
| "grad_norm": 1.4632988494224077, | |
| "learning_rate": 1.4606830562448692e-05, | |
| "loss": 1.0058, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.41743119266055045, | |
| "grad_norm": 1.4953822979383466, | |
| "learning_rate": 1.4464127481899312e-05, | |
| "loss": 1.008, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.42201834862385323, | |
| "grad_norm": 1.4559315828564945, | |
| "learning_rate": 1.4320279866427798e-05, | |
| "loss": 1.0065, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.42660550458715596, | |
| "grad_norm": 1.6922761610390848, | |
| "learning_rate": 1.4175324596392075e-05, | |
| "loss": 1.0205, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.43119266055045874, | |
| "grad_norm": 1.590358388704108, | |
| "learning_rate": 1.402929883613599e-05, | |
| "loss": 1.0069, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.43577981651376146, | |
| "grad_norm": 1.4546641964451503, | |
| "learning_rate": 1.3882240024460928e-05, | |
| "loss": 0.9926, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.44036697247706424, | |
| "grad_norm": 1.4097757101446433, | |
| "learning_rate": 1.3734185865027061e-05, | |
| "loss": 1.0109, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.44495412844036697, | |
| "grad_norm": 1.4718695959287078, | |
| "learning_rate": 1.358517431668672e-05, | |
| "loss": 0.9992, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.44954128440366975, | |
| "grad_norm": 1.3849859496158945, | |
| "learning_rate": 1.3435243583752294e-05, | |
| "loss": 0.9943, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.4541284403669725, | |
| "grad_norm": 1.4115509115184626, | |
| "learning_rate": 1.3284432106201233e-05, | |
| "loss": 0.9882, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.45871559633027525, | |
| "grad_norm": 1.541052954984908, | |
| "learning_rate": 1.313277854982062e-05, | |
| "loss": 1.0081, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.463302752293578, | |
| "grad_norm": 1.6604691549617376, | |
| "learning_rate": 1.2980321796293838e-05, | |
| "loss": 0.9893, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.46788990825688076, | |
| "grad_norm": 1.5419924069079967, | |
| "learning_rate": 1.2827100933231904e-05, | |
| "loss": 1.005, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.4724770642201835, | |
| "grad_norm": 1.5391926952486923, | |
| "learning_rate": 1.2673155244151985e-05, | |
| "loss": 1.0106, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.47706422018348627, | |
| "grad_norm": 1.5041092670535134, | |
| "learning_rate": 1.2518524198405699e-05, | |
| "loss": 0.9993, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.481651376146789, | |
| "grad_norm": 1.5768343545674215, | |
| "learning_rate": 1.2363247441059775e-05, | |
| "loss": 0.9841, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.48623853211009177, | |
| "grad_norm": 1.4584802149012241, | |
| "learning_rate": 1.2207364782731657e-05, | |
| "loss": 1.0082, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.4908256880733945, | |
| "grad_norm": 1.428960359443167, | |
| "learning_rate": 1.2050916189382646e-05, | |
| "loss": 0.9731, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.4954128440366973, | |
| "grad_norm": 1.4606267386566858, | |
| "learning_rate": 1.189394177207125e-05, | |
| "loss": 0.9795, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.376913229782971, | |
| "learning_rate": 1.1736481776669307e-05, | |
| "loss": 0.9881, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.5045871559633027, | |
| "grad_norm": 1.3836549478959417, | |
| "learning_rate": 1.1578576573543541e-05, | |
| "loss": 0.9641, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.5091743119266054, | |
| "grad_norm": 1.356149707622323, | |
| "learning_rate": 1.1420266647205232e-05, | |
| "loss": 0.9744, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.5137614678899083, | |
| "grad_norm": 1.3827419350659662, | |
| "learning_rate": 1.1261592585930576e-05, | |
| "loss": 0.9839, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.518348623853211, | |
| "grad_norm": 1.4379779871173162, | |
| "learning_rate": 1.1102595071354471e-05, | |
| "loss": 0.9851, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.5229357798165137, | |
| "grad_norm": 1.399288614224523, | |
| "learning_rate": 1.0943314868040365e-05, | |
| "loss": 0.9781, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.5275229357798165, | |
| "grad_norm": 1.4412830988024221, | |
| "learning_rate": 1.0783792813028828e-05, | |
| "loss": 0.9881, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.5321100917431193, | |
| "grad_norm": 1.365677938278276, | |
| "learning_rate": 1.0624069805367558e-05, | |
| "loss": 0.9696, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.536697247706422, | |
| "grad_norm": 1.3825640573901334, | |
| "learning_rate": 1.0464186795625481e-05, | |
| "loss": 0.9726, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.5412844036697247, | |
| "grad_norm": 1.4955792558472396, | |
| "learning_rate": 1.0304184775393642e-05, | |
| "loss": 0.9874, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.5458715596330275, | |
| "grad_norm": 1.3824032912359923, | |
| "learning_rate": 1.0144104766775574e-05, | |
| "loss": 0.9695, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.5504587155963303, | |
| "grad_norm": 1.3477277386187514, | |
| "learning_rate": 9.983987811869863e-06, | |
| "loss": 0.9617, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.555045871559633, | |
| "grad_norm": 1.4036097176796005, | |
| "learning_rate": 9.823874962247565e-06, | |
| "loss": 0.9847, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.5596330275229358, | |
| "grad_norm": 1.4536087002525018, | |
| "learning_rate": 9.663807268427197e-06, | |
| "loss": 0.9493, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.5642201834862385, | |
| "grad_norm": 1.3938810806635806, | |
| "learning_rate": 9.503825769350016e-06, | |
| "loss": 0.9759, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.5688073394495413, | |
| "grad_norm": 1.4313464690072646, | |
| "learning_rate": 9.343971481858246e-06, | |
| "loss": 0.9718, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.573394495412844, | |
| "grad_norm": 1.3483789746433568, | |
| "learning_rate": 9.184285390178978e-06, | |
| "loss": 0.9685, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.5779816513761468, | |
| "grad_norm": 1.3437326617499212, | |
| "learning_rate": 9.024808435416435e-06, | |
| "loss": 0.9565, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.5825688073394495, | |
| "grad_norm": 1.3899537082883675, | |
| "learning_rate": 8.865581505055292e-06, | |
| "loss": 0.966, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.5871559633027523, | |
| "grad_norm": 1.4049970453901013, | |
| "learning_rate": 8.706645422477739e-06, | |
| "loss": 0.9609, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.591743119266055, | |
| "grad_norm": 1.563627127558928, | |
| "learning_rate": 8.548040936496989e-06, | |
| "loss": 0.9669, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.5963302752293578, | |
| "grad_norm": 1.4175659667522775, | |
| "learning_rate": 8.389808710909881e-06, | |
| "loss": 0.969, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.6009174311926605, | |
| "grad_norm": 1.3887375134393394, | |
| "learning_rate": 8.231989314071318e-06, | |
| "loss": 0.9672, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.6055045871559633, | |
| "grad_norm": 1.3886857636850283, | |
| "learning_rate": 8.07462320849313e-06, | |
| "loss": 0.9825, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.6100917431192661, | |
| "grad_norm": 1.3852031395838529, | |
| "learning_rate": 7.917750740470116e-06, | |
| "loss": 0.9612, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.6146788990825688, | |
| "grad_norm": 1.3564902677213335, | |
| "learning_rate": 7.761412129735853e-06, | |
| "loss": 0.9606, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.6192660550458715, | |
| "grad_norm": 1.364640541839942, | |
| "learning_rate": 7.605647459150961e-06, | |
| "loss": 0.9699, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.6238532110091743, | |
| "grad_norm": 1.3628791024834306, | |
| "learning_rate": 7.4504966644264775e-06, | |
| "loss": 0.9499, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.6284403669724771, | |
| "grad_norm": 1.3160716541619537, | |
| "learning_rate": 7.295999523884921e-06, | |
| "loss": 0.9605, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.6330275229357798, | |
| "grad_norm": 1.3601615038063317, | |
| "learning_rate": 7.142195648261747e-06, | |
| "loss": 0.9712, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.6376146788990825, | |
| "grad_norm": 1.3879140081588965, | |
| "learning_rate": 6.989124470549746e-06, | |
| "loss": 0.9397, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.6422018348623854, | |
| "grad_norm": 1.3184740100520171, | |
| "learning_rate": 6.83682523588902e-06, | |
| "loss": 0.9636, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.6467889908256881, | |
| "grad_norm": 1.3571390496693267, | |
| "learning_rate": 6.685336991505122e-06, | |
| "loss": 0.9661, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.6513761467889908, | |
| "grad_norm": 1.306435428954974, | |
| "learning_rate": 6.5346985766979384e-06, | |
| "loss": 0.9546, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.6559633027522935, | |
| "grad_norm": 1.3143087769244612, | |
| "learning_rate": 6.384948612883872e-06, | |
| "loss": 0.9599, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.6605504587155964, | |
| "grad_norm": 1.2717322661805426, | |
| "learning_rate": 6.2361254936939e-06, | |
| "loss": 0.9589, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.6651376146788991, | |
| "grad_norm": 1.304695248009233, | |
| "learning_rate": 6.0882673751300235e-06, | |
| "loss": 0.9477, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.6697247706422018, | |
| "grad_norm": 1.3414217532090449, | |
| "learning_rate": 5.941412165782645e-06, | |
| "loss": 0.9568, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.6743119266055045, | |
| "grad_norm": 1.4013089394139884, | |
| "learning_rate": 5.79559751711138e-06, | |
| "loss": 0.9482, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.6788990825688074, | |
| "grad_norm": 1.3044699444068533, | |
| "learning_rate": 5.650860813791786e-06, | |
| "loss": 0.9486, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.6834862385321101, | |
| "grad_norm": 1.3828781502865561, | |
| "learning_rate": 5.507239164130501e-06, | |
| "loss": 0.9489, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.6880733944954128, | |
| "grad_norm": 1.3314126321208273, | |
| "learning_rate": 5.364769390551225e-06, | |
| "loss": 0.969, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.6926605504587156, | |
| "grad_norm": 1.2942289271606044, | |
| "learning_rate": 5.223488020154028e-06, | |
| "loss": 0.96, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.6972477064220184, | |
| "grad_norm": 1.3357321457662634, | |
| "learning_rate": 5.083431275350312e-06, | |
| "loss": 0.9258, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.7018348623853211, | |
| "grad_norm": 1.3041211686044023, | |
| "learning_rate": 4.9446350645759885e-06, | |
| "loss": 0.9417, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.7064220183486238, | |
| "grad_norm": 1.380479241153724, | |
| "learning_rate": 4.807134973085036e-06, | |
| "loss": 0.9666, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.7110091743119266, | |
| "grad_norm": 1.2413378517127889, | |
| "learning_rate": 4.670966253826027e-06, | |
| "loss": 0.9352, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.7155963302752294, | |
| "grad_norm": 1.4796267075105034, | |
| "learning_rate": 4.53616381840377e-06, | |
| "loss": 0.9473, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.7201834862385321, | |
| "grad_norm": 1.3651275142718018, | |
| "learning_rate": 4.402762228128531e-06, | |
| "loss": 0.9392, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.7247706422018348, | |
| "grad_norm": 1.4100087288923597, | |
| "learning_rate": 4.270795685155001e-06, | |
| "loss": 0.9478, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.7293577981651376, | |
| "grad_norm": 1.3017707707123873, | |
| "learning_rate": 4.140298023713416e-06, | |
| "loss": 0.9301, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.7339449541284404, | |
| "grad_norm": 1.3267041246218492, | |
| "learning_rate": 4.0113027014349374e-06, | |
| "loss": 0.9146, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.7385321100917431, | |
| "grad_norm": 1.2995509569521342, | |
| "learning_rate": 3.883842790773647e-06, | |
| "loss": 0.9573, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.7431192660550459, | |
| "grad_norm": 1.2910745261862546, | |
| "learning_rate": 3.757950970527249e-06, | |
| "loss": 0.9265, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.7477064220183486, | |
| "grad_norm": 1.2853989305550235, | |
| "learning_rate": 3.633659517458736e-06, | |
| "loss": 0.9248, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.7522935779816514, | |
| "grad_norm": 1.263114329710996, | |
| "learning_rate": 3.511000298021098e-06, | |
| "loss": 0.9231, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.7568807339449541, | |
| "grad_norm": 1.3202969619364027, | |
| "learning_rate": 3.39000476018726e-06, | |
| "loss": 0.9265, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.7614678899082569, | |
| "grad_norm": 1.2837511661684693, | |
| "learning_rate": 3.2707039253872796e-06, | |
| "loss": 0.9238, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.7660550458715596, | |
| "grad_norm": 1.272334565345566, | |
| "learning_rate": 3.153128380554941e-06, | |
| "loss": 0.9318, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.7706422018348624, | |
| "grad_norm": 1.2911962822321414, | |
| "learning_rate": 3.037308270285709e-06, | |
| "loss": 0.9229, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.7752293577981652, | |
| "grad_norm": 1.2871010544124712, | |
| "learning_rate": 2.923273289108115e-06, | |
| "loss": 0.9409, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.7798165137614679, | |
| "grad_norm": 1.2957020048675643, | |
| "learning_rate": 2.8110526738705345e-06, | |
| "loss": 0.9243, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.7844036697247706, | |
| "grad_norm": 1.291643068745679, | |
| "learning_rate": 2.700675196245288e-06, | |
| "loss": 0.9396, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.7889908256880734, | |
| "grad_norm": 1.2442630476677876, | |
| "learning_rate": 2.592169155352031e-06, | |
| "loss": 0.9416, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.7935779816513762, | |
| "grad_norm": 1.2808835563133167, | |
| "learning_rate": 2.485562370502279e-06, | |
| "loss": 0.9428, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.7981651376146789, | |
| "grad_norm": 1.322945652372357, | |
| "learning_rate": 2.3808821740669608e-06, | |
| "loss": 0.9358, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.8027522935779816, | |
| "grad_norm": 1.3273655469543255, | |
| "learning_rate": 2.2781554044688015e-06, | |
| "loss": 0.9379, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.8073394495412844, | |
| "grad_norm": 1.2953684297066224, | |
| "learning_rate": 2.1774083993013715e-06, | |
| "loss": 0.935, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.8119266055045872, | |
| "grad_norm": 1.314047012294258, | |
| "learning_rate": 2.0786669885765044e-06, | |
| "loss": 0.9273, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.8165137614678899, | |
| "grad_norm": 1.2910450886812495, | |
| "learning_rate": 1.981956488101898e-06, | |
| "loss": 0.9406, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.8211009174311926, | |
| "grad_norm": 1.2889304700514808, | |
| "learning_rate": 1.8873016929904942e-06, | |
| "loss": 0.9423, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.8256880733944955, | |
| "grad_norm": 1.2584483146134866, | |
| "learning_rate": 1.7947268713034128e-06, | |
| "loss": 0.9119, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.8302752293577982, | |
| "grad_norm": 1.2306571475976114, | |
| "learning_rate": 1.704255757827963e-06, | |
| "loss": 0.9147, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.8348623853211009, | |
| "grad_norm": 1.2996923914258511, | |
| "learning_rate": 1.6159115479924259e-06, | |
| "loss": 0.9309, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.8394495412844036, | |
| "grad_norm": 1.3030591756269578, | |
| "learning_rate": 1.529716891919074e-06, | |
| "loss": 0.9393, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.8440366972477065, | |
| "grad_norm": 1.289888704070606, | |
| "learning_rate": 1.4456938886170413e-06, | |
| "loss": 0.9106, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.8486238532110092, | |
| "grad_norm": 1.3368041078863482, | |
| "learning_rate": 1.3638640803164516e-06, | |
| "loss": 0.9242, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.8532110091743119, | |
| "grad_norm": 1.2917360929177786, | |
| "learning_rate": 1.2842484469453365e-06, | |
| "loss": 0.9143, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.8577981651376146, | |
| "grad_norm": 1.2939657342934223, | |
| "learning_rate": 1.2068674007506787e-06, | |
| "loss": 0.9286, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 0.8623853211009175, | |
| "grad_norm": 1.3560403644461199, | |
| "learning_rate": 1.1317407810650372e-06, | |
| "loss": 0.9264, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.8669724770642202, | |
| "grad_norm": 1.2478045531537327, | |
| "learning_rate": 1.0588878492200261e-06, | |
| "loss": 0.9319, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 0.8715596330275229, | |
| "grad_norm": 1.2774964714280372, | |
| "learning_rate": 9.883272836080116e-07, | |
| "loss": 0.9195, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.8761467889908257, | |
| "grad_norm": 1.2810847178825988, | |
| "learning_rate": 9.200771748932513e-07, | |
| "loss": 0.9327, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 0.8807339449541285, | |
| "grad_norm": 1.300184919031024, | |
| "learning_rate": 8.541550213737171e-07, | |
| "loss": 0.9115, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.8853211009174312, | |
| "grad_norm": 1.2673226234465946, | |
| "learning_rate": 7.905777244947954e-07, | |
| "loss": 0.9182, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 0.8899082568807339, | |
| "grad_norm": 1.2812373865753657, | |
| "learning_rate": 7.293615845160196e-07, | |
| "loss": 0.9289, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.8944954128440367, | |
| "grad_norm": 1.2598363534447818, | |
| "learning_rate": 6.705222963319191e-07, | |
| "loss": 0.9375, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 0.8990825688073395, | |
| "grad_norm": 1.311487874418335, | |
| "learning_rate": 6.140749454480932e-07, | |
| "loss": 0.9116, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.9036697247706422, | |
| "grad_norm": 1.2877511107465471, | |
| "learning_rate": 5.600340041135133e-07, | |
| "loss": 0.9185, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 0.908256880733945, | |
| "grad_norm": 1.2931543960319436, | |
| "learning_rate": 5.0841332761005e-07, | |
| "loss": 0.9332, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.9128440366972477, | |
| "grad_norm": 1.180427274170459, | |
| "learning_rate": 4.592261507001994e-07, | |
| "loss": 0.9144, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 0.9174311926605505, | |
| "grad_norm": 1.2599014895347989, | |
| "learning_rate": 4.124850842338779e-07, | |
| "loss": 0.9195, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.9220183486238532, | |
| "grad_norm": 1.2650407345597277, | |
| "learning_rate": 3.6820211191520127e-07, | |
| "loss": 0.9233, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 0.926605504587156, | |
| "grad_norm": 1.2740336020686558, | |
| "learning_rate": 3.263885872300343e-07, | |
| "loss": 0.9229, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.9311926605504587, | |
| "grad_norm": 1.2585813421221876, | |
| "learning_rate": 2.870552305351382e-07, | |
| "loss": 0.9004, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 0.9357798165137615, | |
| "grad_norm": 1.265046561795281, | |
| "learning_rate": 2.5021212630962246e-07, | |
| "loss": 0.9273, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.9403669724770642, | |
| "grad_norm": 1.2115732603108056, | |
| "learning_rate": 2.158687205694443e-07, | |
| "loss": 0.907, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 0.944954128440367, | |
| "grad_norm": 1.2383709828148504, | |
| "learning_rate": 1.840338184455881e-07, | |
| "loss": 0.9273, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.9495412844036697, | |
| "grad_norm": 1.2701853729566557, | |
| "learning_rate": 1.5471558192656776e-07, | |
| "loss": 0.9162, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 0.9541284403669725, | |
| "grad_norm": 1.2175490882454665, | |
| "learning_rate": 1.279215277658097e-07, | |
| "loss": 0.9324, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.9587155963302753, | |
| "grad_norm": 1.2654519384436929, | |
| "learning_rate": 1.0365852555447642e-07, | |
| "loss": 0.9251, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 0.963302752293578, | |
| "grad_norm": 1.2262401130217693, | |
| "learning_rate": 8.19327959602012e-08, | |
| "loss": 0.9347, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.9678899082568807, | |
| "grad_norm": 1.252498856154254, | |
| "learning_rate": 6.274990913221035e-08, | |
| "loss": 0.9287, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 0.9724770642201835, | |
| "grad_norm": 1.2498743795801759, | |
| "learning_rate": 4.6114783273213395e-08, | |
| "loss": 0.9137, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.9770642201834863, | |
| "grad_norm": 1.2653583861940565, | |
| "learning_rate": 3.203168337845508e-08, | |
| "loss": 0.9067, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 0.981651376146789, | |
| "grad_norm": 1.2716276154907005, | |
| "learning_rate": 2.05042201422323e-08, | |
| "loss": 0.9157, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.9862385321100917, | |
| "grad_norm": 1.2922733325507376, | |
| "learning_rate": 1.1535349032167908e-08, | |
| "loss": 0.8985, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 0.9908256880733946, | |
| "grad_norm": 1.2484185326437052, | |
| "learning_rate": 5.127369531473525e-09, | |
| "loss": 0.9282, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.9954128440366973, | |
| "grad_norm": 1.2964676313466048, | |
| "learning_rate": 1.2819245493955746e-09, | |
| "loss": 0.9246, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 1.33366290382446, | |
| "learning_rate": 0.0, | |
| "loss": 0.9201, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 0.9365631341934204, | |
| "eval_runtime": 506.8306, | |
| "eval_samples_per_second": 30.446, | |
| "eval_steps_per_second": 0.477, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 1090, | |
| "total_flos": 456447649382400.0, | |
| "train_loss": 0.9783915571116526, | |
| "train_runtime": 14076.1137, | |
| "train_samples_per_second": 9.905, | |
| "train_steps_per_second": 0.077 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 1090, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 100, | |
| "total_flos": 456447649382400.0, | |
| "train_batch_size": 16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |