| { | |
| "best_global_step": 3272, | |
| "best_metric": 0.9839899931917298, | |
| "best_model_checkpoint": "voice_gender_classification/checkpoint-3272", | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 3272, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0003056234718826406, | |
| "grad_norm": 1.0770392417907715, | |
| "learning_rate": 6.000000000000001e-07, | |
| "loss": 0.6859, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.003056234718826406, | |
| "grad_norm": 1.2716485261917114, | |
| "learning_rate": 6e-06, | |
| "loss": 0.7011, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.006112469437652812, | |
| "grad_norm": 1.0392799377441406, | |
| "learning_rate": 1.2e-05, | |
| "loss": 0.6944, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.009168704156479218, | |
| "grad_norm": 0.628126859664917, | |
| "learning_rate": 1.8e-05, | |
| "loss": 0.6592, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.012224938875305624, | |
| "grad_norm": 2.359282970428467, | |
| "learning_rate": 2.4e-05, | |
| "loss": 0.6314, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.015281173594132029, | |
| "grad_norm": 4.273380756378174, | |
| "learning_rate": 3e-05, | |
| "loss": 0.5976, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.018337408312958436, | |
| "grad_norm": 1.5336124897003174, | |
| "learning_rate": 2.9906890130353816e-05, | |
| "loss": 0.4168, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.021393643031784843, | |
| "grad_norm": 1.772170066833496, | |
| "learning_rate": 2.9823091247672255e-05, | |
| "loss": 0.3254, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.02444987775061125, | |
| "grad_norm": 1.347057580947876, | |
| "learning_rate": 2.973929236499069e-05, | |
| "loss": 0.1641, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.02750611246943765, | |
| "grad_norm": 0.8756974339485168, | |
| "learning_rate": 2.9646182495344507e-05, | |
| "loss": 0.2623, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.030562347188264057, | |
| "grad_norm": 0.594032347202301, | |
| "learning_rate": 2.9553072625698325e-05, | |
| "loss": 0.0924, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.03361858190709047, | |
| "grad_norm": 15.337114334106445, | |
| "learning_rate": 2.946927374301676e-05, | |
| "loss": 0.4399, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.03667481662591687, | |
| "grad_norm": 0.4488385319709778, | |
| "learning_rate": 2.937616387337058e-05, | |
| "loss": 0.044, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.03973105134474328, | |
| "grad_norm": 1.3066495656967163, | |
| "learning_rate": 2.9283054003724395e-05, | |
| "loss": 0.2036, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.042787286063569685, | |
| "grad_norm": 0.36801964044570923, | |
| "learning_rate": 2.919925512104283e-05, | |
| "loss": 0.1376, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.04584352078239609, | |
| "grad_norm": 0.35944098234176636, | |
| "learning_rate": 2.910614525139665e-05, | |
| "loss": 0.2113, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.0488997555012225, | |
| "grad_norm": 0.32566192746162415, | |
| "learning_rate": 2.9013035381750465e-05, | |
| "loss": 0.1613, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.0519559902200489, | |
| "grad_norm": 0.3194483816623688, | |
| "learning_rate": 2.8919925512104284e-05, | |
| "loss": 0.1142, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.0550122249388753, | |
| "grad_norm": 0.35201185941696167, | |
| "learning_rate": 2.88268156424581e-05, | |
| "loss": 0.2022, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.05806845965770171, | |
| "grad_norm": 0.3429718017578125, | |
| "learning_rate": 2.8733705772811922e-05, | |
| "loss": 0.1527, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.061124694376528114, | |
| "grad_norm": 0.3786994516849518, | |
| "learning_rate": 2.8640595903165737e-05, | |
| "loss": 0.1623, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.06418092909535453, | |
| "grad_norm": 0.4179794192314148, | |
| "learning_rate": 2.8547486033519556e-05, | |
| "loss": 0.1551, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.06723716381418093, | |
| "grad_norm": 83.91354370117188, | |
| "learning_rate": 2.845437616387337e-05, | |
| "loss": 0.2994, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.07029339853300734, | |
| "grad_norm": 0.2769756615161896, | |
| "learning_rate": 2.8361266294227187e-05, | |
| "loss": 0.1539, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.07334963325183375, | |
| "grad_norm": 0.2955838441848755, | |
| "learning_rate": 2.8268156424581005e-05, | |
| "loss": 0.1634, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.07640586797066015, | |
| "grad_norm": 0.2536284029483795, | |
| "learning_rate": 2.817504655493482e-05, | |
| "loss": 0.1137, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.07946210268948656, | |
| "grad_norm": 1.7733937501907349, | |
| "learning_rate": 2.8081936685288643e-05, | |
| "loss": 0.3204, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.08251833740831296, | |
| "grad_norm": 0.36826714873313904, | |
| "learning_rate": 2.798882681564246e-05, | |
| "loss": 0.1624, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.08557457212713937, | |
| "grad_norm": 0.3160645067691803, | |
| "learning_rate": 2.7895716945996277e-05, | |
| "loss": 0.1015, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.08863080684596578, | |
| "grad_norm": 0.24478046596050262, | |
| "learning_rate": 2.7802607076350093e-05, | |
| "loss": 0.1955, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.09168704156479218, | |
| "grad_norm": 14.753984451293945, | |
| "learning_rate": 2.770949720670391e-05, | |
| "loss": 0.1996, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.09474327628361859, | |
| "grad_norm": 1.5840058326721191, | |
| "learning_rate": 2.7616387337057727e-05, | |
| "loss": 0.1386, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.097799511002445, | |
| "grad_norm": 0.23863820731639862, | |
| "learning_rate": 2.752327746741155e-05, | |
| "loss": 0.2088, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.1008557457212714, | |
| "grad_norm": 1.5646458864212036, | |
| "learning_rate": 2.7430167597765364e-05, | |
| "loss": 0.16, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.1039119804400978, | |
| "grad_norm": 0.25270429253578186, | |
| "learning_rate": 2.7337057728119183e-05, | |
| "loss": 0.0747, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.1069682151589242, | |
| "grad_norm": 1.4299941062927246, | |
| "learning_rate": 2.7243947858473e-05, | |
| "loss": 0.1075, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.1100244498777506, | |
| "grad_norm": 20.335739135742188, | |
| "learning_rate": 2.7150837988826817e-05, | |
| "loss": 0.2605, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.11308068459657701, | |
| "grad_norm": 0.2346445471048355, | |
| "learning_rate": 2.7057728119180633e-05, | |
| "loss": 0.0954, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.11613691931540342, | |
| "grad_norm": 0.23116770386695862, | |
| "learning_rate": 2.696461824953445e-05, | |
| "loss": 0.0641, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.11919315403422982, | |
| "grad_norm": 1.6760469675064087, | |
| "learning_rate": 2.687150837988827e-05, | |
| "loss": 0.0678, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.12224938875305623, | |
| "grad_norm": 0.1794356405735016, | |
| "learning_rate": 2.6778398510242085e-05, | |
| "loss": 0.0609, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.12530562347188265, | |
| "grad_norm": 0.18168634176254272, | |
| "learning_rate": 2.6685288640595904e-05, | |
| "loss": 0.0611, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.12836185819070906, | |
| "grad_norm": 0.2647036612033844, | |
| "learning_rate": 2.659217877094972e-05, | |
| "loss": 0.0598, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.13141809290953546, | |
| "grad_norm": 0.2929624915122986, | |
| "learning_rate": 2.649906890130354e-05, | |
| "loss": 0.1172, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.13447432762836187, | |
| "grad_norm": 0.7274528741836548, | |
| "learning_rate": 2.6405959031657354e-05, | |
| "loss": 0.1237, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.13753056234718827, | |
| "grad_norm": 1.7421795129776, | |
| "learning_rate": 2.6312849162011176e-05, | |
| "loss": 0.2341, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.14058679706601468, | |
| "grad_norm": 0.6214892268180847, | |
| "learning_rate": 2.621973929236499e-05, | |
| "loss": 0.1648, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.14364303178484109, | |
| "grad_norm": 0.21029511094093323, | |
| "learning_rate": 2.612662942271881e-05, | |
| "loss": 0.0573, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.1466992665036675, | |
| "grad_norm": 0.181521475315094, | |
| "learning_rate": 2.6033519553072625e-05, | |
| "loss": 0.2243, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.1497555012224939, | |
| "grad_norm": 0.2883399724960327, | |
| "learning_rate": 2.5940409683426444e-05, | |
| "loss": 0.1662, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.1528117359413203, | |
| "grad_norm": 0.1559794545173645, | |
| "learning_rate": 2.584729981378026e-05, | |
| "loss": 0.1764, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.1558679706601467, | |
| "grad_norm": 0.4264441728591919, | |
| "learning_rate": 2.575418994413408e-05, | |
| "loss": 0.0789, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.15892420537897312, | |
| "grad_norm": 0.320130318403244, | |
| "learning_rate": 2.5661080074487897e-05, | |
| "loss": 0.1565, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.16198044009779952, | |
| "grad_norm": 6.056375503540039, | |
| "learning_rate": 2.5567970204841716e-05, | |
| "loss": 0.2424, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.16503667481662593, | |
| "grad_norm": 4.100960731506348, | |
| "learning_rate": 2.547486033519553e-05, | |
| "loss": 0.0672, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.16809290953545233, | |
| "grad_norm": 4.6566901206970215, | |
| "learning_rate": 2.538175046554935e-05, | |
| "loss": 0.1897, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.17114914425427874, | |
| "grad_norm": 3.481842279434204, | |
| "learning_rate": 2.5288640595903165e-05, | |
| "loss": 0.1389, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.17420537897310515, | |
| "grad_norm": 0.45788800716400146, | |
| "learning_rate": 2.519553072625698e-05, | |
| "loss": 0.0967, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.17726161369193155, | |
| "grad_norm": 0.1905224472284317, | |
| "learning_rate": 2.5102420856610803e-05, | |
| "loss": 0.0771, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.18031784841075796, | |
| "grad_norm": 0.1771785467863083, | |
| "learning_rate": 2.5009310986964618e-05, | |
| "loss": 0.1548, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.18337408312958436, | |
| "grad_norm": 0.18358071148395538, | |
| "learning_rate": 2.4916201117318437e-05, | |
| "loss": 0.2571, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.18643031784841077, | |
| "grad_norm": 0.3116374611854553, | |
| "learning_rate": 2.4823091247672252e-05, | |
| "loss": 0.1225, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.18948655256723718, | |
| "grad_norm": 0.2802579998970032, | |
| "learning_rate": 2.472998137802607e-05, | |
| "loss": 0.0988, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.19254278728606358, | |
| "grad_norm": 43.23971176147461, | |
| "learning_rate": 2.4636871508379887e-05, | |
| "loss": 0.2569, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.19559902200489, | |
| "grad_norm": 0.25425320863723755, | |
| "learning_rate": 2.454376163873371e-05, | |
| "loss": 0.0759, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.1986552567237164, | |
| "grad_norm": 0.16143210232257843, | |
| "learning_rate": 2.4450651769087524e-05, | |
| "loss": 0.0666, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.2017114914425428, | |
| "grad_norm": 0.15112750232219696, | |
| "learning_rate": 2.4357541899441343e-05, | |
| "loss": 0.1222, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.2047677261613692, | |
| "grad_norm": 0.1990450918674469, | |
| "learning_rate": 2.4264432029795158e-05, | |
| "loss": 0.1612, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.2078239608801956, | |
| "grad_norm": 0.23198798298835754, | |
| "learning_rate": 2.4171322160148977e-05, | |
| "loss": 0.0139, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.210880195599022, | |
| "grad_norm": 0.2512992024421692, | |
| "learning_rate": 2.4078212290502792e-05, | |
| "loss": 0.2073, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.2139364303178484, | |
| "grad_norm": 0.29186299443244934, | |
| "learning_rate": 2.398510242085661e-05, | |
| "loss": 0.0141, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.2169926650366748, | |
| "grad_norm": 0.20607629418373108, | |
| "learning_rate": 2.389199255121043e-05, | |
| "loss": 0.1886, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.2200488997555012, | |
| "grad_norm": 0.40326663851737976, | |
| "learning_rate": 2.379888268156425e-05, | |
| "loss": 0.082, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.22310513447432762, | |
| "grad_norm": 0.18325626850128174, | |
| "learning_rate": 2.3705772811918064e-05, | |
| "loss": 0.0628, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.22616136919315402, | |
| "grad_norm": 0.1717202067375183, | |
| "learning_rate": 2.361266294227188e-05, | |
| "loss": 0.1665, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.22921760391198043, | |
| "grad_norm": 1.470982313156128, | |
| "learning_rate": 2.3519553072625698e-05, | |
| "loss": 0.1568, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.23227383863080683, | |
| "grad_norm": 0.1533995270729065, | |
| "learning_rate": 2.3426443202979514e-05, | |
| "loss": 0.0565, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.23533007334963324, | |
| "grad_norm": 0.18549972772598267, | |
| "learning_rate": 2.3333333333333336e-05, | |
| "loss": 0.1877, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.23838630806845965, | |
| "grad_norm": 1.3424246311187744, | |
| "learning_rate": 2.324022346368715e-05, | |
| "loss": 0.1371, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.24144254278728605, | |
| "grad_norm": 1.0893691778182983, | |
| "learning_rate": 2.314711359404097e-05, | |
| "loss": 0.1286, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.24449877750611246, | |
| "grad_norm": 0.309053897857666, | |
| "learning_rate": 2.3054003724394785e-05, | |
| "loss": 0.0637, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.24755501222493886, | |
| "grad_norm": 0.2743883728981018, | |
| "learning_rate": 2.2960893854748604e-05, | |
| "loss": 0.0151, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.2506112469437653, | |
| "grad_norm": 1.4966446161270142, | |
| "learning_rate": 2.286778398510242e-05, | |
| "loss": 0.1031, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.2536674816625917, | |
| "grad_norm": 0.22224830090999603, | |
| "learning_rate": 2.2774674115456238e-05, | |
| "loss": 0.0618, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.2567237163814181, | |
| "grad_norm": 0.17919765412807465, | |
| "learning_rate": 2.2681564245810057e-05, | |
| "loss": 0.0593, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.2597799511002445, | |
| "grad_norm": 0.09819658100605011, | |
| "learning_rate": 2.2588454376163876e-05, | |
| "loss": 0.0781, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.2628361858190709, | |
| "grad_norm": 0.22454911470413208, | |
| "learning_rate": 2.249534450651769e-05, | |
| "loss": 0.1045, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.2658924205378973, | |
| "grad_norm": 49.3597526550293, | |
| "learning_rate": 2.240223463687151e-05, | |
| "loss": 0.087, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.26894865525672373, | |
| "grad_norm": 1.6358880996704102, | |
| "learning_rate": 2.2309124767225325e-05, | |
| "loss": 0.2507, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.2720048899755501, | |
| "grad_norm": 0.15142078697681427, | |
| "learning_rate": 2.2216014897579144e-05, | |
| "loss": 0.1193, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.27506112469437655, | |
| "grad_norm": 0.1405174732208252, | |
| "learning_rate": 2.2122905027932963e-05, | |
| "loss": 0.0527, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.2781173594132029, | |
| "grad_norm": 0.1312502920627594, | |
| "learning_rate": 2.2029795158286778e-05, | |
| "loss": 0.1309, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.28117359413202936, | |
| "grad_norm": 0.11085129529237747, | |
| "learning_rate": 2.1936685288640597e-05, | |
| "loss": 0.0598, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.28422982885085574, | |
| "grad_norm": 0.13175728917121887, | |
| "learning_rate": 2.1843575418994412e-05, | |
| "loss": 0.0601, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.28728606356968217, | |
| "grad_norm": 0.13237424194812775, | |
| "learning_rate": 2.175046554934823e-05, | |
| "loss": 0.1338, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.29034229828850855, | |
| "grad_norm": 1.7460986375808716, | |
| "learning_rate": 2.1657355679702047e-05, | |
| "loss": 0.1575, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.293398533007335, | |
| "grad_norm": 0.1591300666332245, | |
| "learning_rate": 2.156424581005587e-05, | |
| "loss": 0.0535, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.29645476772616136, | |
| "grad_norm": 0.1477421522140503, | |
| "learning_rate": 2.1471135940409684e-05, | |
| "loss": 0.0701, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.2995110024449878, | |
| "grad_norm": 0.15832221508026123, | |
| "learning_rate": 2.1378026070763503e-05, | |
| "loss": 0.2401, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.3025672371638142, | |
| "grad_norm": 0.2130369395017624, | |
| "learning_rate": 2.1284916201117318e-05, | |
| "loss": 0.2158, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.3056234718826406, | |
| "grad_norm": 0.1542048305273056, | |
| "learning_rate": 2.1191806331471137e-05, | |
| "loss": 0.0971, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.308679706601467, | |
| "grad_norm": 3.364166021347046, | |
| "learning_rate": 2.1098696461824952e-05, | |
| "loss": 0.1079, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.3117359413202934, | |
| "grad_norm": 0.14105622470378876, | |
| "learning_rate": 2.100558659217877e-05, | |
| "loss": 0.0597, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.3147921760391198, | |
| "grad_norm": 0.1316232830286026, | |
| "learning_rate": 2.091247672253259e-05, | |
| "loss": 0.1159, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.31784841075794623, | |
| "grad_norm": 0.48321789503097534, | |
| "learning_rate": 2.081936685288641e-05, | |
| "loss": 0.078, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.3209046454767726, | |
| "grad_norm": 0.1567317247390747, | |
| "learning_rate": 2.0726256983240224e-05, | |
| "loss": 0.204, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.32396088019559904, | |
| "grad_norm": 6.398591995239258, | |
| "learning_rate": 2.0633147113594043e-05, | |
| "loss": 0.3119, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.3270171149144254, | |
| "grad_norm": 0.20879434049129486, | |
| "learning_rate": 2.0540037243947858e-05, | |
| "loss": 0.1167, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.33007334963325186, | |
| "grad_norm": 0.23976916074752808, | |
| "learning_rate": 2.0446927374301674e-05, | |
| "loss": 0.1077, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.33312958435207823, | |
| "grad_norm": 0.17524395883083344, | |
| "learning_rate": 2.0353817504655496e-05, | |
| "loss": 0.0814, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.33618581907090467, | |
| "grad_norm": 0.36440449953079224, | |
| "learning_rate": 2.026070763500931e-05, | |
| "loss": 0.0589, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.33924205378973105, | |
| "grad_norm": 0.15378913283348083, | |
| "learning_rate": 2.016759776536313e-05, | |
| "loss": 0.0112, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.3422982885085575, | |
| "grad_norm": 0.14528077840805054, | |
| "learning_rate": 2.0074487895716945e-05, | |
| "loss": 0.1187, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.34535452322738386, | |
| "grad_norm": 0.12367907911539078, | |
| "learning_rate": 1.9990689013035385e-05, | |
| "loss": 0.0722, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.3484107579462103, | |
| "grad_norm": 0.09661256521940231, | |
| "learning_rate": 1.98975791433892e-05, | |
| "loss": 0.0052, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.35146699266503667, | |
| "grad_norm": 0.7434007525444031, | |
| "learning_rate": 1.980446927374302e-05, | |
| "loss": 0.005, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.3545232273838631, | |
| "grad_norm": 0.04170653969049454, | |
| "learning_rate": 1.9711359404096834e-05, | |
| "loss": 0.0036, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.3575794621026895, | |
| "grad_norm": 0.07466982305049896, | |
| "learning_rate": 1.961824953445065e-05, | |
| "loss": 0.0684, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.3606356968215159, | |
| "grad_norm": 0.05339043587446213, | |
| "learning_rate": 1.952513966480447e-05, | |
| "loss": 0.0064, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.3636919315403423, | |
| "grad_norm": 4.3722052574157715, | |
| "learning_rate": 1.9432029795158287e-05, | |
| "loss": 0.295, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.36674816625916873, | |
| "grad_norm": 0.04944440722465515, | |
| "learning_rate": 1.9338919925512106e-05, | |
| "loss": 0.0644, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.3698044009779951, | |
| "grad_norm": 0.09254731982946396, | |
| "learning_rate": 1.924581005586592e-05, | |
| "loss": 0.2032, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.37286063569682154, | |
| "grad_norm": 2.1284565925598145, | |
| "learning_rate": 1.915270018621974e-05, | |
| "loss": 0.1224, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.3759168704156479, | |
| "grad_norm": 0.1618586629629135, | |
| "learning_rate": 1.9059590316573555e-05, | |
| "loss": 0.0694, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.37897310513447435, | |
| "grad_norm": 0.20215436816215515, | |
| "learning_rate": 1.8966480446927374e-05, | |
| "loss": 0.0642, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.38202933985330073, | |
| "grad_norm": 0.11647956073284149, | |
| "learning_rate": 1.8873370577281193e-05, | |
| "loss": 0.0268, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.38508557457212717, | |
| "grad_norm": 2.9735450744628906, | |
| "learning_rate": 1.878026070763501e-05, | |
| "loss": 0.1907, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.38814180929095354, | |
| "grad_norm": 0.16742634773254395, | |
| "learning_rate": 1.8687150837988827e-05, | |
| "loss": 0.068, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.39119804400978, | |
| "grad_norm": 0.1201523020863533, | |
| "learning_rate": 1.8594040968342646e-05, | |
| "loss": 0.0627, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.39425427872860636, | |
| "grad_norm": 0.07464749366044998, | |
| "learning_rate": 1.850093109869646e-05, | |
| "loss": 0.0132, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.3973105134474328, | |
| "grad_norm": 0.12164530158042908, | |
| "learning_rate": 1.840782122905028e-05, | |
| "loss": 0.2341, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.40036674816625917, | |
| "grad_norm": 0.09393124282360077, | |
| "learning_rate": 1.83147113594041e-05, | |
| "loss": 0.061, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.4034229828850856, | |
| "grad_norm": 8.743972778320312, | |
| "learning_rate": 1.8221601489757917e-05, | |
| "loss": 0.1509, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.406479217603912, | |
| "grad_norm": 0.22810806334018707, | |
| "learning_rate": 1.8128491620111733e-05, | |
| "loss": 0.0187, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.4095354523227384, | |
| "grad_norm": 0.15852580964565277, | |
| "learning_rate": 1.8035381750465548e-05, | |
| "loss": 0.051, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.4125916870415648, | |
| "grad_norm": 2.218247652053833, | |
| "learning_rate": 1.7942271880819367e-05, | |
| "loss": 0.151, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.4156479217603912, | |
| "grad_norm": 0.0791810154914856, | |
| "learning_rate": 1.7849162011173182e-05, | |
| "loss": 0.352, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.4187041564792176, | |
| "grad_norm": 0.2023414820432663, | |
| "learning_rate": 1.7756052141527e-05, | |
| "loss": 0.2002, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.421760391198044, | |
| "grad_norm": 4.077162265777588, | |
| "learning_rate": 1.766294227188082e-05, | |
| "loss": 0.0924, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.4248166259168704, | |
| "grad_norm": 0.16036756336688995, | |
| "learning_rate": 1.756983240223464e-05, | |
| "loss": 0.1015, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.4278728606356968, | |
| "grad_norm": 12.642723083496094, | |
| "learning_rate": 1.7476722532588454e-05, | |
| "loss": 0.0885, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.43092909535452323, | |
| "grad_norm": 0.20661431550979614, | |
| "learning_rate": 1.7383612662942273e-05, | |
| "loss": 0.1037, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.4339853300733496, | |
| "grad_norm": 0.14822793006896973, | |
| "learning_rate": 1.7290502793296088e-05, | |
| "loss": 0.0837, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.43704156479217604, | |
| "grad_norm": 0.12976513803005219, | |
| "learning_rate": 1.7197392923649907e-05, | |
| "loss": 0.2077, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.4400977995110024, | |
| "grad_norm": 0.045029882341623306, | |
| "learning_rate": 1.7104283054003726e-05, | |
| "loss": 0.0824, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.44315403422982885, | |
| "grad_norm": 0.19809339940547943, | |
| "learning_rate": 1.7011173184357544e-05, | |
| "loss": 0.1054, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.44621026894865523, | |
| "grad_norm": 0.2574075162410736, | |
| "learning_rate": 1.691806331471136e-05, | |
| "loss": 0.0878, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.44926650366748166, | |
| "grad_norm": 1.8627293109893799, | |
| "learning_rate": 1.682495344506518e-05, | |
| "loss": 0.1102, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.45232273838630804, | |
| "grad_norm": 0.1813037395477295, | |
| "learning_rate": 1.6731843575418994e-05, | |
| "loss": 0.1346, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.4553789731051345, | |
| "grad_norm": 0.21230725944042206, | |
| "learning_rate": 1.6638733705772813e-05, | |
| "loss": 0.0377, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.45843520782396086, | |
| "grad_norm": 0.17976613342761993, | |
| "learning_rate": 1.654562383612663e-05, | |
| "loss": 0.1334, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.4614914425427873, | |
| "grad_norm": 0.13542696833610535, | |
| "learning_rate": 1.6452513966480447e-05, | |
| "loss": 0.0935, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.46454767726161367, | |
| "grad_norm": 0.1337755024433136, | |
| "learning_rate": 1.6359404096834266e-05, | |
| "loss": 0.0073, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.4676039119804401, | |
| "grad_norm": 0.052432287484407425, | |
| "learning_rate": 1.626629422718808e-05, | |
| "loss": 0.1279, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.4706601466992665, | |
| "grad_norm": 0.06683123856782913, | |
| "learning_rate": 1.61731843575419e-05, | |
| "loss": 0.0042, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.4737163814180929, | |
| "grad_norm": 0.09111201763153076, | |
| "learning_rate": 1.6080074487895715e-05, | |
| "loss": 0.0046, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.4767726161369193, | |
| "grad_norm": 0.053649771958589554, | |
| "learning_rate": 1.5986964618249534e-05, | |
| "loss": 0.1305, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.4798288508557457, | |
| "grad_norm": 0.10080774873495102, | |
| "learning_rate": 1.5893854748603353e-05, | |
| "loss": 0.1217, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.4828850855745721, | |
| "grad_norm": 34.571510314941406, | |
| "learning_rate": 1.580074487895717e-05, | |
| "loss": 0.036, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.48594132029339854, | |
| "grad_norm": 0.07344861328601837, | |
| "learning_rate": 1.5707635009310987e-05, | |
| "loss": 0.1111, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.4889975550122249, | |
| "grad_norm": 4.016622543334961, | |
| "learning_rate": 1.5614525139664806e-05, | |
| "loss": 0.1141, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.49205378973105135, | |
| "grad_norm": 0.12781716883182526, | |
| "learning_rate": 1.552141527001862e-05, | |
| "loss": 0.1436, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.49511002444987773, | |
| "grad_norm": 0.28127339482307434, | |
| "learning_rate": 1.542830540037244e-05, | |
| "loss": 0.0141, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.49816625916870416, | |
| "grad_norm": 110.2169418334961, | |
| "learning_rate": 1.533519553072626e-05, | |
| "loss": 0.1011, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.5012224938875306, | |
| "grad_norm": 0.11259926855564117, | |
| "learning_rate": 1.5242085661080076e-05, | |
| "loss": 0.1317, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.5042787286063569, | |
| "grad_norm": 54.546661376953125, | |
| "learning_rate": 1.5148975791433893e-05, | |
| "loss": 0.1059, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.5073349633251834, | |
| "grad_norm": 0.039869241416454315, | |
| "learning_rate": 1.5055865921787711e-05, | |
| "loss": 0.0576, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.5103911980440098, | |
| "grad_norm": 0.09830798953771591, | |
| "learning_rate": 1.4962756052141527e-05, | |
| "loss": 0.0096, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.5134474327628362, | |
| "grad_norm": 0.07597620040178299, | |
| "learning_rate": 1.4869646182495346e-05, | |
| "loss": 0.051, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.5165036674816625, | |
| "grad_norm": 0.05168398097157478, | |
| "learning_rate": 1.4776536312849163e-05, | |
| "loss": 0.1166, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.519559902200489, | |
| "grad_norm": 93.93925476074219, | |
| "learning_rate": 1.468342644320298e-05, | |
| "loss": 0.0429, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.5226161369193154, | |
| "grad_norm": 0.9389681220054626, | |
| "learning_rate": 1.4590316573556799e-05, | |
| "loss": 0.0065, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.5256723716381418, | |
| "grad_norm": 0.09277983009815216, | |
| "learning_rate": 1.4497206703910616e-05, | |
| "loss": 0.0635, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.5287286063569682, | |
| "grad_norm": 0.09135166555643082, | |
| "learning_rate": 1.4404096834264433e-05, | |
| "loss": 0.1598, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.5317848410757946, | |
| "grad_norm": 0.07920227199792862, | |
| "learning_rate": 1.431098696461825e-05, | |
| "loss": 0.0586, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.534841075794621, | |
| "grad_norm": 0.0759720727801323, | |
| "learning_rate": 1.4217877094972069e-05, | |
| "loss": 0.1053, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.5378973105134475, | |
| "grad_norm": 0.07295390963554382, | |
| "learning_rate": 1.4124767225325884e-05, | |
| "loss": 0.0636, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.5409535452322738, | |
| "grad_norm": 0.1437848061323166, | |
| "learning_rate": 1.4031657355679701e-05, | |
| "loss": 0.1748, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.5440097799511002, | |
| "grad_norm": 0.0709507092833519, | |
| "learning_rate": 1.393854748603352e-05, | |
| "loss": 0.0113, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.5470660146699267, | |
| "grad_norm": 0.06068747863173485, | |
| "learning_rate": 1.3845437616387337e-05, | |
| "loss": 0.1883, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.5501222493887531, | |
| "grad_norm": 0.2686610221862793, | |
| "learning_rate": 1.3752327746741154e-05, | |
| "loss": 0.1691, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.5531784841075794, | |
| "grad_norm": 0.09411593526601791, | |
| "learning_rate": 1.3659217877094973e-05, | |
| "loss": 0.0983, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.5562347188264058, | |
| "grad_norm": 0.22031310200691223, | |
| "learning_rate": 1.356610800744879e-05, | |
| "loss": 0.0591, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.5592909535452323, | |
| "grad_norm": 0.09072048962116241, | |
| "learning_rate": 1.3472998137802607e-05, | |
| "loss": 0.0111, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.5623471882640587, | |
| "grad_norm": 0.10641049593687057, | |
| "learning_rate": 1.3379888268156426e-05, | |
| "loss": 0.1188, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.565403422982885, | |
| "grad_norm": 0.07925461232662201, | |
| "learning_rate": 1.3286778398510243e-05, | |
| "loss": 0.0603, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.5684596577017115, | |
| "grad_norm": 0.16015411913394928, | |
| "learning_rate": 1.319366852886406e-05, | |
| "loss": 0.062, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.5715158924205379, | |
| "grad_norm": 1.8697471618652344, | |
| "learning_rate": 1.3100558659217879e-05, | |
| "loss": 0.1608, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.5745721271393643, | |
| "grad_norm": 0.1413722038269043, | |
| "learning_rate": 1.3007448789571696e-05, | |
| "loss": 0.061, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.5776283618581907, | |
| "grad_norm": 0.1544978767633438, | |
| "learning_rate": 1.2914338919925513e-05, | |
| "loss": 0.0677, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.5806845965770171, | |
| "grad_norm": 0.1221311092376709, | |
| "learning_rate": 1.282122905027933e-05, | |
| "loss": 0.1088, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.5837408312958435, | |
| "grad_norm": 1.616721272468567, | |
| "learning_rate": 1.2728119180633149e-05, | |
| "loss": 0.1209, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.58679706601467, | |
| "grad_norm": 6.469699382781982, | |
| "learning_rate": 1.2635009310986966e-05, | |
| "loss": 0.1047, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.5898533007334963, | |
| "grad_norm": 0.13257375359535217, | |
| "learning_rate": 1.2541899441340781e-05, | |
| "loss": 0.1008, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.5929095354523227, | |
| "grad_norm": 5.230037689208984, | |
| "learning_rate": 1.24487895716946e-05, | |
| "loss": 0.1843, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.5959657701711492, | |
| "grad_norm": 5.640364170074463, | |
| "learning_rate": 1.2355679702048417e-05, | |
| "loss": 0.1216, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.5990220048899756, | |
| "grad_norm": 0.05993470177054405, | |
| "learning_rate": 1.2262569832402234e-05, | |
| "loss": 0.0991, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.6020782396088019, | |
| "grad_norm": 0.12238603085279465, | |
| "learning_rate": 1.2169459962756053e-05, | |
| "loss": 0.0109, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.6051344743276283, | |
| "grad_norm": 0.16531647741794586, | |
| "learning_rate": 1.207635009310987e-05, | |
| "loss": 0.0523, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.6081907090464548, | |
| "grad_norm": 0.07218731194734573, | |
| "learning_rate": 1.1983240223463687e-05, | |
| "loss": 0.0054, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.6112469437652812, | |
| "grad_norm": 0.04113167151808739, | |
| "learning_rate": 1.1890130353817506e-05, | |
| "loss": 0.0674, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.6143031784841075, | |
| "grad_norm": 13.622767448425293, | |
| "learning_rate": 1.1797020484171323e-05, | |
| "loss": 0.1255, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.617359413202934, | |
| "grad_norm": 0.06221568211913109, | |
| "learning_rate": 1.170391061452514e-05, | |
| "loss": 0.0043, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.6204156479217604, | |
| "grad_norm": 0.04879147931933403, | |
| "learning_rate": 1.1610800744878958e-05, | |
| "loss": 0.0603, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.6234718826405868, | |
| "grad_norm": 0.09293865412473679, | |
| "learning_rate": 1.1517690875232776e-05, | |
| "loss": 0.1467, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.6265281173594132, | |
| "grad_norm": 0.10396531224250793, | |
| "learning_rate": 1.1424581005586593e-05, | |
| "loss": 0.0688, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.6295843520782396, | |
| "grad_norm": 0.2048744261264801, | |
| "learning_rate": 1.133147113594041e-05, | |
| "loss": 0.0627, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.632640586797066, | |
| "grad_norm": 0.05055548995733261, | |
| "learning_rate": 1.1238361266294228e-05, | |
| "loss": 0.0662, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.6356968215158925, | |
| "grad_norm": 2.0886988639831543, | |
| "learning_rate": 1.1145251396648046e-05, | |
| "loss": 0.1094, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.6387530562347188, | |
| "grad_norm": 0.11593261361122131, | |
| "learning_rate": 1.1052141527001861e-05, | |
| "loss": 0.3611, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.6418092909535452, | |
| "grad_norm": 0.07993444800376892, | |
| "learning_rate": 1.095903165735568e-05, | |
| "loss": 0.0055, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.6448655256723717, | |
| "grad_norm": 0.13663113117218018, | |
| "learning_rate": 1.0865921787709497e-05, | |
| "loss": 0.0692, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.6479217603911981, | |
| "grad_norm": 0.09905444830656052, | |
| "learning_rate": 1.0772811918063314e-05, | |
| "loss": 0.008, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.6509779951100244, | |
| "grad_norm": 0.06681796908378601, | |
| "learning_rate": 1.0679702048417133e-05, | |
| "loss": 0.007, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.6540342298288508, | |
| "grad_norm": 0.08089947700500488, | |
| "learning_rate": 1.058659217877095e-05, | |
| "loss": 0.0875, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.6570904645476773, | |
| "grad_norm": 0.11339417099952698, | |
| "learning_rate": 1.0493482309124767e-05, | |
| "loss": 0.1986, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.6601466992665037, | |
| "grad_norm": 0.05615869536995888, | |
| "learning_rate": 1.0400372439478586e-05, | |
| "loss": 0.1701, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.66320293398533, | |
| "grad_norm": 0.09708325564861298, | |
| "learning_rate": 1.0307262569832403e-05, | |
| "loss": 0.059, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.6662591687041565, | |
| "grad_norm": 0.08505033701658249, | |
| "learning_rate": 1.021415270018622e-05, | |
| "loss": 0.2405, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.6693154034229829, | |
| "grad_norm": 0.12613414227962494, | |
| "learning_rate": 1.0121042830540038e-05, | |
| "loss": 0.0379, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.6723716381418093, | |
| "grad_norm": 0.11440026015043259, | |
| "learning_rate": 1.0027932960893856e-05, | |
| "loss": 0.0096, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.6754278728606357, | |
| "grad_norm": 1.7247629165649414, | |
| "learning_rate": 9.934823091247673e-06, | |
| "loss": 0.2441, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.6784841075794621, | |
| "grad_norm": 0.2626115679740906, | |
| "learning_rate": 9.84171322160149e-06, | |
| "loss": 0.1266, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.6815403422982885, | |
| "grad_norm": 0.08933160454034805, | |
| "learning_rate": 9.748603351955308e-06, | |
| "loss": 0.0932, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.684596577017115, | |
| "grad_norm": 0.18056859076023102, | |
| "learning_rate": 9.655493482309126e-06, | |
| "loss": 0.0528, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.6876528117359413, | |
| "grad_norm": 0.719739556312561, | |
| "learning_rate": 9.562383612662943e-06, | |
| "loss": 0.0848, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.6907090464547677, | |
| "grad_norm": 0.11645276844501495, | |
| "learning_rate": 9.46927374301676e-06, | |
| "loss": 0.0073, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.6937652811735942, | |
| "grad_norm": 0.05040796473622322, | |
| "learning_rate": 9.376163873370577e-06, | |
| "loss": 0.1393, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.6968215158924206, | |
| "grad_norm": 17.505489349365234, | |
| "learning_rate": 9.283054003724394e-06, | |
| "loss": 0.0733, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.6998777506112469, | |
| "grad_norm": 1.6304818391799927, | |
| "learning_rate": 9.189944134078213e-06, | |
| "loss": 0.0513, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.7029339853300733, | |
| "grad_norm": 0.1561206579208374, | |
| "learning_rate": 9.09683426443203e-06, | |
| "loss": 0.0575, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.7059902200488998, | |
| "grad_norm": 0.07211354374885559, | |
| "learning_rate": 9.003724394785847e-06, | |
| "loss": 0.1541, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.7090464547677262, | |
| "grad_norm": 0.12376312911510468, | |
| "learning_rate": 8.910614525139666e-06, | |
| "loss": 0.0061, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.7121026894865525, | |
| "grad_norm": 0.3487497568130493, | |
| "learning_rate": 8.817504655493483e-06, | |
| "loss": 0.0553, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.715158924205379, | |
| "grad_norm": 0.10781729221343994, | |
| "learning_rate": 8.7243947858473e-06, | |
| "loss": 0.0069, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.7182151589242054, | |
| "grad_norm": 5.861069679260254, | |
| "learning_rate": 8.631284916201118e-06, | |
| "loss": 0.1086, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.7212713936430318, | |
| "grad_norm": 0.2850921154022217, | |
| "learning_rate": 8.538175046554935e-06, | |
| "loss": 0.0912, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.7243276283618582, | |
| "grad_norm": 3.2116122245788574, | |
| "learning_rate": 8.445065176908753e-06, | |
| "loss": 0.1441, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.7273838630806846, | |
| "grad_norm": 0.4662390947341919, | |
| "learning_rate": 8.35195530726257e-06, | |
| "loss": 0.1017, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.730440097799511, | |
| "grad_norm": 0.05119713768362999, | |
| "learning_rate": 8.258845437616388e-06, | |
| "loss": 0.0102, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.7334963325183375, | |
| "grad_norm": 0.0849548801779747, | |
| "learning_rate": 8.165735567970205e-06, | |
| "loss": 0.0061, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.7365525672371638, | |
| "grad_norm": 0.035956285893917084, | |
| "learning_rate": 8.072625698324023e-06, | |
| "loss": 0.0035, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.7396088019559902, | |
| "grad_norm": 0.06464776396751404, | |
| "learning_rate": 7.979515828677841e-06, | |
| "loss": 0.0466, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 0.7426650366748166, | |
| "grad_norm": 0.5524429082870483, | |
| "learning_rate": 7.886405959031657e-06, | |
| "loss": 0.0704, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 0.7457212713936431, | |
| "grad_norm": 0.04895765706896782, | |
| "learning_rate": 7.793296089385474e-06, | |
| "loss": 0.0036, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 0.7487775061124694, | |
| "grad_norm": 0.03954509273171425, | |
| "learning_rate": 7.700186219739293e-06, | |
| "loss": 0.0032, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.7518337408312958, | |
| "grad_norm": 13.51663589477539, | |
| "learning_rate": 7.60707635009311e-06, | |
| "loss": 0.063, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 0.7548899755501223, | |
| "grad_norm": 0.11551866680383682, | |
| "learning_rate": 7.5139664804469275e-06, | |
| "loss": 0.0028, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 0.7579462102689487, | |
| "grad_norm": 7.975831031799316, | |
| "learning_rate": 7.420856610800745e-06, | |
| "loss": 0.2147, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 0.761002444987775, | |
| "grad_norm": 0.10641849786043167, | |
| "learning_rate": 7.3277467411545625e-06, | |
| "loss": 0.1884, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 0.7640586797066015, | |
| "grad_norm": 0.3172556757926941, | |
| "learning_rate": 7.23463687150838e-06, | |
| "loss": 0.0692, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.7671149144254279, | |
| "grad_norm": 0.12498392164707184, | |
| "learning_rate": 7.1415270018621975e-06, | |
| "loss": 0.1202, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 0.7701711491442543, | |
| "grad_norm": 0.1759582906961441, | |
| "learning_rate": 7.0484171322160155e-06, | |
| "loss": 0.0624, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 0.7732273838630807, | |
| "grad_norm": 0.09976211935281754, | |
| "learning_rate": 6.9553072625698325e-06, | |
| "loss": 0.1345, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 0.7762836185819071, | |
| "grad_norm": 0.10823414474725723, | |
| "learning_rate": 6.8621973929236505e-06, | |
| "loss": 0.0963, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 0.7793398533007335, | |
| "grad_norm": 0.09119368344545364, | |
| "learning_rate": 6.7690875232774675e-06, | |
| "loss": 0.0065, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.78239608801956, | |
| "grad_norm": 0.22418056428432465, | |
| "learning_rate": 6.675977653631285e-06, | |
| "loss": 0.1055, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.7854523227383863, | |
| "grad_norm": 0.16302266716957092, | |
| "learning_rate": 6.5828677839851025e-06, | |
| "loss": 0.1129, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 0.7885085574572127, | |
| "grad_norm": 0.12889882922172546, | |
| "learning_rate": 6.48975791433892e-06, | |
| "loss": 0.1578, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 0.7915647921760391, | |
| "grad_norm": 0.4161546528339386, | |
| "learning_rate": 6.3966480446927375e-06, | |
| "loss": 0.0546, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 0.7946210268948656, | |
| "grad_norm": 0.4652371108531952, | |
| "learning_rate": 6.3035381750465554e-06, | |
| "loss": 0.0736, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.7976772616136919, | |
| "grad_norm": 0.1676311194896698, | |
| "learning_rate": 6.2104283054003725e-06, | |
| "loss": 0.1377, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 0.8007334963325183, | |
| "grad_norm": 0.49725136160850525, | |
| "learning_rate": 6.1173184357541904e-06, | |
| "loss": 0.0478, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 0.8037897310513448, | |
| "grad_norm": 0.3372693359851837, | |
| "learning_rate": 6.0242085661080075e-06, | |
| "loss": 0.107, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 0.8068459657701712, | |
| "grad_norm": 11.673458099365234, | |
| "learning_rate": 5.9310986964618254e-06, | |
| "loss": 0.1794, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 0.8099022004889975, | |
| "grad_norm": 0.13454145193099976, | |
| "learning_rate": 5.8379888268156425e-06, | |
| "loss": 0.005, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.812958435207824, | |
| "grad_norm": 0.10115115344524384, | |
| "learning_rate": 5.74487895716946e-06, | |
| "loss": 0.0428, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 0.8160146699266504, | |
| "grad_norm": 0.1693265289068222, | |
| "learning_rate": 5.6517690875232775e-06, | |
| "loss": 0.0553, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 0.8190709046454768, | |
| "grad_norm": 0.040254298597574234, | |
| "learning_rate": 5.558659217877095e-06, | |
| "loss": 0.0041, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 0.8221271393643031, | |
| "grad_norm": 0.06211506202816963, | |
| "learning_rate": 5.4655493482309125e-06, | |
| "loss": 0.0301, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 0.8251833740831296, | |
| "grad_norm": 0.03791659697890282, | |
| "learning_rate": 5.37243947858473e-06, | |
| "loss": 0.1197, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.828239608801956, | |
| "grad_norm": 0.06369005143642426, | |
| "learning_rate": 5.2793296089385475e-06, | |
| "loss": 0.065, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 0.8312958435207825, | |
| "grad_norm": 0.15808682143688202, | |
| "learning_rate": 5.186219739292365e-06, | |
| "loss": 0.0554, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 0.8343520782396088, | |
| "grad_norm": 0.05038405582308769, | |
| "learning_rate": 5.0931098696461825e-06, | |
| "loss": 0.082, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 0.8374083129584352, | |
| "grad_norm": 0.26016944646835327, | |
| "learning_rate": 4.9999999999999996e-06, | |
| "loss": 0.0731, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 0.8404645476772616, | |
| "grad_norm": 0.03949306160211563, | |
| "learning_rate": 4.9068901303538175e-06, | |
| "loss": 0.2097, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.843520782396088, | |
| "grad_norm": 0.10841944068670273, | |
| "learning_rate": 4.813780260707635e-06, | |
| "loss": 0.1337, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 0.8465770171149144, | |
| "grad_norm": 0.08589453250169754, | |
| "learning_rate": 4.7206703910614525e-06, | |
| "loss": 0.1875, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 0.8496332518337408, | |
| "grad_norm": 7.176298141479492, | |
| "learning_rate": 4.62756052141527e-06, | |
| "loss": 0.1634, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 0.8526894865525673, | |
| "grad_norm": 0.20672304928302765, | |
| "learning_rate": 4.5344506517690875e-06, | |
| "loss": 0.0686, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 0.8557457212713936, | |
| "grad_norm": 0.03756504878401756, | |
| "learning_rate": 4.441340782122905e-06, | |
| "loss": 0.121, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.85880195599022, | |
| "grad_norm": 0.509375274181366, | |
| "learning_rate": 4.348230912476723e-06, | |
| "loss": 0.0217, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 0.8618581907090465, | |
| "grad_norm": 13.620018005371094, | |
| "learning_rate": 4.2551210428305395e-06, | |
| "loss": 0.119, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 0.8649144254278729, | |
| "grad_norm": 0.0516686849296093, | |
| "learning_rate": 4.1620111731843575e-06, | |
| "loss": 0.0654, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 0.8679706601466992, | |
| "grad_norm": 0.04489528760313988, | |
| "learning_rate": 4.068901303538175e-06, | |
| "loss": 0.0572, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 0.8710268948655256, | |
| "grad_norm": 0.06253698468208313, | |
| "learning_rate": 3.9757914338919925e-06, | |
| "loss": 0.0448, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 0.8740831295843521, | |
| "grad_norm": 0.06379766017198563, | |
| "learning_rate": 3.88268156424581e-06, | |
| "loss": 0.0062, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 0.8771393643031785, | |
| "grad_norm": 0.058733634650707245, | |
| "learning_rate": 3.789571694599628e-06, | |
| "loss": 0.0691, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 0.8801955990220048, | |
| "grad_norm": 0.06447366625070572, | |
| "learning_rate": 3.696461824953445e-06, | |
| "loss": 0.1691, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 0.8832518337408313, | |
| "grad_norm": 0.04590935632586479, | |
| "learning_rate": 3.6033519553072625e-06, | |
| "loss": 0.1227, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 0.8863080684596577, | |
| "grad_norm": 0.39994603395462036, | |
| "learning_rate": 3.5102420856610804e-06, | |
| "loss": 0.1183, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.8893643031784841, | |
| "grad_norm": 0.396999329328537, | |
| "learning_rate": 3.417132216014898e-06, | |
| "loss": 0.0406, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 0.8924205378973105, | |
| "grad_norm": 0.10507692396640778, | |
| "learning_rate": 3.324022346368715e-06, | |
| "loss": 0.053, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 0.8954767726161369, | |
| "grad_norm": 0.3681839406490326, | |
| "learning_rate": 3.2309124767225324e-06, | |
| "loss": 0.0048, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 0.8985330073349633, | |
| "grad_norm": 0.21031515300273895, | |
| "learning_rate": 3.1378026070763504e-06, | |
| "loss": 0.0053, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 0.9015892420537898, | |
| "grad_norm": 6.659632682800293, | |
| "learning_rate": 3.044692737430168e-06, | |
| "loss": 0.0991, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 0.9046454767726161, | |
| "grad_norm": 0.049913499504327774, | |
| "learning_rate": 2.951582867783985e-06, | |
| "loss": 0.0038, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 0.9077017114914425, | |
| "grad_norm": 0.09325915575027466, | |
| "learning_rate": 2.8584729981378024e-06, | |
| "loss": 0.0033, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 0.910757946210269, | |
| "grad_norm": 0.08547964692115784, | |
| "learning_rate": 2.7653631284916204e-06, | |
| "loss": 0.0684, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 0.9138141809290954, | |
| "grad_norm": 9.980097770690918, | |
| "learning_rate": 2.672253258845438e-06, | |
| "loss": 0.0735, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 0.9168704156479217, | |
| "grad_norm": 0.03152007982134819, | |
| "learning_rate": 2.5791433891992554e-06, | |
| "loss": 0.049, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.9199266503667481, | |
| "grad_norm": 0.039837196469306946, | |
| "learning_rate": 2.4860335195530724e-06, | |
| "loss": 0.0686, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 0.9229828850855746, | |
| "grad_norm": 12.22442626953125, | |
| "learning_rate": 2.3929236499068903e-06, | |
| "loss": 0.0554, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 0.926039119804401, | |
| "grad_norm": 0.13149628043174744, | |
| "learning_rate": 2.299813780260708e-06, | |
| "loss": 0.1026, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 0.9290953545232273, | |
| "grad_norm": 0.044785063713788986, | |
| "learning_rate": 2.2067039106145253e-06, | |
| "loss": 0.0042, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 0.9321515892420538, | |
| "grad_norm": 0.042241331189870834, | |
| "learning_rate": 2.1135940409683424e-06, | |
| "loss": 0.0681, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 0.9352078239608802, | |
| "grad_norm": 0.0555758960545063, | |
| "learning_rate": 2.0204841713221603e-06, | |
| "loss": 0.0282, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 0.9382640586797066, | |
| "grad_norm": 0.07724986225366592, | |
| "learning_rate": 1.927374301675978e-06, | |
| "loss": 0.1247, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 0.941320293398533, | |
| "grad_norm": 0.08575576543807983, | |
| "learning_rate": 1.8342644320297951e-06, | |
| "loss": 0.0689, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 0.9443765281173594, | |
| "grad_norm": 0.2837558686733246, | |
| "learning_rate": 1.7411545623836128e-06, | |
| "loss": 0.102, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 0.9474327628361858, | |
| "grad_norm": 0.1205146312713623, | |
| "learning_rate": 1.6480446927374301e-06, | |
| "loss": 0.1657, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 0.9504889975550123, | |
| "grad_norm": 0.16669517755508423, | |
| "learning_rate": 1.5549348230912478e-06, | |
| "loss": 0.0078, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 0.9535452322738386, | |
| "grad_norm": 15.247810363769531, | |
| "learning_rate": 1.4618249534450651e-06, | |
| "loss": 0.0175, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 0.956601466992665, | |
| "grad_norm": 0.20827865600585938, | |
| "learning_rate": 1.3687150837988828e-06, | |
| "loss": 0.0353, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 0.9596577017114915, | |
| "grad_norm": 0.07442985475063324, | |
| "learning_rate": 1.2756052141527001e-06, | |
| "loss": 0.0415, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 0.9627139364303179, | |
| "grad_norm": 0.040485650300979614, | |
| "learning_rate": 1.1824953445065178e-06, | |
| "loss": 0.1062, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 0.9657701711491442, | |
| "grad_norm": 0.12251528352499008, | |
| "learning_rate": 1.0893854748603353e-06, | |
| "loss": 0.043, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 0.9688264058679706, | |
| "grad_norm": 0.050180476158857346, | |
| "learning_rate": 9.962756052141528e-07, | |
| "loss": 0.03, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 0.9718826405867971, | |
| "grad_norm": 0.0506335012614727, | |
| "learning_rate": 9.031657355679702e-07, | |
| "loss": 0.2524, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 0.9749388753056235, | |
| "grad_norm": 28.770177841186523, | |
| "learning_rate": 8.100558659217877e-07, | |
| "loss": 0.0968, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 0.9779951100244498, | |
| "grad_norm": 0.03078501671552658, | |
| "learning_rate": 7.169459962756052e-07, | |
| "loss": 0.0758, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 0.9810513447432763, | |
| "grad_norm": 16.90084457397461, | |
| "learning_rate": 6.238361266294227e-07, | |
| "loss": 0.1547, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 0.9841075794621027, | |
| "grad_norm": 0.1555572897195816, | |
| "learning_rate": 5.307262569832402e-07, | |
| "loss": 0.0491, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 0.9871638141809291, | |
| "grad_norm": 0.04730997607111931, | |
| "learning_rate": 4.3761638733705775e-07, | |
| "loss": 0.0039, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 0.9902200488997555, | |
| "grad_norm": 3.162146806716919, | |
| "learning_rate": 3.4450651769087525e-07, | |
| "loss": 0.1387, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 0.9932762836185819, | |
| "grad_norm": 2.292222499847412, | |
| "learning_rate": 2.5139664804469275e-07, | |
| "loss": 0.1184, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 0.9963325183374083, | |
| "grad_norm": 0.054889462888240814, | |
| "learning_rate": 1.5828677839851024e-07, | |
| "loss": 0.0065, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 0.9993887530562348, | |
| "grad_norm": 0.310970664024353, | |
| "learning_rate": 6.517690875232774e-08, | |
| "loss": 0.0087, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_accuracy": 0.9845683728036669, | |
| "eval_f1": 0.9839899931917298, | |
| "eval_loss": 0.06383595615625381, | |
| "eval_model_preparation_time": 0.0032, | |
| "eval_runtime": 411.4096, | |
| "eval_samples_per_second": 15.909, | |
| "eval_steps_per_second": 1.991, | |
| "step": 3272 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 3272, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 2.2958418080824934e+18, | |
| "train_batch_size": 8, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |