| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 1.0, |
| "eval_steps": 25, |
| "global_step": 781, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0064054659976513295, |
| "grad_norm": 1.095150351524353, |
| "learning_rate": 1.0000000000000002e-06, |
| "loss": 0.9763, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.012810931995302659, |
| "grad_norm": 1.0511373281478882, |
| "learning_rate": 2.25e-06, |
| "loss": 0.9023, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.019216397992953986, |
| "grad_norm": 0.817418098449707, |
| "learning_rate": 3.5e-06, |
| "loss": 0.9288, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.025621863990605318, |
| "grad_norm": 0.624769926071167, |
| "learning_rate": 4.75e-06, |
| "loss": 0.8766, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.03202732998825664, |
| "grad_norm": 0.5237262845039368, |
| "learning_rate": 4.999659159998194e-06, |
| "loss": 0.8411, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.03843279598590797, |
| "grad_norm": 0.46350979804992676, |
| "learning_rate": 4.998274656771894e-06, |
| "loss": 0.8132, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.0448382619835593, |
| "grad_norm": 0.3592084050178528, |
| "learning_rate": 4.995825777227236e-06, |
| "loss": 0.7663, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.051243727981210636, |
| "grad_norm": 0.34569790959358215, |
| "learning_rate": 4.992313564696022e-06, |
| "loss": 0.7355, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.057649193978861965, |
| "grad_norm": 0.2865957021713257, |
| "learning_rate": 4.9877395155372815e-06, |
| "loss": 0.7632, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.06405465997651329, |
| "grad_norm": 0.25277993083000183, |
| "learning_rate": 4.982105578499759e-06, |
| "loss": 0.7539, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.07046012597416462, |
| "grad_norm": 0.24113819003105164, |
| "learning_rate": 4.975414153891664e-06, |
| "loss": 0.7322, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.07686559197181594, |
| "grad_norm": 0.2370101362466812, |
| "learning_rate": 4.967668092558024e-06, |
| "loss": 0.7242, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.08327105796946728, |
| "grad_norm": 0.23438391089439392, |
| "learning_rate": 4.9588706946661066e-06, |
| "loss": 0.6943, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.0896765239671186, |
| "grad_norm": 0.24512864649295807, |
| "learning_rate": 4.949025708299395e-06, |
| "loss": 0.6866, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.09608198996476994, |
| "grad_norm": 0.22728285193443298, |
| "learning_rate": 4.93813732786074e-06, |
| "loss": 0.6658, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.10248745596242127, |
| "grad_norm": 0.2585676610469818, |
| "learning_rate": 4.926210192285359e-06, |
| "loss": 0.7284, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.1088929219600726, |
| "grad_norm": 0.2257288247346878, |
| "learning_rate": 4.913249383064438e-06, |
| "loss": 0.6844, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.11529838795772393, |
| "grad_norm": 0.2544812262058258, |
| "learning_rate": 4.899260422080195e-06, |
| "loss": 0.6872, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.12170385395537525, |
| "grad_norm": 0.23987820744514465, |
| "learning_rate": 4.884249269253309e-06, |
| "loss": 0.6438, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.12810931995302657, |
| "grad_norm": 0.28527575731277466, |
| "learning_rate": 4.868222320003731e-06, |
| "loss": 0.656, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.13451478595067792, |
| "grad_norm": 0.25415557622909546, |
| "learning_rate": 4.851186402525946e-06, |
| "loss": 0.6562, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.14092025194832924, |
| "grad_norm": 0.21308082342147827, |
| "learning_rate": 4.8331487748798636e-06, |
| "loss": 0.6569, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.14732571794598057, |
| "grad_norm": 0.23867692053318024, |
| "learning_rate": 4.814117121898554e-06, |
| "loss": 0.6158, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.1537311839436319, |
| "grad_norm": 0.23583965003490448, |
| "learning_rate": 4.794099551914173e-06, |
| "loss": 0.6084, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.16013664994128324, |
| "grad_norm": 0.22881825268268585, |
| "learning_rate": 4.773104593303449e-06, |
| "loss": 0.6778, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.16654211593893456, |
| "grad_norm": 0.24278834462165833, |
| "learning_rate": 4.751141190854214e-06, |
| "loss": 0.6071, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.17294758193658588, |
| "grad_norm": 0.2911915183067322, |
| "learning_rate": 4.728218701954525e-06, |
| "loss": 0.6543, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.1793530479342372, |
| "grad_norm": 0.2795209586620331, |
| "learning_rate": 4.704346892606001e-06, |
| "loss": 0.5956, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.18575851393188855, |
| "grad_norm": 0.25558847188949585, |
| "learning_rate": 4.6795359332630694e-06, |
| "loss": 0.6395, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.19216397992953987, |
| "grad_norm": 0.2465032935142517, |
| "learning_rate": 4.653796394499904e-06, |
| "loss": 0.6136, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.1985694459271912, |
| "grad_norm": 0.244729146361351, |
| "learning_rate": 4.627139242506882e-06, |
| "loss": 0.6046, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.20497491192484255, |
| "grad_norm": 0.2569523751735687, |
| "learning_rate": 4.599575834418505e-06, |
| "loss": 0.6483, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.21138037792249387, |
| "grad_norm": 0.29486870765686035, |
| "learning_rate": 4.571117913474749e-06, |
| "loss": 0.6034, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.2177858439201452, |
| "grad_norm": 0.26274922490119934, |
| "learning_rate": 4.541777604017924e-06, |
| "loss": 0.5834, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.2241913099177965, |
| "grad_norm": 0.2968301773071289, |
| "learning_rate": 4.511567406327162e-06, |
| "loss": 0.6063, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.23059677591544786, |
| "grad_norm": 0.301880419254303, |
| "learning_rate": 4.480500191292744e-06, |
| "loss": 0.5965, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.23700224191309918, |
| "grad_norm": 0.27574408054351807, |
| "learning_rate": 4.448589194932521e-06, |
| "loss": 0.5959, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.2434077079107505, |
| "grad_norm": 0.25683072209358215, |
| "learning_rate": 4.415848012752789e-06, |
| "loss": 0.5764, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.24981317390840183, |
| "grad_norm": 0.3023504912853241, |
| "learning_rate": 4.38229059395599e-06, |
| "loss": 0.6013, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.25621863990605315, |
| "grad_norm": 0.2837856113910675, |
| "learning_rate": 4.347931235497738e-06, |
| "loss": 0.6044, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.2626241059037045, |
| "grad_norm": 0.29550132155418396, |
| "learning_rate": 4.312784575995669e-06, |
| "loss": 0.5866, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.26902957190135585, |
| "grad_norm": 0.28646326065063477, |
| "learning_rate": 4.276865589492747e-06, |
| "loss": 0.5838, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.27543503789900714, |
| "grad_norm": 0.3569891154766083, |
| "learning_rate": 4.240189579077649e-06, |
| "loss": 0.5701, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.2818405038966585, |
| "grad_norm": 0.33763790130615234, |
| "learning_rate": 4.202772170364969e-06, |
| "loss": 0.5759, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.2882459698943098, |
| "grad_norm": 0.245514377951622, |
| "learning_rate": 4.164629304838012e-06, |
| "loss": 0.5694, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.29465143589196113, |
| "grad_norm": 0.2917734682559967, |
| "learning_rate": 4.125777233057007e-06, |
| "loss": 0.5944, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.3010569018896125, |
| "grad_norm": 0.2828243374824524, |
| "learning_rate": 4.086232507735648e-06, |
| "loss": 0.5717, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.3074623678872638, |
| "grad_norm": 0.38584598898887634, |
| "learning_rate": 4.0460119766889e-06, |
| "loss": 0.5931, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.3138678338849151, |
| "grad_norm": 0.36006131768226624, |
| "learning_rate": 4.005132775655076e-06, |
| "loss": 0.5564, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.3202732998825665, |
| "grad_norm": 0.372942179441452, |
| "learning_rate": 3.963612320995257e-06, |
| "loss": 0.6025, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.32667876588021777, |
| "grad_norm": 0.28577810525894165, |
| "learning_rate": 3.921468302273137e-06, |
| "loss": 0.5469, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.3330842318778691, |
| "grad_norm": 0.3417142331600189, |
| "learning_rate": 3.8787186747184826e-06, |
| "loss": 0.5342, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.33948969787552047, |
| "grad_norm": 0.35277873277664185, |
| "learning_rate": 3.8353816515774115e-06, |
| "loss": 0.5531, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.34589516387317176, |
| "grad_norm": 0.2886194586753845, |
| "learning_rate": 3.79147569635273e-06, |
| "loss": 0.5431, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.3523006298708231, |
| "grad_norm": 0.29417017102241516, |
| "learning_rate": 3.747019514937663e-06, |
| "loss": 0.5826, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.3587060958684744, |
| "grad_norm": 0.3237856924533844, |
| "learning_rate": 3.70203204764631e-06, |
| "loss": 0.5689, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.36511156186612576, |
| "grad_norm": 0.3846990168094635, |
| "learning_rate": 3.6565324611442234e-06, |
| "loss": 0.5763, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.3715170278637771, |
| "grad_norm": 0.2781718969345093, |
| "learning_rate": 3.6105401402825595e-06, |
| "loss": 0.5549, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.3779224938614284, |
| "grad_norm": 0.2733434736728668, |
| "learning_rate": 3.5640746798392657e-06, |
| "loss": 0.5967, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.38432795985907975, |
| "grad_norm": 0.34851792454719543, |
| "learning_rate": 3.5171558761708334e-06, |
| "loss": 0.5543, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.3907334258567311, |
| "grad_norm": 0.34361544251441956, |
| "learning_rate": 3.469803718778166e-06, |
| "loss": 0.5519, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.3971388918543824, |
| "grad_norm": 0.2903674244880676, |
| "learning_rate": 3.4220383817901625e-06, |
| "loss": 0.562, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.40354435785203374, |
| "grad_norm": 0.31230857968330383, |
| "learning_rate": 3.3738802153686414e-06, |
| "loss": 0.5537, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.4099498238496851, |
| "grad_norm": 0.31388580799102783, |
| "learning_rate": 3.3253497370382605e-06, |
| "loss": 0.5551, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.4163552898473364, |
| "grad_norm": 0.32741594314575195, |
| "learning_rate": 3.2764676229451397e-06, |
| "loss": 0.5498, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.42276075584498773, |
| "grad_norm": 0.2511857748031616, |
| "learning_rate": 3.227254699047904e-06, |
| "loss": 0.5511, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.42916622184263903, |
| "grad_norm": 0.3877544105052948, |
| "learning_rate": 3.177731932244892e-06, |
| "loss": 0.5485, |
| "step": 335 |
| }, |
| { |
| "epoch": 0.4355716878402904, |
| "grad_norm": 0.3445497751235962, |
| "learning_rate": 3.127920421441327e-06, |
| "loss": 0.5639, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.4419771538379417, |
| "grad_norm": 0.3347369432449341, |
| "learning_rate": 3.077841388560243e-06, |
| "loss": 0.5745, |
| "step": 345 |
| }, |
| { |
| "epoch": 0.448382619835593, |
| "grad_norm": 0.36205726861953735, |
| "learning_rate": 3.0275161695009975e-06, |
| "loss": 0.5658, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.45478808583324437, |
| "grad_norm": 0.35722121596336365, |
| "learning_rate": 2.9769662050492276e-06, |
| "loss": 0.5479, |
| "step": 355 |
| }, |
| { |
| "epoch": 0.4611935518308957, |
| "grad_norm": 0.35634711384773254, |
| "learning_rate": 2.926213031742125e-06, |
| "loss": 0.5595, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.467599017828547, |
| "grad_norm": 0.32853105664253235, |
| "learning_rate": 2.8752782726929045e-06, |
| "loss": 0.5429, |
| "step": 365 |
| }, |
| { |
| "epoch": 0.47400448382619836, |
| "grad_norm": 0.3939530551433563, |
| "learning_rate": 2.8241836283784026e-06, |
| "loss": 0.531, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.48040994982384966, |
| "grad_norm": 0.3002113699913025, |
| "learning_rate": 2.7729508673936972e-06, |
| "loss": 0.5596, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.486815415821501, |
| "grad_norm": 0.3216375708580017, |
| "learning_rate": 2.721601817177725e-06, |
| "loss": 0.5292, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.49322088181915236, |
| "grad_norm": 0.3263518810272217, |
| "learning_rate": 2.6701583547138165e-06, |
| "loss": 0.5684, |
| "step": 385 |
| }, |
| { |
| "epoch": 0.49962634781680365, |
| "grad_norm": 0.3253946900367737, |
| "learning_rate": 2.618642397209126e-06, |
| "loss": 0.5264, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.5060318138144551, |
| "grad_norm": 0.3231858015060425, |
| "learning_rate": 2.567075892756924e-06, |
| "loss": 0.5448, |
| "step": 395 |
| }, |
| { |
| "epoch": 0.5124372798121063, |
| "grad_norm": 0.36590418219566345, |
| "learning_rate": 2.5154808109857367e-06, |
| "loss": 0.5248, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.5188427458097576, |
| "grad_norm": 0.3773280084133148, |
| "learning_rate": 2.4638791336992967e-06, |
| "loss": 0.5505, |
| "step": 405 |
| }, |
| { |
| "epoch": 0.525248211807409, |
| "grad_norm": 0.3829450309276581, |
| "learning_rate": 2.4122928455113233e-06, |
| "loss": 0.5466, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.5316536778050603, |
| "grad_norm": 0.3306972086429596, |
| "learning_rate": 2.360743924479093e-06, |
| "loss": 0.5721, |
| "step": 415 |
| }, |
| { |
| "epoch": 0.5380591438027117, |
| "grad_norm": 0.32845625281333923, |
| "learning_rate": 2.3092543327398083e-06, |
| "loss": 0.5177, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.5444646098003629, |
| "grad_norm": 0.3088594079017639, |
| "learning_rate": 2.2578460071537512e-06, |
| "loss": 0.5575, |
| "step": 425 |
| }, |
| { |
| "epoch": 0.5508700757980143, |
| "grad_norm": 0.3912436366081238, |
| "learning_rate": 2.2065408499582e-06, |
| "loss": 0.561, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.5572755417956656, |
| "grad_norm": 0.3839537799358368, |
| "learning_rate": 2.155360719436102e-06, |
| "loss": 0.526, |
| "step": 435 |
| }, |
| { |
| "epoch": 0.563681007793317, |
| "grad_norm": 0.3676234185695648, |
| "learning_rate": 2.1043274206034727e-06, |
| "loss": 0.541, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.5700864737909683, |
| "grad_norm": 0.368198424577713, |
| "learning_rate": 2.0534626959194816e-06, |
| "loss": 0.5212, |
| "step": 445 |
| }, |
| { |
| "epoch": 0.5764919397886196, |
| "grad_norm": 0.34693998098373413, |
| "learning_rate": 2.002788216023203e-06, |
| "loss": 0.5462, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.5828974057862709, |
| "grad_norm": 0.37934619188308716, |
| "learning_rate": 1.9523255705009558e-06, |
| "loss": 0.5423, |
| "step": 455 |
| }, |
| { |
| "epoch": 0.5893028717839223, |
| "grad_norm": 0.3458510637283325, |
| "learning_rate": 1.902096258688174e-06, |
| "loss": 0.4889, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.5957083377815736, |
| "grad_norm": 0.33206406235694885, |
| "learning_rate": 1.8521216805097358e-06, |
| "loss": 0.5378, |
| "step": 465 |
| }, |
| { |
| "epoch": 0.602113803779225, |
| "grad_norm": 0.29139187932014465, |
| "learning_rate": 1.8024231273626424e-06, |
| "loss": 0.5444, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.6085192697768763, |
| "grad_norm": 0.3665342926979065, |
| "learning_rate": 1.7530217730449312e-06, |
| "loss": 0.5276, |
| "step": 475 |
| }, |
| { |
| "epoch": 0.6149247357745276, |
| "grad_norm": 0.32149678468704224, |
| "learning_rate": 1.7039386647346975e-06, |
| "loss": 0.5117, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.6213302017721789, |
| "grad_norm": 0.30389899015426636, |
| "learning_rate": 1.6551947140230568e-06, |
| "loss": 0.5573, |
| "step": 485 |
| }, |
| { |
| "epoch": 0.6277356677698303, |
| "grad_norm": 0.3890632688999176, |
| "learning_rate": 1.6068106880048747e-06, |
| "loss": 0.5395, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.6341411337674816, |
| "grad_norm": 0.36464181542396545, |
| "learning_rate": 1.5588072004310634e-06, |
| "loss": 0.5497, |
| "step": 495 |
| }, |
| { |
| "epoch": 0.640546599765133, |
| "grad_norm": 0.32058826088905334, |
| "learning_rate": 1.5112047029262e-06, |
| "loss": 0.5377, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.6469520657627842, |
| "grad_norm": 0.280649870634079, |
| "learning_rate": 1.4640234762752248e-06, |
| "loss": 0.5349, |
| "step": 505 |
| }, |
| { |
| "epoch": 0.6533575317604355, |
| "grad_norm": 0.36669427156448364, |
| "learning_rate": 1.4172836217829267e-06, |
| "loss": 0.539, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.6597629977580869, |
| "grad_norm": 0.3747427463531494, |
| "learning_rate": 1.3710050527098867e-06, |
| "loss": 0.5582, |
| "step": 515 |
| }, |
| { |
| "epoch": 0.6661684637557382, |
| "grad_norm": 0.3354856073856354, |
| "learning_rate": 1.3252074857885453e-06, |
| "loss": 0.5512, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.6725739297533896, |
| "grad_norm": 0.3852190673351288, |
| "learning_rate": 1.2799104328229928e-06, |
| "loss": 0.5216, |
| "step": 525 |
| }, |
| { |
| "epoch": 0.6789793957510409, |
| "grad_norm": 0.3479975759983063, |
| "learning_rate": 1.2351331923760743e-06, |
| "loss": 0.518, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.6853848617486922, |
| "grad_norm": 0.37335509061813354, |
| "learning_rate": 1.1908948415473418e-06, |
| "loss": 0.5203, |
| "step": 535 |
| }, |
| { |
| "epoch": 0.6917903277463435, |
| "grad_norm": 0.4034595489501953, |
| "learning_rate": 1.1472142278453582e-06, |
| "loss": 0.5157, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.6981957937439949, |
| "grad_norm": 0.2966196537017822, |
| "learning_rate": 1.1041099611578177e-06, |
| "loss": 0.541, |
| "step": 545 |
| }, |
| { |
| "epoch": 0.7046012597416462, |
| "grad_norm": 0.3616897463798523, |
| "learning_rate": 1.0616004058229084e-06, |
| "loss": 0.5254, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.7110067257392976, |
| "grad_norm": 0.3452332317829132, |
| "learning_rate": 1.0197036728052847e-06, |
| "loss": 0.5543, |
| "step": 555 |
| }, |
| { |
| "epoch": 0.7174121917369488, |
| "grad_norm": 0.3313845694065094, |
| "learning_rate": 9.784376119799851e-07, |
| "loss": 0.5294, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.7238176577346002, |
| "grad_norm": 0.37989768385887146, |
| "learning_rate": 9.378198045275968e-07, |
| "loss": 0.5411, |
| "step": 565 |
| }, |
| { |
| "epoch": 0.7302231237322515, |
| "grad_norm": 0.3642790913581848, |
| "learning_rate": 8.97867555443886e-07, |
| "loss": 0.5186, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.7366285897299029, |
| "grad_norm": 0.369183748960495, |
| "learning_rate": 8.585978861670958e-07, |
| "loss": 0.5189, |
| "step": 575 |
| }, |
| { |
| "epoch": 0.7430340557275542, |
| "grad_norm": 0.3428362011909485, |
| "learning_rate": 8.200275273260611e-07, |
| "loss": 0.5326, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.7494395217252056, |
| "grad_norm": 0.33693212270736694, |
| "learning_rate": 7.821729116122126e-07, |
| "loss": 0.5417, |
| "step": 585 |
| }, |
| { |
| "epoch": 0.7558449877228568, |
| "grad_norm": 0.35541847348213196, |
| "learning_rate": 7.450501667785146e-07, |
| "loss": 0.531, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.7622504537205081, |
| "grad_norm": 0.3545955717563629, |
| "learning_rate": 7.086751087683297e-07, |
| "loss": 0.5371, |
| "step": 595 |
| }, |
| { |
| "epoch": 0.7686559197181595, |
| "grad_norm": 0.369098424911499, |
| "learning_rate": 6.730632349771193e-07, |
| "loss": 0.5444, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.7750613857158108, |
| "grad_norm": 0.395386666059494, |
| "learning_rate": 6.3822971764986e-07, |
| "loss": 0.5207, |
| "step": 605 |
| }, |
| { |
| "epoch": 0.7814668517134622, |
| "grad_norm": 0.3929295241832733, |
| "learning_rate": 6.041893974169963e-07, |
| "loss": 0.5223, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.7878723177111134, |
| "grad_norm": 0.38200634717941284, |
| "learning_rate": 5.709567769716678e-07, |
| "loss": 0.5346, |
| "step": 615 |
| }, |
| { |
| "epoch": 0.7942777837087648, |
| "grad_norm": 0.35260388255119324, |
| "learning_rate": 5.385460148909169e-07, |
| "loss": 0.5106, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.8006832497064161, |
| "grad_norm": 0.36902984976768494, |
| "learning_rate": 5.069709196035011e-07, |
| "loss": 0.5333, |
| "step": 625 |
| }, |
| { |
| "epoch": 0.8070887157040675, |
| "grad_norm": 0.36749160289764404, |
| "learning_rate": 4.762449435068914e-07, |
| "loss": 0.5197, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.8134941817017188, |
| "grad_norm": 0.37014660239219666, |
| "learning_rate": 4.4638117723595054e-07, |
| "loss": 0.5492, |
| "step": 635 |
| }, |
| { |
| "epoch": 0.8198996476993702, |
| "grad_norm": 0.3781520426273346, |
| "learning_rate": 4.173923440857358e-07, |
| "loss": 0.5365, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.8263051136970214, |
| "grad_norm": 0.33049196004867554, |
| "learning_rate": 3.892907945908128e-07, |
| "loss": 0.5163, |
| "step": 645 |
| }, |
| { |
| "epoch": 0.8327105796946728, |
| "grad_norm": 0.40030181407928467, |
| "learning_rate": 3.6208850126337595e-07, |
| "loss": 0.5111, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.8391160456923241, |
| "grad_norm": 0.3238008916378021, |
| "learning_rate": 3.357970534924229e-07, |
| "loss": 0.544, |
| "step": 655 |
| }, |
| { |
| "epoch": 0.8455215116899755, |
| "grad_norm": 0.3207038938999176, |
| "learning_rate": 3.104276526061617e-07, |
| "loss": 0.5242, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.8519269776876268, |
| "grad_norm": 0.38148337602615356, |
| "learning_rate": 2.859911070997437e-07, |
| "loss": 0.5358, |
| "step": 665 |
| }, |
| { |
| "epoch": 0.8583324436852781, |
| "grad_norm": 0.44874271750450134, |
| "learning_rate": 2.624978280303628e-07, |
| "loss": 0.5347, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.8647379096829294, |
| "grad_norm": 0.3957948684692383, |
| "learning_rate": 2.3995782458168276e-07, |
| "loss": 0.5401, |
| "step": 675 |
| }, |
| { |
| "epoch": 0.8711433756805808, |
| "grad_norm": 0.398444265127182, |
| "learning_rate": 2.1838069979947945e-07, |
| "loss": 0.524, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.8775488416782321, |
| "grad_norm": 0.37134161591529846, |
| "learning_rate": 1.9777564650031112e-07, |
| "loss": 0.535, |
| "step": 685 |
| }, |
| { |
| "epoch": 0.8839543076758835, |
| "grad_norm": 0.37950778007507324, |
| "learning_rate": 1.7815144335497524e-07, |
| "loss": 0.5358, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.8903597736735348, |
| "grad_norm": 0.4200168251991272, |
| "learning_rate": 1.5951645114839875e-07, |
| "loss": 0.5234, |
| "step": 695 |
| }, |
| { |
| "epoch": 0.896765239671186, |
| "grad_norm": 0.3997882008552551, |
| "learning_rate": 1.4187860921757252e-07, |
| "loss": 0.5437, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.9031707056688374, |
| "grad_norm": 0.405408650636673, |
| "learning_rate": 1.2524543206904188e-07, |
| "loss": 0.5443, |
| "step": 705 |
| }, |
| { |
| "epoch": 0.9095761716664887, |
| "grad_norm": 0.368826299905777, |
| "learning_rate": 1.0962400617738872e-07, |
| "loss": 0.564, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.9159816376641401, |
| "grad_norm": 0.41221150755882263, |
| "learning_rate": 9.502098696608147e-08, |
| "loss": 0.5288, |
| "step": 715 |
| }, |
| { |
| "epoch": 0.9223871036617914, |
| "grad_norm": 0.30681097507476807, |
| "learning_rate": 8.144259597196308e-08, |
| "loss": 0.5356, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.9287925696594427, |
| "grad_norm": 0.3287171423435211, |
| "learning_rate": 6.889461819460485e-08, |
| "loss": 0.5228, |
| "step": 725 |
| }, |
| { |
| "epoch": 0.935198035657094, |
| "grad_norm": 0.39335137605667114, |
| "learning_rate": 5.738239963163472e-08, |
| "loss": 0.5102, |
| "step": 730 |
| }, |
| { |
| "epoch": 0.9416035016547454, |
| "grad_norm": 0.5197286605834961, |
| "learning_rate": 4.691084500110521e-08, |
| "loss": 0.5134, |
| "step": 735 |
| }, |
| { |
| "epoch": 0.9480089676523967, |
| "grad_norm": 0.37825560569763184, |
| "learning_rate": 3.748441565186583e-08, |
| "loss": 0.495, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.9544144336500481, |
| "grad_norm": 0.39631250500679016, |
| "learning_rate": 2.910712766282908e-08, |
| "loss": 0.5227, |
| "step": 745 |
| }, |
| { |
| "epoch": 0.9608198996476993, |
| "grad_norm": 0.40286198258399963, |
| "learning_rate": 2.178255013194075e-08, |
| "loss": 0.5142, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.9672253656453507, |
| "grad_norm": 0.31852659583091736, |
| "learning_rate": 1.5513803655587966e-08, |
| "loss": 0.4985, |
| "step": 755 |
| }, |
| { |
| "epoch": 0.973630831643002, |
| "grad_norm": 0.3686765134334564, |
| "learning_rate": 1.0303558999082974e-08, |
| "loss": 0.5464, |
| "step": 760 |
| }, |
| { |
| "epoch": 0.9800362976406534, |
| "grad_norm": 0.3755616843700409, |
| "learning_rate": 6.1540359588005416e-09, |
| "loss": 0.5158, |
| "step": 765 |
| }, |
| { |
| "epoch": 0.9864417636383047, |
| "grad_norm": 0.31239190697669983, |
| "learning_rate": 3.067002416444198e-09, |
| "loss": 0.499, |
| "step": 770 |
| }, |
| { |
| "epoch": 0.9928472296359561, |
| "grad_norm": 0.3598840534687042, |
| "learning_rate": 1.0437735858506715e-09, |
| "loss": 0.5246, |
| "step": 775 |
| }, |
| { |
| "epoch": 0.9992526956336073, |
| "grad_norm": 0.3162603974342346, |
| "learning_rate": 8.521145264978048e-11, |
| "loss": 0.5619, |
| "step": 780 |
| }, |
| { |
| "epoch": 1.0, |
| "step": 781, |
| "total_flos": 1.4329344655107686e+18, |
| "train_loss": 0.5799389032029312, |
| "train_runtime": 9974.2346, |
| "train_samples_per_second": 0.939, |
| "train_steps_per_second": 0.078 |
| } |
| ], |
| "logging_steps": 5, |
| "max_steps": 781, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 1, |
| "save_steps": 50, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 1.4329344655107686e+18, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|