| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 1089, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.00919064851513585, | |
| "grad_norm": 0.8316559195518494, | |
| "learning_rate": 8.181818181818183e-06, | |
| "loss": 0.499, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.0183812970302717, | |
| "grad_norm": 0.8773086667060852, | |
| "learning_rate": 1.7272727272727274e-05, | |
| "loss": 0.5179, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.02757194554540755, | |
| "grad_norm": 0.9753805994987488, | |
| "learning_rate": 2.636363636363636e-05, | |
| "loss": 0.5032, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.0367625940605434, | |
| "grad_norm": 0.8634973764419556, | |
| "learning_rate": 3.545454545454546e-05, | |
| "loss": 0.5424, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.045953242575679246, | |
| "grad_norm": 0.8948274254798889, | |
| "learning_rate": 4.454545454545455e-05, | |
| "loss": 0.5347, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.0551438910908151, | |
| "grad_norm": 0.9634304642677307, | |
| "learning_rate": 4.999815378056639e-05, | |
| "loss": 0.5524, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.06433453960595094, | |
| "grad_norm": 0.8109133243560791, | |
| "learning_rate": 4.997738694339449e-05, | |
| "loss": 0.5651, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.0735251881210868, | |
| "grad_norm": 0.8288258910179138, | |
| "learning_rate": 4.993356472749955e-05, | |
| "loss": 0.6061, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.08271583663622264, | |
| "grad_norm": 0.7533223628997803, | |
| "learning_rate": 4.986672758298095e-05, | |
| "loss": 0.6184, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.09190648515135849, | |
| "grad_norm": 0.8546445369720459, | |
| "learning_rate": 4.97769372038695e-05, | |
| "loss": 0.5879, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.10109713366649434, | |
| "grad_norm": 0.8864842057228088, | |
| "learning_rate": 4.966427647118077e-05, | |
| "loss": 0.6273, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.1102877821816302, | |
| "grad_norm": 0.7892295122146606, | |
| "learning_rate": 4.952884937641176e-05, | |
| "loss": 0.579, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.11947843069676604, | |
| "grad_norm": 0.7917618751525879, | |
| "learning_rate": 4.937078092555153e-05, | |
| "loss": 0.5732, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.12866907921190188, | |
| "grad_norm": 0.727471113204956, | |
| "learning_rate": 4.919021702369445e-05, | |
| "loss": 0.6135, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.13785972772703775, | |
| "grad_norm": 0.7850656509399414, | |
| "learning_rate": 4.898732434036244e-05, | |
| "loss": 0.5946, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.1470503762421736, | |
| "grad_norm": 0.6990341544151306, | |
| "learning_rate": 4.876229015566071e-05, | |
| "loss": 0.6023, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.15624102475730944, | |
| "grad_norm": 0.7127705216407776, | |
| "learning_rate": 4.85153221874088e-05, | |
| "loss": 0.608, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.1654316732724453, | |
| "grad_norm": 0.778217077255249, | |
| "learning_rate": 4.824664839940667e-05, | |
| "loss": 0.5726, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.17462232178758114, | |
| "grad_norm": 0.7732360363006592, | |
| "learning_rate": 4.795651679101267e-05, | |
| "loss": 0.5802, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.18381297030271698, | |
| "grad_norm": 0.7755158543586731, | |
| "learning_rate": 4.764519516822774e-05, | |
| "loss": 0.6277, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.19300361881785283, | |
| "grad_norm": 0.7841328382492065, | |
| "learning_rate": 4.731297089649703e-05, | |
| "loss": 0.6151, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.20219426733298868, | |
| "grad_norm": 0.7142501473426819, | |
| "learning_rate": 4.696015063545726e-05, | |
| "loss": 0.5707, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.21138491584812452, | |
| "grad_norm": 0.7081753015518188, | |
| "learning_rate": 4.658706005587443e-05, | |
| "loss": 0.605, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.2205755643632604, | |
| "grad_norm": 0.7052861452102661, | |
| "learning_rate": 4.619404353903353e-05, | |
| "loss": 0.5783, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.22976621287839624, | |
| "grad_norm": 0.7916448712348938, | |
| "learning_rate": 4.578146385885723e-05, | |
| "loss": 0.6093, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.2389568613935321, | |
| "grad_norm": 0.8103572130203247, | |
| "learning_rate": 4.5349701847047496e-05, | |
| "loss": 0.6432, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.24814750990866793, | |
| "grad_norm": 0.715683102607727, | |
| "learning_rate": 4.489915604155879e-05, | |
| "loss": 0.5833, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.25733815842380375, | |
| "grad_norm": 0.7637775540351868, | |
| "learning_rate": 4.4430242318727624e-05, | |
| "loss": 0.6096, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.26652880693893966, | |
| "grad_norm": 0.7922810912132263, | |
| "learning_rate": 4.394339350939787e-05, | |
| "loss": 0.6341, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.2757194554540755, | |
| "grad_norm": 0.6947622895240784, | |
| "learning_rate": 4.34390589993962e-05, | |
| "loss": 0.6911, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.28491010396921135, | |
| "grad_norm": 0.74177485704422, | |
| "learning_rate": 4.2917704314726505e-05, | |
| "loss": 0.5604, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.2941007524843472, | |
| "grad_norm": 0.8070971369743347, | |
| "learning_rate": 4.2379810691866064e-05, | |
| "loss": 0.6613, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.30329140099948304, | |
| "grad_norm": 0.7629824280738831, | |
| "learning_rate": 4.182587463356016e-05, | |
| "loss": 0.6023, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.3124820495146189, | |
| "grad_norm": 0.8028804063796997, | |
| "learning_rate": 4.125640745052522e-05, | |
| "loss": 0.6769, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.32167269802975473, | |
| "grad_norm": 0.7006043791770935, | |
| "learning_rate": 4.067193478948344e-05, | |
| "loss": 0.6306, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.3308633465448906, | |
| "grad_norm": 0.6782275438308716, | |
| "learning_rate": 4.007299614796447e-05, | |
| "loss": 0.6056, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.3400539950600264, | |
| "grad_norm": 0.746510922908783, | |
| "learning_rate": 3.9460144376322256e-05, | |
| "loss": 0.5728, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.3492446435751623, | |
| "grad_norm": 0.7944929599761963, | |
| "learning_rate": 3.8833945167426434e-05, | |
| "loss": 0.59, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.3584352920902981, | |
| "grad_norm": 0.7733877897262573, | |
| "learning_rate": 3.819497653449954e-05, | |
| "loss": 0.6037, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.36762594060543397, | |
| "grad_norm": 0.6790263652801514, | |
| "learning_rate": 3.754382827758179e-05, | |
| "loss": 0.6148, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.3768165891205698, | |
| "grad_norm": 0.6839562058448792, | |
| "learning_rate": 3.688110143911618e-05, | |
| "loss": 0.5848, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.38600723763570566, | |
| "grad_norm": 0.8358755707740784, | |
| "learning_rate": 3.620740774915622e-05, | |
| "loss": 0.584, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.3951978861508415, | |
| "grad_norm": 0.6800047755241394, | |
| "learning_rate": 3.552336906070838e-05, | |
| "loss": 0.5998, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.40438853466597735, | |
| "grad_norm": 0.7366592884063721, | |
| "learning_rate": 3.482961677573074e-05, | |
| "loss": 0.5783, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.4135791831811132, | |
| "grad_norm": 0.7072996497154236, | |
| "learning_rate": 3.41267912623173e-05, | |
| "loss": 0.561, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.42276983169624904, | |
| "grad_norm": 0.7341826558113098, | |
| "learning_rate": 3.341554126360619e-05, | |
| "loss": 0.5687, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.4319604802113849, | |
| "grad_norm": 0.9071962237358093, | |
| "learning_rate": 3.269652329895728e-05, | |
| "loss": 0.6271, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.4411511287265208, | |
| "grad_norm": 0.664733350276947, | |
| "learning_rate": 3.1970401057951905e-05, | |
| "loss": 0.5826, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.45034177724165664, | |
| "grad_norm": 0.7106197476387024, | |
| "learning_rate": 3.12378447877742e-05, | |
| "loss": 0.581, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.4595324257567925, | |
| "grad_norm": 0.7017560005187988, | |
| "learning_rate": 3.049953067453934e-05, | |
| "loss": 0.5701, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.46872307427192833, | |
| "grad_norm": 0.7051500678062439, | |
| "learning_rate": 2.9756140219139957e-05, | |
| "loss": 0.5772, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.4779137227870642, | |
| "grad_norm": 0.7792097330093384, | |
| "learning_rate": 2.900835960818664e-05, | |
| "loss": 0.6002, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.4871043713022, | |
| "grad_norm": 0.7199306488037109, | |
| "learning_rate": 2.8256879080623394e-05, | |
| "loss": 0.5871, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.49629501981733587, | |
| "grad_norm": 0.7342507839202881, | |
| "learning_rate": 2.7502392290602463e-05, | |
| "loss": 0.6032, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.5054856683324717, | |
| "grad_norm": 0.7658795118331909, | |
| "learning_rate": 2.6745595667206873e-05, | |
| "loss": 0.595, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.5146763168476075, | |
| "grad_norm": 0.7520851492881775, | |
| "learning_rate": 2.598718777161152e-05, | |
| "loss": 0.6061, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.5238669653627434, | |
| "grad_norm": 0.7485829591751099, | |
| "learning_rate": 2.5227868652276205e-05, | |
| "loss": 0.5787, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.5330576138778793, | |
| "grad_norm": 0.7010724544525146, | |
| "learning_rate": 2.446833919876584e-05, | |
| "loss": 0.6055, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.5422482623930152, | |
| "grad_norm": 0.7274252772331238, | |
| "learning_rate": 2.3709300494794347e-05, | |
| "loss": 0.5696, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.551438910908151, | |
| "grad_norm": 0.6527493000030518, | |
| "learning_rate": 2.295145317108924e-05, | |
| "loss": 0.5651, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.5606295594232868, | |
| "grad_norm": 0.752870500087738, | |
| "learning_rate": 2.219549675867439e-05, | |
| "loss": 0.6001, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.5698202079384227, | |
| "grad_norm": 0.8178837895393372, | |
| "learning_rate": 2.1442129043167874e-05, | |
| "loss": 0.6227, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.5790108564535585, | |
| "grad_norm": 0.7618655562400818, | |
| "learning_rate": 2.0692045420690925e-05, | |
| "loss": 0.5446, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.5882015049686944, | |
| "grad_norm": 0.7821996808052063, | |
| "learning_rate": 1.9945938255982466e-05, | |
| "loss": 0.6276, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.5973921534838302, | |
| "grad_norm": 0.7382956147193909, | |
| "learning_rate": 1.920449624331179e-05, | |
| "loss": 0.5368, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.6065828019989661, | |
| "grad_norm": 0.7067505717277527, | |
| "learning_rate": 1.8468403770779337e-05, | |
| "loss": 0.5906, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.6157734505141019, | |
| "grad_norm": 0.7150029540061951, | |
| "learning_rate": 1.7738340288592103e-05, | |
| "loss": 0.5398, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.6249640990292378, | |
| "grad_norm": 0.6986609697341919, | |
| "learning_rate": 1.7014979681897164e-05, | |
| "loss": 0.5463, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.6341547475443736, | |
| "grad_norm": 0.8039021492004395, | |
| "learning_rate": 1.629898964875189e-05, | |
| "loss": 0.5595, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.6433453960595095, | |
| "grad_norm": 0.772060751914978, | |
| "learning_rate": 1.5591031083805265e-05, | |
| "loss": 0.5648, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.6525360445746453, | |
| "grad_norm": 0.7191370129585266, | |
| "learning_rate": 1.4891757468258983e-05, | |
| "loss": 0.5658, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.6617266930897812, | |
| "grad_norm": 0.692693293094635, | |
| "learning_rate": 1.4201814266671603e-05, | |
| "loss": 0.5544, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.670917341604917, | |
| "grad_norm": 0.8597797155380249, | |
| "learning_rate": 1.3521838331162439e-05, | |
| "loss": 0.5671, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.6801079901200529, | |
| "grad_norm": 0.7617805600166321, | |
| "learning_rate": 1.2852457313565187e-05, | |
| "loss": 0.5965, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.6892986386351887, | |
| "grad_norm": 0.7422612905502319, | |
| "learning_rate": 1.2194289086073837e-05, | |
| "loss": 0.5756, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.6984892871503245, | |
| "grad_norm": 0.6952013969421387, | |
| "learning_rate": 1.1547941170915686e-05, | |
| "loss": 0.5227, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.7076799356654604, | |
| "grad_norm": 0.7390100359916687, | |
| "learning_rate": 1.0914010179577952e-05, | |
| "loss": 0.5638, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.7168705841805962, | |
| "grad_norm": 0.8268740177154541, | |
| "learning_rate": 1.0293081262105448e-05, | |
| "loss": 0.5406, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.7260612326957321, | |
| "grad_norm": 0.8330510854721069, | |
| "learning_rate": 9.685727566977837e-06, | |
| "loss": 0.5548, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.7352518812108679, | |
| "grad_norm": 0.7260822057723999, | |
| "learning_rate": 9.092509712064875e-06, | |
| "loss": 0.6003, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.7444425297260038, | |
| "grad_norm": 0.818632960319519, | |
| "learning_rate": 8.513975267148013e-06, | |
| "loss": 0.5384, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.7536331782411396, | |
| "grad_norm": 0.7371208667755127, | |
| "learning_rate": 7.950658248486131e-06, | |
| "loss": 0.5694, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.7628238267562755, | |
| "grad_norm": 0.7147781848907471, | |
| "learning_rate": 7.403078625891757e-06, | |
| "loss": 0.5314, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.7720144752714113, | |
| "grad_norm": 0.7499004006385803, | |
| "learning_rate": 6.8717418427729054e-06, | |
| "loss": 0.5395, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.7812051237865472, | |
| "grad_norm": 0.8400412797927856, | |
| "learning_rate": 6.357138349583497e-06, | |
| "loss": 0.5305, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.790395772301683, | |
| "grad_norm": 0.783181369304657, | |
| "learning_rate": 5.859743151113059e-06, | |
| "loss": 0.5798, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.7995864208168189, | |
| "grad_norm": 0.7410508394241333, | |
| "learning_rate": 5.380015368033476e-06, | |
| "loss": 0.5668, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.8087770693319547, | |
| "grad_norm": 0.8269330263137817, | |
| "learning_rate": 4.918397813107678e-06, | |
| "loss": 0.5836, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.8179677178470905, | |
| "grad_norm": 0.8294151425361633, | |
| "learning_rate": 4.4753165824512335e-06, | |
| "loss": 0.562, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.8271583663622264, | |
| "grad_norm": 0.798109233379364, | |
| "learning_rate": 4.051180662224358e-06, | |
| "loss": 0.5628, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.8363490148773622, | |
| "grad_norm": 0.7758229970932007, | |
| "learning_rate": 3.6463815511171646e-06, | |
| "loss": 0.5716, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.8455396633924981, | |
| "grad_norm": 0.8326278328895569, | |
| "learning_rate": 3.2612928989767894e-06, | |
| "loss": 0.6003, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.8547303119076339, | |
| "grad_norm": 0.8162908554077148, | |
| "learning_rate": 2.8962701619098754e-06, | |
| "loss": 0.5603, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.8639209604227698, | |
| "grad_norm": 0.7787085771560669, | |
| "learning_rate": 2.5516502741787963e-06, | |
| "loss": 0.5119, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.8731116089379056, | |
| "grad_norm": 0.7192712426185608, | |
| "learning_rate": 2.2277513371944365e-06, | |
| "loss": 0.5594, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.8823022574530416, | |
| "grad_norm": 0.8983786702156067, | |
| "learning_rate": 1.9248723258926904e-06, | |
| "loss": 0.56, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.8914929059681774, | |
| "grad_norm": 0.7046878933906555, | |
| "learning_rate": 1.6432928127656206e-06, | |
| "loss": 0.5398, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.9006835544833133, | |
| "grad_norm": 0.8238187432289124, | |
| "learning_rate": 1.3832727098020332e-06, | |
| "loss": 0.5311, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.9098742029984491, | |
| "grad_norm": 0.8765665888786316, | |
| "learning_rate": 1.1450520285757161e-06, | |
| "loss": 0.5756, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.919064851513585, | |
| "grad_norm": 0.7299582362174988, | |
| "learning_rate": 9.288506587027429e-07, | |
| "loss": 0.5271, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.9282555000287208, | |
| "grad_norm": 0.9001564383506775, | |
| "learning_rate": 7.348681648723238e-07, | |
| "loss": 0.5796, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.9374461485438567, | |
| "grad_norm": 0.8588452935218811, | |
| "learning_rate": 5.632836026386395e-07, | |
| "loss": 0.5541, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.9466367970589925, | |
| "grad_norm": 0.8667751550674438, | |
| "learning_rate": 4.1425535314356667e-07, | |
| "loss": 0.5385, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.9558274455741284, | |
| "grad_norm": 0.7498716115951538, | |
| "learning_rate": 2.8792097692297294e-07, | |
| "loss": 0.5387, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.9650180940892642, | |
| "grad_norm": 0.7537438273429871, | |
| "learning_rate": 1.843970869314593e-07, | |
| "loss": 0.582, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.9742087426044, | |
| "grad_norm": 0.7668646574020386, | |
| "learning_rate": 1.0377924090273771e-07, | |
| "loss": 0.5493, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.9833993911195359, | |
| "grad_norm": 0.7778952121734619, | |
| "learning_rate": 4.614185314509856e-08, | |
| "loss": 0.5798, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.9925900396346717, | |
| "grad_norm": 0.8097951412200928, | |
| "learning_rate": 1.1538125853238768e-08, | |
| "loss": 0.5628, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 1089, | |
| "total_flos": 6818710278897664.0, | |
| "train_loss": 0.5775129214900694, | |
| "train_runtime": 138111.1092, | |
| "train_samples_per_second": 1.008, | |
| "train_steps_per_second": 0.008 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 1089, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 200, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 6818710278897664.0, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |