| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.9995320542817033, | |
| "eval_steps": 500, | |
| "global_step": 1068, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.004679457182966776, | |
| "grad_norm": 2.2908554548637006, | |
| "learning_rate": 9.345794392523365e-07, | |
| "loss": 0.4939, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.009358914365933552, | |
| "grad_norm": 2.016859310665491, | |
| "learning_rate": 1.869158878504673e-06, | |
| "loss": 0.4723, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.014038371548900327, | |
| "grad_norm": 1.5221155414770415, | |
| "learning_rate": 2.8037383177570094e-06, | |
| "loss": 0.4618, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.018717828731867104, | |
| "grad_norm": 0.5002414337717975, | |
| "learning_rate": 3.738317757009346e-06, | |
| "loss": 0.4567, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.02339728591483388, | |
| "grad_norm": 0.9841161532928453, | |
| "learning_rate": 4.6728971962616825e-06, | |
| "loss": 0.4435, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.028076743097800654, | |
| "grad_norm": 0.6160542794694137, | |
| "learning_rate": 5.607476635514019e-06, | |
| "loss": 0.4308, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.03275620028076743, | |
| "grad_norm": 0.4169222842040913, | |
| "learning_rate": 6.542056074766355e-06, | |
| "loss": 0.4254, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.03743565746373421, | |
| "grad_norm": 0.43151336467869167, | |
| "learning_rate": 7.476635514018692e-06, | |
| "loss": 0.4131, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.04211511464670098, | |
| "grad_norm": 0.27790836608550784, | |
| "learning_rate": 8.411214953271028e-06, | |
| "loss": 0.4151, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.04679457182966776, | |
| "grad_norm": 0.3490921524994091, | |
| "learning_rate": 9.345794392523365e-06, | |
| "loss": 0.3969, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.05147402901263454, | |
| "grad_norm": 0.2784386545031061, | |
| "learning_rate": 1.02803738317757e-05, | |
| "loss": 0.3985, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.05615348619560131, | |
| "grad_norm": 0.2805705696670822, | |
| "learning_rate": 1.1214953271028037e-05, | |
| "loss": 0.4125, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.06083294337856809, | |
| "grad_norm": 0.2525496578964846, | |
| "learning_rate": 1.2149532710280374e-05, | |
| "loss": 0.3947, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.06551240056153486, | |
| "grad_norm": 0.23045102014975796, | |
| "learning_rate": 1.308411214953271e-05, | |
| "loss": 0.3955, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.07019185774450164, | |
| "grad_norm": 0.22022938803676545, | |
| "learning_rate": 1.4018691588785047e-05, | |
| "loss": 0.3967, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.07487131492746842, | |
| "grad_norm": 0.23347833396023018, | |
| "learning_rate": 1.4953271028037384e-05, | |
| "loss": 0.3909, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.0795507721104352, | |
| "grad_norm": 0.228913019838675, | |
| "learning_rate": 1.588785046728972e-05, | |
| "loss": 0.3945, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.08423022929340196, | |
| "grad_norm": 0.2633815411021567, | |
| "learning_rate": 1.6822429906542056e-05, | |
| "loss": 0.3824, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.08890968647636874, | |
| "grad_norm": 0.23342700291557855, | |
| "learning_rate": 1.7757009345794395e-05, | |
| "loss": 0.3862, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.09358914365933552, | |
| "grad_norm": 0.23300433901186635, | |
| "learning_rate": 1.869158878504673e-05, | |
| "loss": 0.3919, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.0982686008423023, | |
| "grad_norm": 0.27373389148512267, | |
| "learning_rate": 1.9626168224299065e-05, | |
| "loss": 0.3779, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.10294805802526907, | |
| "grad_norm": 0.228261846212288, | |
| "learning_rate": 1.9937565036420395e-05, | |
| "loss": 0.3936, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.10762751520823584, | |
| "grad_norm": 0.23489411050580397, | |
| "learning_rate": 1.9833506763787724e-05, | |
| "loss": 0.3843, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.11230697239120262, | |
| "grad_norm": 0.27702591802476917, | |
| "learning_rate": 1.972944849115505e-05, | |
| "loss": 0.382, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.1169864295741694, | |
| "grad_norm": 0.225569280525892, | |
| "learning_rate": 1.9625390218522374e-05, | |
| "loss": 0.3819, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.12166588675713617, | |
| "grad_norm": 0.24311963459537528, | |
| "learning_rate": 1.95213319458897e-05, | |
| "loss": 0.3893, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.12634534394010294, | |
| "grad_norm": 0.260175889528399, | |
| "learning_rate": 1.9417273673257027e-05, | |
| "loss": 0.394, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.13102480112306972, | |
| "grad_norm": 0.2556543935052257, | |
| "learning_rate": 1.9313215400624352e-05, | |
| "loss": 0.3872, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.1357042583060365, | |
| "grad_norm": 0.25822395228887385, | |
| "learning_rate": 1.9209157127991677e-05, | |
| "loss": 0.3904, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.14038371548900327, | |
| "grad_norm": 0.2289827168893927, | |
| "learning_rate": 1.9105098855359002e-05, | |
| "loss": 0.3881, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.14506317267197005, | |
| "grad_norm": 0.2782914404861313, | |
| "learning_rate": 1.9001040582726327e-05, | |
| "loss": 0.389, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.14974262985493683, | |
| "grad_norm": 0.2691987795720794, | |
| "learning_rate": 1.8896982310093656e-05, | |
| "loss": 0.3799, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.1544220870379036, | |
| "grad_norm": 0.22215928015704764, | |
| "learning_rate": 1.879292403746098e-05, | |
| "loss": 0.3798, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.1591015442208704, | |
| "grad_norm": 0.2503740618645921, | |
| "learning_rate": 1.8688865764828306e-05, | |
| "loss": 0.3977, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.16378100140383717, | |
| "grad_norm": 0.22564216914588497, | |
| "learning_rate": 1.858480749219563e-05, | |
| "loss": 0.3744, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.16846045858680392, | |
| "grad_norm": 0.25918009956565563, | |
| "learning_rate": 1.8480749219562956e-05, | |
| "loss": 0.3738, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.1731399157697707, | |
| "grad_norm": 0.2393508145158046, | |
| "learning_rate": 1.837669094693028e-05, | |
| "loss": 0.3857, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.17781937295273748, | |
| "grad_norm": 0.24654174660930164, | |
| "learning_rate": 1.827263267429761e-05, | |
| "loss": 0.3903, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.18249883013570425, | |
| "grad_norm": 0.28662121700475657, | |
| "learning_rate": 1.8168574401664934e-05, | |
| "loss": 0.3898, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.18717828731867103, | |
| "grad_norm": 0.23290473813531298, | |
| "learning_rate": 1.806451612903226e-05, | |
| "loss": 0.3768, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.1918577445016378, | |
| "grad_norm": 0.28192870043577783, | |
| "learning_rate": 1.7960457856399584e-05, | |
| "loss": 0.3879, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.1965372016846046, | |
| "grad_norm": 0.27564026147091214, | |
| "learning_rate": 1.785639958376691e-05, | |
| "loss": 0.3865, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.20121665886757137, | |
| "grad_norm": 0.23229839176465608, | |
| "learning_rate": 1.7752341311134234e-05, | |
| "loss": 0.3841, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.20589611605053815, | |
| "grad_norm": 0.25015151051907036, | |
| "learning_rate": 1.7648283038501563e-05, | |
| "loss": 0.3915, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.21057557323350493, | |
| "grad_norm": 0.24218523885962545, | |
| "learning_rate": 1.7544224765868888e-05, | |
| "loss": 0.3845, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.21525503041647168, | |
| "grad_norm": 0.21893648736945756, | |
| "learning_rate": 1.7440166493236216e-05, | |
| "loss": 0.3796, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.21993448759943846, | |
| "grad_norm": 0.24100134035505158, | |
| "learning_rate": 1.733610822060354e-05, | |
| "loss": 0.3809, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.22461394478240523, | |
| "grad_norm": 0.24831115571580942, | |
| "learning_rate": 1.7232049947970866e-05, | |
| "loss": 0.3942, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.229293401965372, | |
| "grad_norm": 0.2270469313952638, | |
| "learning_rate": 1.712799167533819e-05, | |
| "loss": 0.3822, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.2339728591483388, | |
| "grad_norm": 0.21454127210032897, | |
| "learning_rate": 1.7023933402705516e-05, | |
| "loss": 0.3804, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.23865231633130557, | |
| "grad_norm": 0.2784358974791008, | |
| "learning_rate": 1.691987513007284e-05, | |
| "loss": 0.3891, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.24333177351427235, | |
| "grad_norm": 0.25560963065745107, | |
| "learning_rate": 1.681581685744017e-05, | |
| "loss": 0.3785, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.24801123069723913, | |
| "grad_norm": 0.29030559419788876, | |
| "learning_rate": 1.6711758584807495e-05, | |
| "loss": 0.3859, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.2526906878802059, | |
| "grad_norm": 0.23201558406292724, | |
| "learning_rate": 1.660770031217482e-05, | |
| "loss": 0.3905, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.2573701450631727, | |
| "grad_norm": 0.2230334421902013, | |
| "learning_rate": 1.6503642039542145e-05, | |
| "loss": 0.3826, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.26204960224613943, | |
| "grad_norm": 0.2888881421186973, | |
| "learning_rate": 1.639958376690947e-05, | |
| "loss": 0.3854, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.26672905942910624, | |
| "grad_norm": 0.23960115819463665, | |
| "learning_rate": 1.6295525494276795e-05, | |
| "loss": 0.3812, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.271408516612073, | |
| "grad_norm": 0.219894569143036, | |
| "learning_rate": 1.6191467221644123e-05, | |
| "loss": 0.3772, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.2760879737950398, | |
| "grad_norm": 0.24801341276626648, | |
| "learning_rate": 1.6087408949011448e-05, | |
| "loss": 0.3809, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.28076743097800655, | |
| "grad_norm": 0.2654106880151869, | |
| "learning_rate": 1.5983350676378773e-05, | |
| "loss": 0.3805, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.2854468881609733, | |
| "grad_norm": 0.22635581084433476, | |
| "learning_rate": 1.5879292403746098e-05, | |
| "loss": 0.3768, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.2901263453439401, | |
| "grad_norm": 0.21488693776906298, | |
| "learning_rate": 1.5775234131113423e-05, | |
| "loss": 0.3689, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.29480580252690686, | |
| "grad_norm": 0.26183689025810514, | |
| "learning_rate": 1.5671175858480748e-05, | |
| "loss": 0.3735, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.29948525970987366, | |
| "grad_norm": 0.2694040246267707, | |
| "learning_rate": 1.5567117585848077e-05, | |
| "loss": 0.3808, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.3041647168928404, | |
| "grad_norm": 0.24520431614646418, | |
| "learning_rate": 1.54630593132154e-05, | |
| "loss": 0.3854, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.3088441740758072, | |
| "grad_norm": 0.2564447473549132, | |
| "learning_rate": 1.5359001040582727e-05, | |
| "loss": 0.3862, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.31352363125877397, | |
| "grad_norm": 0.23238465003590167, | |
| "learning_rate": 1.5254942767950053e-05, | |
| "loss": 0.3815, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.3182030884417408, | |
| "grad_norm": 0.2800169601088245, | |
| "learning_rate": 1.515088449531738e-05, | |
| "loss": 0.3814, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.32288254562470753, | |
| "grad_norm": 0.22176804004915363, | |
| "learning_rate": 1.5046826222684705e-05, | |
| "loss": 0.3801, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.32756200280767434, | |
| "grad_norm": 0.23327501996058547, | |
| "learning_rate": 1.494276795005203e-05, | |
| "loss": 0.3712, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.3322414599906411, | |
| "grad_norm": 0.23797943468099234, | |
| "learning_rate": 1.4838709677419357e-05, | |
| "loss": 0.3736, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.33692091717360784, | |
| "grad_norm": 0.21540929537029724, | |
| "learning_rate": 1.4734651404786682e-05, | |
| "loss": 0.3785, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.34160037435657464, | |
| "grad_norm": 0.2820377200347258, | |
| "learning_rate": 1.4630593132154007e-05, | |
| "loss": 0.3754, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.3462798315395414, | |
| "grad_norm": 0.2364483513673407, | |
| "learning_rate": 1.4526534859521334e-05, | |
| "loss": 0.3768, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.3509592887225082, | |
| "grad_norm": 0.23107190478630543, | |
| "learning_rate": 1.4422476586888659e-05, | |
| "loss": 0.384, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.35563874590547495, | |
| "grad_norm": 0.2544369764708807, | |
| "learning_rate": 1.4318418314255984e-05, | |
| "loss": 0.3821, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.36031820308844176, | |
| "grad_norm": 0.2733772251893476, | |
| "learning_rate": 1.421436004162331e-05, | |
| "loss": 0.3863, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.3649976602714085, | |
| "grad_norm": 0.2328825486788227, | |
| "learning_rate": 1.4110301768990635e-05, | |
| "loss": 0.3794, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.3696771174543753, | |
| "grad_norm": 0.22430844792190455, | |
| "learning_rate": 1.400624349635796e-05, | |
| "loss": 0.3801, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.37435657463734207, | |
| "grad_norm": 0.24928578537816098, | |
| "learning_rate": 1.3902185223725287e-05, | |
| "loss": 0.391, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.37903603182030887, | |
| "grad_norm": 0.2314083212055415, | |
| "learning_rate": 1.3798126951092612e-05, | |
| "loss": 0.3767, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.3837154890032756, | |
| "grad_norm": 0.25193607656373374, | |
| "learning_rate": 1.3694068678459939e-05, | |
| "loss": 0.3742, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.3883949461862424, | |
| "grad_norm": 0.26856384975738834, | |
| "learning_rate": 1.3590010405827266e-05, | |
| "loss": 0.3893, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.3930744033692092, | |
| "grad_norm": 0.2410724044047713, | |
| "learning_rate": 1.348595213319459e-05, | |
| "loss": 0.3881, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.39775386055217593, | |
| "grad_norm": 0.22933262352367176, | |
| "learning_rate": 1.3381893860561916e-05, | |
| "loss": 0.389, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.40243331773514274, | |
| "grad_norm": 0.2198762674864796, | |
| "learning_rate": 1.3277835587929242e-05, | |
| "loss": 0.3777, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.4071127749181095, | |
| "grad_norm": 0.2419080712284111, | |
| "learning_rate": 1.3173777315296567e-05, | |
| "loss": 0.3675, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.4117922321010763, | |
| "grad_norm": 0.22323453635381435, | |
| "learning_rate": 1.3069719042663892e-05, | |
| "loss": 0.3766, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.41647168928404305, | |
| "grad_norm": 0.2439496427012552, | |
| "learning_rate": 1.2965660770031219e-05, | |
| "loss": 0.3737, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.42115114646700985, | |
| "grad_norm": 0.21608448050711354, | |
| "learning_rate": 1.2861602497398544e-05, | |
| "loss": 0.3762, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.4258306036499766, | |
| "grad_norm": 0.23222753887880532, | |
| "learning_rate": 1.2757544224765869e-05, | |
| "loss": 0.3806, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.43051006083294335, | |
| "grad_norm": 0.23089602293769354, | |
| "learning_rate": 1.2653485952133196e-05, | |
| "loss": 0.3866, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.43518951801591016, | |
| "grad_norm": 0.22647211350083776, | |
| "learning_rate": 1.2549427679500521e-05, | |
| "loss": 0.376, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.4398689751988769, | |
| "grad_norm": 0.22578583335582927, | |
| "learning_rate": 1.2445369406867846e-05, | |
| "loss": 0.3755, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.4445484323818437, | |
| "grad_norm": 0.25240119086847307, | |
| "learning_rate": 1.2341311134235173e-05, | |
| "loss": 0.3705, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.44922788956481047, | |
| "grad_norm": 0.21696390759958575, | |
| "learning_rate": 1.2237252861602498e-05, | |
| "loss": 0.3791, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.4539073467477773, | |
| "grad_norm": 0.2259330179979768, | |
| "learning_rate": 1.2133194588969823e-05, | |
| "loss": 0.3647, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.458586803930744, | |
| "grad_norm": 0.23734350889969866, | |
| "learning_rate": 1.202913631633715e-05, | |
| "loss": 0.3867, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.46326626111371083, | |
| "grad_norm": 0.29490446479765287, | |
| "learning_rate": 1.1925078043704474e-05, | |
| "loss": 0.3802, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.4679457182966776, | |
| "grad_norm": 0.2539714715149494, | |
| "learning_rate": 1.1821019771071803e-05, | |
| "loss": 0.3769, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.4726251754796444, | |
| "grad_norm": 0.2653171806988273, | |
| "learning_rate": 1.1716961498439128e-05, | |
| "loss": 0.3774, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.47730463266261114, | |
| "grad_norm": 0.22986118422933471, | |
| "learning_rate": 1.1612903225806453e-05, | |
| "loss": 0.3761, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.4819840898455779, | |
| "grad_norm": 0.264468288478972, | |
| "learning_rate": 1.150884495317378e-05, | |
| "loss": 0.3768, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.4866635470285447, | |
| "grad_norm": 0.2275931074116885, | |
| "learning_rate": 1.1404786680541105e-05, | |
| "loss": 0.3793, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.49134300421151145, | |
| "grad_norm": 0.3117840889438511, | |
| "learning_rate": 1.130072840790843e-05, | |
| "loss": 0.3767, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.49602246139447825, | |
| "grad_norm": 0.24416383968722374, | |
| "learning_rate": 1.1196670135275756e-05, | |
| "loss": 0.3767, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.5007019185774451, | |
| "grad_norm": 0.2500367823625742, | |
| "learning_rate": 1.1092611862643081e-05, | |
| "loss": 0.3773, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.5053813757604118, | |
| "grad_norm": 0.23404067505268092, | |
| "learning_rate": 1.0988553590010406e-05, | |
| "loss": 0.3789, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.5100608329433786, | |
| "grad_norm": 0.23836407315387556, | |
| "learning_rate": 1.0884495317377733e-05, | |
| "loss": 0.3807, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.5147402901263454, | |
| "grad_norm": 0.2272928154140146, | |
| "learning_rate": 1.0780437044745058e-05, | |
| "loss": 0.378, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.5194197473093122, | |
| "grad_norm": 0.21603973655035985, | |
| "learning_rate": 1.0676378772112383e-05, | |
| "loss": 0.3849, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.5240992044922789, | |
| "grad_norm": 0.26461820434483596, | |
| "learning_rate": 1.057232049947971e-05, | |
| "loss": 0.3807, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.5287786616752457, | |
| "grad_norm": 0.22573025113669873, | |
| "learning_rate": 1.0468262226847035e-05, | |
| "loss": 0.3713, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.5334581188582125, | |
| "grad_norm": 0.2398814988800669, | |
| "learning_rate": 1.036420395421436e-05, | |
| "loss": 0.3741, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.5381375760411792, | |
| "grad_norm": 0.33008700162835736, | |
| "learning_rate": 1.0260145681581687e-05, | |
| "loss": 0.3784, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.542817033224146, | |
| "grad_norm": 0.2478593026168216, | |
| "learning_rate": 1.0156087408949012e-05, | |
| "loss": 0.3797, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.5474964904071128, | |
| "grad_norm": 0.22416882300252308, | |
| "learning_rate": 1.0052029136316337e-05, | |
| "loss": 0.3717, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.5521759475900796, | |
| "grad_norm": 0.24550032569746283, | |
| "learning_rate": 9.947970863683663e-06, | |
| "loss": 0.3716, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.5568554047730463, | |
| "grad_norm": 0.21152157772573982, | |
| "learning_rate": 9.843912591050988e-06, | |
| "loss": 0.3762, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.5615348619560131, | |
| "grad_norm": 0.26437115475677, | |
| "learning_rate": 9.739854318418315e-06, | |
| "loss": 0.3649, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.5662143191389799, | |
| "grad_norm": 0.24141737859739215, | |
| "learning_rate": 9.63579604578564e-06, | |
| "loss": 0.3721, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.5708937763219466, | |
| "grad_norm": 0.2158535958967608, | |
| "learning_rate": 9.531737773152965e-06, | |
| "loss": 0.3736, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.5755732335049134, | |
| "grad_norm": 0.25210607106723404, | |
| "learning_rate": 9.427679500520292e-06, | |
| "loss": 0.3779, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.5802526906878802, | |
| "grad_norm": 0.24941061104468912, | |
| "learning_rate": 9.323621227887619e-06, | |
| "loss": 0.3724, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.584932147870847, | |
| "grad_norm": 0.23950130390892227, | |
| "learning_rate": 9.219562955254944e-06, | |
| "loss": 0.3718, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.5896116050538137, | |
| "grad_norm": 0.2413173383260915, | |
| "learning_rate": 9.115504682622269e-06, | |
| "loss": 0.3736, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.5942910622367805, | |
| "grad_norm": 0.23891221147926558, | |
| "learning_rate": 9.011446409989595e-06, | |
| "loss": 0.3722, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.5989705194197473, | |
| "grad_norm": 0.21323637844758167, | |
| "learning_rate": 8.90738813735692e-06, | |
| "loss": 0.3666, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.6036499766027141, | |
| "grad_norm": 0.23920099409630857, | |
| "learning_rate": 8.803329864724245e-06, | |
| "loss": 0.3833, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.6083294337856808, | |
| "grad_norm": 0.24543976835408943, | |
| "learning_rate": 8.699271592091572e-06, | |
| "loss": 0.3822, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.6130088909686476, | |
| "grad_norm": 0.23517185442217967, | |
| "learning_rate": 8.595213319458897e-06, | |
| "loss": 0.3825, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.6176883481516144, | |
| "grad_norm": 0.22209203218170853, | |
| "learning_rate": 8.491155046826224e-06, | |
| "loss": 0.3742, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.6223678053345811, | |
| "grad_norm": 0.2118379611031678, | |
| "learning_rate": 8.387096774193549e-06, | |
| "loss": 0.3705, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.6270472625175479, | |
| "grad_norm": 0.24116107131805098, | |
| "learning_rate": 8.283038501560876e-06, | |
| "loss": 0.3648, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.6317267197005147, | |
| "grad_norm": 0.24385133695583516, | |
| "learning_rate": 8.1789802289282e-06, | |
| "loss": 0.3846, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.6364061768834816, | |
| "grad_norm": 0.24780606354176135, | |
| "learning_rate": 8.074921956295526e-06, | |
| "loss": 0.3843, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.6410856340664483, | |
| "grad_norm": 0.267287826257429, | |
| "learning_rate": 7.970863683662852e-06, | |
| "loss": 0.3697, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.6457650912494151, | |
| "grad_norm": 0.2544017525039406, | |
| "learning_rate": 7.866805411030177e-06, | |
| "loss": 0.3709, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.6504445484323819, | |
| "grad_norm": 0.2455819067026457, | |
| "learning_rate": 7.762747138397502e-06, | |
| "loss": 0.3701, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.6551240056153487, | |
| "grad_norm": 0.259501849908747, | |
| "learning_rate": 7.658688865764829e-06, | |
| "loss": 0.3772, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.6598034627983154, | |
| "grad_norm": 0.20856381315062708, | |
| "learning_rate": 7.554630593132155e-06, | |
| "loss": 0.3707, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.6644829199812822, | |
| "grad_norm": 0.2332621123767882, | |
| "learning_rate": 7.450572320499481e-06, | |
| "loss": 0.3894, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.669162377164249, | |
| "grad_norm": 0.20954093237017377, | |
| "learning_rate": 7.346514047866807e-06, | |
| "loss": 0.374, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.6738418343472157, | |
| "grad_norm": 0.2010019285730511, | |
| "learning_rate": 7.242455775234132e-06, | |
| "loss": 0.3838, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.6785212915301825, | |
| "grad_norm": 0.21248927092428815, | |
| "learning_rate": 7.1383975026014575e-06, | |
| "loss": 0.3612, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.6832007487131493, | |
| "grad_norm": 0.1994556422452643, | |
| "learning_rate": 7.0343392299687825e-06, | |
| "loss": 0.3724, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.6878802058961161, | |
| "grad_norm": 0.22422751080621398, | |
| "learning_rate": 6.930280957336108e-06, | |
| "loss": 0.3762, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.6925596630790828, | |
| "grad_norm": 0.21331923051138799, | |
| "learning_rate": 6.826222684703434e-06, | |
| "loss": 0.368, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.6972391202620496, | |
| "grad_norm": 0.2103235038933413, | |
| "learning_rate": 6.722164412070759e-06, | |
| "loss": 0.3703, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.7019185774450164, | |
| "grad_norm": 0.2156780953428616, | |
| "learning_rate": 6.618106139438086e-06, | |
| "loss": 0.3786, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.7065980346279832, | |
| "grad_norm": 0.21490072085384168, | |
| "learning_rate": 6.514047866805412e-06, | |
| "loss": 0.3758, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.7112774918109499, | |
| "grad_norm": 0.2247782023019425, | |
| "learning_rate": 6.409989594172738e-06, | |
| "loss": 0.3679, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.7159569489939167, | |
| "grad_norm": 0.20942665269809468, | |
| "learning_rate": 6.305931321540063e-06, | |
| "loss": 0.3652, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.7206364061768835, | |
| "grad_norm": 0.22300307299120317, | |
| "learning_rate": 6.201873048907389e-06, | |
| "loss": 0.3867, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.7253158633598502, | |
| "grad_norm": 0.21731092424159765, | |
| "learning_rate": 6.0978147762747145e-06, | |
| "loss": 0.3699, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.729995320542817, | |
| "grad_norm": 0.2373583086701352, | |
| "learning_rate": 5.9937565036420395e-06, | |
| "loss": 0.3653, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.7346747777257838, | |
| "grad_norm": 0.2066335501990478, | |
| "learning_rate": 5.889698231009365e-06, | |
| "loss": 0.3741, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.7393542349087506, | |
| "grad_norm": 0.20910016326262895, | |
| "learning_rate": 5.785639958376691e-06, | |
| "loss": 0.3724, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.7440336920917173, | |
| "grad_norm": 0.2235231360878696, | |
| "learning_rate": 5.681581685744018e-06, | |
| "loss": 0.3727, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.7487131492746841, | |
| "grad_norm": 0.2046167285793584, | |
| "learning_rate": 5.577523413111343e-06, | |
| "loss": 0.3684, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.7533926064576509, | |
| "grad_norm": 0.19515222922990866, | |
| "learning_rate": 5.473465140478669e-06, | |
| "loss": 0.3787, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.7580720636406177, | |
| "grad_norm": 0.22525978812547012, | |
| "learning_rate": 5.369406867845994e-06, | |
| "loss": 0.3764, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.7627515208235844, | |
| "grad_norm": 0.22394936960559922, | |
| "learning_rate": 5.26534859521332e-06, | |
| "loss": 0.3672, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.7674309780065512, | |
| "grad_norm": 0.23343895861163633, | |
| "learning_rate": 5.161290322580646e-06, | |
| "loss": 0.3676, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.772110435189518, | |
| "grad_norm": 0.24463148457843642, | |
| "learning_rate": 5.057232049947971e-06, | |
| "loss": 0.3728, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.7767898923724847, | |
| "grad_norm": 0.2187630043298853, | |
| "learning_rate": 4.953173777315297e-06, | |
| "loss": 0.3712, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.7814693495554516, | |
| "grad_norm": 0.21300481471513596, | |
| "learning_rate": 4.849115504682622e-06, | |
| "loss": 0.381, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.7861488067384184, | |
| "grad_norm": 0.20931843918006063, | |
| "learning_rate": 4.745057232049948e-06, | |
| "loss": 0.3747, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.7908282639213852, | |
| "grad_norm": 0.21125591014093664, | |
| "learning_rate": 4.640998959417274e-06, | |
| "loss": 0.3735, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.7955077211043519, | |
| "grad_norm": 0.2110487123545379, | |
| "learning_rate": 4.5369406867846e-06, | |
| "loss": 0.3735, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.8001871782873187, | |
| "grad_norm": 0.22392695446168392, | |
| "learning_rate": 4.432882414151926e-06, | |
| "loss": 0.3777, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.8048666354702855, | |
| "grad_norm": 0.2117452208603545, | |
| "learning_rate": 4.328824141519251e-06, | |
| "loss": 0.3761, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.8095460926532522, | |
| "grad_norm": 0.24234752709613971, | |
| "learning_rate": 4.224765868886577e-06, | |
| "loss": 0.3796, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.814225549836219, | |
| "grad_norm": 0.2025209483621438, | |
| "learning_rate": 4.120707596253903e-06, | |
| "loss": 0.3808, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.8189050070191858, | |
| "grad_norm": 0.2226709019764019, | |
| "learning_rate": 4.0166493236212285e-06, | |
| "loss": 0.3752, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.8235844642021526, | |
| "grad_norm": 0.2210571915933747, | |
| "learning_rate": 3.912591050988554e-06, | |
| "loss": 0.3704, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.8282639213851193, | |
| "grad_norm": 0.23618799628521495, | |
| "learning_rate": 3.8085327783558793e-06, | |
| "loss": 0.3737, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.8329433785680861, | |
| "grad_norm": 0.20617597677407185, | |
| "learning_rate": 3.7044745057232052e-06, | |
| "loss": 0.3749, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.8376228357510529, | |
| "grad_norm": 0.20798543863033134, | |
| "learning_rate": 3.600416233090531e-06, | |
| "loss": 0.3687, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.8423022929340197, | |
| "grad_norm": 0.2089006102637797, | |
| "learning_rate": 3.496357960457857e-06, | |
| "loss": 0.3706, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.8469817501169864, | |
| "grad_norm": 0.20477692589921484, | |
| "learning_rate": 3.3922996878251824e-06, | |
| "loss": 0.3702, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.8516612072999532, | |
| "grad_norm": 0.21774749397741822, | |
| "learning_rate": 3.288241415192508e-06, | |
| "loss": 0.3657, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.85634066448292, | |
| "grad_norm": 0.24858244914099062, | |
| "learning_rate": 3.1841831425598337e-06, | |
| "loss": 0.3681, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.8610201216658867, | |
| "grad_norm": 0.22753648396835552, | |
| "learning_rate": 3.0801248699271596e-06, | |
| "loss": 0.3703, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.8656995788488535, | |
| "grad_norm": 0.2585954998067118, | |
| "learning_rate": 2.976066597294485e-06, | |
| "loss": 0.3748, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.8703790360318203, | |
| "grad_norm": 0.23712893924962872, | |
| "learning_rate": 2.872008324661811e-06, | |
| "loss": 0.3724, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.8750584932147871, | |
| "grad_norm": 0.21078832525011484, | |
| "learning_rate": 2.7679500520291363e-06, | |
| "loss": 0.3722, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 0.8797379503977538, | |
| "grad_norm": 0.2005875423082211, | |
| "learning_rate": 2.6638917793964626e-06, | |
| "loss": 0.364, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.8844174075807206, | |
| "grad_norm": 0.21183567448111165, | |
| "learning_rate": 2.559833506763788e-06, | |
| "loss": 0.3646, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 0.8890968647636874, | |
| "grad_norm": 0.23633491944791057, | |
| "learning_rate": 2.4557752341311135e-06, | |
| "loss": 0.369, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.8937763219466542, | |
| "grad_norm": 0.20446185463761749, | |
| "learning_rate": 2.3517169614984394e-06, | |
| "loss": 0.3717, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 0.8984557791296209, | |
| "grad_norm": 0.21120878547944943, | |
| "learning_rate": 2.2476586888657652e-06, | |
| "loss": 0.3691, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.9031352363125877, | |
| "grad_norm": 0.21926451373870526, | |
| "learning_rate": 2.1436004162330907e-06, | |
| "loss": 0.376, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 0.9078146934955545, | |
| "grad_norm": 0.21234936601889798, | |
| "learning_rate": 2.0395421436004166e-06, | |
| "loss": 0.3721, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.9124941506785212, | |
| "grad_norm": 0.22032807507001712, | |
| "learning_rate": 1.935483870967742e-06, | |
| "loss": 0.3796, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 0.917173607861488, | |
| "grad_norm": 0.20048012242437066, | |
| "learning_rate": 1.8314255983350679e-06, | |
| "loss": 0.3692, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.9218530650444549, | |
| "grad_norm": 0.19197425552585032, | |
| "learning_rate": 1.7273673257023935e-06, | |
| "loss": 0.3602, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 0.9265325222274217, | |
| "grad_norm": 0.19963248316970567, | |
| "learning_rate": 1.6233090530697194e-06, | |
| "loss": 0.3687, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.9312119794103884, | |
| "grad_norm": 0.20252903061869637, | |
| "learning_rate": 1.5192507804370448e-06, | |
| "loss": 0.3693, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 0.9358914365933552, | |
| "grad_norm": 0.22168638474832342, | |
| "learning_rate": 1.4151925078043705e-06, | |
| "loss": 0.3775, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.940570893776322, | |
| "grad_norm": 0.21135045250093049, | |
| "learning_rate": 1.3111342351716964e-06, | |
| "loss": 0.3707, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 0.9452503509592888, | |
| "grad_norm": 0.20991603381075577, | |
| "learning_rate": 1.207075962539022e-06, | |
| "loss": 0.3801, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.9499298081422555, | |
| "grad_norm": 0.19588787717171513, | |
| "learning_rate": 1.1030176899063477e-06, | |
| "loss": 0.3707, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 0.9546092653252223, | |
| "grad_norm": 0.18850895260293038, | |
| "learning_rate": 9.989594172736733e-07, | |
| "loss": 0.3603, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.9592887225081891, | |
| "grad_norm": 0.1924405623942021, | |
| "learning_rate": 8.94901144640999e-07, | |
| "loss": 0.3663, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 0.9639681796911558, | |
| "grad_norm": 0.1934047488919004, | |
| "learning_rate": 7.908428720083247e-07, | |
| "loss": 0.3731, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.9686476368741226, | |
| "grad_norm": 0.2027478493287533, | |
| "learning_rate": 6.867845993756504e-07, | |
| "loss": 0.3682, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 0.9733270940570894, | |
| "grad_norm": 0.19171827074306003, | |
| "learning_rate": 5.827263267429762e-07, | |
| "loss": 0.3643, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.9780065512400562, | |
| "grad_norm": 0.20102243551100357, | |
| "learning_rate": 4.786680541103018e-07, | |
| "loss": 0.3761, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 0.9826860084230229, | |
| "grad_norm": 0.19333944542660678, | |
| "learning_rate": 3.7460978147762747e-07, | |
| "loss": 0.3782, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.9873654656059897, | |
| "grad_norm": 0.2031908820930634, | |
| "learning_rate": 2.705515088449532e-07, | |
| "loss": 0.3766, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 0.9920449227889565, | |
| "grad_norm": 0.1887326527462366, | |
| "learning_rate": 1.6649323621227891e-07, | |
| "loss": 0.376, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.9967243799719232, | |
| "grad_norm": 0.195152345665213, | |
| "learning_rate": 6.243496357960458e-08, | |
| "loss": 0.3606, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 0.9995320542817033, | |
| "step": 1068, | |
| "total_flos": 9.145616827248804e+17, | |
| "train_loss": 0.3807801652267185, | |
| "train_runtime": 28315.9454, | |
| "train_samples_per_second": 0.604, | |
| "train_steps_per_second": 0.038 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 1068, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 9.145616827248804e+17, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |