Migrated from latest commit: Upload task output efe665cf-4f36-458e-9e4a-2933f244229e
f6c5fce
verified
| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.936619718309859, | |
| "eval_steps": 500, | |
| "global_step": 1251, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.011737089201877934, | |
| "grad_norm": 1.6926082666941107, | |
| "learning_rate": 9.237540571428572e-06, | |
| "loss": 4.1191, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.023474178403755867, | |
| "grad_norm": 1.0523580952405567, | |
| "learning_rate": 2.0784466285714287e-05, | |
| "loss": 3.9165, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.035211267605633804, | |
| "grad_norm": 0.9348982322725803, | |
| "learning_rate": 3.2331392000000005e-05, | |
| "loss": 3.0751, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.046948356807511735, | |
| "grad_norm": 0.963142977085111, | |
| "learning_rate": 4.3878317714285716e-05, | |
| "loss": 2.3471, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.05868544600938967, | |
| "grad_norm": 0.41999800522000985, | |
| "learning_rate": 5.542524342857144e-05, | |
| "loss": 1.9358, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.07042253521126761, | |
| "grad_norm": 0.44353131458171546, | |
| "learning_rate": 6.697216914285716e-05, | |
| "loss": 1.8648, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.08215962441314555, | |
| "grad_norm": 1.3157403937206136, | |
| "learning_rate": 7.851909485714286e-05, | |
| "loss": 1.874, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.09389671361502347, | |
| "grad_norm": 0.30804693051898663, | |
| "learning_rate": 8.082693104266452e-05, | |
| "loss": 1.8383, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.1056338028169014, | |
| "grad_norm": 0.27001251328292725, | |
| "learning_rate": 8.082063867481342e-05, | |
| "loss": 1.4036, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.11737089201877934, | |
| "grad_norm": 0.3485335678060715, | |
| "learning_rate": 8.080950709070086e-05, | |
| "loss": 1.6265, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.12910798122065728, | |
| "grad_norm": 0.25009903952840973, | |
| "learning_rate": 8.079353806798678e-05, | |
| "loss": 1.5537, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.14084507042253522, | |
| "grad_norm": 0.283472870261176, | |
| "learning_rate": 8.07727341568466e-05, | |
| "loss": 1.5242, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.15258215962441316, | |
| "grad_norm": 0.2603618862170815, | |
| "learning_rate": 8.074709867956397e-05, | |
| "loss": 1.52, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.1643192488262911, | |
| "grad_norm": 0.26748765076963404, | |
| "learning_rate": 8.071663573000018e-05, | |
| "loss": 1.4147, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.176056338028169, | |
| "grad_norm": 0.27233224635502085, | |
| "learning_rate": 8.068135017294043e-05, | |
| "loss": 1.4277, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.18779342723004694, | |
| "grad_norm": 0.27807424769416844, | |
| "learning_rate": 8.064124764331693e-05, | |
| "loss": 1.5202, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.19953051643192488, | |
| "grad_norm": 0.237501219160123, | |
| "learning_rate": 8.059633454530899e-05, | |
| "loss": 1.5265, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.2112676056338028, | |
| "grad_norm": 0.26393741964527623, | |
| "learning_rate": 8.054661805132042e-05, | |
| "loss": 1.4661, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.22300469483568075, | |
| "grad_norm": 0.307134230448153, | |
| "learning_rate": 8.049210610083398e-05, | |
| "loss": 1.5281, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.2347417840375587, | |
| "grad_norm": 0.30034381561230195, | |
| "learning_rate": 8.043280739914361e-05, | |
| "loss": 1.4272, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.24647887323943662, | |
| "grad_norm": 0.25199182793601094, | |
| "learning_rate": 8.036873141596414e-05, | |
| "loss": 1.385, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.25821596244131456, | |
| "grad_norm": 0.25027218831718984, | |
| "learning_rate": 8.029988838391911e-05, | |
| "loss": 1.4313, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.2699530516431925, | |
| "grad_norm": 1.0436457556233485, | |
| "learning_rate": 8.02262892969066e-05, | |
| "loss": 1.4076, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.28169014084507044, | |
| "grad_norm": 0.25287945938151374, | |
| "learning_rate": 8.014794590834359e-05, | |
| "loss": 1.4006, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.2934272300469484, | |
| "grad_norm": 0.3426017772114307, | |
| "learning_rate": 8.0064870729289e-05, | |
| "loss": 1.3478, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.3051643192488263, | |
| "grad_norm": 0.24548095078180857, | |
| "learning_rate": 7.997707702644572e-05, | |
| "loss": 1.4508, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.31690140845070425, | |
| "grad_norm": 0.3257280042891666, | |
| "learning_rate": 7.988457882004198e-05, | |
| "loss": 1.3507, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.3286384976525822, | |
| "grad_norm": 0.2648447775178709, | |
| "learning_rate": 7.978739088159242e-05, | |
| "loss": 1.3959, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.3403755868544601, | |
| "grad_norm": 0.3253574365810539, | |
| "learning_rate": 7.968552873153908e-05, | |
| "loss": 1.5608, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.352112676056338, | |
| "grad_norm": 0.2683217685759068, | |
| "learning_rate": 7.957900863677296e-05, | |
| "loss": 1.3742, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.36384976525821594, | |
| "grad_norm": 0.27823469209609253, | |
| "learning_rate": 7.94678476080362e-05, | |
| "loss": 1.4693, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.3755868544600939, | |
| "grad_norm": 0.31396185033997276, | |
| "learning_rate": 7.935206339720553e-05, | |
| "loss": 1.4293, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.3873239436619718, | |
| "grad_norm": 0.23711923917761538, | |
| "learning_rate": 7.923167449445751e-05, | |
| "loss": 1.3614, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.39906103286384975, | |
| "grad_norm": 0.23098090222216835, | |
| "learning_rate": 7.910670012531556e-05, | |
| "loss": 1.4348, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.4107981220657277, | |
| "grad_norm": 0.24726241172120542, | |
| "learning_rate": 7.897716024757991e-05, | |
| "loss": 1.3991, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.4225352112676056, | |
| "grad_norm": 0.2529598203448259, | |
| "learning_rate": 7.884307554814028e-05, | |
| "loss": 1.3889, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.43427230046948356, | |
| "grad_norm": 0.2626838974550443, | |
| "learning_rate": 7.870446743967242e-05, | |
| "loss": 1.3164, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.4460093896713615, | |
| "grad_norm": 0.22340503189330538, | |
| "learning_rate": 7.856135805721846e-05, | |
| "loss": 1.3403, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.45774647887323944, | |
| "grad_norm": 0.2643635528374707, | |
| "learning_rate": 7.841377025465227e-05, | |
| "loss": 1.3128, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.4694835680751174, | |
| "grad_norm": 0.23702382238947953, | |
| "learning_rate": 7.826172760102954e-05, | |
| "loss": 1.3577, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.4812206572769953, | |
| "grad_norm": 0.3089050115416261, | |
| "learning_rate": 7.810525437682417e-05, | |
| "loss": 1.3627, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.49295774647887325, | |
| "grad_norm": 0.2459426412330782, | |
| "learning_rate": 7.794437557005058e-05, | |
| "loss": 1.361, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.5046948356807511, | |
| "grad_norm": 0.2474645086201422, | |
| "learning_rate": 7.777911687227339e-05, | |
| "loss": 1.35, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.5164319248826291, | |
| "grad_norm": 0.2776269910713455, | |
| "learning_rate": 7.760950467450457e-05, | |
| "loss": 1.3038, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.528169014084507, | |
| "grad_norm": 0.23093548062400274, | |
| "learning_rate": 7.743556606298887e-05, | |
| "loss": 1.2999, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.539906103286385, | |
| "grad_norm": 0.21029555812416517, | |
| "learning_rate": 7.725732881487837e-05, | |
| "loss": 1.3563, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.5516431924882629, | |
| "grad_norm": 0.2380837640644916, | |
| "learning_rate": 7.707482139379649e-05, | |
| "loss": 1.4117, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.5633802816901409, | |
| "grad_norm": 0.2501691778817746, | |
| "learning_rate": 7.688807294529259e-05, | |
| "loss": 1.3747, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.5751173708920188, | |
| "grad_norm": 0.2531422248522496, | |
| "learning_rate": 7.669711329218748e-05, | |
| "loss": 1.3142, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.5868544600938967, | |
| "grad_norm": 0.3071530417375628, | |
| "learning_rate": 7.6501972929811e-05, | |
| "loss": 1.3518, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.5985915492957746, | |
| "grad_norm": 0.2462933779162881, | |
| "learning_rate": 7.630268302113184e-05, | |
| "loss": 1.3381, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.6103286384976526, | |
| "grad_norm": 0.231440592355059, | |
| "learning_rate": 7.609927539178122e-05, | |
| "loss": 1.2967, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.6220657276995305, | |
| "grad_norm": 0.26883215485587464, | |
| "learning_rate": 7.589178252497026e-05, | |
| "loss": 1.4204, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.6338028169014085, | |
| "grad_norm": 1.5499941567046591, | |
| "learning_rate": 7.568023755630275e-05, | |
| "loss": 1.2625, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.6455399061032864, | |
| "grad_norm": 0.23176606216751602, | |
| "learning_rate": 7.546467426848345e-05, | |
| "loss": 1.3575, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.6572769953051644, | |
| "grad_norm": 0.29787341630319236, | |
| "learning_rate": 7.524512708592318e-05, | |
| "loss": 1.3631, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.6690140845070423, | |
| "grad_norm": 0.2414950159288888, | |
| "learning_rate": 7.502163106924143e-05, | |
| "loss": 1.3421, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.6807511737089202, | |
| "grad_norm": 0.2609054694114428, | |
| "learning_rate": 7.479422190966731e-05, | |
| "loss": 1.2775, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.6924882629107981, | |
| "grad_norm": 0.24342075714638725, | |
| "learning_rate": 7.456293592333991e-05, | |
| "loss": 1.2399, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.704225352112676, | |
| "grad_norm": 0.2451126010921149, | |
| "learning_rate": 7.432781004550864e-05, | |
| "loss": 1.1849, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.715962441314554, | |
| "grad_norm": 0.38381425009051096, | |
| "learning_rate": 7.408888182463507e-05, | |
| "loss": 1.2887, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.7276995305164319, | |
| "grad_norm": 0.23592915827608676, | |
| "learning_rate": 7.38461894163964e-05, | |
| "loss": 1.3505, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.7394366197183099, | |
| "grad_norm": 0.2698206782214951, | |
| "learning_rate": 7.35997715775923e-05, | |
| "loss": 1.3215, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.7511737089201878, | |
| "grad_norm": 0.2453301887513675, | |
| "learning_rate": 7.334966765995569e-05, | |
| "loss": 1.2296, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.7629107981220657, | |
| "grad_norm": 0.21952063652943296, | |
| "learning_rate": 7.309591760386829e-05, | |
| "loss": 1.3247, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.7746478873239436, | |
| "grad_norm": 0.28692324866840146, | |
| "learning_rate": 7.28385619319825e-05, | |
| "loss": 1.2973, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.7863849765258216, | |
| "grad_norm": 0.24556875494678596, | |
| "learning_rate": 7.257764174275004e-05, | |
| "loss": 1.1292, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.7981220657276995, | |
| "grad_norm": 0.21102775812196028, | |
| "learning_rate": 7.231319870385874e-05, | |
| "loss": 1.1703, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.8098591549295775, | |
| "grad_norm": 0.22043483877271167, | |
| "learning_rate": 7.204527504557848e-05, | |
| "loss": 1.1836, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.8215962441314554, | |
| "grad_norm": 0.23022946831058466, | |
| "learning_rate": 7.177391355401707e-05, | |
| "loss": 1.3726, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.8333333333333334, | |
| "grad_norm": 0.2246896571930163, | |
| "learning_rate": 7.149915756428768e-05, | |
| "loss": 1.3799, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.8450704225352113, | |
| "grad_norm": 0.2523792044709292, | |
| "learning_rate": 7.122105095358836e-05, | |
| "loss": 1.3122, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.8568075117370892, | |
| "grad_norm": 0.23559138914799244, | |
| "learning_rate": 7.0939638134195e-05, | |
| "loss": 1.2995, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.8685446009389671, | |
| "grad_norm": 0.2597889434863669, | |
| "learning_rate": 7.065496404636905e-05, | |
| "loss": 1.369, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.8802816901408451, | |
| "grad_norm": 0.24829748827668763, | |
| "learning_rate": 7.036707415118069e-05, | |
| "loss": 1.2793, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.892018779342723, | |
| "grad_norm": 0.6851956453951893, | |
| "learning_rate": 7.007601442324886e-05, | |
| "loss": 1.3262, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.903755868544601, | |
| "grad_norm": 0.24078658149556825, | |
| "learning_rate": 6.978183134339949e-05, | |
| "loss": 1.3209, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.9154929577464789, | |
| "grad_norm": 0.24167241236562123, | |
| "learning_rate": 6.948457189124259e-05, | |
| "loss": 1.3779, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.9272300469483568, | |
| "grad_norm": 0.24575130336431836, | |
| "learning_rate": 6.918428353766991e-05, | |
| "loss": 1.3325, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.9389671361502347, | |
| "grad_norm": 0.2326601273504395, | |
| "learning_rate": 6.888101423727413e-05, | |
| "loss": 1.2331, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.9507042253521126, | |
| "grad_norm": 0.22591521946130988, | |
| "learning_rate": 6.857481242069052e-05, | |
| "loss": 1.2541, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.9624413145539906, | |
| "grad_norm": 0.2308511630846233, | |
| "learning_rate": 6.82657269868631e-05, | |
| "loss": 1.2298, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.9741784037558685, | |
| "grad_norm": 0.25182225001294944, | |
| "learning_rate": 6.795380729523547e-05, | |
| "loss": 1.301, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.9859154929577465, | |
| "grad_norm": 0.2605089795549262, | |
| "learning_rate": 6.76391031578685e-05, | |
| "loss": 1.3731, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.9976525821596244, | |
| "grad_norm": 0.22892211041701857, | |
| "learning_rate": 6.732166483148549e-05, | |
| "loss": 1.1978, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.9976525821596244, | |
| "eval_loss": 1.6044145822525024, | |
| "eval_runtime": 50.0629, | |
| "eval_samples_per_second": 1.458, | |
| "eval_steps_per_second": 0.2, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 1.0093896713615023, | |
| "grad_norm": 0.24910638304822852, | |
| "learning_rate": 6.70015430094465e-05, | |
| "loss": 1.123, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.0211267605633803, | |
| "grad_norm": 0.33357404745827773, | |
| "learning_rate": 6.667878881365277e-05, | |
| "loss": 1.1185, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 1.0328638497652582, | |
| "grad_norm": 0.29008454147641954, | |
| "learning_rate": 6.635345378638292e-05, | |
| "loss": 1.1284, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.0446009389671362, | |
| "grad_norm": 0.2670483776495452, | |
| "learning_rate": 6.602558988206174e-05, | |
| "loss": 1.2331, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 1.056338028169014, | |
| "grad_norm": 0.2430183857087479, | |
| "learning_rate": 6.56952494589635e-05, | |
| "loss": 1.0354, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.068075117370892, | |
| "grad_norm": 0.25159392522233937, | |
| "learning_rate": 6.536248527085044e-05, | |
| "loss": 1.095, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 1.07981220657277, | |
| "grad_norm": 0.25129604646154363, | |
| "learning_rate": 6.502735045854836e-05, | |
| "loss": 1.1631, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.091549295774648, | |
| "grad_norm": 0.2375727576097195, | |
| "learning_rate": 6.468989854146017e-05, | |
| "loss": 1.056, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 1.1032863849765258, | |
| "grad_norm": 0.25705262164871134, | |
| "learning_rate": 6.435018340901922e-05, | |
| "loss": 1.1019, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.1150234741784038, | |
| "grad_norm": 0.2632363761365353, | |
| "learning_rate": 6.400825931208336e-05, | |
| "loss": 1.0925, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 1.1267605633802817, | |
| "grad_norm": 0.25529732934150606, | |
| "learning_rate": 6.366418085427133e-05, | |
| "loss": 1.0742, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.1384976525821595, | |
| "grad_norm": 0.24911618052576018, | |
| "learning_rate": 6.331800298324286e-05, | |
| "loss": 1.1257, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 1.1502347417840375, | |
| "grad_norm": 0.2713693928567898, | |
| "learning_rate": 6.29697809819238e-05, | |
| "loss": 1.1852, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.1619718309859155, | |
| "grad_norm": 0.25598293691462287, | |
| "learning_rate": 6.26195704596776e-05, | |
| "loss": 1.147, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 1.1737089201877935, | |
| "grad_norm": 0.2506209145241536, | |
| "learning_rate": 6.2267427343425e-05, | |
| "loss": 1.0602, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.1737089201877935, | |
| "eval_loss": 1.5600786209106445, | |
| "eval_runtime": 49.8763, | |
| "eval_samples_per_second": 1.464, | |
| "eval_steps_per_second": 0.2, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.1854460093896715, | |
| "grad_norm": 0.24306048622973317, | |
| "learning_rate": 6.19134078687125e-05, | |
| "loss": 1.0243, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 1.1971830985915493, | |
| "grad_norm": 0.26647005420227804, | |
| "learning_rate": 6.155756857073197e-05, | |
| "loss": 1.1676, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.2089201877934272, | |
| "grad_norm": 0.28346081980702725, | |
| "learning_rate": 6.119996627529222e-05, | |
| "loss": 1.1091, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 1.2206572769953052, | |
| "grad_norm": 0.2523758910052274, | |
| "learning_rate": 6.084065808974423e-05, | |
| "loss": 1.1431, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.232394366197183, | |
| "grad_norm": 0.24326518180467072, | |
| "learning_rate": 6.047970139386133e-05, | |
| "loss": 1.0999, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 1.244131455399061, | |
| "grad_norm": 0.2517876789791905, | |
| "learning_rate": 6.011715383067593e-05, | |
| "loss": 1.1285, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.255868544600939, | |
| "grad_norm": 0.25670486692069977, | |
| "learning_rate": 5.975307329727426e-05, | |
| "loss": 1.0623, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 1.267605633802817, | |
| "grad_norm": 0.2831263117979734, | |
| "learning_rate": 5.938751793555044e-05, | |
| "loss": 1.1384, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.2793427230046948, | |
| "grad_norm": 0.2514294700936478, | |
| "learning_rate": 5.902054612292151e-05, | |
| "loss": 1.1115, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 1.2910798122065728, | |
| "grad_norm": 0.2830207243631822, | |
| "learning_rate": 5.865221646300485e-05, | |
| "loss": 1.1742, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.3028169014084507, | |
| "grad_norm": 0.27398863839678606, | |
| "learning_rate": 5.828258777625944e-05, | |
| "loss": 1.1052, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 1.3145539906103285, | |
| "grad_norm": 0.25895033553118535, | |
| "learning_rate": 5.791171909059259e-05, | |
| "loss": 1.2186, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.3262910798122065, | |
| "grad_norm": 0.258186822849988, | |
| "learning_rate": 5.7539669631933385e-05, | |
| "loss": 1.1201, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 1.3380281690140845, | |
| "grad_norm": 0.25129851535157616, | |
| "learning_rate": 5.71664988147746e-05, | |
| "loss": 1.1031, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.3497652582159625, | |
| "grad_norm": 0.25982425225157424, | |
| "learning_rate": 5.6792266232684574e-05, | |
| "loss": 1.0259, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 1.3615023474178405, | |
| "grad_norm": 0.26067740399731576, | |
| "learning_rate": 5.6417031648790324e-05, | |
| "loss": 1.2319, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.3732394366197183, | |
| "grad_norm": 0.28203855843767645, | |
| "learning_rate": 5.6040854986233706e-05, | |
| "loss": 1.1523, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 1.3849765258215962, | |
| "grad_norm": 0.2616034024922836, | |
| "learning_rate": 5.5663796318601946e-05, | |
| "loss": 1.0862, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.3967136150234742, | |
| "grad_norm": 0.2557988750857973, | |
| "learning_rate": 5.528591586033421e-05, | |
| "loss": 1.1261, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 1.408450704225352, | |
| "grad_norm": 0.2583387594345229, | |
| "learning_rate": 5.490727395710563e-05, | |
| "loss": 1.1638, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.42018779342723, | |
| "grad_norm": 0.2824039637269599, | |
| "learning_rate": 5.452793107619043e-05, | |
| "loss": 1.1064, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 1.431924882629108, | |
| "grad_norm": 0.30378896985216747, | |
| "learning_rate": 5.4147947796805546e-05, | |
| "loss": 1.1587, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 1.443661971830986, | |
| "grad_norm": 0.26055491232191014, | |
| "learning_rate": 5.376738480043643e-05, | |
| "loss": 1.1737, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 1.455399061032864, | |
| "grad_norm": 0.28775987096455746, | |
| "learning_rate": 5.338630286114656e-05, | |
| "loss": 1.0302, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 1.4671361502347418, | |
| "grad_norm": 0.27434540717537476, | |
| "learning_rate": 5.300476283587205e-05, | |
| "loss": 1.096, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 1.4788732394366197, | |
| "grad_norm": 0.2687633406815332, | |
| "learning_rate": 5.262282565470313e-05, | |
| "loss": 1.1784, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 1.4906103286384975, | |
| "grad_norm": 0.30497461697387473, | |
| "learning_rate": 5.224055231115389e-05, | |
| "loss": 1.0557, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 1.5023474178403755, | |
| "grad_norm": 0.2401646558924421, | |
| "learning_rate": 5.185800385242196e-05, | |
| "loss": 1.2208, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 1.5140845070422535, | |
| "grad_norm": 0.28302252582641413, | |
| "learning_rate": 5.147524136963947e-05, | |
| "loss": 1.1629, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 1.5258215962441315, | |
| "grad_norm": 0.2733981765032047, | |
| "learning_rate": 5.1092325988117195e-05, | |
| "loss": 1.1179, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.5375586854460095, | |
| "grad_norm": 0.3543210495906027, | |
| "learning_rate": 5.0709318857583076e-05, | |
| "loss": 1.1171, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 1.5492957746478875, | |
| "grad_norm": 0.2678160500016965, | |
| "learning_rate": 5.032628114241694e-05, | |
| "loss": 1.1378, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.5610328638497653, | |
| "grad_norm": 0.2841104290261492, | |
| "learning_rate": 4.994327401188283e-05, | |
| "loss": 1.1317, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 1.5727699530516432, | |
| "grad_norm": 0.23756976664123616, | |
| "learning_rate": 4.956035863036055e-05, | |
| "loss": 1.1054, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 1.584507042253521, | |
| "grad_norm": 0.299042726296967, | |
| "learning_rate": 4.9177596147578067e-05, | |
| "loss": 1.14, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 1.596244131455399, | |
| "grad_norm": 0.24219892354659045, | |
| "learning_rate": 4.879504768884612e-05, | |
| "loss": 1.2392, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 1.607981220657277, | |
| "grad_norm": 0.2659120257163632, | |
| "learning_rate": 4.841277434529689e-05, | |
| "loss": 1.076, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 1.619718309859155, | |
| "grad_norm": 0.2535490633175144, | |
| "learning_rate": 4.803083716412797e-05, | |
| "loss": 1.1433, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 1.631455399061033, | |
| "grad_norm": 0.2601946247918616, | |
| "learning_rate": 4.764929713885346e-05, | |
| "loss": 1.1151, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 1.6431924882629108, | |
| "grad_norm": 0.2510924947348151, | |
| "learning_rate": 4.726821519956359e-05, | |
| "loss": 1.1195, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.6549295774647887, | |
| "grad_norm": 0.2717853110326184, | |
| "learning_rate": 4.688765220319449e-05, | |
| "loss": 1.0717, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 1.6666666666666665, | |
| "grad_norm": 0.29413144596709895, | |
| "learning_rate": 4.65076689238096e-05, | |
| "loss": 1.1293, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 1.6784037558685445, | |
| "grad_norm": 0.260973856516902, | |
| "learning_rate": 4.61283260428944e-05, | |
| "loss": 1.0877, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 1.6901408450704225, | |
| "grad_norm": 0.24688922048601705, | |
| "learning_rate": 4.574968413966582e-05, | |
| "loss": 1.074, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 1.7018779342723005, | |
| "grad_norm": 0.29518134961553794, | |
| "learning_rate": 4.537180368139808e-05, | |
| "loss": 1.2313, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 1.7136150234741785, | |
| "grad_norm": 0.2789615734091057, | |
| "learning_rate": 4.499474501376632e-05, | |
| "loss": 1.0704, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 1.7253521126760565, | |
| "grad_norm": 0.2658926149309144, | |
| "learning_rate": 4.461856835120969e-05, | |
| "loss": 1.0596, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 1.7370892018779343, | |
| "grad_norm": 0.2572838262933699, | |
| "learning_rate": 4.424333376731544e-05, | |
| "loss": 1.1131, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 1.7488262910798122, | |
| "grad_norm": 0.2771724916201255, | |
| "learning_rate": 4.386910118522542e-05, | |
| "loss": 1.1371, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 1.76056338028169, | |
| "grad_norm": 0.27676486532377537, | |
| "learning_rate": 4.349593036806664e-05, | |
| "loss": 1.0967, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.772300469483568, | |
| "grad_norm": 0.26672550277720775, | |
| "learning_rate": 4.3123880909407424e-05, | |
| "loss": 1.0533, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 1.784037558685446, | |
| "grad_norm": 0.23662667298158832, | |
| "learning_rate": 4.275301222374057e-05, | |
| "loss": 1.0783, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 1.795774647887324, | |
| "grad_norm": 0.29383896298290885, | |
| "learning_rate": 4.238338353699518e-05, | |
| "loss": 1.17, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 1.807511737089202, | |
| "grad_norm": 0.28154071670327757, | |
| "learning_rate": 4.201505387707851e-05, | |
| "loss": 1.1453, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 1.8192488262910798, | |
| "grad_norm": 0.25700361275731554, | |
| "learning_rate": 4.1648082064449574e-05, | |
| "loss": 1.082, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 1.8309859154929577, | |
| "grad_norm": 0.2826122217432797, | |
| "learning_rate": 4.128252670272575e-05, | |
| "loss": 1.0474, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 1.8427230046948355, | |
| "grad_norm": 0.2620920138050905, | |
| "learning_rate": 4.091844616932409e-05, | |
| "loss": 1.1321, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 1.8544600938967135, | |
| "grad_norm": 0.29154525087785954, | |
| "learning_rate": 4.055589860613869e-05, | |
| "loss": 1.1328, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 1.8661971830985915, | |
| "grad_norm": 0.2619470877476919, | |
| "learning_rate": 4.01949419102558e-05, | |
| "loss": 1.0941, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 1.8779342723004695, | |
| "grad_norm": 0.24822033480152877, | |
| "learning_rate": 3.983563372470781e-05, | |
| "loss": 0.9679, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 1.8896713615023475, | |
| "grad_norm": 0.28224783656544505, | |
| "learning_rate": 3.9478031429268066e-05, | |
| "loss": 1.144, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 1.9014084507042255, | |
| "grad_norm": 0.30516183723950785, | |
| "learning_rate": 3.912219213128753e-05, | |
| "loss": 1.1058, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 1.9131455399061033, | |
| "grad_norm": 0.30562651002624985, | |
| "learning_rate": 3.876817265657502e-05, | |
| "loss": 1.1647, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 1.9248826291079812, | |
| "grad_norm": 0.2768262798277747, | |
| "learning_rate": 3.8416029540322404e-05, | |
| "loss": 1.1273, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 1.936619718309859, | |
| "grad_norm": 0.2842898850002602, | |
| "learning_rate": 3.806581901807623e-05, | |
| "loss": 1.1984, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 1.948356807511737, | |
| "grad_norm": 0.31329090267496584, | |
| "learning_rate": 3.771759701675715e-05, | |
| "loss": 1.1333, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 1.960093896713615, | |
| "grad_norm": 0.3381345989557091, | |
| "learning_rate": 3.7371419145728696e-05, | |
| "loss": 1.0973, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 1.971830985915493, | |
| "grad_norm": 0.2970165089739574, | |
| "learning_rate": 3.702734068791666e-05, | |
| "loss": 1.1552, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 1.983568075117371, | |
| "grad_norm": 0.26229774431947867, | |
| "learning_rate": 3.6685416590980806e-05, | |
| "loss": 1.0432, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 1.995305164319249, | |
| "grad_norm": 0.3020477793486203, | |
| "learning_rate": 3.634570145853985e-05, | |
| "loss": 1.1401, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 1.995305164319249, | |
| "eval_loss": 1.5306791067123413, | |
| "eval_runtime": 51.0959, | |
| "eval_samples_per_second": 1.429, | |
| "eval_steps_per_second": 0.196, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 2.007042253521127, | |
| "grad_norm": 0.24436801124041757, | |
| "learning_rate": 3.600824954145166e-05, | |
| "loss": 0.9757, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 2.0187793427230045, | |
| "grad_norm": 0.3332860055486253, | |
| "learning_rate": 3.567311472914957e-05, | |
| "loss": 0.9992, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 2.0305164319248825, | |
| "grad_norm": 0.2915663428907029, | |
| "learning_rate": 3.5340350541036516e-05, | |
| "loss": 0.9287, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 2.0422535211267605, | |
| "grad_norm": 0.31218559752172226, | |
| "learning_rate": 3.501001011793828e-05, | |
| "loss": 0.8744, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 2.0539906103286385, | |
| "grad_norm": 0.3014812239987984, | |
| "learning_rate": 3.46821462136171e-05, | |
| "loss": 0.7871, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 2.0657276995305165, | |
| "grad_norm": 0.2919198189363655, | |
| "learning_rate": 3.4356811186347255e-05, | |
| "loss": 0.8375, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 2.0774647887323945, | |
| "grad_norm": 0.30297421951074827, | |
| "learning_rate": 3.4034056990553525e-05, | |
| "loss": 0.9539, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 2.0892018779342725, | |
| "grad_norm": 0.3004785475460889, | |
| "learning_rate": 3.3713935168514526e-05, | |
| "loss": 0.8916, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 2.10093896713615, | |
| "grad_norm": 0.3063305424781025, | |
| "learning_rate": 3.339649684213153e-05, | |
| "loss": 0.8923, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 2.112676056338028, | |
| "grad_norm": 0.3087837541879336, | |
| "learning_rate": 3.308179270476455e-05, | |
| "loss": 0.9808, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 2.124413145539906, | |
| "grad_norm": 0.3069084480862884, | |
| "learning_rate": 3.276987301313693e-05, | |
| "loss": 0.9499, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 2.136150234741784, | |
| "grad_norm": 0.31772408652498, | |
| "learning_rate": 3.246078757930949e-05, | |
| "loss": 0.9302, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 2.147887323943662, | |
| "grad_norm": 0.32823640884020344, | |
| "learning_rate": 3.21545857627259e-05, | |
| "loss": 0.8977, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 2.15962441314554, | |
| "grad_norm": 0.2986661375144512, | |
| "learning_rate": 3.1851316462330086e-05, | |
| "loss": 0.9632, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 2.171361502347418, | |
| "grad_norm": 0.3044796130319778, | |
| "learning_rate": 3.155102810875744e-05, | |
| "loss": 0.9244, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 2.183098591549296, | |
| "grad_norm": 0.29391562913639346, | |
| "learning_rate": 3.125376865660053e-05, | |
| "loss": 0.9065, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 2.1948356807511735, | |
| "grad_norm": 0.33464998182357986, | |
| "learning_rate": 3.095958557675115e-05, | |
| "loss": 1.0646, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 2.2065727699530515, | |
| "grad_norm": 0.3097450215029285, | |
| "learning_rate": 3.0668525848819334e-05, | |
| "loss": 0.909, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 2.2183098591549295, | |
| "grad_norm": 0.31619368356156724, | |
| "learning_rate": 3.0380635953630954e-05, | |
| "loss": 0.942, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 2.2300469483568075, | |
| "grad_norm": 0.5488021620297451, | |
| "learning_rate": 3.009596186580501e-05, | |
| "loss": 0.9037, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 2.2417840375586855, | |
| "grad_norm": 0.3007995564402941, | |
| "learning_rate": 2.981454904641166e-05, | |
| "loss": 0.841, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 2.2535211267605635, | |
| "grad_norm": 1.0143926976429458, | |
| "learning_rate": 2.953644243571233e-05, | |
| "loss": 0.8931, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 2.2652582159624415, | |
| "grad_norm": 0.32847505150062767, | |
| "learning_rate": 2.926168644598294e-05, | |
| "loss": 0.8939, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 2.276995305164319, | |
| "grad_norm": 0.29868200806169454, | |
| "learning_rate": 2.899032495442154e-05, | |
| "loss": 0.877, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 2.288732394366197, | |
| "grad_norm": 0.3247314434075676, | |
| "learning_rate": 2.8722401296141272e-05, | |
| "loss": 0.9554, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 2.300469483568075, | |
| "grad_norm": 0.2963994274935962, | |
| "learning_rate": 2.845795825724998e-05, | |
| "loss": 0.8366, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 2.312206572769953, | |
| "grad_norm": 0.3018384484277062, | |
| "learning_rate": 2.819703806801753e-05, | |
| "loss": 0.9772, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 2.323943661971831, | |
| "grad_norm": 0.3138452072040482, | |
| "learning_rate": 2.7939682396131733e-05, | |
| "loss": 0.8705, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 2.335680751173709, | |
| "grad_norm": 0.33495040627076267, | |
| "learning_rate": 2.7685932340044337e-05, | |
| "loss": 0.9398, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 2.347417840375587, | |
| "grad_norm": 0.3426786719758913, | |
| "learning_rate": 2.743582842240771e-05, | |
| "loss": 0.9528, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 2.347417840375587, | |
| "eval_loss": 1.5564570426940918, | |
| "eval_runtime": 49.772, | |
| "eval_samples_per_second": 1.467, | |
| "eval_steps_per_second": 0.201, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 2.359154929577465, | |
| "grad_norm": 0.32715481854051526, | |
| "learning_rate": 2.7189410583603632e-05, | |
| "loss": 0.9294, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 2.370892018779343, | |
| "grad_norm": 0.31904903206727697, | |
| "learning_rate": 2.6946718175364948e-05, | |
| "loss": 0.9377, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 2.3826291079812205, | |
| "grad_norm": 0.3246876100683689, | |
| "learning_rate": 2.6707789954491374e-05, | |
| "loss": 0.9389, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 2.3943661971830985, | |
| "grad_norm": 0.33657822136933685, | |
| "learning_rate": 2.647266407666012e-05, | |
| "loss": 0.8748, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 2.4061032863849765, | |
| "grad_norm": 0.33075950831011175, | |
| "learning_rate": 2.62413780903327e-05, | |
| "loss": 0.9191, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 2.4178403755868545, | |
| "grad_norm": 0.3091789648897354, | |
| "learning_rate": 2.6013968930758587e-05, | |
| "loss": 0.8542, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 2.4295774647887325, | |
| "grad_norm": 0.3017557854181004, | |
| "learning_rate": 2.579047291407683e-05, | |
| "loss": 0.9691, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 2.4413145539906105, | |
| "grad_norm": 0.3315662930965317, | |
| "learning_rate": 2.557092573151657e-05, | |
| "loss": 0.9335, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 2.453051643192488, | |
| "grad_norm": 0.28405648990544513, | |
| "learning_rate": 2.5355362443697268e-05, | |
| "loss": 0.8608, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 2.464788732394366, | |
| "grad_norm": 0.3272098198440713, | |
| "learning_rate": 2.5143817475029763e-05, | |
| "loss": 0.945, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 2.476525821596244, | |
| "grad_norm": 15.473459249206623, | |
| "learning_rate": 2.4936324608218807e-05, | |
| "loss": 0.862, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 2.488262910798122, | |
| "grad_norm": 0.3214221685785962, | |
| "learning_rate": 2.4732916978868173e-05, | |
| "loss": 0.8372, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "grad_norm": 0.30527745666926165, | |
| "learning_rate": 2.453362707018903e-05, | |
| "loss": 0.9094, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 2.511737089201878, | |
| "grad_norm": 0.337431832474658, | |
| "learning_rate": 2.4338486707812528e-05, | |
| "loss": 0.8946, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 2.523474178403756, | |
| "grad_norm": 0.3322583740691652, | |
| "learning_rate": 2.414752705470744e-05, | |
| "loss": 0.949, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 2.535211267605634, | |
| "grad_norm": 0.3183558468618594, | |
| "learning_rate": 2.396077860620353e-05, | |
| "loss": 0.8969, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 2.546948356807512, | |
| "grad_norm": 0.3155273530496357, | |
| "learning_rate": 2.377827118512165e-05, | |
| "loss": 0.9395, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 2.5586854460093895, | |
| "grad_norm": 2.4170816896900025, | |
| "learning_rate": 2.3600033937011134e-05, | |
| "loss": 0.8337, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 2.5704225352112675, | |
| "grad_norm": 0.31607546736128544, | |
| "learning_rate": 2.342609532549545e-05, | |
| "loss": 0.856, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 2.5821596244131455, | |
| "grad_norm": 0.33297406422032433, | |
| "learning_rate": 2.3256483127726626e-05, | |
| "loss": 0.8664, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 2.5938967136150235, | |
| "grad_norm": 0.3237147629308357, | |
| "learning_rate": 2.309122442994944e-05, | |
| "loss": 0.8719, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 2.6056338028169015, | |
| "grad_norm": 0.3234191361238395, | |
| "learning_rate": 2.2930345623175854e-05, | |
| "loss": 0.9487, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 2.6173708920187795, | |
| "grad_norm": 0.3323839887038311, | |
| "learning_rate": 2.277387239897046e-05, | |
| "loss": 0.9493, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 2.629107981220657, | |
| "grad_norm": 0.32228378417361714, | |
| "learning_rate": 2.262182974534775e-05, | |
| "loss": 0.8866, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 2.640845070422535, | |
| "grad_norm": 0.3079696625324963, | |
| "learning_rate": 2.247424194278155e-05, | |
| "loss": 0.8054, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 2.652582159624413, | |
| "grad_norm": 0.3097789983846638, | |
| "learning_rate": 2.2331132560327617e-05, | |
| "loss": 0.9464, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 2.664319248826291, | |
| "grad_norm": 0.29487479541027356, | |
| "learning_rate": 2.2192524451859738e-05, | |
| "loss": 0.9063, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 2.676056338028169, | |
| "grad_norm": 0.33006135046655793, | |
| "learning_rate": 2.205843975242011e-05, | |
| "loss": 0.8962, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 2.687793427230047, | |
| "grad_norm": 0.3529330284339669, | |
| "learning_rate": 2.1928899874684446e-05, | |
| "loss": 0.9958, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 2.699530516431925, | |
| "grad_norm": 0.36979938140870244, | |
| "learning_rate": 2.1803925505542516e-05, | |
| "loss": 0.9466, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 2.711267605633803, | |
| "grad_norm": 0.30449528011099597, | |
| "learning_rate": 2.168353660279449e-05, | |
| "loss": 0.8422, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 2.723004694835681, | |
| "grad_norm": 0.32449892199883407, | |
| "learning_rate": 2.156775239196383e-05, | |
| "loss": 0.9596, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 2.7347417840375585, | |
| "grad_norm": 0.3082491729387956, | |
| "learning_rate": 2.145659136322706e-05, | |
| "loss": 0.8846, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 2.7464788732394365, | |
| "grad_norm": 0.3542469454397538, | |
| "learning_rate": 2.1350071268460934e-05, | |
| "loss": 0.9162, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 2.7582159624413145, | |
| "grad_norm": 0.35364030378555145, | |
| "learning_rate": 2.12482091184076e-05, | |
| "loss": 0.9548, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 2.7699530516431925, | |
| "grad_norm": 0.534630830850989, | |
| "learning_rate": 2.115102117995803e-05, | |
| "loss": 0.9527, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 2.7816901408450705, | |
| "grad_norm": 0.3638700943894012, | |
| "learning_rate": 2.1058522973554294e-05, | |
| "loss": 0.8624, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 2.7934272300469485, | |
| "grad_norm": 0.33210932308224567, | |
| "learning_rate": 2.0970729270711017e-05, | |
| "loss": 0.8602, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 2.805164319248826, | |
| "grad_norm": 0.3271576610317457, | |
| "learning_rate": 2.0887654091656426e-05, | |
| "loss": 0.8537, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 2.816901408450704, | |
| "grad_norm": 0.3445497480228464, | |
| "learning_rate": 2.0809310703093418e-05, | |
| "loss": 0.805, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 2.828638497652582, | |
| "grad_norm": 0.32705975575158863, | |
| "learning_rate": 2.073571161608091e-05, | |
| "loss": 0.955, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 2.84037558685446, | |
| "grad_norm": 0.34134786803880485, | |
| "learning_rate": 2.066686858403588e-05, | |
| "loss": 0.907, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 2.852112676056338, | |
| "grad_norm": 0.31253357487176564, | |
| "learning_rate": 2.0602792600856414e-05, | |
| "loss": 0.9051, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 2.863849765258216, | |
| "grad_norm": 0.3325144520719963, | |
| "learning_rate": 2.0543493899166034e-05, | |
| "loss": 0.9146, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 2.875586854460094, | |
| "grad_norm": 0.3293615881469167, | |
| "learning_rate": 2.04889819486796e-05, | |
| "loss": 0.8378, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 2.887323943661972, | |
| "grad_norm": 0.33386869136878455, | |
| "learning_rate": 2.0439265454691023e-05, | |
| "loss": 0.9188, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 2.89906103286385, | |
| "grad_norm": 0.32510298043861213, | |
| "learning_rate": 2.0394352356683095e-05, | |
| "loss": 0.9314, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 2.910798122065728, | |
| "grad_norm": 0.3244639716190284, | |
| "learning_rate": 2.0354249827059588e-05, | |
| "loss": 0.8556, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 2.9225352112676055, | |
| "grad_norm": 0.3442224853352613, | |
| "learning_rate": 2.0318964269999837e-05, | |
| "loss": 0.8086, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 2.9342723004694835, | |
| "grad_norm": 0.3402662522070222, | |
| "learning_rate": 2.0288501320436047e-05, | |
| "loss": 0.8865, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 2.936619718309859, | |
| "eval_loss": 1.5202196836471558, | |
| "eval_runtime": 49.8279, | |
| "eval_samples_per_second": 1.465, | |
| "eval_steps_per_second": 0.201, | |
| "step": 1251 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 1278, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1297580487081984.0, | |
| "train_batch_size": 8, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |