Invalid JSON:
Unexpected token 'I', ..."ad_norm": Infinity,
"... is not valid JSON
| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.42122999157540014, | |
| "eval_steps": 500, | |
| "global_step": 500, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.004212299915754001, | |
| "grad_norm": Infinity, | |
| "learning_rate": 1.0425224359183675e-05, | |
| "loss": 9.1367, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.008424599831508003, | |
| "grad_norm": Infinity, | |
| "learning_rate": 2.3456754808163266e-05, | |
| "loss": 9.1854, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.012636899747262006, | |
| "grad_norm": Infinity, | |
| "learning_rate": 3.6488285257142865e-05, | |
| "loss": 9.2157, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.016849199663016005, | |
| "grad_norm": Infinity, | |
| "learning_rate": 4.951981570612245e-05, | |
| "loss": 9.1202, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.02106149957877001, | |
| "grad_norm": Infinity, | |
| "learning_rate": 6.255134615510205e-05, | |
| "loss": 9.0366, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.02527379949452401, | |
| "grad_norm": Infinity, | |
| "learning_rate": 7.558287660408165e-05, | |
| "loss": 8.9155, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.02948609941027801, | |
| "grad_norm": Infinity, | |
| "learning_rate": 8.861440705306124e-05, | |
| "loss": 9.0951, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.03369839932603201, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.122049589805829e-05, | |
| "loss": 9.2247, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.037910699241786014, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.121961334579207e-05, | |
| "loss": 9.1174, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.04212299915754002, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.121805192576025e-05, | |
| "loss": 9.1988, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.04633529907329402, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.121581166895088e-05, | |
| "loss": 9.1803, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.05054759898904802, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.121289261982425e-05, | |
| "loss": 9.0852, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.05475989890480202, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.120929483631201e-05, | |
| "loss": 9.161, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.05897219882055602, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.120501838981599e-05, | |
| "loss": 9.0158, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.06318449873631002, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.120006336520685e-05, | |
| "loss": 9.0559, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.06739679865206402, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.11944298608223e-05, | |
| "loss": 9.0891, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.07160909856781802, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.118811798846527e-05, | |
| "loss": 9.1733, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.07582139848357203, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.118112787340156e-05, | |
| "loss": 9.2267, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.08003369839932603, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.117345965435748e-05, | |
| "loss": 9.0607, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.08424599831508003, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.116511348351699e-05, | |
| "loss": 9.0453, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.08845829823083404, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.115608952651875e-05, | |
| "loss": 9.0472, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.09267059814658804, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.11463879624528e-05, | |
| "loss": 9.1655, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.09688289806234204, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.113600898385701e-05, | |
| "loss": 8.9799, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.10109519797809605, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.112495279671328e-05, | |
| "loss": 9.2456, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.10530749789385004, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.11132196204434e-05, | |
| "loss": 9.0901, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.10951979780960404, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.11008096879048e-05, | |
| "loss": 9.0431, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.11373209772535804, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.108772324538577e-05, | |
| "loss": 9.153, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.11794439764111204, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.107396055260072e-05, | |
| "loss": 9.2556, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.12215669755686605, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.105952188268497e-05, | |
| "loss": 8.9465, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.12636899747262004, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.104440752218928e-05, | |
| "loss": 9.0371, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.13058129738837404, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.102861777107424e-05, | |
| "loss": 9.2312, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.13479359730412804, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.101215294270431e-05, | |
| "loss": 9.2269, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.13900589721988205, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.099501336384152e-05, | |
| "loss": 9.0931, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.14321819713563605, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.097719937463912e-05, | |
| "loss": 9.072, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.14743049705139005, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.095871132863466e-05, | |
| "loss": 8.983, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.15164279696714406, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.093954959274312e-05, | |
| "loss": 9.1125, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.15585509688289806, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.091971454724959e-05, | |
| "loss": 8.8615, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.16006739679865206, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.089920658580165e-05, | |
| "loss": 9.1443, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.16427969671440606, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.087802611540166e-05, | |
| "loss": 9.2218, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.16849199663016007, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.085617355639865e-05, | |
| "loss": 9.1446, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.17270429654591407, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.083364934247988e-05, | |
| "loss": 9.2042, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.17691659646166807, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.081045392066242e-05, | |
| "loss": 9.2255, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.18112889637742208, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.07865877512841e-05, | |
| "loss": 8.9658, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.18534119629317608, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.076205130799451e-05, | |
| "loss": 9.0488, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.18955349620893008, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.073684507774549e-05, | |
| "loss": 8.9994, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.1937657961246841, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.071096956078153e-05, | |
| "loss": 9.2227, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.1979780960404381, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.068442527062987e-05, | |
| "loss": 9.0947, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.2021903959561921, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.065721273409019e-05, | |
| "loss": 8.9357, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.2064026958719461, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.062933249122428e-05, | |
| "loss": 9.1496, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.21061499578770007, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.060078509534528e-05, | |
| "loss": 9.198, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.21482729570345407, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.057157111300668e-05, | |
| "loss": 9.2222, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.21903959561920808, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.054169112399107e-05, | |
| "loss": 9.104, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.22325189553496208, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.051114572129868e-05, | |
| "loss": 9.2462, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.22746419545071608, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.047993551113556e-05, | |
| "loss": 9.2007, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.2316764953664701, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.044806111290159e-05, | |
| "loss": 9.0567, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.2358887952822241, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.041552315917816e-05, | |
| "loss": 9.3158, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.2401010951979781, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.038232229571564e-05, | |
| "loss": 9.127, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.2443133951137321, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.034845918142056e-05, | |
| "loss": 9.139, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.2485256950294861, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.031393448834246e-05, | |
| "loss": 8.9646, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.2527379949452401, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.027874890166069e-05, | |
| "loss": 9.1291, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.2569502948609941, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.024290311967066e-05, | |
| "loss": 9.1037, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.2611625947767481, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.020639785377019e-05, | |
| "loss": 9.0707, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.2653748946925021, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.01692338284451e-05, | |
| "loss": 9.2199, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.2695871946082561, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.013141178125513e-05, | |
| "loss": 9.2428, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.2737994945240101, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.009293246281905e-05, | |
| "loss": 9.163, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.2780117944397641, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.005379663679996e-05, | |
| "loss": 9.22, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.2822240943555181, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.001400507989004e-05, | |
| "loss": 9.0213, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.2864363942712721, | |
| "grad_norm": Infinity, | |
| "learning_rate": 8.99735585817951e-05, | |
| "loss": 9.1937, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.2906486941870261, | |
| "grad_norm": Infinity, | |
| "learning_rate": 8.993245794521902e-05, | |
| "loss": 9.1865, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.2948609941027801, | |
| "grad_norm": Infinity, | |
| "learning_rate": 8.98907039858477e-05, | |
| "loss": 9.2424, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.2990732940185341, | |
| "grad_norm": Infinity, | |
| "learning_rate": 8.984829753233298e-05, | |
| "loss": 9.1797, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.3032855939342881, | |
| "grad_norm": Infinity, | |
| "learning_rate": 8.980523942627609e-05, | |
| "loss": 9.127, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.3074978938500421, | |
| "grad_norm": Infinity, | |
| "learning_rate": 8.976153052221104e-05, | |
| "loss": 9.0876, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.3117101937657961, | |
| "grad_norm": Infinity, | |
| "learning_rate": 8.971717168758756e-05, | |
| "loss": 9.1596, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.3159224936815501, | |
| "grad_norm": Infinity, | |
| "learning_rate": 8.967216380275405e-05, | |
| "loss": 9.048, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.3201347935973041, | |
| "grad_norm": Infinity, | |
| "learning_rate": 8.962650776093989e-05, | |
| "loss": 9.0332, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.3243470935130581, | |
| "grad_norm": Infinity, | |
| "learning_rate": 8.958020446823789e-05, | |
| "loss": 9.1551, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.32855939342881213, | |
| "grad_norm": Infinity, | |
| "learning_rate": 8.953325484358625e-05, | |
| "loss": 8.9415, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.33277169334456613, | |
| "grad_norm": Infinity, | |
| "learning_rate": 8.948565981875027e-05, | |
| "loss": 9.0024, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.33698399326032014, | |
| "grad_norm": Infinity, | |
| "learning_rate": 8.943742033830394e-05, | |
| "loss": 9.1814, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.34119629317607414, | |
| "grad_norm": Infinity, | |
| "learning_rate": 8.938853735961113e-05, | |
| "loss": 9.2051, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.34540859309182814, | |
| "grad_norm": Infinity, | |
| "learning_rate": 8.933901185280665e-05, | |
| "loss": 9.1032, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.34962089300758215, | |
| "grad_norm": Infinity, | |
| "learning_rate": 8.928884480077696e-05, | |
| "loss": 9.0838, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.35383319292333615, | |
| "grad_norm": Infinity, | |
| "learning_rate": 8.923803719914063e-05, | |
| "loss": 9.0075, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.35804549283909015, | |
| "grad_norm": Infinity, | |
| "learning_rate": 8.91865900562287e-05, | |
| "loss": 8.9951, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.36225779275484415, | |
| "grad_norm": Infinity, | |
| "learning_rate": 8.913450439306448e-05, | |
| "loss": 9.1082, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.36647009267059816, | |
| "grad_norm": Infinity, | |
| "learning_rate": 8.908178124334348e-05, | |
| "loss": 9.2004, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.37068239258635216, | |
| "grad_norm": Infinity, | |
| "learning_rate": 8.902842165341278e-05, | |
| "loss": 8.9807, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.37489469250210616, | |
| "grad_norm": Infinity, | |
| "learning_rate": 8.89744266822503e-05, | |
| "loss": 9.043, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.37910699241786017, | |
| "grad_norm": Infinity, | |
| "learning_rate": 8.891979740144376e-05, | |
| "loss": 9.1759, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.38331929233361417, | |
| "grad_norm": Infinity, | |
| "learning_rate": 8.886453489516945e-05, | |
| "loss": 9.0236, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.3875315922493682, | |
| "grad_norm": Infinity, | |
| "learning_rate": 8.880864026017068e-05, | |
| "loss": 9.2918, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.3917438921651222, | |
| "grad_norm": Infinity, | |
| "learning_rate": 8.875211460573607e-05, | |
| "loss": 9.0641, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.3959561920808762, | |
| "grad_norm": Infinity, | |
| "learning_rate": 8.869495905367742e-05, | |
| "loss": 9.048, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.4001684919966302, | |
| "grad_norm": Infinity, | |
| "learning_rate": 8.863717473830758e-05, | |
| "loss": 9.0192, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.4043807919123842, | |
| "grad_norm": Infinity, | |
| "learning_rate": 8.857876280641784e-05, | |
| "loss": 9.0275, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.4085930918281382, | |
| "grad_norm": Infinity, | |
| "learning_rate": 8.851972441725522e-05, | |
| "loss": 8.9821, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.4128053917438922, | |
| "grad_norm": Infinity, | |
| "learning_rate": 8.846006074249951e-05, | |
| "loss": 8.9907, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.41701769165964614, | |
| "grad_norm": Infinity, | |
| "learning_rate": 8.839977296623983e-05, | |
| "loss": 9.0855, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.42122999157540014, | |
| "grad_norm": Infinity, | |
| "learning_rate": 8.833886228495139e-05, | |
| "loss": 9.2186, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.42122999157540014, | |
| "eval_loss": 9.067032814025879, | |
| "eval_runtime": 2.0409, | |
| "eval_samples_per_second": 26.949, | |
| "eval_steps_per_second": 13.719, | |
| "step": 500 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 3561, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 2.1956663091134464e+18, | |
| "train_batch_size": 48, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |