| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 2424, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.00041254125412541255, | |
| "grad_norm": 25.173291317801098, | |
| "learning_rate": 4.1152263374485605e-08, | |
| "loss": 1.4315, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0020627062706270625, | |
| "grad_norm": 23.244172144374918, | |
| "learning_rate": 2.05761316872428e-07, | |
| "loss": 1.4238, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.004125412541254125, | |
| "grad_norm": 15.407649804349377, | |
| "learning_rate": 4.11522633744856e-07, | |
| "loss": 1.4148, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.006188118811881188, | |
| "grad_norm": 8.402022752616448, | |
| "learning_rate": 6.17283950617284e-07, | |
| "loss": 1.278, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.00825082508250825, | |
| "grad_norm": 11.034069123055462, | |
| "learning_rate": 8.23045267489712e-07, | |
| "loss": 1.1772, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.010313531353135313, | |
| "grad_norm": 4.332735330250593, | |
| "learning_rate": 1.02880658436214e-06, | |
| "loss": 1.0531, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.012376237623762377, | |
| "grad_norm": 3.55215196003044, | |
| "learning_rate": 1.234567901234568e-06, | |
| "loss": 0.9881, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.014438943894389438, | |
| "grad_norm": 3.3491262120808307, | |
| "learning_rate": 1.440329218106996e-06, | |
| "loss": 0.9604, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.0165016501650165, | |
| "grad_norm": 3.022805529228879, | |
| "learning_rate": 1.646090534979424e-06, | |
| "loss": 0.9382, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.018564356435643563, | |
| "grad_norm": 3.068991080972076, | |
| "learning_rate": 1.8518518518518519e-06, | |
| "loss": 0.9276, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.020627062706270627, | |
| "grad_norm": 3.1689853717520595, | |
| "learning_rate": 2.05761316872428e-06, | |
| "loss": 0.9118, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.02268976897689769, | |
| "grad_norm": 3.095012272596912, | |
| "learning_rate": 2.263374485596708e-06, | |
| "loss": 0.9069, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.024752475247524754, | |
| "grad_norm": 3.1980712687602972, | |
| "learning_rate": 2.469135802469136e-06, | |
| "loss": 0.9114, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.026815181518151814, | |
| "grad_norm": 3.0048714944711734, | |
| "learning_rate": 2.674897119341564e-06, | |
| "loss": 0.8915, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.028877887788778877, | |
| "grad_norm": 3.2438404533032807, | |
| "learning_rate": 2.880658436213992e-06, | |
| "loss": 0.9011, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.03094059405940594, | |
| "grad_norm": 3.0946505652248786, | |
| "learning_rate": 3.08641975308642e-06, | |
| "loss": 0.8911, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.033003300330033, | |
| "grad_norm": 2.976460498240933, | |
| "learning_rate": 3.292181069958848e-06, | |
| "loss": 0.8905, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.03506600660066007, | |
| "grad_norm": 2.964486117776111, | |
| "learning_rate": 3.4979423868312762e-06, | |
| "loss": 0.8711, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.03712871287128713, | |
| "grad_norm": 3.1472293237072018, | |
| "learning_rate": 3.7037037037037037e-06, | |
| "loss": 0.8776, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.039191419141914194, | |
| "grad_norm": 2.9491584240395547, | |
| "learning_rate": 3.909465020576132e-06, | |
| "loss": 0.8731, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.041254125412541254, | |
| "grad_norm": 3.1222334377001575, | |
| "learning_rate": 4.11522633744856e-06, | |
| "loss": 0.8673, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.043316831683168314, | |
| "grad_norm": 3.1224801634532744, | |
| "learning_rate": 4.3209876543209875e-06, | |
| "loss": 0.8568, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.04537953795379538, | |
| "grad_norm": 3.1837554638007255, | |
| "learning_rate": 4.526748971193416e-06, | |
| "loss": 0.8641, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.04744224422442244, | |
| "grad_norm": 3.1279379452438336, | |
| "learning_rate": 4.732510288065844e-06, | |
| "loss": 0.8654, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.04950495049504951, | |
| "grad_norm": 3.165713359110299, | |
| "learning_rate": 4.938271604938272e-06, | |
| "loss": 0.8593, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.05156765676567657, | |
| "grad_norm": 2.9608774490517282, | |
| "learning_rate": 5.1440329218107e-06, | |
| "loss": 0.8548, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.05363036303630363, | |
| "grad_norm": 2.9604320712666485, | |
| "learning_rate": 5.349794238683128e-06, | |
| "loss": 0.8506, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.055693069306930694, | |
| "grad_norm": 3.0433993058342765, | |
| "learning_rate": 5.555555555555557e-06, | |
| "loss": 0.8439, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.057755775577557754, | |
| "grad_norm": 3.3372432183258036, | |
| "learning_rate": 5.761316872427984e-06, | |
| "loss": 0.8352, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.05981848184818482, | |
| "grad_norm": 3.166742755838131, | |
| "learning_rate": 5.967078189300412e-06, | |
| "loss": 0.8584, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.06188118811881188, | |
| "grad_norm": 3.2086952524615326, | |
| "learning_rate": 6.17283950617284e-06, | |
| "loss": 0.825, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.06394389438943894, | |
| "grad_norm": 3.4577671140527557, | |
| "learning_rate": 6.3786008230452675e-06, | |
| "loss": 0.824, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.066006600660066, | |
| "grad_norm": 3.3939747315335613, | |
| "learning_rate": 6.584362139917696e-06, | |
| "loss": 0.8412, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.06806930693069307, | |
| "grad_norm": 3.1642303616218195, | |
| "learning_rate": 6.790123456790124e-06, | |
| "loss": 0.8372, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.07013201320132013, | |
| "grad_norm": 3.9078331437297975, | |
| "learning_rate": 6.9958847736625525e-06, | |
| "loss": 0.8393, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.0721947194719472, | |
| "grad_norm": 3.086012476362717, | |
| "learning_rate": 7.201646090534981e-06, | |
| "loss": 0.8549, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.07425742574257425, | |
| "grad_norm": 3.163457682817171, | |
| "learning_rate": 7.4074074074074075e-06, | |
| "loss": 0.8319, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.07632013201320131, | |
| "grad_norm": 3.1200124817434607, | |
| "learning_rate": 7.613168724279836e-06, | |
| "loss": 0.8229, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.07838283828382839, | |
| "grad_norm": 3.010419511128134, | |
| "learning_rate": 7.818930041152263e-06, | |
| "loss": 0.8329, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.08044554455445545, | |
| "grad_norm": 3.0362752294117317, | |
| "learning_rate": 8.024691358024692e-06, | |
| "loss": 0.8288, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.08250825082508251, | |
| "grad_norm": 3.1939826585064997, | |
| "learning_rate": 8.23045267489712e-06, | |
| "loss": 0.8134, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.08457095709570957, | |
| "grad_norm": 2.9811252237712056, | |
| "learning_rate": 8.43621399176955e-06, | |
| "loss": 0.8293, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.08663366336633663, | |
| "grad_norm": 3.2289018691242264, | |
| "learning_rate": 8.641975308641975e-06, | |
| "loss": 0.8108, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.0886963696369637, | |
| "grad_norm": 2.920715386036233, | |
| "learning_rate": 8.847736625514404e-06, | |
| "loss": 0.8199, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.09075907590759076, | |
| "grad_norm": 2.8888271523586835, | |
| "learning_rate": 9.053497942386832e-06, | |
| "loss": 0.831, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.09282178217821782, | |
| "grad_norm": 2.8414889865764312, | |
| "learning_rate": 9.25925925925926e-06, | |
| "loss": 0.8188, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.09488448844884488, | |
| "grad_norm": 3.3784549124194743, | |
| "learning_rate": 9.465020576131688e-06, | |
| "loss": 0.8083, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.09694719471947194, | |
| "grad_norm": 3.2323221908817996, | |
| "learning_rate": 9.670781893004116e-06, | |
| "loss": 0.8186, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.09900990099009901, | |
| "grad_norm": 2.8554149805575846, | |
| "learning_rate": 9.876543209876543e-06, | |
| "loss": 0.8035, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.10107260726072607, | |
| "grad_norm": 3.027573880753027, | |
| "learning_rate": 9.9999792514327e-06, | |
| "loss": 0.8091, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.10313531353135313, | |
| "grad_norm": 3.0804491286944145, | |
| "learning_rate": 9.999745832028163e-06, | |
| "loss": 0.7989, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.1051980198019802, | |
| "grad_norm": 3.2290033265195848, | |
| "learning_rate": 9.999253069658074e-06, | |
| "loss": 0.8178, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.10726072607260725, | |
| "grad_norm": 3.4736769564268233, | |
| "learning_rate": 9.998500989882627e-06, | |
| "loss": 0.8178, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.10932343234323433, | |
| "grad_norm": 3.0428405529671263, | |
| "learning_rate": 9.997489631713117e-06, | |
| "loss": 0.8006, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.11138613861386139, | |
| "grad_norm": 2.8688354965301426, | |
| "learning_rate": 9.996219047609943e-06, | |
| "loss": 0.7866, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.11344884488448845, | |
| "grad_norm": 3.0556606109594644, | |
| "learning_rate": 9.99468930347986e-06, | |
| "loss": 0.8174, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.11551155115511551, | |
| "grad_norm": 3.0647994656665603, | |
| "learning_rate": 9.99290047867258e-06, | |
| "loss": 0.8044, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.11757425742574257, | |
| "grad_norm": 2.856082139020889, | |
| "learning_rate": 9.990852665976648e-06, | |
| "loss": 0.7871, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.11963696369636964, | |
| "grad_norm": 2.894869532549818, | |
| "learning_rate": 9.98854597161462e-06, | |
| "loss": 0.7967, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.1216996699669967, | |
| "grad_norm": 2.9795368246449483, | |
| "learning_rate": 9.98598051523758e-06, | |
| "loss": 0.7987, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.12376237623762376, | |
| "grad_norm": 3.16205487552237, | |
| "learning_rate": 9.983156429918895e-06, | |
| "loss": 0.7675, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.12582508250825084, | |
| "grad_norm": 3.245576749246031, | |
| "learning_rate": 9.98007386214735e-06, | |
| "loss": 0.7915, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.12788778877887788, | |
| "grad_norm": 3.025661649853723, | |
| "learning_rate": 9.976732971819526e-06, | |
| "loss": 0.7895, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.12995049504950495, | |
| "grad_norm": 2.843144690280715, | |
| "learning_rate": 9.973133932231514e-06, | |
| "loss": 0.7782, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.132013201320132, | |
| "grad_norm": 3.105956078671983, | |
| "learning_rate": 9.96927693006992e-06, | |
| "loss": 0.7754, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.13407590759075907, | |
| "grad_norm": 2.7999853476012877, | |
| "learning_rate": 9.965162165402194e-06, | |
| "loss": 0.7652, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.13613861386138615, | |
| "grad_norm": 2.8609256056169987, | |
| "learning_rate": 9.960789851666237e-06, | |
| "loss": 0.7786, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.1382013201320132, | |
| "grad_norm": 3.1630409852297454, | |
| "learning_rate": 9.956160215659342e-06, | |
| "loss": 0.7828, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.14026402640264027, | |
| "grad_norm": 2.795363964969132, | |
| "learning_rate": 9.951273497526423e-06, | |
| "loss": 0.7686, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.14232673267326731, | |
| "grad_norm": 2.9053928333152723, | |
| "learning_rate": 9.94612995074756e-06, | |
| "loss": 0.7635, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.1443894389438944, | |
| "grad_norm": 2.7319984337656478, | |
| "learning_rate": 9.94072984212485e-06, | |
| "loss": 0.7396, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.14645214521452146, | |
| "grad_norm": 2.90922485756749, | |
| "learning_rate": 9.935073451768567e-06, | |
| "loss": 0.7675, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.1485148514851485, | |
| "grad_norm": 2.7539644778310852, | |
| "learning_rate": 9.929161073082636e-06, | |
| "loss": 0.7559, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.15057755775577558, | |
| "grad_norm": 2.8959145263782644, | |
| "learning_rate": 9.922993012749413e-06, | |
| "loss": 0.748, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.15264026402640263, | |
| "grad_norm": 2.8562652818653516, | |
| "learning_rate": 9.916569590713775e-06, | |
| "loss": 0.7412, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.1547029702970297, | |
| "grad_norm": 2.860687978591301, | |
| "learning_rate": 9.90989114016652e-06, | |
| "loss": 0.7648, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.15676567656765678, | |
| "grad_norm": 2.7743547846726995, | |
| "learning_rate": 9.902958007527092e-06, | |
| "loss": 0.7469, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.15882838283828382, | |
| "grad_norm": 2.933130436544695, | |
| "learning_rate": 9.89577055242561e-06, | |
| "loss": 0.7415, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.1608910891089109, | |
| "grad_norm": 2.777976255262721, | |
| "learning_rate": 9.88832914768421e-06, | |
| "loss": 0.719, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.16295379537953794, | |
| "grad_norm": 2.8659385000525983, | |
| "learning_rate": 9.880634179297706e-06, | |
| "loss": 0.742, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.16501650165016502, | |
| "grad_norm": 2.8897535873983418, | |
| "learning_rate": 9.872686046413575e-06, | |
| "loss": 0.7346, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.1670792079207921, | |
| "grad_norm": 2.8576246139096892, | |
| "learning_rate": 9.864485161311242e-06, | |
| "loss": 0.7325, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.16914191419141913, | |
| "grad_norm": 3.1030879684908346, | |
| "learning_rate": 9.856031949380707e-06, | |
| "loss": 0.7606, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.1712046204620462, | |
| "grad_norm": 2.713632760138165, | |
| "learning_rate": 9.847326849100467e-06, | |
| "loss": 0.7379, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.17326732673267325, | |
| "grad_norm": 2.80346541082337, | |
| "learning_rate": 9.838370312014783e-06, | |
| "loss": 0.7381, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.17533003300330033, | |
| "grad_norm": 3.200752732606346, | |
| "learning_rate": 9.829162802710246e-06, | |
| "loss": 0.7328, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.1773927392739274, | |
| "grad_norm": 3.1792821336770567, | |
| "learning_rate": 9.819704798791691e-06, | |
| "loss": 0.7305, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.17945544554455445, | |
| "grad_norm": 2.8356276160053424, | |
| "learning_rate": 9.80999679085741e-06, | |
| "loss": 0.714, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.18151815181518152, | |
| "grad_norm": 2.6413041173544043, | |
| "learning_rate": 9.800039282473719e-06, | |
| "loss": 0.735, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.18358085808580857, | |
| "grad_norm": 2.9983638727309856, | |
| "learning_rate": 9.789832790148822e-06, | |
| "loss": 0.7148, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.18564356435643564, | |
| "grad_norm": 2.995170348181315, | |
| "learning_rate": 9.77937784330603e-06, | |
| "loss": 0.7137, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.18770627062706272, | |
| "grad_norm": 3.0031426949210456, | |
| "learning_rate": 9.768674984256292e-06, | |
| "loss": 0.7334, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.18976897689768976, | |
| "grad_norm": 2.789041969236007, | |
| "learning_rate": 9.757724768170074e-06, | |
| "loss": 0.7161, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.19183168316831684, | |
| "grad_norm": 2.65991213841404, | |
| "learning_rate": 9.74652776304855e-06, | |
| "loss": 0.709, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.19389438943894388, | |
| "grad_norm": 2.7707492384242007, | |
| "learning_rate": 9.73508454969415e-06, | |
| "loss": 0.7162, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.19595709570957096, | |
| "grad_norm": 2.6804566008309654, | |
| "learning_rate": 9.723395721680418e-06, | |
| "loss": 0.7173, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.19801980198019803, | |
| "grad_norm": 2.8279805454985545, | |
| "learning_rate": 9.711461885321247e-06, | |
| "loss": 0.6969, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.20008250825082508, | |
| "grad_norm": 3.1208245898107045, | |
| "learning_rate": 9.699283659639402e-06, | |
| "loss": 0.7091, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.20214521452145215, | |
| "grad_norm": 2.6589644032154363, | |
| "learning_rate": 9.68686167633443e-06, | |
| "loss": 0.6952, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.2042079207920792, | |
| "grad_norm": 2.9084930227852137, | |
| "learning_rate": 9.67419657974988e-06, | |
| "loss": 0.7134, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.20627062706270627, | |
| "grad_norm": 2.8303925657999853, | |
| "learning_rate": 9.661289026839889e-06, | |
| "loss": 0.6975, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.20833333333333334, | |
| "grad_norm": 2.784037458254395, | |
| "learning_rate": 9.648139687135106e-06, | |
| "loss": 0.6979, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.2103960396039604, | |
| "grad_norm": 2.7696913715189897, | |
| "learning_rate": 9.634749242707948e-06, | |
| "loss": 0.6733, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.21245874587458746, | |
| "grad_norm": 2.9247340254888545, | |
| "learning_rate": 9.62111838813724e-06, | |
| "loss": 0.6945, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.2145214521452145, | |
| "grad_norm": 2.719893437871808, | |
| "learning_rate": 9.607247830472174e-06, | |
| "loss": 0.6761, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.21658415841584158, | |
| "grad_norm": 2.8726720120792386, | |
| "learning_rate": 9.593138289195634e-06, | |
| "loss": 0.6913, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.21864686468646866, | |
| "grad_norm": 2.781250153342701, | |
| "learning_rate": 9.578790496186879e-06, | |
| "loss": 0.6903, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.2207095709570957, | |
| "grad_norm": 2.7701813874151244, | |
| "learning_rate": 9.56420519568358e-06, | |
| "loss": 0.6831, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.22277227722772278, | |
| "grad_norm": 2.7057701780207983, | |
| "learning_rate": 9.549383144243213e-06, | |
| "loss": 0.6956, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.22483498349834982, | |
| "grad_norm": 2.7687957358970112, | |
| "learning_rate": 9.534325110703814e-06, | |
| "loss": 0.6709, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.2268976897689769, | |
| "grad_norm": 2.7158766270699983, | |
| "learning_rate": 9.519031876144106e-06, | |
| "loss": 0.6608, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.22896039603960397, | |
| "grad_norm": 2.7406561041756095, | |
| "learning_rate": 9.503504233842973e-06, | |
| "loss": 0.6857, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.23102310231023102, | |
| "grad_norm": 2.7133954058150525, | |
| "learning_rate": 9.487742989238318e-06, | |
| "loss": 0.6723, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.2330858085808581, | |
| "grad_norm": 2.7907877653122797, | |
| "learning_rate": 9.471748959885284e-06, | |
| "loss": 0.6809, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.23514851485148514, | |
| "grad_norm": 2.8110823806166922, | |
| "learning_rate": 9.455522975413846e-06, | |
| "loss": 0.6695, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.2372112211221122, | |
| "grad_norm": 2.9142519407957543, | |
| "learning_rate": 9.439065877485774e-06, | |
| "loss": 0.6506, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.23927392739273928, | |
| "grad_norm": 2.7148544728300195, | |
| "learning_rate": 9.422378519750978e-06, | |
| "loss": 0.6499, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.24133663366336633, | |
| "grad_norm": 2.713997914652474, | |
| "learning_rate": 9.40546176780323e-06, | |
| "loss": 0.6525, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.2433993399339934, | |
| "grad_norm": 2.692751552453192, | |
| "learning_rate": 9.388316499135259e-06, | |
| "loss": 0.6496, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.24546204620462045, | |
| "grad_norm": 2.6802330864864854, | |
| "learning_rate": 9.370943603093235e-06, | |
| "loss": 0.6417, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.24752475247524752, | |
| "grad_norm": 2.8231163075907437, | |
| "learning_rate": 9.353343980830644e-06, | |
| "loss": 0.652, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.2495874587458746, | |
| "grad_norm": 2.667983921285145, | |
| "learning_rate": 9.33551854526154e-06, | |
| "loss": 0.6516, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.25165016501650167, | |
| "grad_norm": 2.7364796827272038, | |
| "learning_rate": 9.31746822101319e-06, | |
| "loss": 0.6402, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.2537128712871287, | |
| "grad_norm": 2.6289768861588256, | |
| "learning_rate": 9.299193944378112e-06, | |
| "loss": 0.651, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.25577557755775576, | |
| "grad_norm": 3.115748743305885, | |
| "learning_rate": 9.280696663265512e-06, | |
| "loss": 0.6409, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.25783828382838286, | |
| "grad_norm": 2.7801532039806727, | |
| "learning_rate": 9.261977337152107e-06, | |
| "loss": 0.6232, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.2599009900990099, | |
| "grad_norm": 2.8268570410512415, | |
| "learning_rate": 9.243036937032373e-06, | |
| "loss": 0.652, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.26196369636963696, | |
| "grad_norm": 2.734252985059684, | |
| "learning_rate": 9.223876445368153e-06, | |
| "loss": 0.6408, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.264026402640264, | |
| "grad_norm": 2.8594124607696862, | |
| "learning_rate": 9.204496856037718e-06, | |
| "loss": 0.6608, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.2660891089108911, | |
| "grad_norm": 2.7632847146041937, | |
| "learning_rate": 9.184899174284201e-06, | |
| "loss": 0.6435, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.26815181518151815, | |
| "grad_norm": 2.7892742600413056, | |
| "learning_rate": 9.16508441666346e-06, | |
| "loss": 0.6382, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.2702145214521452, | |
| "grad_norm": 2.9224506916430926, | |
| "learning_rate": 9.14505361099134e-06, | |
| "loss": 0.6249, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.2722772277227723, | |
| "grad_norm": 2.916964064023966, | |
| "learning_rate": 9.124807796290366e-06, | |
| "loss": 0.6284, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.27433993399339934, | |
| "grad_norm": 2.7731836971623953, | |
| "learning_rate": 9.104348022735853e-06, | |
| "loss": 0.633, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.2764026402640264, | |
| "grad_norm": 2.7535471272900196, | |
| "learning_rate": 9.083675351601417e-06, | |
| "loss": 0.6194, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.2784653465346535, | |
| "grad_norm": 2.856389390689046, | |
| "learning_rate": 9.062790855203932e-06, | |
| "loss": 0.6376, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.28052805280528054, | |
| "grad_norm": 2.9038867407258224, | |
| "learning_rate": 9.041695616847915e-06, | |
| "loss": 0.6263, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.2825907590759076, | |
| "grad_norm": 2.5804402002125415, | |
| "learning_rate": 9.020390730769324e-06, | |
| "loss": 0.6393, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.28465346534653463, | |
| "grad_norm": 2.632496010078948, | |
| "learning_rate": 8.998877302078803e-06, | |
| "loss": 0.6076, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.28671617161716173, | |
| "grad_norm": 4.356850125546882, | |
| "learning_rate": 8.97715644670436e-06, | |
| "loss": 0.6133, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.2887788778877888, | |
| "grad_norm": 2.559920441209793, | |
| "learning_rate": 8.955229291333473e-06, | |
| "loss": 0.6167, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.2908415841584158, | |
| "grad_norm": 2.7757209022740823, | |
| "learning_rate": 8.933096973354665e-06, | |
| "loss": 0.6171, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.2929042904290429, | |
| "grad_norm": 2.673673023680112, | |
| "learning_rate": 8.910760640798487e-06, | |
| "loss": 0.6142, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.29496699669966997, | |
| "grad_norm": 2.8111377522291856, | |
| "learning_rate": 8.88822145227798e-06, | |
| "loss": 0.609, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.297029702970297, | |
| "grad_norm": 2.6340814150599017, | |
| "learning_rate": 8.865480576928578e-06, | |
| "loss": 0.6205, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.2990924092409241, | |
| "grad_norm": 2.6824532363700566, | |
| "learning_rate": 8.842539194347448e-06, | |
| "loss": 0.5866, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.30115511551155116, | |
| "grad_norm": 2.619983825517638, | |
| "learning_rate": 8.819398494532328e-06, | |
| "loss": 0.6133, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.3032178217821782, | |
| "grad_norm": 2.686298593112456, | |
| "learning_rate": 8.796059677819773e-06, | |
| "loss": 0.6011, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.30528052805280526, | |
| "grad_norm": 2.9363501744882132, | |
| "learning_rate": 8.77252395482291e-06, | |
| "loss": 0.6084, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.30734323432343236, | |
| "grad_norm": 2.752287473506256, | |
| "learning_rate": 8.748792546368641e-06, | |
| "loss": 0.5966, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.3094059405940594, | |
| "grad_norm": 2.71844410518512, | |
| "learning_rate": 8.72486668343431e-06, | |
| "loss": 0.614, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.31146864686468645, | |
| "grad_norm": 3.145817966285669, | |
| "learning_rate": 8.700747607083851e-06, | |
| "loss": 0.5957, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.31353135313531355, | |
| "grad_norm": 3.0310831048054303, | |
| "learning_rate": 8.676436568403422e-06, | |
| "loss": 0.5978, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.3155940594059406, | |
| "grad_norm": 2.701739599075158, | |
| "learning_rate": 8.651934828436497e-06, | |
| "loss": 0.5923, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.31765676567656764, | |
| "grad_norm": 2.8448862694962944, | |
| "learning_rate": 8.627243658118466e-06, | |
| "loss": 0.5981, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.31971947194719474, | |
| "grad_norm": 2.719726931310169, | |
| "learning_rate": 8.602364338210699e-06, | |
| "loss": 0.5937, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.3217821782178218, | |
| "grad_norm": 2.8019550606602652, | |
| "learning_rate": 8.57729815923412e-06, | |
| "loss": 0.6029, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.32384488448844884, | |
| "grad_norm": 2.503107027258152, | |
| "learning_rate": 8.55204642140226e-06, | |
| "loss": 0.5952, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.3259075907590759, | |
| "grad_norm": 2.6677149194472762, | |
| "learning_rate": 8.52661043455382e-06, | |
| "loss": 0.5917, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.327970297029703, | |
| "grad_norm": 2.746672046408305, | |
| "learning_rate": 8.50099151808472e-06, | |
| "loss": 0.5973, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.33003300330033003, | |
| "grad_norm": 2.6263940638611554, | |
| "learning_rate": 8.47519100087967e-06, | |
| "loss": 0.5898, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.3320957095709571, | |
| "grad_norm": 2.503813501518845, | |
| "learning_rate": 8.449210221243225e-06, | |
| "loss": 0.5991, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.3341584158415842, | |
| "grad_norm": 2.4979356769923573, | |
| "learning_rate": 8.42305052683038e-06, | |
| "loss": 0.5912, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.3362211221122112, | |
| "grad_norm": 2.847043832115963, | |
| "learning_rate": 8.39671327457666e-06, | |
| "loss": 0.5799, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.33828382838283827, | |
| "grad_norm": 2.7801453739069633, | |
| "learning_rate": 8.370199830627732e-06, | |
| "loss": 0.5823, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.34034653465346537, | |
| "grad_norm": 2.627346750326928, | |
| "learning_rate": 8.343511570268541e-06, | |
| "loss": 0.5865, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.3424092409240924, | |
| "grad_norm": 2.8090361015173246, | |
| "learning_rate": 8.316649877851977e-06, | |
| "loss": 0.568, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.34447194719471946, | |
| "grad_norm": 2.6857687788227107, | |
| "learning_rate": 8.289616146727062e-06, | |
| "loss": 0.5811, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.3465346534653465, | |
| "grad_norm": 2.7827938400617964, | |
| "learning_rate": 8.262411779166681e-06, | |
| "loss": 0.5608, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.3485973597359736, | |
| "grad_norm": 2.7509505960907084, | |
| "learning_rate": 8.235038186294836e-06, | |
| "loss": 0.5888, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.35066006600660066, | |
| "grad_norm": 2.6638572156656584, | |
| "learning_rate": 8.207496788013456e-06, | |
| "loss": 0.562, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.3527227722772277, | |
| "grad_norm": 2.591282044159344, | |
| "learning_rate": 8.179789012928747e-06, | |
| "loss": 0.5712, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.3547854785478548, | |
| "grad_norm": 2.5109540424205576, | |
| "learning_rate": 8.151916298277078e-06, | |
| "loss": 0.553, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.35684818481848185, | |
| "grad_norm": 2.892814594553888, | |
| "learning_rate": 8.123880089850438e-06, | |
| "loss": 0.5686, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.3589108910891089, | |
| "grad_norm": 2.5359593127713898, | |
| "learning_rate": 8.095681841921441e-06, | |
| "loss": 0.5454, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.360973597359736, | |
| "grad_norm": 2.6515995361569202, | |
| "learning_rate": 8.06732301716789e-06, | |
| "loss": 0.562, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.36303630363036304, | |
| "grad_norm": 2.485706938051772, | |
| "learning_rate": 8.038805086596903e-06, | |
| "loss": 0.5532, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.3650990099009901, | |
| "grad_norm": 2.5183163477684576, | |
| "learning_rate": 8.010129529468614e-06, | |
| "loss": 0.5666, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.36716171617161714, | |
| "grad_norm": 2.675472695073968, | |
| "learning_rate": 7.981297833219435e-06, | |
| "loss": 0.5631, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.36922442244224424, | |
| "grad_norm": 2.6653612064334045, | |
| "learning_rate": 7.952311493384916e-06, | |
| "loss": 0.5681, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.3712871287128713, | |
| "grad_norm": 2.57514354803963, | |
| "learning_rate": 7.923172013522153e-06, | |
| "loss": 0.5668, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.37334983498349833, | |
| "grad_norm": 2.707762540885758, | |
| "learning_rate": 7.893880905131807e-06, | |
| "loss": 0.558, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.37541254125412543, | |
| "grad_norm": 2.6980529523902472, | |
| "learning_rate": 7.864439687579695e-06, | |
| "loss": 0.5447, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.3774752475247525, | |
| "grad_norm": 2.7666172686862267, | |
| "learning_rate": 7.834849888017979e-06, | |
| "loss": 0.555, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.3795379537953795, | |
| "grad_norm": 2.8852303996201876, | |
| "learning_rate": 7.805113041305958e-06, | |
| "loss": 0.5381, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.3816006600660066, | |
| "grad_norm": 2.583967025080346, | |
| "learning_rate": 7.775230689930445e-06, | |
| "loss": 0.5465, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.38366336633663367, | |
| "grad_norm": 2.612939793985183, | |
| "learning_rate": 7.745204383925753e-06, | |
| "loss": 0.5542, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.3857260726072607, | |
| "grad_norm": 2.779912992420415, | |
| "learning_rate": 7.715035680793311e-06, | |
| "loss": 0.5527, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 0.38778877887788776, | |
| "grad_norm": 2.644547318860223, | |
| "learning_rate": 7.684726145420853e-06, | |
| "loss": 0.5405, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.38985148514851486, | |
| "grad_norm": 2.5007394214042433, | |
| "learning_rate": 7.654277350001255e-06, | |
| "loss": 0.5635, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 0.3919141914191419, | |
| "grad_norm": 2.806021602965884, | |
| "learning_rate": 7.623690873950988e-06, | |
| "loss": 0.5273, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.39397689768976896, | |
| "grad_norm": 2.5596611924464754, | |
| "learning_rate": 7.592968303828181e-06, | |
| "loss": 0.5275, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 0.39603960396039606, | |
| "grad_norm": 2.6100310716117994, | |
| "learning_rate": 7.5621112332503325e-06, | |
| "loss": 0.5296, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.3981023102310231, | |
| "grad_norm": 2.4652574522767017, | |
| "learning_rate": 7.531121262811645e-06, | |
| "loss": 0.5327, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 0.40016501650165015, | |
| "grad_norm": 2.5835355525531054, | |
| "learning_rate": 7.500000000000001e-06, | |
| "loss": 0.5313, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.40222772277227725, | |
| "grad_norm": 2.556870458540024, | |
| "learning_rate": 7.468749059113578e-06, | |
| "loss": 0.5219, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 0.4042904290429043, | |
| "grad_norm": 2.6560183770094166, | |
| "learning_rate": 7.437370061177116e-06, | |
| "loss": 0.529, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.40635313531353134, | |
| "grad_norm": 2.6201898522482683, | |
| "learning_rate": 7.40586463385783e-06, | |
| "loss": 0.534, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 0.4084158415841584, | |
| "grad_norm": 2.6803677560952712, | |
| "learning_rate": 7.374234411380987e-06, | |
| "loss": 0.5249, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.4104785478547855, | |
| "grad_norm": 2.5303749301843586, | |
| "learning_rate": 7.342481034445127e-06, | |
| "loss": 0.5318, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 0.41254125412541254, | |
| "grad_norm": 2.5847848779090694, | |
| "learning_rate": 7.310606150136965e-06, | |
| "loss": 0.5137, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.4146039603960396, | |
| "grad_norm": 2.624837840218052, | |
| "learning_rate": 7.2786114118459564e-06, | |
| "loss": 0.5275, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 0.4166666666666667, | |
| "grad_norm": 2.7154629758532574, | |
| "learning_rate": 7.246498479178523e-06, | |
| "loss": 0.5311, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.41872937293729373, | |
| "grad_norm": 2.7217754848899234, | |
| "learning_rate": 7.214269017871981e-06, | |
| "loss": 0.5388, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 0.4207920792079208, | |
| "grad_norm": 2.8374602937448956, | |
| "learning_rate": 7.181924699708127e-06, | |
| "loss": 0.5114, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.4228547854785479, | |
| "grad_norm": 2.669653807310483, | |
| "learning_rate": 7.149467202426525e-06, | |
| "loss": 0.5162, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 0.4249174917491749, | |
| "grad_norm": 2.8259143043104853, | |
| "learning_rate": 7.116898209637478e-06, | |
| "loss": 0.5207, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.42698019801980197, | |
| "grad_norm": 2.4487631528416034, | |
| "learning_rate": 7.084219410734701e-06, | |
| "loss": 0.5239, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 0.429042904290429, | |
| "grad_norm": 2.5467769274691157, | |
| "learning_rate": 7.051432500807682e-06, | |
| "loss": 0.5143, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.4311056105610561, | |
| "grad_norm": 2.674655727993759, | |
| "learning_rate": 7.018539180553768e-06, | |
| "loss": 0.5179, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 0.43316831683168316, | |
| "grad_norm": 2.5887768807891107, | |
| "learning_rate": 6.985541156189932e-06, | |
| "loss": 0.5084, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.4352310231023102, | |
| "grad_norm": 2.7440060414213394, | |
| "learning_rate": 6.952440139364286e-06, | |
| "loss": 0.512, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 0.4372937293729373, | |
| "grad_norm": 2.645722871973474, | |
| "learning_rate": 6.919237847067282e-06, | |
| "loss": 0.5199, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.43935643564356436, | |
| "grad_norm": 2.611780386705664, | |
| "learning_rate": 6.885936001542658e-06, | |
| "loss": 0.5078, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 0.4414191419141914, | |
| "grad_norm": 2.5086528986935703, | |
| "learning_rate": 6.852536330198099e-06, | |
| "loss": 0.5201, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.4434818481848185, | |
| "grad_norm": 2.4576048338159486, | |
| "learning_rate": 6.819040565515636e-06, | |
| "loss": 0.5126, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 0.44554455445544555, | |
| "grad_norm": 2.7304284435440542, | |
| "learning_rate": 6.785450444961783e-06, | |
| "loss": 0.5145, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.4476072607260726, | |
| "grad_norm": 2.6166324259831435, | |
| "learning_rate": 6.751767710897404e-06, | |
| "loss": 0.5141, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 0.44966996699669964, | |
| "grad_norm": 2.528382351784196, | |
| "learning_rate": 6.71799411048734e-06, | |
| "loss": 0.4858, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.45173267326732675, | |
| "grad_norm": 2.633514406168111, | |
| "learning_rate": 6.684131395609784e-06, | |
| "loss": 0.5084, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 0.4537953795379538, | |
| "grad_norm": 2.534482871115365, | |
| "learning_rate": 6.650181322765407e-06, | |
| "loss": 0.4981, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.45585808580858084, | |
| "grad_norm": 2.4508414675719505, | |
| "learning_rate": 6.61614565298624e-06, | |
| "loss": 0.5008, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 0.45792079207920794, | |
| "grad_norm": 2.4274494034766203, | |
| "learning_rate": 6.5820261517443365e-06, | |
| "loss": 0.4981, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.459983498349835, | |
| "grad_norm": 2.538858548171987, | |
| "learning_rate": 6.54782458886019e-06, | |
| "loss": 0.4891, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 0.46204620462046203, | |
| "grad_norm": 2.5483259090729575, | |
| "learning_rate": 6.5135427384109315e-06, | |
| "loss": 0.4999, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.46410891089108913, | |
| "grad_norm": 2.5232721697679166, | |
| "learning_rate": 6.479182378638308e-06, | |
| "loss": 0.5024, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 0.4661716171617162, | |
| "grad_norm": 2.591445497366531, | |
| "learning_rate": 6.444745291856442e-06, | |
| "loss": 0.4964, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.4682343234323432, | |
| "grad_norm": 2.568897490439017, | |
| "learning_rate": 6.410233264359379e-06, | |
| "loss": 0.4978, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 0.47029702970297027, | |
| "grad_norm": 2.494168999146032, | |
| "learning_rate": 6.375648086328431e-06, | |
| "loss": 0.5058, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.47235973597359737, | |
| "grad_norm": 2.4409245464037967, | |
| "learning_rate": 6.340991551739319e-06, | |
| "loss": 0.4885, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 0.4744224422442244, | |
| "grad_norm": 2.603683438326256, | |
| "learning_rate": 6.3062654582691175e-06, | |
| "loss": 0.4983, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.47648514851485146, | |
| "grad_norm": 2.5017059333187404, | |
| "learning_rate": 6.271471607203006e-06, | |
| "loss": 0.4868, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 0.47854785478547857, | |
| "grad_norm": 2.664734878049621, | |
| "learning_rate": 6.236611803340829e-06, | |
| "loss": 0.4946, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.4806105610561056, | |
| "grad_norm": 2.6730486310091037, | |
| "learning_rate": 6.201687854903492e-06, | |
| "loss": 0.4811, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 0.48267326732673266, | |
| "grad_norm": 2.8065530727044927, | |
| "learning_rate": 6.16670157343915e-06, | |
| "loss": 0.4974, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.48473597359735976, | |
| "grad_norm": 2.660140416974183, | |
| "learning_rate": 6.131654773729255e-06, | |
| "loss": 0.485, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 0.4867986798679868, | |
| "grad_norm": 2.494718317338255, | |
| "learning_rate": 6.096549273694411e-06, | |
| "loss": 0.4832, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.48886138613861385, | |
| "grad_norm": 2.6067794556409427, | |
| "learning_rate": 6.061386894300082e-06, | |
| "loss": 0.4822, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 0.4909240924092409, | |
| "grad_norm": 2.411365711902098, | |
| "learning_rate": 6.026169459462132e-06, | |
| "loss": 0.488, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.492986798679868, | |
| "grad_norm": 2.5304361708398253, | |
| "learning_rate": 5.990898795952225e-06, | |
| "loss": 0.4808, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 0.49504950495049505, | |
| "grad_norm": 2.542256557791119, | |
| "learning_rate": 5.955576733303053e-06, | |
| "loss": 0.4922, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.4971122112211221, | |
| "grad_norm": 2.416811649448376, | |
| "learning_rate": 5.920205103713449e-06, | |
| "loss": 0.4857, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 0.4991749174917492, | |
| "grad_norm": 2.4894521032789023, | |
| "learning_rate": 5.884785741953345e-06, | |
| "loss": 0.4742, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.5012376237623762, | |
| "grad_norm": 2.4053368303294995, | |
| "learning_rate": 5.849320485268597e-06, | |
| "loss": 0.4691, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 0.5033003300330033, | |
| "grad_norm": 2.440300312110612, | |
| "learning_rate": 5.8138111732856906e-06, | |
| "loss": 0.4794, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.5053630363036303, | |
| "grad_norm": 2.507917784275635, | |
| "learning_rate": 5.778259647916309e-06, | |
| "loss": 0.4807, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 0.5074257425742574, | |
| "grad_norm": 2.564227750985733, | |
| "learning_rate": 5.7426677532618e-06, | |
| "loss": 0.4623, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.5094884488448845, | |
| "grad_norm": 2.5559157486419037, | |
| "learning_rate": 5.707037335517514e-06, | |
| "loss": 0.4599, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 0.5115511551155115, | |
| "grad_norm": 2.4886710886810666, | |
| "learning_rate": 5.67137024287704e-06, | |
| "loss": 0.4539, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.5136138613861386, | |
| "grad_norm": 2.533851130168081, | |
| "learning_rate": 5.635668325436343e-06, | |
| "loss": 0.4762, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 0.5156765676567657, | |
| "grad_norm": 2.5682484148678593, | |
| "learning_rate": 5.599933435097791e-06, | |
| "loss": 0.461, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.5177392739273927, | |
| "grad_norm": 2.7619029888240334, | |
| "learning_rate": 5.564167425474093e-06, | |
| "loss": 0.4708, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 0.5198019801980198, | |
| "grad_norm": 2.65748720318593, | |
| "learning_rate": 5.528372151792161e-06, | |
| "loss": 0.4835, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.5218646864686468, | |
| "grad_norm": 2.343200657687914, | |
| "learning_rate": 5.492549470796865e-06, | |
| "loss": 0.4562, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 0.5239273927392739, | |
| "grad_norm": 2.398515835994456, | |
| "learning_rate": 5.456701240654726e-06, | |
| "loss": 0.4448, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.525990099009901, | |
| "grad_norm": 2.480816874843841, | |
| "learning_rate": 5.420829320857532e-06, | |
| "loss": 0.4599, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 0.528052805280528, | |
| "grad_norm": 2.5823441471809643, | |
| "learning_rate": 5.384935572125882e-06, | |
| "loss": 0.4352, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.5301155115511551, | |
| "grad_norm": 2.4908465249480853, | |
| "learning_rate": 5.349021856312669e-06, | |
| "loss": 0.4594, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 0.5321782178217822, | |
| "grad_norm": 2.399600740783908, | |
| "learning_rate": 5.3130900363065055e-06, | |
| "loss": 0.4466, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.5342409240924092, | |
| "grad_norm": 2.454620597807439, | |
| "learning_rate": 5.277141975935083e-06, | |
| "loss": 0.4585, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 0.5363036303630363, | |
| "grad_norm": 2.372249644843968, | |
| "learning_rate": 5.24117953986851e-06, | |
| "loss": 0.4512, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.5383663366336634, | |
| "grad_norm": 2.3367801961389407, | |
| "learning_rate": 5.2052045935225725e-06, | |
| "loss": 0.4568, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 0.5404290429042904, | |
| "grad_norm": 2.4464214105982856, | |
| "learning_rate": 5.169219002961987e-06, | |
| "loss": 0.4561, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.5424917491749175, | |
| "grad_norm": 10.09033249652104, | |
| "learning_rate": 5.133224634803594e-06, | |
| "loss": 0.4506, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 0.5445544554455446, | |
| "grad_norm": 2.525626316753546, | |
| "learning_rate": 5.097223356119538e-06, | |
| "loss": 0.4651, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.5466171617161716, | |
| "grad_norm": 2.521794410914058, | |
| "learning_rate": 5.061217034340426e-06, | |
| "loss": 0.4729, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 0.5486798679867987, | |
| "grad_norm": 2.5260338421626445, | |
| "learning_rate": 5.02520753715845e-06, | |
| "loss": 0.4525, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.5507425742574258, | |
| "grad_norm": 2.446840796366364, | |
| "learning_rate": 4.989196732430518e-06, | |
| "loss": 0.4567, | |
| "step": 1335 | |
| }, | |
| { | |
| "epoch": 0.5528052805280528, | |
| "grad_norm": 2.3859413099257893, | |
| "learning_rate": 4.953186488081362e-06, | |
| "loss": 0.4471, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.5548679867986799, | |
| "grad_norm": 2.5520983341553105, | |
| "learning_rate": 4.9171786720066465e-06, | |
| "loss": 0.4523, | |
| "step": 1345 | |
| }, | |
| { | |
| "epoch": 0.556930693069307, | |
| "grad_norm": 2.470761509016717, | |
| "learning_rate": 4.881175151976075e-06, | |
| "loss": 0.4589, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.558993399339934, | |
| "grad_norm": 2.6345528475282847, | |
| "learning_rate": 4.845177795536516e-06, | |
| "loss": 0.4429, | |
| "step": 1355 | |
| }, | |
| { | |
| "epoch": 0.5610561056105611, | |
| "grad_norm": 2.4651932232525366, | |
| "learning_rate": 4.809188469915121e-06, | |
| "loss": 0.4341, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.5631188118811881, | |
| "grad_norm": 2.423072698536209, | |
| "learning_rate": 4.773209041922472e-06, | |
| "loss": 0.4416, | |
| "step": 1365 | |
| }, | |
| { | |
| "epoch": 0.5651815181518152, | |
| "grad_norm": 2.453471069490191, | |
| "learning_rate": 4.737241377855751e-06, | |
| "loss": 0.4307, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.5672442244224423, | |
| "grad_norm": 2.373055640370215, | |
| "learning_rate": 4.7012873434019296e-06, | |
| "loss": 0.4355, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 0.5693069306930693, | |
| "grad_norm": 2.5437533790570646, | |
| "learning_rate": 4.6653488035409975e-06, | |
| "loss": 0.4415, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.5713696369636964, | |
| "grad_norm": 2.391498166308705, | |
| "learning_rate": 4.629427622449217e-06, | |
| "loss": 0.4313, | |
| "step": 1385 | |
| }, | |
| { | |
| "epoch": 0.5734323432343235, | |
| "grad_norm": 2.643038273859464, | |
| "learning_rate": 4.59352566340243e-06, | |
| "loss": 0.4531, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.5754950495049505, | |
| "grad_norm": 2.515338122652954, | |
| "learning_rate": 4.557644788679413e-06, | |
| "loss": 0.4372, | |
| "step": 1395 | |
| }, | |
| { | |
| "epoch": 0.5775577557755776, | |
| "grad_norm": 2.5228479375899355, | |
| "learning_rate": 4.521786859465263e-06, | |
| "loss": 0.4343, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.5796204620462047, | |
| "grad_norm": 2.526725915460239, | |
| "learning_rate": 4.485953735754872e-06, | |
| "loss": 0.4236, | |
| "step": 1405 | |
| }, | |
| { | |
| "epoch": 0.5816831683168316, | |
| "grad_norm": 2.3164427894674224, | |
| "learning_rate": 4.450147276256439e-06, | |
| "loss": 0.4164, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.5837458745874587, | |
| "grad_norm": 2.3425181209777963, | |
| "learning_rate": 4.414369338295056e-06, | |
| "loss": 0.4427, | |
| "step": 1415 | |
| }, | |
| { | |
| "epoch": 0.5858085808580858, | |
| "grad_norm": 2.450541853696667, | |
| "learning_rate": 4.37862177771637e-06, | |
| "loss": 0.4406, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.5878712871287128, | |
| "grad_norm": 2.420195141307401, | |
| "learning_rate": 4.342906448790315e-06, | |
| "loss": 0.4475, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 0.5899339933993399, | |
| "grad_norm": 2.4136105308611624, | |
| "learning_rate": 4.307225204114927e-06, | |
| "loss": 0.435, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.591996699669967, | |
| "grad_norm": 2.4087084238655962, | |
| "learning_rate": 4.271579894520254e-06, | |
| "loss": 0.44, | |
| "step": 1435 | |
| }, | |
| { | |
| "epoch": 0.594059405940594, | |
| "grad_norm": 2.457797382505865, | |
| "learning_rate": 4.235972368972343e-06, | |
| "loss": 0.4368, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.5961221122112211, | |
| "grad_norm": 2.3957561493714827, | |
| "learning_rate": 4.200404474477341e-06, | |
| "loss": 0.4163, | |
| "step": 1445 | |
| }, | |
| { | |
| "epoch": 0.5981848184818482, | |
| "grad_norm": 2.3457771252145587, | |
| "learning_rate": 4.16487805598568e-06, | |
| "loss": 0.4094, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.6002475247524752, | |
| "grad_norm": 2.573777931737885, | |
| "learning_rate": 4.12939495629638e-06, | |
| "loss": 0.4162, | |
| "step": 1455 | |
| }, | |
| { | |
| "epoch": 0.6023102310231023, | |
| "grad_norm": 2.348703417322529, | |
| "learning_rate": 4.093957015961465e-06, | |
| "loss": 0.4208, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.6043729372937293, | |
| "grad_norm": 2.6136735137117997, | |
| "learning_rate": 4.0585660731904855e-06, | |
| "loss": 0.4357, | |
| "step": 1465 | |
| }, | |
| { | |
| "epoch": 0.6064356435643564, | |
| "grad_norm": 2.3717596586411633, | |
| "learning_rate": 4.023223963755168e-06, | |
| "loss": 0.4111, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.6084983498349835, | |
| "grad_norm": 2.3925003782413117, | |
| "learning_rate": 3.987932520894201e-06, | |
| "loss": 0.422, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 0.6105610561056105, | |
| "grad_norm": 2.4066751622931086, | |
| "learning_rate": 3.9526935752181275e-06, | |
| "loss": 0.4262, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.6126237623762376, | |
| "grad_norm": 2.2489948265452435, | |
| "learning_rate": 3.917508954614401e-06, | |
| "loss": 0.4137, | |
| "step": 1485 | |
| }, | |
| { | |
| "epoch": 0.6146864686468647, | |
| "grad_norm": 2.3814132782379636, | |
| "learning_rate": 3.882380484152567e-06, | |
| "loss": 0.4114, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.6167491749174917, | |
| "grad_norm": 2.503060690023786, | |
| "learning_rate": 3.847309985989593e-06, | |
| "loss": 0.4222, | |
| "step": 1495 | |
| }, | |
| { | |
| "epoch": 0.6188118811881188, | |
| "grad_norm": 2.485844712788725, | |
| "learning_rate": 3.8122992792753534e-06, | |
| "loss": 0.4212, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.6208745874587459, | |
| "grad_norm": 2.428549383590827, | |
| "learning_rate": 3.777350180058264e-06, | |
| "loss": 0.4187, | |
| "step": 1505 | |
| }, | |
| { | |
| "epoch": 0.6229372937293729, | |
| "grad_norm": 2.6875741250581533, | |
| "learning_rate": 3.7424645011910847e-06, | |
| "loss": 0.4123, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.625, | |
| "grad_norm": 2.425981196955932, | |
| "learning_rate": 3.707644052236887e-06, | |
| "loss": 0.4096, | |
| "step": 1515 | |
| }, | |
| { | |
| "epoch": 0.6270627062706271, | |
| "grad_norm": 2.7328115668919155, | |
| "learning_rate": 3.672890639375184e-06, | |
| "loss": 0.4206, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.6291254125412541, | |
| "grad_norm": 2.3245488388421593, | |
| "learning_rate": 3.6382060653082434e-06, | |
| "loss": 0.4052, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 0.6311881188118812, | |
| "grad_norm": 2.384682866362113, | |
| "learning_rate": 3.6035921291675815e-06, | |
| "loss": 0.4156, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.6332508250825083, | |
| "grad_norm": 2.4129965482745486, | |
| "learning_rate": 3.569050626420636e-06, | |
| "loss": 0.4093, | |
| "step": 1535 | |
| }, | |
| { | |
| "epoch": 0.6353135313531353, | |
| "grad_norm": 2.4547665669069776, | |
| "learning_rate": 3.5345833487776404e-06, | |
| "loss": 0.425, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.6373762376237624, | |
| "grad_norm": 2.489381449919041, | |
| "learning_rate": 3.500192084098677e-06, | |
| "loss": 0.4215, | |
| "step": 1545 | |
| }, | |
| { | |
| "epoch": 0.6394389438943895, | |
| "grad_norm": 2.382148749916793, | |
| "learning_rate": 3.4658786163009416e-06, | |
| "loss": 0.4152, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.6415016501650165, | |
| "grad_norm": 2.3828897057876746, | |
| "learning_rate": 3.4316447252662142e-06, | |
| "loss": 0.4084, | |
| "step": 1555 | |
| }, | |
| { | |
| "epoch": 0.6435643564356436, | |
| "grad_norm": 2.446574644486715, | |
| "learning_rate": 3.3974921867485238e-06, | |
| "loss": 0.407, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.6456270627062707, | |
| "grad_norm": 2.3536881611551035, | |
| "learning_rate": 3.3634227722820496e-06, | |
| "loss": 0.4007, | |
| "step": 1565 | |
| }, | |
| { | |
| "epoch": 0.6476897689768977, | |
| "grad_norm": 2.48572252066864, | |
| "learning_rate": 3.3294382490892226e-06, | |
| "loss": 0.401, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.6497524752475248, | |
| "grad_norm": 2.4835672414143604, | |
| "learning_rate": 3.2955403799890567e-06, | |
| "loss": 0.4005, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 0.6518151815181518, | |
| "grad_norm": 2.3799907386582224, | |
| "learning_rate": 3.261730923305717e-06, | |
| "loss": 0.4026, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.6538778877887789, | |
| "grad_norm": 2.486466648614617, | |
| "learning_rate": 3.2280116327773028e-06, | |
| "loss": 0.4048, | |
| "step": 1585 | |
| }, | |
| { | |
| "epoch": 0.655940594059406, | |
| "grad_norm": 2.4042700940181523, | |
| "learning_rate": 3.194384257464884e-06, | |
| "loss": 0.3945, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.658003300330033, | |
| "grad_norm": 2.327134482890906, | |
| "learning_rate": 3.160850541661779e-06, | |
| "loss": 0.4047, | |
| "step": 1595 | |
| }, | |
| { | |
| "epoch": 0.6600660066006601, | |
| "grad_norm": 2.4535001026550844, | |
| "learning_rate": 3.127412224803068e-06, | |
| "loss": 0.3885, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.6621287128712872, | |
| "grad_norm": 2.6395091605070786, | |
| "learning_rate": 3.094071041375375e-06, | |
| "loss": 0.4085, | |
| "step": 1605 | |
| }, | |
| { | |
| "epoch": 0.6641914191419142, | |
| "grad_norm": 2.45831697505499, | |
| "learning_rate": 3.060828720826889e-06, | |
| "loss": 0.4063, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.6662541254125413, | |
| "grad_norm": 2.25651867695093, | |
| "learning_rate": 3.0276869874776632e-06, | |
| "loss": 0.3991, | |
| "step": 1615 | |
| }, | |
| { | |
| "epoch": 0.6683168316831684, | |
| "grad_norm": 2.36719278095781, | |
| "learning_rate": 2.994647560430167e-06, | |
| "loss": 0.3943, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.6703795379537953, | |
| "grad_norm": 2.4049216832251035, | |
| "learning_rate": 2.961712153480118e-06, | |
| "loss": 0.4029, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 0.6724422442244224, | |
| "grad_norm": 2.559007669700738, | |
| "learning_rate": 2.9288824750275803e-06, | |
| "loss": 0.4032, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.6745049504950495, | |
| "grad_norm": 2.310036214506543, | |
| "learning_rate": 2.896160227988357e-06, | |
| "loss": 0.4005, | |
| "step": 1635 | |
| }, | |
| { | |
| "epoch": 0.6765676567656765, | |
| "grad_norm": 2.43914641154883, | |
| "learning_rate": 2.8635471097056423e-06, | |
| "loss": 0.3943, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.6786303630363036, | |
| "grad_norm": 2.443819723387033, | |
| "learning_rate": 2.8310448118619967e-06, | |
| "loss": 0.407, | |
| "step": 1645 | |
| }, | |
| { | |
| "epoch": 0.6806930693069307, | |
| "grad_norm": 2.4768276955867794, | |
| "learning_rate": 2.7986550203915807e-06, | |
| "loss": 0.3909, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.6827557755775577, | |
| "grad_norm": 2.3876332218325365, | |
| "learning_rate": 2.7663794153927165e-06, | |
| "loss": 0.3989, | |
| "step": 1655 | |
| }, | |
| { | |
| "epoch": 0.6848184818481848, | |
| "grad_norm": 2.373869717825122, | |
| "learning_rate": 2.7342196710407337e-06, | |
| "loss": 0.3848, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.6868811881188119, | |
| "grad_norm": 2.2944477847209703, | |
| "learning_rate": 2.7021774555011214e-06, | |
| "loss": 0.3952, | |
| "step": 1665 | |
| }, | |
| { | |
| "epoch": 0.6889438943894389, | |
| "grad_norm": 2.4458499611784044, | |
| "learning_rate": 2.6702544308430122e-06, | |
| "loss": 0.3841, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.691006600660066, | |
| "grad_norm": 2.7433908365448443, | |
| "learning_rate": 2.6384522529529542e-06, | |
| "loss": 0.383, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 0.693069306930693, | |
| "grad_norm": 2.4164071285356052, | |
| "learning_rate": 2.6067725714490307e-06, | |
| "loss": 0.3841, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.6951320132013201, | |
| "grad_norm": 2.5329201914328077, | |
| "learning_rate": 2.5752170295952856e-06, | |
| "loss": 0.4024, | |
| "step": 1685 | |
| }, | |
| { | |
| "epoch": 0.6971947194719472, | |
| "grad_norm": 2.362743279232679, | |
| "learning_rate": 2.5437872642164818e-06, | |
| "loss": 0.3965, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.6992574257425742, | |
| "grad_norm": 2.3597073251243064, | |
| "learning_rate": 2.5124849056132094e-06, | |
| "loss": 0.3658, | |
| "step": 1695 | |
| }, | |
| { | |
| "epoch": 0.7013201320132013, | |
| "grad_norm": 2.645876828690864, | |
| "learning_rate": 2.4813115774773046e-06, | |
| "loss": 0.3884, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.7033828382838284, | |
| "grad_norm": 2.392821189181109, | |
| "learning_rate": 2.4502688968076416e-06, | |
| "loss": 0.3827, | |
| "step": 1705 | |
| }, | |
| { | |
| "epoch": 0.7054455445544554, | |
| "grad_norm": 2.4614918453264565, | |
| "learning_rate": 2.4193584738262426e-06, | |
| "loss": 0.3825, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.7075082508250825, | |
| "grad_norm": 2.3794205872949252, | |
| "learning_rate": 2.388581911894767e-06, | |
| "loss": 0.3825, | |
| "step": 1715 | |
| }, | |
| { | |
| "epoch": 0.7095709570957096, | |
| "grad_norm": 2.4464619731962634, | |
| "learning_rate": 2.357940807431339e-06, | |
| "loss": 0.3785, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.7116336633663366, | |
| "grad_norm": 2.3852404958442013, | |
| "learning_rate": 2.3274367498277246e-06, | |
| "loss": 0.3773, | |
| "step": 1725 | |
| }, | |
| { | |
| "epoch": 0.7136963696369637, | |
| "grad_norm": 2.376518354617449, | |
| "learning_rate": 2.2970713213669127e-06, | |
| "loss": 0.3831, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.7157590759075908, | |
| "grad_norm": 2.6079917176596448, | |
| "learning_rate": 2.266846097141026e-06, | |
| "loss": 0.3857, | |
| "step": 1735 | |
| }, | |
| { | |
| "epoch": 0.7178217821782178, | |
| "grad_norm": 2.3458483796617418, | |
| "learning_rate": 2.2367626449696168e-06, | |
| "loss": 0.3751, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.7198844884488449, | |
| "grad_norm": 2.270229411474977, | |
| "learning_rate": 2.206822525318352e-06, | |
| "loss": 0.3784, | |
| "step": 1745 | |
| }, | |
| { | |
| "epoch": 0.721947194719472, | |
| "grad_norm": 2.3820977460708077, | |
| "learning_rate": 2.1770272912180577e-06, | |
| "loss": 0.3726, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.724009900990099, | |
| "grad_norm": 2.3917376427271746, | |
| "learning_rate": 2.1473784881841753e-06, | |
| "loss": 0.3749, | |
| "step": 1755 | |
| }, | |
| { | |
| "epoch": 0.7260726072607261, | |
| "grad_norm": 2.430451456865234, | |
| "learning_rate": 2.117877654136584e-06, | |
| "loss": 0.3621, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.7281353135313532, | |
| "grad_norm": 2.3121061406028107, | |
| "learning_rate": 2.088526319319827e-06, | |
| "loss": 0.3672, | |
| "step": 1765 | |
| }, | |
| { | |
| "epoch": 0.7301980198019802, | |
| "grad_norm": 2.360101764240328, | |
| "learning_rate": 2.059326006223743e-06, | |
| "loss": 0.3717, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.7322607260726073, | |
| "grad_norm": 2.573193311702909, | |
| "learning_rate": 2.030278229504484e-06, | |
| "loss": 0.379, | |
| "step": 1775 | |
| }, | |
| { | |
| "epoch": 0.7343234323432343, | |
| "grad_norm": 2.3389637944355943, | |
| "learning_rate": 2.001384495905954e-06, | |
| "loss": 0.3752, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.7363861386138614, | |
| "grad_norm": 2.488613631746104, | |
| "learning_rate": 1.972646304181656e-06, | |
| "loss": 0.3782, | |
| "step": 1785 | |
| }, | |
| { | |
| "epoch": 0.7384488448844885, | |
| "grad_norm": 2.4022170179819096, | |
| "learning_rate": 1.944065145016935e-06, | |
| "loss": 0.3726, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.7405115511551155, | |
| "grad_norm": 2.256385150261715, | |
| "learning_rate": 1.9156425009516736e-06, | |
| "loss": 0.3688, | |
| "step": 1795 | |
| }, | |
| { | |
| "epoch": 0.7425742574257426, | |
| "grad_norm": 2.4167248024578813, | |
| "learning_rate": 1.8873798463033742e-06, | |
| "loss": 0.3684, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.7446369636963697, | |
| "grad_norm": 2.451766042702295, | |
| "learning_rate": 1.8592786470906932e-06, | |
| "loss": 0.375, | |
| "step": 1805 | |
| }, | |
| { | |
| "epoch": 0.7466996699669967, | |
| "grad_norm": 2.383004059186879, | |
| "learning_rate": 1.8313403609573976e-06, | |
| "loss": 0.3759, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.7487623762376238, | |
| "grad_norm": 2.12270311435573, | |
| "learning_rate": 1.8035664370967493e-06, | |
| "loss": 0.3763, | |
| "step": 1815 | |
| }, | |
| { | |
| "epoch": 0.7508250825082509, | |
| "grad_norm": 2.4415551404560847, | |
| "learning_rate": 1.775958316176339e-06, | |
| "loss": 0.3676, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.7528877887788779, | |
| "grad_norm": 2.381919666851577, | |
| "learning_rate": 1.7485174302633557e-06, | |
| "loss": 0.3707, | |
| "step": 1825 | |
| }, | |
| { | |
| "epoch": 0.754950495049505, | |
| "grad_norm": 2.468112725751951, | |
| "learning_rate": 1.721245202750299e-06, | |
| "loss": 0.3743, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.7570132013201321, | |
| "grad_norm": 2.4412319646605707, | |
| "learning_rate": 1.694143048281156e-06, | |
| "loss": 0.3697, | |
| "step": 1835 | |
| }, | |
| { | |
| "epoch": 0.759075907590759, | |
| "grad_norm": 2.3933092088835592, | |
| "learning_rate": 1.6672123726780083e-06, | |
| "loss": 0.362, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.7611386138613861, | |
| "grad_norm": 2.3599050961874357, | |
| "learning_rate": 1.6404545728681232e-06, | |
| "loss": 0.3621, | |
| "step": 1845 | |
| }, | |
| { | |
| "epoch": 0.7632013201320133, | |
| "grad_norm": 2.3969987242185407, | |
| "learning_rate": 1.613871036811489e-06, | |
| "loss": 0.3631, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.7652640264026402, | |
| "grad_norm": 2.3728886726584144, | |
| "learning_rate": 1.5874631434288128e-06, | |
| "loss": 0.3747, | |
| "step": 1855 | |
| }, | |
| { | |
| "epoch": 0.7673267326732673, | |
| "grad_norm": 2.334377590582883, | |
| "learning_rate": 1.5612322625300064e-06, | |
| "loss": 0.3647, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.7693894389438944, | |
| "grad_norm": 2.3804587872682865, | |
| "learning_rate": 1.5351797547431212e-06, | |
| "loss": 0.3694, | |
| "step": 1865 | |
| }, | |
| { | |
| "epoch": 0.7714521452145214, | |
| "grad_norm": 2.3600418503523444, | |
| "learning_rate": 1.5093069714437803e-06, | |
| "loss": 0.3636, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.7735148514851485, | |
| "grad_norm": 2.553398892828675, | |
| "learning_rate": 1.483615254685075e-06, | |
| "loss": 0.3611, | |
| "step": 1875 | |
| }, | |
| { | |
| "epoch": 0.7755775577557755, | |
| "grad_norm": 2.3499524569210157, | |
| "learning_rate": 1.4581059371279516e-06, | |
| "loss": 0.3674, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.7776402640264026, | |
| "grad_norm": 2.4025948209352848, | |
| "learning_rate": 1.4327803419720836e-06, | |
| "loss": 0.3609, | |
| "step": 1885 | |
| }, | |
| { | |
| "epoch": 0.7797029702970297, | |
| "grad_norm": 2.44960933153857, | |
| "learning_rate": 1.4076397828872441e-06, | |
| "loss": 0.3546, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.7817656765676567, | |
| "grad_norm": 2.453171947739372, | |
| "learning_rate": 1.3826855639451492e-06, | |
| "loss": 0.3675, | |
| "step": 1895 | |
| }, | |
| { | |
| "epoch": 0.7838283828382838, | |
| "grad_norm": 2.243857634486722, | |
| "learning_rate": 1.357918979551831e-06, | |
| "loss": 0.3655, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.7858910891089109, | |
| "grad_norm": 2.334627109038789, | |
| "learning_rate": 1.333341314380479e-06, | |
| "loss": 0.3528, | |
| "step": 1905 | |
| }, | |
| { | |
| "epoch": 0.7879537953795379, | |
| "grad_norm": 2.4450603903304367, | |
| "learning_rate": 1.308953843304816e-06, | |
| "loss": 0.3696, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.790016501650165, | |
| "grad_norm": 2.469101543725218, | |
| "learning_rate": 1.2847578313329623e-06, | |
| "loss": 0.3722, | |
| "step": 1915 | |
| }, | |
| { | |
| "epoch": 0.7920792079207921, | |
| "grad_norm": 2.347298229927237, | |
| "learning_rate": 1.2607545335418154e-06, | |
| "loss": 0.364, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.7941419141914191, | |
| "grad_norm": 2.478215187683226, | |
| "learning_rate": 1.2369451950119553e-06, | |
| "loss": 0.3634, | |
| "step": 1925 | |
| }, | |
| { | |
| "epoch": 0.7962046204620462, | |
| "grad_norm": 2.321071149582765, | |
| "learning_rate": 1.2133310507630535e-06, | |
| "loss": 0.3639, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.7982673267326733, | |
| "grad_norm": 2.363187519551145, | |
| "learning_rate": 1.189913325689816e-06, | |
| "loss": 0.3694, | |
| "step": 1935 | |
| }, | |
| { | |
| "epoch": 0.8003300330033003, | |
| "grad_norm": 2.1565024791128615, | |
| "learning_rate": 1.166693234498446e-06, | |
| "loss": 0.3444, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.8023927392739274, | |
| "grad_norm": 2.238362330023504, | |
| "learning_rate": 1.1436719816436293e-06, | |
| "loss": 0.3535, | |
| "step": 1945 | |
| }, | |
| { | |
| "epoch": 0.8044554455445545, | |
| "grad_norm": 2.682507072878571, | |
| "learning_rate": 1.120850761266068e-06, | |
| "loss": 0.3622, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.8065181518151815, | |
| "grad_norm": 2.4616448463288974, | |
| "learning_rate": 1.098230757130529e-06, | |
| "loss": 0.3521, | |
| "step": 1955 | |
| }, | |
| { | |
| "epoch": 0.8085808580858086, | |
| "grad_norm": 2.518356814157187, | |
| "learning_rate": 1.075813142564448e-06, | |
| "loss": 0.3621, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.8106435643564357, | |
| "grad_norm": 2.41717183499352, | |
| "learning_rate": 1.053599080397068e-06, | |
| "loss": 0.3534, | |
| "step": 1965 | |
| }, | |
| { | |
| "epoch": 0.8127062706270627, | |
| "grad_norm": 2.3274563555835357, | |
| "learning_rate": 1.031589722899109e-06, | |
| "loss": 0.3497, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.8147689768976898, | |
| "grad_norm": 2.41058923252155, | |
| "learning_rate": 1.0097862117230162e-06, | |
| "loss": 0.3453, | |
| "step": 1975 | |
| }, | |
| { | |
| "epoch": 0.8168316831683168, | |
| "grad_norm": 2.441792027931522, | |
| "learning_rate": 9.881896778437328e-07, | |
| "loss": 0.3444, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.8188943894389439, | |
| "grad_norm": 2.4018876695962383, | |
| "learning_rate": 9.6680124150003e-07, | |
| "loss": 0.3654, | |
| "step": 1985 | |
| }, | |
| { | |
| "epoch": 0.820957095709571, | |
| "grad_norm": 2.3575482669225902, | |
| "learning_rate": 9.456220121364091e-07, | |
| "loss": 0.348, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.823019801980198, | |
| "grad_norm": 2.525821025154249, | |
| "learning_rate": 9.24653088345544e-07, | |
| "loss": 0.3523, | |
| "step": 1995 | |
| }, | |
| { | |
| "epoch": 0.8250825082508251, | |
| "grad_norm": 2.7783807540038294, | |
| "learning_rate": 9.038955578113018e-07, | |
| "loss": 0.3582, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.8271452145214522, | |
| "grad_norm": 2.50572956814031, | |
| "learning_rate": 8.833504972523238e-07, | |
| "loss": 0.3593, | |
| "step": 2005 | |
| }, | |
| { | |
| "epoch": 0.8292079207920792, | |
| "grad_norm": 2.252557918607386, | |
| "learning_rate": 8.630189723661663e-07, | |
| "loss": 0.3536, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.8312706270627063, | |
| "grad_norm": 2.4651130321437758, | |
| "learning_rate": 8.429020377740338e-07, | |
| "loss": 0.3535, | |
| "step": 2015 | |
| }, | |
| { | |
| "epoch": 0.8333333333333334, | |
| "grad_norm": 2.400123594291822, | |
| "learning_rate": 8.230007369660636e-07, | |
| "loss": 0.3543, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.8353960396039604, | |
| "grad_norm": 2.2893414346006806, | |
| "learning_rate": 8.033161022472063e-07, | |
| "loss": 0.3633, | |
| "step": 2025 | |
| }, | |
| { | |
| "epoch": 0.8374587458745875, | |
| "grad_norm": 2.387886294048468, | |
| "learning_rate": 7.838491546836763e-07, | |
| "loss": 0.3501, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.8395214521452146, | |
| "grad_norm": 2.380305733427394, | |
| "learning_rate": 7.646009040499846e-07, | |
| "loss": 0.3396, | |
| "step": 2035 | |
| }, | |
| { | |
| "epoch": 0.8415841584158416, | |
| "grad_norm": 2.6014881264846164, | |
| "learning_rate": 7.455723487765664e-07, | |
| "loss": 0.3586, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.8436468646864687, | |
| "grad_norm": 2.4907278984166106, | |
| "learning_rate": 7.267644758979869e-07, | |
| "loss": 0.3468, | |
| "step": 2045 | |
| }, | |
| { | |
| "epoch": 0.8457095709570958, | |
| "grad_norm": 2.34019203542543, | |
| "learning_rate": 7.08178261001743e-07, | |
| "loss": 0.3514, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.8477722772277227, | |
| "grad_norm": 2.3922323702113637, | |
| "learning_rate": 6.898146681776629e-07, | |
| "loss": 0.3486, | |
| "step": 2055 | |
| }, | |
| { | |
| "epoch": 0.8498349834983498, | |
| "grad_norm": 2.3826898973626127, | |
| "learning_rate": 6.7167464996789e-07, | |
| "loss": 0.3621, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.851897689768977, | |
| "grad_norm": 2.3165319710343324, | |
| "learning_rate": 6.537591473174814e-07, | |
| "loss": 0.3413, | |
| "step": 2065 | |
| }, | |
| { | |
| "epoch": 0.8539603960396039, | |
| "grad_norm": 2.5692025408049064, | |
| "learning_rate": 6.360690895255916e-07, | |
| "loss": 0.3461, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.856023102310231, | |
| "grad_norm": 2.272769119087517, | |
| "learning_rate": 6.186053941972775e-07, | |
| "loss": 0.348, | |
| "step": 2075 | |
| }, | |
| { | |
| "epoch": 0.858085808580858, | |
| "grad_norm": 2.4550162262909447, | |
| "learning_rate": 6.013689671958944e-07, | |
| "loss": 0.3531, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.8601485148514851, | |
| "grad_norm": 2.388763908670921, | |
| "learning_rate": 5.84360702596109e-07, | |
| "loss": 0.3608, | |
| "step": 2085 | |
| }, | |
| { | |
| "epoch": 0.8622112211221122, | |
| "grad_norm": 2.5005604744641174, | |
| "learning_rate": 5.67581482637527e-07, | |
| "loss": 0.3444, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.8642739273927392, | |
| "grad_norm": 2.4560197080279234, | |
| "learning_rate": 5.510321776789213e-07, | |
| "loss": 0.3463, | |
| "step": 2095 | |
| }, | |
| { | |
| "epoch": 0.8663366336633663, | |
| "grad_norm": 2.365882695902703, | |
| "learning_rate": 5.347136461530966e-07, | |
| "loss": 0.3366, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.8683993399339934, | |
| "grad_norm": 2.4380355384519925, | |
| "learning_rate": 5.186267345223539e-07, | |
| "loss": 0.3385, | |
| "step": 2105 | |
| }, | |
| { | |
| "epoch": 0.8704620462046204, | |
| "grad_norm": 2.399986551951994, | |
| "learning_rate": 5.027722772345828e-07, | |
| "loss": 0.3448, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.8725247524752475, | |
| "grad_norm": 2.454557551769108, | |
| "learning_rate": 4.871510966799847e-07, | |
| "loss": 0.3507, | |
| "step": 2115 | |
| }, | |
| { | |
| "epoch": 0.8745874587458746, | |
| "grad_norm": 2.456742477580607, | |
| "learning_rate": 4.717640031484055e-07, | |
| "loss": 0.3525, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.8766501650165016, | |
| "grad_norm": 2.41826728768274, | |
| "learning_rate": 4.566117947873139e-07, | |
| "loss": 0.3526, | |
| "step": 2125 | |
| }, | |
| { | |
| "epoch": 0.8787128712871287, | |
| "grad_norm": 2.4671547541589183, | |
| "learning_rate": 4.4169525756039164e-07, | |
| "loss": 0.3375, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.8807755775577558, | |
| "grad_norm": 2.3142164149499793, | |
| "learning_rate": 4.2701516520677054e-07, | |
| "loss": 0.3428, | |
| "step": 2135 | |
| }, | |
| { | |
| "epoch": 0.8828382838283828, | |
| "grad_norm": 2.282591060676799, | |
| "learning_rate": 4.1257227920089684e-07, | |
| "loss": 0.315, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.8849009900990099, | |
| "grad_norm": 2.2945870317486445, | |
| "learning_rate": 3.983673487130313e-07, | |
| "loss": 0.3438, | |
| "step": 2145 | |
| }, | |
| { | |
| "epoch": 0.886963696369637, | |
| "grad_norm": 2.4023456680454003, | |
| "learning_rate": 3.8440111057038874e-07, | |
| "loss": 0.3466, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.889026402640264, | |
| "grad_norm": 2.4628827167617278, | |
| "learning_rate": 3.706742892189197e-07, | |
| "loss": 0.3578, | |
| "step": 2155 | |
| }, | |
| { | |
| "epoch": 0.8910891089108911, | |
| "grad_norm": 2.518821614068097, | |
| "learning_rate": 3.5718759668572913e-07, | |
| "loss": 0.3484, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.8931518151815182, | |
| "grad_norm": 2.4044153870357734, | |
| "learning_rate": 3.439417325421468e-07, | |
| "loss": 0.3331, | |
| "step": 2165 | |
| }, | |
| { | |
| "epoch": 0.8952145214521452, | |
| "grad_norm": 2.263400317365007, | |
| "learning_rate": 3.3093738386743734e-07, | |
| "loss": 0.3359, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.8972772277227723, | |
| "grad_norm": 2.4013680975289162, | |
| "learning_rate": 3.1817522521316034e-07, | |
| "loss": 0.3509, | |
| "step": 2175 | |
| }, | |
| { | |
| "epoch": 0.8993399339933993, | |
| "grad_norm": 2.4793048291891324, | |
| "learning_rate": 3.0565591856818236e-07, | |
| "loss": 0.3499, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.9014026402640264, | |
| "grad_norm": 2.524550570591509, | |
| "learning_rate": 2.9338011332433525e-07, | |
| "loss": 0.3314, | |
| "step": 2185 | |
| }, | |
| { | |
| "epoch": 0.9034653465346535, | |
| "grad_norm": 2.4135168075751294, | |
| "learning_rate": 2.813484462427357e-07, | |
| "loss": 0.3359, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.9055280528052805, | |
| "grad_norm": 2.4118495326051996, | |
| "learning_rate": 2.695615414207542e-07, | |
| "loss": 0.3499, | |
| "step": 2195 | |
| }, | |
| { | |
| "epoch": 0.9075907590759076, | |
| "grad_norm": 2.392533982344082, | |
| "learning_rate": 2.5802001025963917e-07, | |
| "loss": 0.3417, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.9096534653465347, | |
| "grad_norm": 2.2880229451881418, | |
| "learning_rate": 2.467244514328082e-07, | |
| "loss": 0.3493, | |
| "step": 2205 | |
| }, | |
| { | |
| "epoch": 0.9117161716171617, | |
| "grad_norm": 2.3091247857735504, | |
| "learning_rate": 2.3567545085478983e-07, | |
| "loss": 0.3383, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.9137788778877888, | |
| "grad_norm": 2.4901336136450136, | |
| "learning_rate": 2.248735816508324e-07, | |
| "loss": 0.3398, | |
| "step": 2215 | |
| }, | |
| { | |
| "epoch": 0.9158415841584159, | |
| "grad_norm": 2.3142795015199114, | |
| "learning_rate": 2.1431940412717843e-07, | |
| "loss": 0.3427, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.9179042904290429, | |
| "grad_norm": 2.4277647469057744, | |
| "learning_rate": 2.040134657419951e-07, | |
| "loss": 0.3387, | |
| "step": 2225 | |
| }, | |
| { | |
| "epoch": 0.91996699669967, | |
| "grad_norm": 2.3617745475614034, | |
| "learning_rate": 1.9395630107698293e-07, | |
| "loss": 0.3392, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.9220297029702971, | |
| "grad_norm": 2.5202449484112615, | |
| "learning_rate": 1.8414843180964316e-07, | |
| "loss": 0.3451, | |
| "step": 2235 | |
| }, | |
| { | |
| "epoch": 0.9240924092409241, | |
| "grad_norm": 2.6186027806455727, | |
| "learning_rate": 1.7459036668621586e-07, | |
| "loss": 0.345, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.9261551155115512, | |
| "grad_norm": 2.357402116923623, | |
| "learning_rate": 1.6528260149529573e-07, | |
| "loss": 0.3426, | |
| "step": 2245 | |
| }, | |
| { | |
| "epoch": 0.9282178217821783, | |
| "grad_norm": 2.7714941079884365, | |
| "learning_rate": 1.562256190421102e-07, | |
| "loss": 0.3444, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.9302805280528053, | |
| "grad_norm": 2.384372509379296, | |
| "learning_rate": 1.4741988912347848e-07, | |
| "loss": 0.349, | |
| "step": 2255 | |
| }, | |
| { | |
| "epoch": 0.9323432343234324, | |
| "grad_norm": 2.423642340690504, | |
| "learning_rate": 1.3886586850344276e-07, | |
| "loss": 0.3411, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.9344059405940595, | |
| "grad_norm": 2.4976812347623762, | |
| "learning_rate": 1.30564000889572e-07, | |
| "loss": 0.3427, | |
| "step": 2265 | |
| }, | |
| { | |
| "epoch": 0.9364686468646864, | |
| "grad_norm": 2.4409748992106244, | |
| "learning_rate": 1.225147169099511e-07, | |
| "loss": 0.3344, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.9385313531353136, | |
| "grad_norm": 2.4594177758611226, | |
| "learning_rate": 1.147184340908386e-07, | |
| "loss": 0.3466, | |
| "step": 2275 | |
| }, | |
| { | |
| "epoch": 0.9405940594059405, | |
| "grad_norm": 2.3563533473196054, | |
| "learning_rate": 1.0717555683501413e-07, | |
| "loss": 0.3437, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.9426567656765676, | |
| "grad_norm": 2.438141654343306, | |
| "learning_rate": 9.988647640079785e-08, | |
| "loss": 0.3481, | |
| "step": 2285 | |
| }, | |
| { | |
| "epoch": 0.9447194719471947, | |
| "grad_norm": 2.3653534995422465, | |
| "learning_rate": 9.285157088175678e-08, | |
| "loss": 0.3305, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.9467821782178217, | |
| "grad_norm": 2.5595084581881893, | |
| "learning_rate": 8.607120518709156e-08, | |
| "loss": 0.3447, | |
| "step": 2295 | |
| }, | |
| { | |
| "epoch": 0.9488448844884488, | |
| "grad_norm": 2.298821712031628, | |
| "learning_rate": 7.954573102271157e-08, | |
| "loss": 0.3427, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.9509075907590759, | |
| "grad_norm": 2.3999698309349786, | |
| "learning_rate": 7.327548687298625e-08, | |
| "loss": 0.3387, | |
| "step": 2305 | |
| }, | |
| { | |
| "epoch": 0.9529702970297029, | |
| "grad_norm": 2.2525707814009617, | |
| "learning_rate": 6.726079798319185e-08, | |
| "loss": 0.3561, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.95503300330033, | |
| "grad_norm": 2.3217448485216248, | |
| "learning_rate": 6.150197634263888e-08, | |
| "loss": 0.3469, | |
| "step": 2315 | |
| }, | |
| { | |
| "epoch": 0.9570957095709571, | |
| "grad_norm": 2.365101163841893, | |
| "learning_rate": 5.599932066848834e-08, | |
| "loss": 0.3367, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.9591584158415841, | |
| "grad_norm": 2.425841031506538, | |
| "learning_rate": 5.0753116390258594e-08, | |
| "loss": 0.3401, | |
| "step": 2325 | |
| }, | |
| { | |
| "epoch": 0.9612211221122112, | |
| "grad_norm": 2.563836501970768, | |
| "learning_rate": 4.576363563501718e-08, | |
| "loss": 0.3409, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.9632838283828383, | |
| "grad_norm": 2.34358481427922, | |
| "learning_rate": 4.103113721326768e-08, | |
| "loss": 0.3484, | |
| "step": 2335 | |
| }, | |
| { | |
| "epoch": 0.9653465346534653, | |
| "grad_norm": 2.402352860387557, | |
| "learning_rate": 3.655586660552324e-08, | |
| "loss": 0.3419, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.9674092409240924, | |
| "grad_norm": 2.412967620073454, | |
| "learning_rate": 3.233805594957506e-08, | |
| "loss": 0.3272, | |
| "step": 2345 | |
| }, | |
| { | |
| "epoch": 0.9694719471947195, | |
| "grad_norm": 2.3840732180526487, | |
| "learning_rate": 2.8377924028449855e-08, | |
| "loss": 0.3383, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.9715346534653465, | |
| "grad_norm": 2.508928823648118, | |
| "learning_rate": 2.4675676259059976e-08, | |
| "loss": 0.3447, | |
| "step": 2355 | |
| }, | |
| { | |
| "epoch": 0.9735973597359736, | |
| "grad_norm": 2.1993358612568317, | |
| "learning_rate": 2.123150468155144e-08, | |
| "loss": 0.3294, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.9756600660066007, | |
| "grad_norm": 2.4190680800537567, | |
| "learning_rate": 1.8045587949339637e-08, | |
| "loss": 0.3351, | |
| "step": 2365 | |
| }, | |
| { | |
| "epoch": 0.9777227722772277, | |
| "grad_norm": 2.2890929930955037, | |
| "learning_rate": 1.5118091319843985e-08, | |
| "loss": 0.34, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.9797854785478548, | |
| "grad_norm": 2.5688702019838883, | |
| "learning_rate": 1.2449166645915333e-08, | |
| "loss": 0.3304, | |
| "step": 2375 | |
| }, | |
| { | |
| "epoch": 0.9818481848184818, | |
| "grad_norm": 2.483690216637316, | |
| "learning_rate": 1.0038952367958376e-08, | |
| "loss": 0.341, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.9839108910891089, | |
| "grad_norm": 2.179239333588943, | |
| "learning_rate": 7.887573506752954e-09, | |
| "loss": 0.3384, | |
| "step": 2385 | |
| }, | |
| { | |
| "epoch": 0.985973597359736, | |
| "grad_norm": 2.512101769493497, | |
| "learning_rate": 5.9951416569659085e-09, | |
| "loss": 0.3348, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.988036303630363, | |
| "grad_norm": 2.4449313798457597, | |
| "learning_rate": 4.361754981365152e-09, | |
| "loss": 0.3339, | |
| "step": 2395 | |
| }, | |
| { | |
| "epoch": 0.9900990099009901, | |
| "grad_norm": 2.2966537435115444, | |
| "learning_rate": 2.98749820572708e-09, | |
| "loss": 0.3263, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.9921617161716172, | |
| "grad_norm": 2.5187709917417727, | |
| "learning_rate": 1.8724426144395293e-09, | |
| "loss": 0.3357, | |
| "step": 2405 | |
| }, | |
| { | |
| "epoch": 0.9942244224422442, | |
| "grad_norm": 2.340909896310804, | |
| "learning_rate": 1.0166460468080674e-09, | |
| "loss": 0.3426, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.9962871287128713, | |
| "grad_norm": 2.3871751186126207, | |
| "learning_rate": 4.2015289405339386e-10, | |
| "loss": 0.3369, | |
| "step": 2415 | |
| }, | |
| { | |
| "epoch": 0.9983498349834984, | |
| "grad_norm": 2.377304997896102, | |
| "learning_rate": 8.29940970092924e-11, | |
| "loss": 0.3308, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_runtime": 3.3729, | |
| "eval_samples_per_second": 2.965, | |
| "eval_steps_per_second": 0.889, | |
| "step": 2424 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 2424, | |
| "total_flos": 253768142684160.0, | |
| "train_loss": 0.5300087753695624, | |
| "train_runtime": 21368.6318, | |
| "train_samples_per_second": 1.814, | |
| "train_steps_per_second": 0.113 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 2424, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 253768142684160.0, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |