| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 2423, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0004127115146512588, | |
| "grad_norm": 24.122843960524676, | |
| "learning_rate": 4.1152263374485605e-08, | |
| "loss": 1.4137, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0020635575732562937, | |
| "grad_norm": 23.76872795123526, | |
| "learning_rate": 2.05761316872428e-07, | |
| "loss": 1.4306, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.004127115146512587, | |
| "grad_norm": 15.835819961484214, | |
| "learning_rate": 4.11522633744856e-07, | |
| "loss": 1.3895, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.006190672719768881, | |
| "grad_norm": 8.711444647130131, | |
| "learning_rate": 6.17283950617284e-07, | |
| "loss": 1.2741, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.008254230293025175, | |
| "grad_norm": 10.763225748048312, | |
| "learning_rate": 8.23045267489712e-07, | |
| "loss": 1.1629, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.010317787866281469, | |
| "grad_norm": 5.160551789043553, | |
| "learning_rate": 1.02880658436214e-06, | |
| "loss": 1.0567, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.012381345439537762, | |
| "grad_norm": 3.472599934267948, | |
| "learning_rate": 1.234567901234568e-06, | |
| "loss": 0.9904, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.014444903012794058, | |
| "grad_norm": 3.5380974995691528, | |
| "learning_rate": 1.440329218106996e-06, | |
| "loss": 0.9621, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.01650846058605035, | |
| "grad_norm": 3.0479724307668077, | |
| "learning_rate": 1.646090534979424e-06, | |
| "loss": 0.9573, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.018572018159306643, | |
| "grad_norm": 2.97801536675108, | |
| "learning_rate": 1.8518518518518519e-06, | |
| "loss": 0.9295, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.020635575732562937, | |
| "grad_norm": 3.0790095562756514, | |
| "learning_rate": 2.05761316872428e-06, | |
| "loss": 0.9106, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.02269913330581923, | |
| "grad_norm": 3.0873399280284626, | |
| "learning_rate": 2.263374485596708e-06, | |
| "loss": 0.9054, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.024762690879075525, | |
| "grad_norm": 3.046348701050192, | |
| "learning_rate": 2.469135802469136e-06, | |
| "loss": 0.9016, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.026826248452331822, | |
| "grad_norm": 3.117521509733326, | |
| "learning_rate": 2.674897119341564e-06, | |
| "loss": 0.9005, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.028889806025588115, | |
| "grad_norm": 3.046385395158289, | |
| "learning_rate": 2.880658436213992e-06, | |
| "loss": 0.8882, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.03095336359884441, | |
| "grad_norm": 3.1884168387342537, | |
| "learning_rate": 3.08641975308642e-06, | |
| "loss": 0.8799, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.0330169211721007, | |
| "grad_norm": 3.2618306435716176, | |
| "learning_rate": 3.292181069958848e-06, | |
| "loss": 0.8907, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.035080478745357, | |
| "grad_norm": 3.322364354858461, | |
| "learning_rate": 3.4979423868312762e-06, | |
| "loss": 0.8684, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.03714403631861329, | |
| "grad_norm": 2.9808270129935495, | |
| "learning_rate": 3.7037037037037037e-06, | |
| "loss": 0.8604, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.039207593891869584, | |
| "grad_norm": 3.1835415746195435, | |
| "learning_rate": 3.909465020576132e-06, | |
| "loss": 0.8707, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.041271151465125874, | |
| "grad_norm": 3.057661991370929, | |
| "learning_rate": 4.11522633744856e-06, | |
| "loss": 0.8703, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.04333470903838217, | |
| "grad_norm": 2.9074769839799277, | |
| "learning_rate": 4.3209876543209875e-06, | |
| "loss": 0.8633, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.04539826661163846, | |
| "grad_norm": 2.9699831351967405, | |
| "learning_rate": 4.526748971193416e-06, | |
| "loss": 0.8759, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.04746182418489476, | |
| "grad_norm": 3.219620261483576, | |
| "learning_rate": 4.732510288065844e-06, | |
| "loss": 0.8636, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.04952538175815105, | |
| "grad_norm": 3.060892913438638, | |
| "learning_rate": 4.938271604938272e-06, | |
| "loss": 0.8512, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.051588939331407346, | |
| "grad_norm": 3.0812348532752543, | |
| "learning_rate": 5.1440329218107e-06, | |
| "loss": 0.8412, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.053652496904663644, | |
| "grad_norm": 3.208845547096666, | |
| "learning_rate": 5.349794238683128e-06, | |
| "loss": 0.8581, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.055716054477919934, | |
| "grad_norm": 3.461069159058445, | |
| "learning_rate": 5.555555555555557e-06, | |
| "loss": 0.8512, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.05777961205117623, | |
| "grad_norm": 3.040452060763894, | |
| "learning_rate": 5.761316872427984e-06, | |
| "loss": 0.857, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.05984316962443252, | |
| "grad_norm": 2.9241553701138763, | |
| "learning_rate": 5.967078189300412e-06, | |
| "loss": 0.8556, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.06190672719768882, | |
| "grad_norm": 3.22517785299545, | |
| "learning_rate": 6.17283950617284e-06, | |
| "loss": 0.8504, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.06397028477094512, | |
| "grad_norm": 3.066199171323176, | |
| "learning_rate": 6.3786008230452675e-06, | |
| "loss": 0.8455, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.0660338423442014, | |
| "grad_norm": 3.1076485395976436, | |
| "learning_rate": 6.584362139917696e-06, | |
| "loss": 0.8343, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.0680973999174577, | |
| "grad_norm": 3.057886812615224, | |
| "learning_rate": 6.790123456790124e-06, | |
| "loss": 0.833, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.070160957490714, | |
| "grad_norm": 3.12354443289718, | |
| "learning_rate": 6.9958847736625525e-06, | |
| "loss": 0.8445, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.07222451506397029, | |
| "grad_norm": 3.1566533318265515, | |
| "learning_rate": 7.201646090534981e-06, | |
| "loss": 0.8336, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.07428807263722657, | |
| "grad_norm": 3.0910067969250288, | |
| "learning_rate": 7.4074074074074075e-06, | |
| "loss": 0.8335, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.07635163021048287, | |
| "grad_norm": 3.2341343019009754, | |
| "learning_rate": 7.613168724279836e-06, | |
| "loss": 0.8313, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.07841518778373917, | |
| "grad_norm": 2.9443616643904984, | |
| "learning_rate": 7.818930041152263e-06, | |
| "loss": 0.8271, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.08047874535699547, | |
| "grad_norm": 2.955202520255723, | |
| "learning_rate": 8.024691358024692e-06, | |
| "loss": 0.833, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.08254230293025175, | |
| "grad_norm": 3.0004205685291896, | |
| "learning_rate": 8.23045267489712e-06, | |
| "loss": 0.8046, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.08460586050350805, | |
| "grad_norm": 3.0549793246137984, | |
| "learning_rate": 8.43621399176955e-06, | |
| "loss": 0.8155, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.08666941807676434, | |
| "grad_norm": 3.0535661505541896, | |
| "learning_rate": 8.641975308641975e-06, | |
| "loss": 0.8196, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.08873297565002064, | |
| "grad_norm": 3.116053820452697, | |
| "learning_rate": 8.847736625514404e-06, | |
| "loss": 0.8347, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.09079653322327692, | |
| "grad_norm": 3.022529098579132, | |
| "learning_rate": 9.053497942386832e-06, | |
| "loss": 0.8081, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.09286009079653322, | |
| "grad_norm": 3.0433063189548792, | |
| "learning_rate": 9.25925925925926e-06, | |
| "loss": 0.8208, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.09492364836978952, | |
| "grad_norm": 3.085322202490418, | |
| "learning_rate": 9.465020576131688e-06, | |
| "loss": 0.8339, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.09698720594304581, | |
| "grad_norm": 3.6047031312875584, | |
| "learning_rate": 9.670781893004116e-06, | |
| "loss": 0.8181, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.0990507635163021, | |
| "grad_norm": 3.0604827472581255, | |
| "learning_rate": 9.876543209876543e-06, | |
| "loss": 0.8166, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.1011143210895584, | |
| "grad_norm": 3.2691636345258277, | |
| "learning_rate": 9.999979232392962e-06, | |
| "loss": 0.799, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.10317787866281469, | |
| "grad_norm": 2.9570633048095964, | |
| "learning_rate": 9.999745598795033e-06, | |
| "loss": 0.8205, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.10524143623607099, | |
| "grad_norm": 3.335892639195958, | |
| "learning_rate": 9.999252384260794e-06, | |
| "loss": 0.7989, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.10730499380932729, | |
| "grad_norm": 2.9248399326152725, | |
| "learning_rate": 9.998499614397364e-06, | |
| "loss": 0.8063, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.10936855138258357, | |
| "grad_norm": 3.0768852625514422, | |
| "learning_rate": 9.997487328287675e-06, | |
| "loss": 0.7968, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.11143210895583987, | |
| "grad_norm": 3.002775395507999, | |
| "learning_rate": 9.996215578488434e-06, | |
| "loss": 0.8062, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.11349566652909616, | |
| "grad_norm": 2.9801626597554174, | |
| "learning_rate": 9.994684431027407e-06, | |
| "loss": 0.7844, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.11555922410235246, | |
| "grad_norm": 3.1190858706402604, | |
| "learning_rate": 9.99289396539997e-06, | |
| "loss": 0.8089, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.11762278167560875, | |
| "grad_norm": 2.785366704095957, | |
| "learning_rate": 9.990844274565004e-06, | |
| "loss": 0.7997, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.11968633924886504, | |
| "grad_norm": 3.175006190343879, | |
| "learning_rate": 9.98853546494006e-06, | |
| "loss": 0.8186, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.12174989682212134, | |
| "grad_norm": 2.883637899845788, | |
| "learning_rate": 9.985967656395823e-06, | |
| "loss": 0.7889, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.12381345439537764, | |
| "grad_norm": 2.991597043168701, | |
| "learning_rate": 9.983140982249913e-06, | |
| "loss": 0.8136, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.12587701196863393, | |
| "grad_norm": 2.967721245447412, | |
| "learning_rate": 9.980055589259937e-06, | |
| "loss": 0.7804, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.12794056954189023, | |
| "grad_norm": 2.925851320281598, | |
| "learning_rate": 9.976711637615886e-06, | |
| "loss": 0.7789, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.1300041271151465, | |
| "grad_norm": 3.015507414806422, | |
| "learning_rate": 9.973109300931813e-06, | |
| "loss": 0.7821, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.1320676846884028, | |
| "grad_norm": 3.076125191679388, | |
| "learning_rate": 9.969248766236823e-06, | |
| "loss": 0.7668, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.1341312422616591, | |
| "grad_norm": 2.9816750909346137, | |
| "learning_rate": 9.965130233965353e-06, | |
| "loss": 0.7867, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.1361947998349154, | |
| "grad_norm": 3.1216143549392514, | |
| "learning_rate": 9.960753917946777e-06, | |
| "loss": 0.7827, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.1382583574081717, | |
| "grad_norm": 3.125152909470307, | |
| "learning_rate": 9.956120045394297e-06, | |
| "loss": 0.7683, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.140321914981428, | |
| "grad_norm": 2.8419491540297694, | |
| "learning_rate": 9.951228856893152e-06, | |
| "loss": 0.7571, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.14238547255468428, | |
| "grad_norm": 2.739631493281793, | |
| "learning_rate": 9.946080606388115e-06, | |
| "loss": 0.7743, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.14444903012794058, | |
| "grad_norm": 2.888837107470469, | |
| "learning_rate": 9.940675561170328e-06, | |
| "loss": 0.7795, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.14651258770119685, | |
| "grad_norm": 3.0512065932946886, | |
| "learning_rate": 9.935014001863405e-06, | |
| "loss": 0.7753, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.14857614527445315, | |
| "grad_norm": 3.029181890310092, | |
| "learning_rate": 9.92909622240888e-06, | |
| "loss": 0.7515, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.15063970284770944, | |
| "grad_norm": 2.9755768091749015, | |
| "learning_rate": 9.92292253005093e-06, | |
| "loss": 0.7564, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.15270326042096574, | |
| "grad_norm": 2.8483284554348565, | |
| "learning_rate": 9.916493245320428e-06, | |
| "loss": 0.7642, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.15476681799422204, | |
| "grad_norm": 3.2241540408474, | |
| "learning_rate": 9.909808702018315e-06, | |
| "loss": 0.758, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.15683037556747834, | |
| "grad_norm": 2.871363873496698, | |
| "learning_rate": 9.902869247198246e-06, | |
| "loss": 0.7595, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.15889393314073463, | |
| "grad_norm": 2.9539374709865607, | |
| "learning_rate": 9.895675241148588e-06, | |
| "loss": 0.7518, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.16095749071399093, | |
| "grad_norm": 2.9560555474503043, | |
| "learning_rate": 9.888227057373716e-06, | |
| "loss": 0.7493, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.16302104828724723, | |
| "grad_norm": 2.7957300167557926, | |
| "learning_rate": 9.880525082574604e-06, | |
| "loss": 0.7476, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.1650846058605035, | |
| "grad_norm": 2.6981062487306606, | |
| "learning_rate": 9.872569716628764e-06, | |
| "loss": 0.7273, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.1671481634337598, | |
| "grad_norm": 2.7734124248259366, | |
| "learning_rate": 9.86436137256948e-06, | |
| "loss": 0.7297, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.1692117210070161, | |
| "grad_norm": 2.73464344090408, | |
| "learning_rate": 9.855900476564365e-06, | |
| "loss": 0.7445, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.1712752785802724, | |
| "grad_norm": 2.7258763684985303, | |
| "learning_rate": 9.847187467893228e-06, | |
| "loss": 0.7343, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.17333883615352869, | |
| "grad_norm": 2.6760448573907354, | |
| "learning_rate": 9.83822279892528e-06, | |
| "loss": 0.7451, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.17540239372678498, | |
| "grad_norm": 2.782701927749318, | |
| "learning_rate": 9.829006935095629e-06, | |
| "loss": 0.7375, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.17746595130004128, | |
| "grad_norm": 2.72035422100488, | |
| "learning_rate": 9.819540354881136e-06, | |
| "loss": 0.7385, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.17952950887329758, | |
| "grad_norm": 2.928620231770996, | |
| "learning_rate": 9.809823549775559e-06, | |
| "loss": 0.7194, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.18159306644655385, | |
| "grad_norm": 3.127420500265884, | |
| "learning_rate": 9.79985702426404e-06, | |
| "loss": 0.7191, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.18365662401981014, | |
| "grad_norm": 3.075733323456217, | |
| "learning_rate": 9.78964129579691e-06, | |
| "loss": 0.7327, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.18572018159306644, | |
| "grad_norm": 2.9108156590411336, | |
| "learning_rate": 9.779176894762833e-06, | |
| "loss": 0.7314, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.18778373916632274, | |
| "grad_norm": 2.946027511832701, | |
| "learning_rate": 9.768464364461248e-06, | |
| "loss": 0.7154, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.18984729673957904, | |
| "grad_norm": 2.9516974484587863, | |
| "learning_rate": 9.75750426107419e-06, | |
| "loss": 0.6993, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.19191085431283533, | |
| "grad_norm": 2.8592349880443897, | |
| "learning_rate": 9.746297153637386e-06, | |
| "loss": 0.7116, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.19397441188609163, | |
| "grad_norm": 2.9554239306486596, | |
| "learning_rate": 9.73484362401073e-06, | |
| "loss": 0.6998, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.19603796945934793, | |
| "grad_norm": 2.780540575615214, | |
| "learning_rate": 9.723144266848073e-06, | |
| "loss": 0.7175, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.1981015270326042, | |
| "grad_norm": 2.7790855329011657, | |
| "learning_rate": 9.71119968956633e-06, | |
| "loss": 0.7061, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.2001650846058605, | |
| "grad_norm": 2.811087864792907, | |
| "learning_rate": 9.69901051231397e-06, | |
| "loss": 0.7239, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.2022286421791168, | |
| "grad_norm": 2.7071720679814937, | |
| "learning_rate": 9.686577367938802e-06, | |
| "loss": 0.6857, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.2042921997523731, | |
| "grad_norm": 2.9052904699729654, | |
| "learning_rate": 9.673900901955118e-06, | |
| "loss": 0.7026, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.20635575732562939, | |
| "grad_norm": 2.7428004689795284, | |
| "learning_rate": 9.66098177251019e-06, | |
| "loss": 0.7043, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.20841931489888568, | |
| "grad_norm": 2.9542233401997797, | |
| "learning_rate": 9.647820650350087e-06, | |
| "loss": 0.7008, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.21048287247214198, | |
| "grad_norm": 2.8206780344207005, | |
| "learning_rate": 9.634418218784856e-06, | |
| "loss": 0.6827, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.21254643004539828, | |
| "grad_norm": 2.7820042566137917, | |
| "learning_rate": 9.620775173653055e-06, | |
| "loss": 0.6905, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.21460998761865457, | |
| "grad_norm": 2.8617753014266354, | |
| "learning_rate": 9.606892223285604e-06, | |
| "loss": 0.6875, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.21667354519191084, | |
| "grad_norm": 2.818801617888441, | |
| "learning_rate": 9.592770088469032e-06, | |
| "loss": 0.7143, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.21873710276516714, | |
| "grad_norm": 2.803399949113071, | |
| "learning_rate": 9.578409502408037e-06, | |
| "loss": 0.6596, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.22080066033842344, | |
| "grad_norm": 2.832046830498785, | |
| "learning_rate": 9.563811210687433e-06, | |
| "loss": 0.6912, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.22286421791167974, | |
| "grad_norm": 2.7793622312334842, | |
| "learning_rate": 9.548975971233427e-06, | |
| "loss": 0.6945, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.22492777548493603, | |
| "grad_norm": 2.8345021788595073, | |
| "learning_rate": 9.53390455427428e-06, | |
| "loss": 0.6767, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.22699133305819233, | |
| "grad_norm": 2.7737306964899284, | |
| "learning_rate": 9.518597742300309e-06, | |
| "loss": 0.6816, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.22905489063144863, | |
| "grad_norm": 2.7181943065638388, | |
| "learning_rate": 9.503056330023267e-06, | |
| "loss": 0.6797, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.23111844820470492, | |
| "grad_norm": 2.935392215628063, | |
| "learning_rate": 9.48728112433508e-06, | |
| "loss": 0.6993, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.2331820057779612, | |
| "grad_norm": 3.339231195371882, | |
| "learning_rate": 9.471272944265948e-06, | |
| "loss": 0.6565, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.2352455633512175, | |
| "grad_norm": 2.588226117443037, | |
| "learning_rate": 9.45503262094184e-06, | |
| "loss": 0.6685, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.2373091209244738, | |
| "grad_norm": 2.7591510109484507, | |
| "learning_rate": 9.438560997541319e-06, | |
| "loss": 0.6712, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.23937267849773008, | |
| "grad_norm": 2.5942405830165125, | |
| "learning_rate": 9.421858929251786e-06, | |
| "loss": 0.6595, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.24143623607098638, | |
| "grad_norm": 3.1536570604359064, | |
| "learning_rate": 9.404927283225064e-06, | |
| "loss": 0.6624, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.24349979364424268, | |
| "grad_norm": 2.6761790991992402, | |
| "learning_rate": 9.387766938532386e-06, | |
| "loss": 0.6481, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.24556335121749898, | |
| "grad_norm": 2.559657538042446, | |
| "learning_rate": 9.370378786118755e-06, | |
| "loss": 0.6518, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.24762690879075527, | |
| "grad_norm": 2.70639753951879, | |
| "learning_rate": 9.352763728756677e-06, | |
| "loss": 0.6704, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.24969046636401154, | |
| "grad_norm": 2.7368008412010703, | |
| "learning_rate": 9.334922680999304e-06, | |
| "loss": 0.6621, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.25175402393726787, | |
| "grad_norm": 2.852828202511116, | |
| "learning_rate": 9.316856569132942e-06, | |
| "loss": 0.6552, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.25381758151052414, | |
| "grad_norm": 2.5928866487990057, | |
| "learning_rate": 9.29856633112896e-06, | |
| "loss": 0.666, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.25588113908378046, | |
| "grad_norm": 2.7913637236454574, | |
| "learning_rate": 9.280052916595098e-06, | |
| "loss": 0.6661, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.25794469665703673, | |
| "grad_norm": 2.5894434588677164, | |
| "learning_rate": 9.261317286726157e-06, | |
| "loss": 0.6654, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.260008254230293, | |
| "grad_norm": 2.628011843241579, | |
| "learning_rate": 9.242360414254098e-06, | |
| "loss": 0.6649, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.2620718118035493, | |
| "grad_norm": 2.7172847524203636, | |
| "learning_rate": 9.223183283397538e-06, | |
| "loss": 0.6662, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.2641353693768056, | |
| "grad_norm": 2.659395752307675, | |
| "learning_rate": 9.203786889810655e-06, | |
| "loss": 0.6285, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.2661989269500619, | |
| "grad_norm": 2.90375149194139, | |
| "learning_rate": 9.18417224053149e-06, | |
| "loss": 0.6479, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.2682624845233182, | |
| "grad_norm": 2.6966774373935927, | |
| "learning_rate": 9.16434035392966e-06, | |
| "loss": 0.6397, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.2703260420965745, | |
| "grad_norm": 2.7576279699971713, | |
| "learning_rate": 9.144292259653493e-06, | |
| "loss": 0.651, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.2723895996698308, | |
| "grad_norm": 2.8455714787136364, | |
| "learning_rate": 9.124028998576568e-06, | |
| "loss": 0.634, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.2744531572430871, | |
| "grad_norm": 2.524900004589425, | |
| "learning_rate": 9.103551622743667e-06, | |
| "loss": 0.6393, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.2765167148163434, | |
| "grad_norm": 2.672726083965731, | |
| "learning_rate": 9.082861195316164e-06, | |
| "loss": 0.6321, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.27858027238959965, | |
| "grad_norm": 3.0535454448794637, | |
| "learning_rate": 9.061958790516821e-06, | |
| "loss": 0.6219, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.280643829962856, | |
| "grad_norm": 2.961262579490319, | |
| "learning_rate": 9.040845493574016e-06, | |
| "loss": 0.6239, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.28270738753611224, | |
| "grad_norm": 2.867116023710867, | |
| "learning_rate": 9.019522400665397e-06, | |
| "loss": 0.6183, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.28477094510936857, | |
| "grad_norm": 2.70507865608555, | |
| "learning_rate": 8.99799061886098e-06, | |
| "loss": 0.6119, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.28683450268262484, | |
| "grad_norm": 2.7686804191515404, | |
| "learning_rate": 8.976251266065663e-06, | |
| "loss": 0.6205, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.28889806025588116, | |
| "grad_norm": 2.7663678432556926, | |
| "learning_rate": 8.95430547096118e-06, | |
| "loss": 0.6197, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.29096161782913743, | |
| "grad_norm": 2.6693543634519115, | |
| "learning_rate": 8.932154372947512e-06, | |
| "loss": 0.616, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.2930251754023937, | |
| "grad_norm": 2.606058194100552, | |
| "learning_rate": 8.90979912208373e-06, | |
| "loss": 0.5959, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.29508873297565, | |
| "grad_norm": 2.9126080020507903, | |
| "learning_rate": 8.887240879028276e-06, | |
| "loss": 0.6109, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.2971522905489063, | |
| "grad_norm": 2.712632905481665, | |
| "learning_rate": 8.864480814978715e-06, | |
| "loss": 0.6176, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.2992158481221626, | |
| "grad_norm": 2.92931205166235, | |
| "learning_rate": 8.841520111610914e-06, | |
| "loss": 0.6152, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.3012794056954189, | |
| "grad_norm": 2.757497057768333, | |
| "learning_rate": 8.818359961017705e-06, | |
| "loss": 0.6269, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.3033429632686752, | |
| "grad_norm": 2.911039679371386, | |
| "learning_rate": 8.795001565646983e-06, | |
| "loss": 0.617, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.3054065208419315, | |
| "grad_norm": 2.6754121417561922, | |
| "learning_rate": 8.771446138239282e-06, | |
| "loss": 0.6079, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.3074700784151878, | |
| "grad_norm": 2.9820721402144947, | |
| "learning_rate": 8.747694901764807e-06, | |
| "loss": 0.6028, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.3095336359884441, | |
| "grad_norm": 2.7199956091577353, | |
| "learning_rate": 8.723749089359941e-06, | |
| "loss": 0.613, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.31159719356170035, | |
| "grad_norm": 2.587929517491648, | |
| "learning_rate": 8.699609944263219e-06, | |
| "loss": 0.6113, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.3136607511349567, | |
| "grad_norm": 2.7061730588333335, | |
| "learning_rate": 8.675278719750788e-06, | |
| "loss": 0.6027, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.31572430870821294, | |
| "grad_norm": 2.79901699476363, | |
| "learning_rate": 8.65075667907133e-06, | |
| "loss": 0.6025, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.31778786628146927, | |
| "grad_norm": 2.7737439842011193, | |
| "learning_rate": 8.626045095380477e-06, | |
| "loss": 0.6044, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.31985142385472554, | |
| "grad_norm": 2.6503735236627453, | |
| "learning_rate": 8.601145251674718e-06, | |
| "loss": 0.5873, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.32191498142798186, | |
| "grad_norm": 2.8347160208255078, | |
| "learning_rate": 8.576058440724777e-06, | |
| "loss": 0.5828, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.32397853900123813, | |
| "grad_norm": 2.8544698266992996, | |
| "learning_rate": 8.5507859650085e-06, | |
| "loss": 0.5939, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.32604209657449446, | |
| "grad_norm": 2.686873043228608, | |
| "learning_rate": 8.525329136643227e-06, | |
| "loss": 0.5821, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.3281056541477507, | |
| "grad_norm": 2.8021270796404, | |
| "learning_rate": 8.499689277317675e-06, | |
| "loss": 0.5871, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.330169211721007, | |
| "grad_norm": 2.5870353213265402, | |
| "learning_rate": 8.473867718223317e-06, | |
| "loss": 0.5813, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.3322327692942633, | |
| "grad_norm": 2.7498697085626844, | |
| "learning_rate": 8.447865799985258e-06, | |
| "loss": 0.5937, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.3342963268675196, | |
| "grad_norm": 2.6169464644713014, | |
| "learning_rate": 8.421684872592643e-06, | |
| "loss": 0.5769, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.3363598844407759, | |
| "grad_norm": 2.7371393784664804, | |
| "learning_rate": 8.395326295328562e-06, | |
| "loss": 0.5806, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.3384234420140322, | |
| "grad_norm": 2.6465634165278575, | |
| "learning_rate": 8.368791436699482e-06, | |
| "loss": 0.5914, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.3404869995872885, | |
| "grad_norm": 2.57951699963865, | |
| "learning_rate": 8.342081674364182e-06, | |
| "loss": 0.5711, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.3425505571605448, | |
| "grad_norm": 2.726507330124962, | |
| "learning_rate": 8.315198395062246e-06, | |
| "loss": 0.5789, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.34461411473380105, | |
| "grad_norm": 2.6932144440323267, | |
| "learning_rate": 8.28814299454205e-06, | |
| "loss": 0.5868, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.34667767230705737, | |
| "grad_norm": 2.6545019349833052, | |
| "learning_rate": 8.260916877488308e-06, | |
| "loss": 0.585, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.34874122988031364, | |
| "grad_norm": 2.6267015172396544, | |
| "learning_rate": 8.233521457449131e-06, | |
| "loss": 0.5713, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.35080478745356997, | |
| "grad_norm": 2.446024579716919, | |
| "learning_rate": 8.205958156762647e-06, | |
| "loss": 0.5668, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.35286834502682624, | |
| "grad_norm": 2.5506551014516208, | |
| "learning_rate": 8.178228406483145e-06, | |
| "loss": 0.5683, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.35493190260008256, | |
| "grad_norm": 2.5964627157082, | |
| "learning_rate": 8.150333646306787e-06, | |
| "loss": 0.5631, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.35699546017333883, | |
| "grad_norm": 2.7366272191977212, | |
| "learning_rate": 8.122275324496855e-06, | |
| "loss": 0.5854, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.35905901774659515, | |
| "grad_norm": 2.470623506679956, | |
| "learning_rate": 8.094054897808556e-06, | |
| "loss": 0.565, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.3611225753198514, | |
| "grad_norm": 2.6937122631536496, | |
| "learning_rate": 8.065673831413396e-06, | |
| "loss": 0.5644, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.3631861328931077, | |
| "grad_norm": 3.8142387103231847, | |
| "learning_rate": 8.037133598823107e-06, | |
| "loss": 0.5799, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.365249690466364, | |
| "grad_norm": 2.760510177003008, | |
| "learning_rate": 8.008435681813139e-06, | |
| "loss": 0.566, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.3673132480396203, | |
| "grad_norm": 2.6376344821820354, | |
| "learning_rate": 7.979581570345737e-06, | |
| "loss": 0.572, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.3693768056128766, | |
| "grad_norm": 2.5348136936661043, | |
| "learning_rate": 7.950572762492577e-06, | |
| "loss": 0.5529, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.3714403631861329, | |
| "grad_norm": 2.551175872300915, | |
| "learning_rate": 7.92141076435699e-06, | |
| "loss": 0.5593, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.3735039207593892, | |
| "grad_norm": 2.609535679611351, | |
| "learning_rate": 7.892097089995767e-06, | |
| "loss": 0.5491, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.3755674783326455, | |
| "grad_norm": 2.670033948965714, | |
| "learning_rate": 7.862633261340556e-06, | |
| "loss": 0.5446, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.3776310359059018, | |
| "grad_norm": 2.406170232856436, | |
| "learning_rate": 7.83302080811883e-06, | |
| "loss": 0.535, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.37969459347915807, | |
| "grad_norm": 2.5973677086378024, | |
| "learning_rate": 7.803261267774484e-06, | |
| "loss": 0.5501, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.38175815105241434, | |
| "grad_norm": 2.7913389668469977, | |
| "learning_rate": 7.773356185388003e-06, | |
| "loss": 0.5358, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.38382170862567067, | |
| "grad_norm": 2.395012173415671, | |
| "learning_rate": 7.74330711359624e-06, | |
| "loss": 0.5492, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.38588526619892694, | |
| "grad_norm": 2.6947690399272153, | |
| "learning_rate": 7.713115612511815e-06, | |
| "loss": 0.5481, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 0.38794882377218326, | |
| "grad_norm": 2.711638332847109, | |
| "learning_rate": 7.682783249642104e-06, | |
| "loss": 0.5533, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.39001238134543953, | |
| "grad_norm": 2.616283183023565, | |
| "learning_rate": 7.652311599807868e-06, | |
| "loss": 0.5506, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 0.39207593891869585, | |
| "grad_norm": 2.4832951603969162, | |
| "learning_rate": 7.62170224506148e-06, | |
| "loss": 0.5316, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.3941394964919521, | |
| "grad_norm": 2.6721377852076804, | |
| "learning_rate": 7.590956774604791e-06, | |
| "loss": 0.5458, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 0.3962030540652084, | |
| "grad_norm": 2.777241124185147, | |
| "learning_rate": 7.5600767847066215e-06, | |
| "loss": 0.5527, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.3982666116384647, | |
| "grad_norm": 2.5914713737840924, | |
| "learning_rate": 7.529063878619886e-06, | |
| "loss": 0.5484, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 0.400330169211721, | |
| "grad_norm": 2.573321705871262, | |
| "learning_rate": 7.497919666498346e-06, | |
| "loss": 0.5435, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.4023937267849773, | |
| "grad_norm": 2.6520337314396993, | |
| "learning_rate": 7.466645765313023e-06, | |
| "loss": 0.5358, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 0.4044572843582336, | |
| "grad_norm": 2.7056801557526486, | |
| "learning_rate": 7.435243798768242e-06, | |
| "loss": 0.5499, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.4065208419314899, | |
| "grad_norm": 2.6559172388700047, | |
| "learning_rate": 7.403715397217335e-06, | |
| "loss": 0.5241, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 0.4085843995047462, | |
| "grad_norm": 2.5216304625498647, | |
| "learning_rate": 7.3720621975779815e-06, | |
| "loss": 0.521, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.4106479570780025, | |
| "grad_norm": 2.519874224942243, | |
| "learning_rate": 7.3402858432472416e-06, | |
| "loss": 0.5489, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 0.41271151465125877, | |
| "grad_norm": 2.518659981447909, | |
| "learning_rate": 7.308387984016217e-06, | |
| "loss": 0.5266, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.41477507222451504, | |
| "grad_norm": 2.721359929344381, | |
| "learning_rate": 7.276370275984408e-06, | |
| "loss": 0.5325, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 0.41683862979777137, | |
| "grad_norm": 2.575184642938111, | |
| "learning_rate": 7.2442343814737135e-06, | |
| "loss": 0.5357, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.41890218737102763, | |
| "grad_norm": 2.5077385160634904, | |
| "learning_rate": 7.211981968942147e-06, | |
| "loss": 0.524, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 0.42096574494428396, | |
| "grad_norm": 2.453550934446932, | |
| "learning_rate": 7.179614712897195e-06, | |
| "loss": 0.5366, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.42302930251754023, | |
| "grad_norm": 2.561635348596983, | |
| "learning_rate": 7.147134293808885e-06, | |
| "loss": 0.5117, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 0.42509286009079655, | |
| "grad_norm": 2.501066318480838, | |
| "learning_rate": 7.114542398022539e-06, | |
| "loss": 0.524, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.4271564176640528, | |
| "grad_norm": 2.7215445820801714, | |
| "learning_rate": 7.08184071767122e-06, | |
| "loss": 0.5111, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 0.42921997523730915, | |
| "grad_norm": 2.6088491234313604, | |
| "learning_rate": 7.049030950587874e-06, | |
| "loss": 0.5113, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.4312835328105654, | |
| "grad_norm": 2.551009519200941, | |
| "learning_rate": 7.016114800217182e-06, | |
| "loss": 0.5136, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 0.4333470903838217, | |
| "grad_norm": 2.457255947220057, | |
| "learning_rate": 6.983093975527128e-06, | |
| "loss": 0.5208, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.435410647957078, | |
| "grad_norm": 2.6994910463118913, | |
| "learning_rate": 6.949970190920255e-06, | |
| "loss": 0.5212, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 0.4374742055303343, | |
| "grad_norm": 2.597397073975052, | |
| "learning_rate": 6.916745166144671e-06, | |
| "loss": 0.5128, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.4395377631035906, | |
| "grad_norm": 2.605039455033872, | |
| "learning_rate": 6.883420626204751e-06, | |
| "loss": 0.5081, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 0.4416013206768469, | |
| "grad_norm": 2.7509624680821, | |
| "learning_rate": 6.849998301271585e-06, | |
| "loss": 0.516, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.4436648782501032, | |
| "grad_norm": 2.4731078067929197, | |
| "learning_rate": 6.81647992659314e-06, | |
| "loss": 0.5144, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 0.44572843582335947, | |
| "grad_norm": 2.611162217133499, | |
| "learning_rate": 6.782867242404181e-06, | |
| "loss": 0.4979, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.44779199339661574, | |
| "grad_norm": 2.5398801893032474, | |
| "learning_rate": 6.749161993835901e-06, | |
| "loss": 0.5074, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 0.44985555096987206, | |
| "grad_norm": 2.5951566561368775, | |
| "learning_rate": 6.715365930825337e-06, | |
| "loss": 0.4975, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.45191910854312833, | |
| "grad_norm": 2.6092895827410034, | |
| "learning_rate": 6.681480808024503e-06, | |
| "loss": 0.5124, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 0.45398266611638466, | |
| "grad_norm": 2.5414548197736706, | |
| "learning_rate": 6.6475083847092895e-06, | |
| "loss": 0.4957, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.45604622368964093, | |
| "grad_norm": 2.8460005959716166, | |
| "learning_rate": 6.613450424688131e-06, | |
| "loss": 0.5028, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 0.45810978126289725, | |
| "grad_norm": 2.5601376301451753, | |
| "learning_rate": 6.579308696210428e-06, | |
| "loss": 0.4876, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.4601733388361535, | |
| "grad_norm": 2.6807233381503237, | |
| "learning_rate": 6.545084971874738e-06, | |
| "loss": 0.5155, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 0.46223689640940985, | |
| "grad_norm": 2.5044297534154336, | |
| "learning_rate": 6.51078102853675e-06, | |
| "loss": 0.5012, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.4643004539826661, | |
| "grad_norm": 2.5092897248073474, | |
| "learning_rate": 6.47639864721703e-06, | |
| "loss": 0.493, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 0.4663640115559224, | |
| "grad_norm": 2.4954489446094392, | |
| "learning_rate": 6.44193961300855e-06, | |
| "loss": 0.4868, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.4684275691291787, | |
| "grad_norm": 2.5451475237887666, | |
| "learning_rate": 6.407405714984011e-06, | |
| "loss": 0.4866, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 0.470491126702435, | |
| "grad_norm": 2.6098482720462552, | |
| "learning_rate": 6.3727987461029565e-06, | |
| "loss": 0.4956, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.4725546842756913, | |
| "grad_norm": 2.5923597566181296, | |
| "learning_rate": 6.338120503118684e-06, | |
| "loss": 0.4998, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 0.4746182418489476, | |
| "grad_norm": 2.7721513261613393, | |
| "learning_rate": 6.303372786484953e-06, | |
| "loss": 0.4948, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.4766817994222039, | |
| "grad_norm": 2.4572497473964012, | |
| "learning_rate": 6.2685574002625235e-06, | |
| "loss": 0.4744, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 0.47874535699546017, | |
| "grad_norm": 2.7248530281028653, | |
| "learning_rate": 6.233676152025474e-06, | |
| "loss": 0.4789, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.4808089145687165, | |
| "grad_norm": 2.510160739180536, | |
| "learning_rate": 6.198730852767364e-06, | |
| "loss": 0.4924, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 0.48287247214197276, | |
| "grad_norm": 2.5611750613730275, | |
| "learning_rate": 6.1637233168072066e-06, | |
| "loss": 0.4816, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.48493602971522903, | |
| "grad_norm": 2.580939509201322, | |
| "learning_rate": 6.1286553616952705e-06, | |
| "loss": 0.4826, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 0.48699958728848536, | |
| "grad_norm": 2.579364116128431, | |
| "learning_rate": 6.09352880811872e-06, | |
| "loss": 0.4926, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.4890631448617416, | |
| "grad_norm": 2.5855927528887745, | |
| "learning_rate": 6.058345479807077e-06, | |
| "loss": 0.4826, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 0.49112670243499795, | |
| "grad_norm": 2.468639972857448, | |
| "learning_rate": 6.023107203437547e-06, | |
| "loss": 0.4822, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.4931902600082542, | |
| "grad_norm": 2.534893122365119, | |
| "learning_rate": 5.987815808540169e-06, | |
| "loss": 0.4969, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 0.49525381758151055, | |
| "grad_norm": 2.4035384879566095, | |
| "learning_rate": 5.95247312740284e-06, | |
| "loss": 0.4942, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.4973173751547668, | |
| "grad_norm": 2.6544116089729552, | |
| "learning_rate": 5.917080994976172e-06, | |
| "loss": 0.4678, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 0.4993809327280231, | |
| "grad_norm": 2.6911232973443653, | |
| "learning_rate": 5.881641248778235e-06, | |
| "loss": 0.4796, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.5014444903012794, | |
| "grad_norm": 2.9098929338992177, | |
| "learning_rate": 5.8461557287991455e-06, | |
| "loss": 0.4926, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 0.5035080478745357, | |
| "grad_norm": 2.724813117289689, | |
| "learning_rate": 5.810626277405548e-06, | |
| "loss": 0.4837, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.505571605447792, | |
| "grad_norm": 2.5885450608927782, | |
| "learning_rate": 5.775054739244948e-06, | |
| "loss": 0.4827, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 0.5076351630210483, | |
| "grad_norm": 2.4385273279955677, | |
| "learning_rate": 5.739442961149949e-06, | |
| "loss": 0.4777, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.5096987205943045, | |
| "grad_norm": 9.983765788937596, | |
| "learning_rate": 5.703792792042363e-06, | |
| "loss": 0.4657, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 0.5117622781675609, | |
| "grad_norm": 2.5992188623557575, | |
| "learning_rate": 5.668106082837226e-06, | |
| "loss": 0.4677, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.5138258357408172, | |
| "grad_norm": 2.5248538601870916, | |
| "learning_rate": 5.632384686346682e-06, | |
| "loss": 0.4648, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 0.5158893933140735, | |
| "grad_norm": 2.4528442128112653, | |
| "learning_rate": 5.5966304571838084e-06, | |
| "loss": 0.468, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.5179529508873297, | |
| "grad_norm": 2.4985756922245375, | |
| "learning_rate": 5.560845251666307e-06, | |
| "loss": 0.4691, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 0.520016508460586, | |
| "grad_norm": 2.588098554859599, | |
| "learning_rate": 5.525030927720142e-06, | |
| "loss": 0.4843, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.5220800660338424, | |
| "grad_norm": 2.596100924626437, | |
| "learning_rate": 5.489189344783072e-06, | |
| "loss": 0.4722, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 0.5241436236070987, | |
| "grad_norm": 2.4880471647467526, | |
| "learning_rate": 5.4533223637081075e-06, | |
| "loss": 0.4652, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.5262071811803549, | |
| "grad_norm": 2.5470383673418926, | |
| "learning_rate": 5.417431846666903e-06, | |
| "loss": 0.4657, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 0.5282707387536112, | |
| "grad_norm": 2.32837647206492, | |
| "learning_rate": 5.381519657053072e-06, | |
| "loss": 0.4542, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.5303342963268676, | |
| "grad_norm": 2.5139826780698753, | |
| "learning_rate": 5.3455876593854435e-06, | |
| "loss": 0.4665, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 0.5323978539001238, | |
| "grad_norm": 2.4870649815664496, | |
| "learning_rate": 5.309637719211256e-06, | |
| "loss": 0.4527, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.5344614114733801, | |
| "grad_norm": 2.509973264998628, | |
| "learning_rate": 5.273671703009301e-06, | |
| "loss": 0.4507, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 0.5365249690466364, | |
| "grad_norm": 2.690719386362163, | |
| "learning_rate": 5.2376914780930225e-06, | |
| "loss": 0.4576, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.5385885266198926, | |
| "grad_norm": 4.083713443737158, | |
| "learning_rate": 5.201698912513561e-06, | |
| "loss": 0.4412, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 0.540652084193149, | |
| "grad_norm": 2.3850290751433016, | |
| "learning_rate": 5.165695874962769e-06, | |
| "loss": 0.4633, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.5427156417664053, | |
| "grad_norm": 2.383468999919073, | |
| "learning_rate": 5.129684234676195e-06, | |
| "loss": 0.4482, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 0.5447791993396616, | |
| "grad_norm": 2.3602215294945568, | |
| "learning_rate": 5.0936658613360304e-06, | |
| "loss": 0.4683, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.5468427569129178, | |
| "grad_norm": 2.3769603536661035, | |
| "learning_rate": 5.057642624974036e-06, | |
| "loss": 0.4587, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 0.5489063144861742, | |
| "grad_norm": 2.5837187422329837, | |
| "learning_rate": 5.021616395874459e-06, | |
| "loss": 0.439, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.5509698720594305, | |
| "grad_norm": 2.587958298155525, | |
| "learning_rate": 4.9855890444769226e-06, | |
| "loss": 0.4547, | |
| "step": 1335 | |
| }, | |
| { | |
| "epoch": 0.5530334296326868, | |
| "grad_norm": 2.4489991448969795, | |
| "learning_rate": 4.94956244127932e-06, | |
| "loss": 0.4435, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.555096987205943, | |
| "grad_norm": 2.70305559316515, | |
| "learning_rate": 4.9135384567407e-06, | |
| "loss": 0.4635, | |
| "step": 1345 | |
| }, | |
| { | |
| "epoch": 0.5571605447791993, | |
| "grad_norm": 2.552605702749918, | |
| "learning_rate": 4.877518961184149e-06, | |
| "loss": 0.4395, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.5592241023524557, | |
| "grad_norm": 2.4491029644956717, | |
| "learning_rate": 4.841505824699697e-06, | |
| "loss": 0.4446, | |
| "step": 1355 | |
| }, | |
| { | |
| "epoch": 0.561287659925712, | |
| "grad_norm": 2.2903925047271003, | |
| "learning_rate": 4.805500917047217e-06, | |
| "loss": 0.4398, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.5633512174989682, | |
| "grad_norm": 2.379688312715005, | |
| "learning_rate": 4.769506107559344e-06, | |
| "loss": 0.4515, | |
| "step": 1365 | |
| }, | |
| { | |
| "epoch": 0.5654147750722245, | |
| "grad_norm": 2.881348434158553, | |
| "learning_rate": 4.7335232650444376e-06, | |
| "loss": 0.4609, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.5674783326454808, | |
| "grad_norm": 2.450954066121652, | |
| "learning_rate": 4.697554257689541e-06, | |
| "loss": 0.4556, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 0.5695418902187371, | |
| "grad_norm": 2.418284415029761, | |
| "learning_rate": 4.661600952963391e-06, | |
| "loss": 0.4452, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.5716054477919934, | |
| "grad_norm": 2.3480748218612457, | |
| "learning_rate": 4.6256652175194675e-06, | |
| "loss": 0.461, | |
| "step": 1385 | |
| }, | |
| { | |
| "epoch": 0.5736690053652497, | |
| "grad_norm": 2.472904201761537, | |
| "learning_rate": 4.589748917099067e-06, | |
| "loss": 0.4492, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.5757325629385059, | |
| "grad_norm": 2.468262838594691, | |
| "learning_rate": 4.553853916434448e-06, | |
| "loss": 0.4403, | |
| "step": 1395 | |
| }, | |
| { | |
| "epoch": 0.5777961205117623, | |
| "grad_norm": 2.361041942268244, | |
| "learning_rate": 4.517982079152009e-06, | |
| "loss": 0.4377, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.5798596780850186, | |
| "grad_norm": 2.5500737849526423, | |
| "learning_rate": 4.482135267675529e-06, | |
| "loss": 0.45, | |
| "step": 1405 | |
| }, | |
| { | |
| "epoch": 0.5819232356582749, | |
| "grad_norm": 2.453235877539221, | |
| "learning_rate": 4.446315343129484e-06, | |
| "loss": 0.4328, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.5839867932315311, | |
| "grad_norm": 2.3281079794605355, | |
| "learning_rate": 4.410524165242407e-06, | |
| "loss": 0.4374, | |
| "step": 1415 | |
| }, | |
| { | |
| "epoch": 0.5860503508047874, | |
| "grad_norm": 2.551505972004267, | |
| "learning_rate": 4.3747635922503364e-06, | |
| "loss": 0.4432, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.5881139083780438, | |
| "grad_norm": 2.5826844758469276, | |
| "learning_rate": 4.3390354808003464e-06, | |
| "loss": 0.4402, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 0.5901774659513, | |
| "grad_norm": 2.3612299259996936, | |
| "learning_rate": 4.30334168585414e-06, | |
| "loss": 0.4388, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.5922410235245563, | |
| "grad_norm": 2.4199132831049246, | |
| "learning_rate": 4.26768406059175e-06, | |
| "loss": 0.4296, | |
| "step": 1435 | |
| }, | |
| { | |
| "epoch": 0.5943045810978126, | |
| "grad_norm": 2.3293439174487274, | |
| "learning_rate": 4.2320644563153244e-06, | |
| "loss": 0.4391, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.596368138671069, | |
| "grad_norm": 2.5508830684770043, | |
| "learning_rate": 4.196484722352999e-06, | |
| "loss": 0.4341, | |
| "step": 1445 | |
| }, | |
| { | |
| "epoch": 0.5984316962443252, | |
| "grad_norm": 2.459534568261172, | |
| "learning_rate": 4.160946705962895e-06, | |
| "loss": 0.4298, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.6004952538175815, | |
| "grad_norm": 2.4607489784078806, | |
| "learning_rate": 4.12545225223721e-06, | |
| "loss": 0.4238, | |
| "step": 1455 | |
| }, | |
| { | |
| "epoch": 0.6025588113908378, | |
| "grad_norm": 2.5772914418218336, | |
| "learning_rate": 4.090003204006406e-06, | |
| "loss": 0.4165, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.604622368964094, | |
| "grad_norm": 2.273824183445231, | |
| "learning_rate": 4.054601401743561e-06, | |
| "loss": 0.4366, | |
| "step": 1465 | |
| }, | |
| { | |
| "epoch": 0.6066859265373504, | |
| "grad_norm": 2.5922865720792183, | |
| "learning_rate": 4.019248683468781e-06, | |
| "loss": 0.4372, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.6087494841106067, | |
| "grad_norm": 2.520992944241331, | |
| "learning_rate": 3.983946884653804e-06, | |
| "loss": 0.4418, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 0.610813041683863, | |
| "grad_norm": 2.4527707525439415, | |
| "learning_rate": 3.948697838126681e-06, | |
| "loss": 0.4186, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.6128765992571192, | |
| "grad_norm": 2.3893935033867546, | |
| "learning_rate": 3.9135033739766275e-06, | |
| "loss": 0.4332, | |
| "step": 1485 | |
| }, | |
| { | |
| "epoch": 0.6149401568303756, | |
| "grad_norm": 2.303193161104287, | |
| "learning_rate": 3.8783653194590055e-06, | |
| "loss": 0.4308, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.6170037144036319, | |
| "grad_norm": 2.568914194114212, | |
| "learning_rate": 3.843285498900457e-06, | |
| "loss": 0.4238, | |
| "step": 1495 | |
| }, | |
| { | |
| "epoch": 0.6190672719768882, | |
| "grad_norm": 2.3874626733288093, | |
| "learning_rate": 3.808265733604181e-06, | |
| "loss": 0.4211, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.6211308295501444, | |
| "grad_norm": 2.397949299188709, | |
| "learning_rate": 3.7733078417553835e-06, | |
| "loss": 0.4198, | |
| "step": 1505 | |
| }, | |
| { | |
| "epoch": 0.6231943871234007, | |
| "grad_norm": 2.4304137659983223, | |
| "learning_rate": 3.7384136383268673e-06, | |
| "loss": 0.4364, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.6252579446966571, | |
| "grad_norm": 2.4027742214175727, | |
| "learning_rate": 3.70358493498481e-06, | |
| "loss": 0.431, | |
| "step": 1515 | |
| }, | |
| { | |
| "epoch": 0.6273215022699133, | |
| "grad_norm": 2.6288095509256255, | |
| "learning_rate": 3.668823539994706e-06, | |
| "loss": 0.4252, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.6293850598431696, | |
| "grad_norm": 2.386671047852444, | |
| "learning_rate": 3.634131258127467e-06, | |
| "loss": 0.428, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 0.6314486174164259, | |
| "grad_norm": 2.3713874902931615, | |
| "learning_rate": 3.5995098905657457e-06, | |
| "loss": 0.4286, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.6335121749896823, | |
| "grad_norm": 2.300141894247011, | |
| "learning_rate": 3.564961234810399e-06, | |
| "loss": 0.4162, | |
| "step": 1535 | |
| }, | |
| { | |
| "epoch": 0.6355757325629385, | |
| "grad_norm": 2.4635471100860933, | |
| "learning_rate": 3.530487084587173e-06, | |
| "loss": 0.4102, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.6376392901361948, | |
| "grad_norm": 2.374004467594742, | |
| "learning_rate": 3.4960892297535765e-06, | |
| "loss": 0.4144, | |
| "step": 1545 | |
| }, | |
| { | |
| "epoch": 0.6397028477094511, | |
| "grad_norm": 2.3078125248362324, | |
| "learning_rate": 3.4617694562059465e-06, | |
| "loss": 0.4076, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.6417664052827073, | |
| "grad_norm": 2.513382809869699, | |
| "learning_rate": 3.427529545786736e-06, | |
| "loss": 0.4103, | |
| "step": 1555 | |
| }, | |
| { | |
| "epoch": 0.6438299628559637, | |
| "grad_norm": 2.421843916725022, | |
| "learning_rate": 3.393371276191997e-06, | |
| "loss": 0.4151, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.64589352042922, | |
| "grad_norm": 2.469381907143848, | |
| "learning_rate": 3.3592964208790808e-06, | |
| "loss": 0.4164, | |
| "step": 1565 | |
| }, | |
| { | |
| "epoch": 0.6479570780024763, | |
| "grad_norm": 2.531906105074784, | |
| "learning_rate": 3.3253067489745716e-06, | |
| "loss": 0.4071, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.6500206355757325, | |
| "grad_norm": 2.3846085481545747, | |
| "learning_rate": 3.291404025182432e-06, | |
| "loss": 0.4075, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 0.6520841931489889, | |
| "grad_norm": 2.433814917027714, | |
| "learning_rate": 3.2575900096923753e-06, | |
| "loss": 0.4087, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.6541477507222452, | |
| "grad_norm": 2.4424178299468506, | |
| "learning_rate": 3.2238664580884877e-06, | |
| "loss": 0.4043, | |
| "step": 1585 | |
| }, | |
| { | |
| "epoch": 0.6562113082955014, | |
| "grad_norm": 2.3585518358327833, | |
| "learning_rate": 3.1902351212580734e-06, | |
| "loss": 0.4116, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.6582748658687577, | |
| "grad_norm": 2.4272402253853174, | |
| "learning_rate": 3.1566977453007564e-06, | |
| "loss": 0.4045, | |
| "step": 1595 | |
| }, | |
| { | |
| "epoch": 0.660338423442014, | |
| "grad_norm": 2.2735139196354184, | |
| "learning_rate": 3.1232560714378214e-06, | |
| "loss": 0.3998, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.6624019810152704, | |
| "grad_norm": 2.496149939265061, | |
| "learning_rate": 3.0899118359218104e-06, | |
| "loss": 0.3986, | |
| "step": 1605 | |
| }, | |
| { | |
| "epoch": 0.6644655385885266, | |
| "grad_norm": 2.577482137677985, | |
| "learning_rate": 3.056666769946382e-06, | |
| "loss": 0.3922, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.6665290961617829, | |
| "grad_norm": 2.432120191923341, | |
| "learning_rate": 3.0235225995564323e-06, | |
| "loss": 0.3959, | |
| "step": 1615 | |
| }, | |
| { | |
| "epoch": 0.6685926537350392, | |
| "grad_norm": 2.5315892714201507, | |
| "learning_rate": 2.9904810455584664e-06, | |
| "loss": 0.396, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.6706562113082954, | |
| "grad_norm": 2.4126722223283643, | |
| "learning_rate": 2.9575438234312807e-06, | |
| "loss": 0.4034, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 0.6727197688815518, | |
| "grad_norm": 2.3922710783717682, | |
| "learning_rate": 2.9247126432368685e-06, | |
| "loss": 0.3969, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.6747833264548081, | |
| "grad_norm": 2.542933247382595, | |
| "learning_rate": 2.8919892095316616e-06, | |
| "loss": 0.3914, | |
| "step": 1635 | |
| }, | |
| { | |
| "epoch": 0.6768468840280644, | |
| "grad_norm": 2.503873056742514, | |
| "learning_rate": 2.859375221278014e-06, | |
| "loss": 0.4053, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.6789104416013206, | |
| "grad_norm": 2.4704924711278275, | |
| "learning_rate": 2.826872371756e-06, | |
| "loss": 0.4171, | |
| "step": 1645 | |
| }, | |
| { | |
| "epoch": 0.680973999174577, | |
| "grad_norm": 2.2126828966128347, | |
| "learning_rate": 2.7944823484755053e-06, | |
| "loss": 0.3979, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.6830375567478333, | |
| "grad_norm": 2.452244039159894, | |
| "learning_rate": 2.762206833088608e-06, | |
| "loss": 0.3992, | |
| "step": 1655 | |
| }, | |
| { | |
| "epoch": 0.6851011143210896, | |
| "grad_norm": 2.4737732563891197, | |
| "learning_rate": 2.7300475013022666e-06, | |
| "loss": 0.4013, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.6871646718943458, | |
| "grad_norm": 2.32695849246749, | |
| "learning_rate": 2.6980060227913273e-06, | |
| "loss": 0.3984, | |
| "step": 1665 | |
| }, | |
| { | |
| "epoch": 0.6892282294676021, | |
| "grad_norm": 2.5306901436703497, | |
| "learning_rate": 2.666084061111828e-06, | |
| "loss": 0.3942, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.6912917870408585, | |
| "grad_norm": 2.40430761373431, | |
| "learning_rate": 2.6342832736146403e-06, | |
| "loss": 0.3943, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 0.6933553446141147, | |
| "grad_norm": 2.3692497196583955, | |
| "learning_rate": 2.6026053113594056e-06, | |
| "loss": 0.383, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.695418902187371, | |
| "grad_norm": 2.45758426227379, | |
| "learning_rate": 2.571051819028826e-06, | |
| "loss": 0.4056, | |
| "step": 1685 | |
| }, | |
| { | |
| "epoch": 0.6974824597606273, | |
| "grad_norm": 2.490132984626703, | |
| "learning_rate": 2.539624434843272e-06, | |
| "loss": 0.408, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.6995460173338837, | |
| "grad_norm": 2.316910192052694, | |
| "learning_rate": 2.508324790475731e-06, | |
| "loss": 0.3908, | |
| "step": 1695 | |
| }, | |
| { | |
| "epoch": 0.7016095749071399, | |
| "grad_norm": 2.5678586000455215, | |
| "learning_rate": 2.4771545109670746e-06, | |
| "loss": 0.3886, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.7036731324803962, | |
| "grad_norm": 2.333143658853672, | |
| "learning_rate": 2.446115214641719e-06, | |
| "loss": 0.3825, | |
| "step": 1705 | |
| }, | |
| { | |
| "epoch": 0.7057366900536525, | |
| "grad_norm": 2.421284956930726, | |
| "learning_rate": 2.415208513023574e-06, | |
| "loss": 0.378, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.7078002476269087, | |
| "grad_norm": 2.348370007461818, | |
| "learning_rate": 2.3844360107523973e-06, | |
| "loss": 0.3745, | |
| "step": 1715 | |
| }, | |
| { | |
| "epoch": 0.7098638052001651, | |
| "grad_norm": 2.2561855639412047, | |
| "learning_rate": 2.353799305500467e-06, | |
| "loss": 0.3768, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.7119273627734214, | |
| "grad_norm": 2.4144762778450364, | |
| "learning_rate": 2.3232999878896407e-06, | |
| "loss": 0.3941, | |
| "step": 1725 | |
| }, | |
| { | |
| "epoch": 0.7139909203466777, | |
| "grad_norm": 2.510836282225756, | |
| "learning_rate": 2.2929396414087737e-06, | |
| "loss": 0.4012, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.7160544779199339, | |
| "grad_norm": 2.4017944766607906, | |
| "learning_rate": 2.2627198423314988e-06, | |
| "loss": 0.3955, | |
| "step": 1735 | |
| }, | |
| { | |
| "epoch": 0.7181180354931903, | |
| "grad_norm": 2.2838737017346356, | |
| "learning_rate": 2.2326421596343923e-06, | |
| "loss": 0.3833, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.7201815930664466, | |
| "grad_norm": 2.590581157502952, | |
| "learning_rate": 2.2027081549155188e-06, | |
| "loss": 0.3912, | |
| "step": 1745 | |
| }, | |
| { | |
| "epoch": 0.7222451506397028, | |
| "grad_norm": 2.3748009695139776, | |
| "learning_rate": 2.172919382313347e-06, | |
| "loss": 0.3808, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.7243087082129591, | |
| "grad_norm": 2.4065528450546445, | |
| "learning_rate": 2.1432773884260627e-06, | |
| "loss": 0.3705, | |
| "step": 1755 | |
| }, | |
| { | |
| "epoch": 0.7263722657862154, | |
| "grad_norm": 2.4049387116398564, | |
| "learning_rate": 2.1137837122312787e-06, | |
| "loss": 0.3765, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.7284358233594718, | |
| "grad_norm": 2.5287323783127174, | |
| "learning_rate": 2.0844398850061186e-06, | |
| "loss": 0.3886, | |
| "step": 1765 | |
| }, | |
| { | |
| "epoch": 0.730499380932728, | |
| "grad_norm": 2.467020632677239, | |
| "learning_rate": 2.0552474302477327e-06, | |
| "loss": 0.3875, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.7325629385059843, | |
| "grad_norm": 2.313861208086701, | |
| "learning_rate": 2.0262078635941818e-06, | |
| "loss": 0.3889, | |
| "step": 1775 | |
| }, | |
| { | |
| "epoch": 0.7346264960792406, | |
| "grad_norm": 2.3695175969299296, | |
| "learning_rate": 1.9973226927457567e-06, | |
| "loss": 0.3763, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.736690053652497, | |
| "grad_norm": 2.4901046009056884, | |
| "learning_rate": 1.968593417386705e-06, | |
| "loss": 0.3846, | |
| "step": 1785 | |
| }, | |
| { | |
| "epoch": 0.7387536112257532, | |
| "grad_norm": 2.3483856649423585, | |
| "learning_rate": 1.940021529107356e-06, | |
| "loss": 0.3856, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.7408171687990095, | |
| "grad_norm": 2.3336460330815205, | |
| "learning_rate": 1.911608511326688e-06, | |
| "loss": 0.3796, | |
| "step": 1795 | |
| }, | |
| { | |
| "epoch": 0.7428807263722658, | |
| "grad_norm": 2.3516145493656344, | |
| "learning_rate": 1.8833558392153118e-06, | |
| "loss": 0.3857, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.744944283945522, | |
| "grad_norm": 2.3634530645626164, | |
| "learning_rate": 1.8552649796188765e-06, | |
| "loss": 0.3891, | |
| "step": 1805 | |
| }, | |
| { | |
| "epoch": 0.7470078415187784, | |
| "grad_norm": 2.249211977645891, | |
| "learning_rate": 1.8273373909819132e-06, | |
| "loss": 0.3702, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.7490713990920347, | |
| "grad_norm": 2.582157058457191, | |
| "learning_rate": 1.7995745232721207e-06, | |
| "loss": 0.3849, | |
| "step": 1815 | |
| }, | |
| { | |
| "epoch": 0.751134956665291, | |
| "grad_norm": 2.3911264718615444, | |
| "learning_rate": 1.771977817905075e-06, | |
| "loss": 0.3662, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.7531985142385472, | |
| "grad_norm": 2.279432612650543, | |
| "learning_rate": 1.744548707669404e-06, | |
| "loss": 0.3697, | |
| "step": 1825 | |
| }, | |
| { | |
| "epoch": 0.7552620718118036, | |
| "grad_norm": 2.433098384561743, | |
| "learning_rate": 1.717288616652386e-06, | |
| "loss": 0.3714, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.7573256293850599, | |
| "grad_norm": 2.3966273952677764, | |
| "learning_rate": 1.6901989601660224e-06, | |
| "loss": 0.3701, | |
| "step": 1835 | |
| }, | |
| { | |
| "epoch": 0.7593891869583161, | |
| "grad_norm": 2.3793685219467697, | |
| "learning_rate": 1.6632811446735553e-06, | |
| "loss": 0.3749, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.7614527445315724, | |
| "grad_norm": 2.4235058401467167, | |
| "learning_rate": 1.6365365677164413e-06, | |
| "loss": 0.3798, | |
| "step": 1845 | |
| }, | |
| { | |
| "epoch": 0.7635163021048287, | |
| "grad_norm": 2.425747353853211, | |
| "learning_rate": 1.6099666178417927e-06, | |
| "loss": 0.3754, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.7655798596780851, | |
| "grad_norm": 2.378675323045764, | |
| "learning_rate": 1.5835726745302953e-06, | |
| "loss": 0.3792, | |
| "step": 1855 | |
| }, | |
| { | |
| "epoch": 0.7676434172513413, | |
| "grad_norm": 2.372493229885198, | |
| "learning_rate": 1.5573561081245724e-06, | |
| "loss": 0.3835, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.7697069748245976, | |
| "grad_norm": 2.5336093680740026, | |
| "learning_rate": 1.531318279758055e-06, | |
| "loss": 0.3619, | |
| "step": 1865 | |
| }, | |
| { | |
| "epoch": 0.7717705323978539, | |
| "grad_norm": 2.4693265387678442, | |
| "learning_rate": 1.5054605412842915e-06, | |
| "loss": 0.3736, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.7738340899711101, | |
| "grad_norm": 2.303876812670922, | |
| "learning_rate": 1.479784235206786e-06, | |
| "loss": 0.3583, | |
| "step": 1875 | |
| }, | |
| { | |
| "epoch": 0.7758976475443665, | |
| "grad_norm": 2.590842168629549, | |
| "learning_rate": 1.4542906946092817e-06, | |
| "loss": 0.3736, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.7779612051176228, | |
| "grad_norm": 2.500911682432463, | |
| "learning_rate": 1.428981243086549e-06, | |
| "loss": 0.367, | |
| "step": 1885 | |
| }, | |
| { | |
| "epoch": 0.7800247626908791, | |
| "grad_norm": 2.436965476930112, | |
| "learning_rate": 1.4038571946756724e-06, | |
| "loss": 0.3742, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.7820883202641353, | |
| "grad_norm": 2.5258796423012333, | |
| "learning_rate": 1.3789198537878202e-06, | |
| "loss": 0.3642, | |
| "step": 1895 | |
| }, | |
| { | |
| "epoch": 0.7841518778373917, | |
| "grad_norm": 2.369376183964329, | |
| "learning_rate": 1.3541705151405304e-06, | |
| "loss": 0.3746, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.786215435410648, | |
| "grad_norm": 2.3567197797039015, | |
| "learning_rate": 1.3296104636904806e-06, | |
| "loss": 0.3658, | |
| "step": 1905 | |
| }, | |
| { | |
| "epoch": 0.7882789929839042, | |
| "grad_norm": 2.4167682681302645, | |
| "learning_rate": 1.3052409745667783e-06, | |
| "loss": 0.3704, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.7903425505571605, | |
| "grad_norm": 2.3250328086103234, | |
| "learning_rate": 1.281063313004761e-06, | |
| "loss": 0.3677, | |
| "step": 1915 | |
| }, | |
| { | |
| "epoch": 0.7924061081304168, | |
| "grad_norm": 2.4369843316317, | |
| "learning_rate": 1.2570787342803064e-06, | |
| "loss": 0.3709, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.7944696657036732, | |
| "grad_norm": 2.5193296767489537, | |
| "learning_rate": 1.2332884836446473e-06, | |
| "loss": 0.3719, | |
| "step": 1925 | |
| }, | |
| { | |
| "epoch": 0.7965332232769294, | |
| "grad_norm": 2.5002330071663055, | |
| "learning_rate": 1.2096937962597399e-06, | |
| "loss": 0.3677, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.7985967808501857, | |
| "grad_norm": 2.601424944526519, | |
| "learning_rate": 1.1862958971341199e-06, | |
| "loss": 0.3686, | |
| "step": 1935 | |
| }, | |
| { | |
| "epoch": 0.800660338423442, | |
| "grad_norm": 2.3874506853275363, | |
| "learning_rate": 1.16309600105931e-06, | |
| "loss": 0.3619, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.8027238959966984, | |
| "grad_norm": 2.360432193713698, | |
| "learning_rate": 1.1400953125467452e-06, | |
| "loss": 0.3598, | |
| "step": 1945 | |
| }, | |
| { | |
| "epoch": 0.8047874535699546, | |
| "grad_norm": 2.5865175614762053, | |
| "learning_rate": 1.117295025765235e-06, | |
| "loss": 0.3678, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.8068510111432109, | |
| "grad_norm": 2.386447339664475, | |
| "learning_rate": 1.09469632447897e-06, | |
| "loss": 0.3527, | |
| "step": 1955 | |
| }, | |
| { | |
| "epoch": 0.8089145687164672, | |
| "grad_norm": 2.2243473128718607, | |
| "learning_rate": 1.0723003819860555e-06, | |
| "loss": 0.3499, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.8109781262897234, | |
| "grad_norm": 2.4765043887476, | |
| "learning_rate": 1.0501083610575958e-06, | |
| "loss": 0.358, | |
| "step": 1965 | |
| }, | |
| { | |
| "epoch": 0.8130416838629798, | |
| "grad_norm": 2.4123151679318093, | |
| "learning_rate": 1.0281214138773315e-06, | |
| "loss": 0.3411, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.8151052414362361, | |
| "grad_norm": 2.4030618342504892, | |
| "learning_rate": 1.0063406819818106e-06, | |
| "loss": 0.3492, | |
| "step": 1975 | |
| }, | |
| { | |
| "epoch": 0.8171687990094924, | |
| "grad_norm": 2.3438148800354988, | |
| "learning_rate": 9.847672962011233e-07, | |
| "loss": 0.3554, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.8192323565827486, | |
| "grad_norm": 2.328120737157089, | |
| "learning_rate": 9.634023766001981e-07, | |
| "loss": 0.3562, | |
| "step": 1985 | |
| }, | |
| { | |
| "epoch": 0.821295914156005, | |
| "grad_norm": 2.5107579831834816, | |
| "learning_rate": 9.422470324206385e-07, | |
| "loss": 0.3575, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.8233594717292613, | |
| "grad_norm": 2.455991687646852, | |
| "learning_rate": 9.213023620231404e-07, | |
| "loss": 0.3508, | |
| "step": 1995 | |
| }, | |
| { | |
| "epoch": 0.8254230293025175, | |
| "grad_norm": 2.3501569587377693, | |
| "learning_rate": 9.005694528304621e-07, | |
| "loss": 0.3529, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.8274865868757738, | |
| "grad_norm": 2.3892952569776527, | |
| "learning_rate": 8.800493812709677e-07, | |
| "loss": 0.3605, | |
| "step": 2005 | |
| }, | |
| { | |
| "epoch": 0.8295501444490301, | |
| "grad_norm": 2.387038888457062, | |
| "learning_rate": 8.597432127227434e-07, | |
| "loss": 0.3593, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.8316137020222865, | |
| "grad_norm": 2.3731590066808526, | |
| "learning_rate": 8.396520014582798e-07, | |
| "loss": 0.3487, | |
| "step": 2015 | |
| }, | |
| { | |
| "epoch": 0.8336772595955427, | |
| "grad_norm": 2.576723904790806, | |
| "learning_rate": 8.197767905897341e-07, | |
| "loss": 0.3541, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.835740817168799, | |
| "grad_norm": 2.2751284706261306, | |
| "learning_rate": 8.001186120147825e-07, | |
| "loss": 0.3463, | |
| "step": 2025 | |
| }, | |
| { | |
| "epoch": 0.8378043747420553, | |
| "grad_norm": 2.9241025316172804, | |
| "learning_rate": 7.806784863630324e-07, | |
| "loss": 0.3504, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.8398679323153116, | |
| "grad_norm": 2.4559935395470034, | |
| "learning_rate": 7.614574229430432e-07, | |
| "loss": 0.3538, | |
| "step": 2035 | |
| }, | |
| { | |
| "epoch": 0.8419314898885679, | |
| "grad_norm": 2.3573162952481037, | |
| "learning_rate": 7.424564196899175e-07, | |
| "loss": 0.3472, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.8439950474618242, | |
| "grad_norm": 2.3152148511829287, | |
| "learning_rate": 7.236764631134907e-07, | |
| "loss": 0.3446, | |
| "step": 2045 | |
| }, | |
| { | |
| "epoch": 0.8460586050350805, | |
| "grad_norm": 2.328674279050691, | |
| "learning_rate": 7.051185282471173e-07, | |
| "loss": 0.3494, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.8481221626083367, | |
| "grad_norm": 2.3835140443527982, | |
| "learning_rate": 6.867835785970417e-07, | |
| "loss": 0.3443, | |
| "step": 2055 | |
| }, | |
| { | |
| "epoch": 0.8501857201815931, | |
| "grad_norm": 2.366551971108674, | |
| "learning_rate": 6.68672566092376e-07, | |
| "loss": 0.3434, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.8522492777548494, | |
| "grad_norm": 2.4362523327596257, | |
| "learning_rate": 6.507864310356826e-07, | |
| "loss": 0.3361, | |
| "step": 2065 | |
| }, | |
| { | |
| "epoch": 0.8543128353281056, | |
| "grad_norm": 2.341036835199248, | |
| "learning_rate": 6.331261020541462e-07, | |
| "loss": 0.3658, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.8563763929013619, | |
| "grad_norm": 2.5075276094389753, | |
| "learning_rate": 6.156924960513638e-07, | |
| "loss": 0.3598, | |
| "step": 2075 | |
| }, | |
| { | |
| "epoch": 0.8584399504746183, | |
| "grad_norm": 2.304765407228246, | |
| "learning_rate": 5.984865181597454e-07, | |
| "loss": 0.3528, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.8605035080478746, | |
| "grad_norm": 2.334045444451915, | |
| "learning_rate": 5.815090616935115e-07, | |
| "loss": 0.3506, | |
| "step": 2085 | |
| }, | |
| { | |
| "epoch": 0.8625670656211308, | |
| "grad_norm": 2.434017607980088, | |
| "learning_rate": 5.647610081023214e-07, | |
| "loss": 0.3524, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.8646306231943871, | |
| "grad_norm": 2.400891346368746, | |
| "learning_rate": 5.482432269255011e-07, | |
| "loss": 0.3431, | |
| "step": 2095 | |
| }, | |
| { | |
| "epoch": 0.8666941807676434, | |
| "grad_norm": 2.4286183102500027, | |
| "learning_rate": 5.31956575746907e-07, | |
| "loss": 0.3464, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.8687577383408998, | |
| "grad_norm": 2.380179963058147, | |
| "learning_rate": 5.159019001503962e-07, | |
| "loss": 0.3499, | |
| "step": 2105 | |
| }, | |
| { | |
| "epoch": 0.870821295914156, | |
| "grad_norm": 2.306679882811888, | |
| "learning_rate": 5.000800336759232e-07, | |
| "loss": 0.3437, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.8728848534874123, | |
| "grad_norm": 2.289306218382388, | |
| "learning_rate": 4.844917977762653e-07, | |
| "loss": 0.3542, | |
| "step": 2115 | |
| }, | |
| { | |
| "epoch": 0.8749484110606686, | |
| "grad_norm": 2.401006920203725, | |
| "learning_rate": 4.691380017743769e-07, | |
| "loss": 0.3508, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.8770119686339248, | |
| "grad_norm": 2.294060885815769, | |
| "learning_rate": 4.5401944282136454e-07, | |
| "loss": 0.3592, | |
| "step": 2125 | |
| }, | |
| { | |
| "epoch": 0.8790755262071812, | |
| "grad_norm": 2.5092731081270654, | |
| "learning_rate": 4.3913690585510427e-07, | |
| "loss": 0.3343, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.8811390837804375, | |
| "grad_norm": 2.4672551742312523, | |
| "learning_rate": 4.244911635594856e-07, | |
| "loss": 0.3421, | |
| "step": 2135 | |
| }, | |
| { | |
| "epoch": 0.8832026413536938, | |
| "grad_norm": 2.4349291953653234, | |
| "learning_rate": 4.100829763242975e-07, | |
| "loss": 0.3482, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.88526619892695, | |
| "grad_norm": 2.532171457134297, | |
| "learning_rate": 3.959130922057497e-07, | |
| "loss": 0.3365, | |
| "step": 2145 | |
| }, | |
| { | |
| "epoch": 0.8873297565002064, | |
| "grad_norm": 2.4156983042588647, | |
| "learning_rate": 3.819822468876283e-07, | |
| "loss": 0.3424, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.8893933140734627, | |
| "grad_norm": 2.3670827733250346, | |
| "learning_rate": 3.6829116364310914e-07, | |
| "loss": 0.3489, | |
| "step": 2155 | |
| }, | |
| { | |
| "epoch": 0.8914568716467189, | |
| "grad_norm": 2.471779388721616, | |
| "learning_rate": 3.548405532972016e-07, | |
| "loss": 0.35, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.8935204292199752, | |
| "grad_norm": 2.5818181780490184, | |
| "learning_rate": 3.416311141898421e-07, | |
| "loss": 0.3468, | |
| "step": 2165 | |
| }, | |
| { | |
| "epoch": 0.8955839867932315, | |
| "grad_norm": 2.561889277902207, | |
| "learning_rate": 3.286635321396392e-07, | |
| "loss": 0.3415, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.8976475443664879, | |
| "grad_norm": 2.3816913973065654, | |
| "learning_rate": 3.159384804082666e-07, | |
| "loss": 0.335, | |
| "step": 2175 | |
| }, | |
| { | |
| "epoch": 0.8997111019397441, | |
| "grad_norm": 2.3401913676557116, | |
| "learning_rate": 3.034566196655092e-07, | |
| "loss": 0.3438, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.9017746595130004, | |
| "grad_norm": 2.3557441169663598, | |
| "learning_rate": 2.912185979549587e-07, | |
| "loss": 0.3415, | |
| "step": 2185 | |
| }, | |
| { | |
| "epoch": 0.9038382170862567, | |
| "grad_norm": 2.5287391128485464, | |
| "learning_rate": 2.792250506603694e-07, | |
| "loss": 0.3433, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.905901774659513, | |
| "grad_norm": 2.454585929498319, | |
| "learning_rate": 2.67476600472672e-07, | |
| "loss": 0.3451, | |
| "step": 2195 | |
| }, | |
| { | |
| "epoch": 0.9079653322327693, | |
| "grad_norm": 2.596601343506189, | |
| "learning_rate": 2.5597385735764256e-07, | |
| "loss": 0.3583, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.9100288898060256, | |
| "grad_norm": 2.19805805166686, | |
| "learning_rate": 2.447174185242324e-07, | |
| "loss": 0.3461, | |
| "step": 2205 | |
| }, | |
| { | |
| "epoch": 0.9120924473792819, | |
| "grad_norm": 2.4174079650578624, | |
| "learning_rate": 2.3370786839356286e-07, | |
| "loss": 0.3396, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.9141560049525381, | |
| "grad_norm": 2.36584151995226, | |
| "learning_rate": 2.2294577856858236e-07, | |
| "loss": 0.3401, | |
| "step": 2215 | |
| }, | |
| { | |
| "epoch": 0.9162195625257945, | |
| "grad_norm": 2.434108457884742, | |
| "learning_rate": 2.124317078043936e-07, | |
| "loss": 0.3464, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.9182831200990508, | |
| "grad_norm": 2.2550641701066523, | |
| "learning_rate": 2.0216620197923642e-07, | |
| "loss": 0.3403, | |
| "step": 2225 | |
| }, | |
| { | |
| "epoch": 0.920346677672307, | |
| "grad_norm": 2.479423722525491, | |
| "learning_rate": 1.9214979406615097e-07, | |
| "loss": 0.34, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.9224102352455633, | |
| "grad_norm": 2.3709597200527965, | |
| "learning_rate": 1.823830041053065e-07, | |
| "loss": 0.3386, | |
| "step": 2235 | |
| }, | |
| { | |
| "epoch": 0.9244737928188197, | |
| "grad_norm": 2.4793227986038193, | |
| "learning_rate": 1.7286633917700134e-07, | |
| "loss": 0.3538, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.926537350392076, | |
| "grad_norm": 2.4372071315808896, | |
| "learning_rate": 1.6360029337533056e-07, | |
| "loss": 0.3399, | |
| "step": 2245 | |
| }, | |
| { | |
| "epoch": 0.9286009079653322, | |
| "grad_norm": 2.350437257669817, | |
| "learning_rate": 1.5458534778254319e-07, | |
| "loss": 0.3456, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.9306644655385885, | |
| "grad_norm": 2.3873927009091025, | |
| "learning_rate": 1.4582197044405556e-07, | |
| "loss": 0.3476, | |
| "step": 2255 | |
| }, | |
| { | |
| "epoch": 0.9327280231118448, | |
| "grad_norm": 2.3893579636725715, | |
| "learning_rate": 1.3731061634415787e-07, | |
| "loss": 0.3397, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.9347915806851012, | |
| "grad_norm": 2.384752010782048, | |
| "learning_rate": 1.2905172738238603e-07, | |
| "loss": 0.3487, | |
| "step": 2265 | |
| }, | |
| { | |
| "epoch": 0.9368551382583574, | |
| "grad_norm": 2.3497503513681552, | |
| "learning_rate": 1.2104573235058314e-07, | |
| "loss": 0.3383, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.9389186958316137, | |
| "grad_norm": 2.6245838898899687, | |
| "learning_rate": 1.1329304691063692e-07, | |
| "loss": 0.3414, | |
| "step": 2275 | |
| }, | |
| { | |
| "epoch": 0.94098225340487, | |
| "grad_norm": 2.3201420801643358, | |
| "learning_rate": 1.0579407357289461e-07, | |
| "loss": 0.3357, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.9430458109781263, | |
| "grad_norm": 2.390636102614023, | |
| "learning_rate": 9.854920167526983e-08, | |
| "loss": 0.3358, | |
| "step": 2285 | |
| }, | |
| { | |
| "epoch": 0.9451093685513826, | |
| "grad_norm": 2.6935442949745774, | |
| "learning_rate": 9.155880736302857e-08, | |
| "loss": 0.3516, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.9471729261246389, | |
| "grad_norm": 2.33174770965001, | |
| "learning_rate": 8.482325356925614e-08, | |
| "loss": 0.3383, | |
| "step": 2295 | |
| }, | |
| { | |
| "epoch": 0.9492364836978952, | |
| "grad_norm": 2.382213172213323, | |
| "learning_rate": 7.834288999601703e-08, | |
| "loss": 0.362, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.9513000412711514, | |
| "grad_norm": 2.3191616911646724, | |
| "learning_rate": 7.211805309620013e-08, | |
| "loss": 0.328, | |
| "step": 2305 | |
| }, | |
| { | |
| "epoch": 0.9533635988444078, | |
| "grad_norm": 2.469419059741572, | |
| "learning_rate": 6.614906605604598e-08, | |
| "loss": 0.3331, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.9554271564176641, | |
| "grad_norm": 2.4225879136955997, | |
| "learning_rate": 6.043623877837301e-08, | |
| "loss": 0.347, | |
| "step": 2315 | |
| }, | |
| { | |
| "epoch": 0.9574907139909203, | |
| "grad_norm": 2.3375518129030786, | |
| "learning_rate": 5.4979867866482015e-08, | |
| "loss": 0.339, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.9595542715641766, | |
| "grad_norm": 2.460240591927137, | |
| "learning_rate": 4.978023660876075e-08, | |
| "loss": 0.3409, | |
| "step": 2325 | |
| }, | |
| { | |
| "epoch": 0.961617829137433, | |
| "grad_norm": 2.5514455417806388, | |
| "learning_rate": 4.4837614963976205e-08, | |
| "loss": 0.3549, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.9636813867106893, | |
| "grad_norm": 2.4229916264085594, | |
| "learning_rate": 4.015225954725421e-08, | |
| "loss": 0.3462, | |
| "step": 2335 | |
| }, | |
| { | |
| "epoch": 0.9657449442839455, | |
| "grad_norm": 2.392552783694151, | |
| "learning_rate": 3.572441361676171e-08, | |
| "loss": 0.3423, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.9678085018572018, | |
| "grad_norm": 2.414641292682689, | |
| "learning_rate": 3.155430706107465e-08, | |
| "loss": 0.3416, | |
| "step": 2345 | |
| }, | |
| { | |
| "epoch": 0.9698720594304581, | |
| "grad_norm": 2.5378189638300164, | |
| "learning_rate": 2.7642156387240882e-08, | |
| "loss": 0.3419, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.9719356170037144, | |
| "grad_norm": 2.4175815882083502, | |
| "learning_rate": 2.3988164709542462e-08, | |
| "loss": 0.3321, | |
| "step": 2355 | |
| }, | |
| { | |
| "epoch": 0.9739991745769707, | |
| "grad_norm": 2.2974926251848227, | |
| "learning_rate": 2.0592521738947434e-08, | |
| "loss": 0.3383, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.976062732150227, | |
| "grad_norm": 2.3929562307787338, | |
| "learning_rate": 1.7455403773262713e-08, | |
| "loss": 0.3402, | |
| "step": 2365 | |
| }, | |
| { | |
| "epoch": 0.9781262897234833, | |
| "grad_norm": 2.4697653358488103, | |
| "learning_rate": 1.4576973687979168e-08, | |
| "loss": 0.3245, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.9801898472967395, | |
| "grad_norm": 2.4583519170902677, | |
| "learning_rate": 1.1957380927816176e-08, | |
| "loss": 0.3449, | |
| "step": 2375 | |
| }, | |
| { | |
| "epoch": 0.9822534048699959, | |
| "grad_norm": 2.53558630491575, | |
| "learning_rate": 9.596761498961715e-09, | |
| "loss": 0.3378, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.9843169624432522, | |
| "grad_norm": 2.4388182873283983, | |
| "learning_rate": 7.495237962011902e-09, | |
| "loss": 0.3376, | |
| "step": 2385 | |
| }, | |
| { | |
| "epoch": 0.9863805200165084, | |
| "grad_norm": 2.5307806748315764, | |
| "learning_rate": 5.65291942560775e-09, | |
| "loss": 0.3437, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.9884440775897647, | |
| "grad_norm": 2.494932764858625, | |
| "learning_rate": 4.0699015407702495e-09, | |
| "loss": 0.3466, | |
| "step": 2395 | |
| }, | |
| { | |
| "epoch": 0.9905076351630211, | |
| "grad_norm": 2.386648335012219, | |
| "learning_rate": 2.7462664959343465e-09, | |
| "loss": 0.3384, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.9925711927362774, | |
| "grad_norm": 2.7213817604169748, | |
| "learning_rate": 1.682083012681246e-09, | |
| "loss": 0.3332, | |
| "step": 2405 | |
| }, | |
| { | |
| "epoch": 0.9946347503095336, | |
| "grad_norm": 2.559632513812426, | |
| "learning_rate": 8.774063421712608e-10, | |
| "loss": 0.3494, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.9966983078827899, | |
| "grad_norm": 2.307214644489273, | |
| "learning_rate": 3.322782622738885e-10, | |
| "loss": 0.3417, | |
| "step": 2415 | |
| }, | |
| { | |
| "epoch": 0.9987618654560462, | |
| "grad_norm": 2.3274612533228907, | |
| "learning_rate": 4.672707540231969e-11, | |
| "loss": 0.3382, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_runtime": 3.3225, | |
| "eval_samples_per_second": 3.01, | |
| "eval_steps_per_second": 0.903, | |
| "step": 2423 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 2423, | |
| "total_flos": 253663452856320.0, | |
| "train_loss": 0.5332340838443727, | |
| "train_runtime": 21510.4539, | |
| "train_samples_per_second": 1.802, | |
| "train_steps_per_second": 0.113 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 2423, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 253663452856320.0, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |