| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.982264665757162, | |
| "eval_steps": 500, | |
| "global_step": 366, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.008185538881309686, | |
| "grad_norm": 5.0478727782480615, | |
| "learning_rate": 0.0, | |
| "loss": 1.0078, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.01637107776261937, | |
| "grad_norm": 5.297869728172454, | |
| "learning_rate": 2.702702702702703e-07, | |
| "loss": 1.0618, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.02455661664392906, | |
| "grad_norm": 5.2373720826529615, | |
| "learning_rate": 5.405405405405406e-07, | |
| "loss": 0.996, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.03274215552523874, | |
| "grad_norm": 5.1487966370839615, | |
| "learning_rate": 8.108108108108109e-07, | |
| "loss": 1.0566, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.040927694406548434, | |
| "grad_norm": 4.889603343820506, | |
| "learning_rate": 1.0810810810810812e-06, | |
| "loss": 1.0428, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.04911323328785812, | |
| "grad_norm": 5.010279773418308, | |
| "learning_rate": 1.3513513513513515e-06, | |
| "loss": 1.0379, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.0572987721691678, | |
| "grad_norm": 4.110386008020255, | |
| "learning_rate": 1.6216216216216219e-06, | |
| "loss": 1.0033, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.06548431105047749, | |
| "grad_norm": 3.7462198602177916, | |
| "learning_rate": 1.8918918918918922e-06, | |
| "loss": 0.968, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.07366984993178717, | |
| "grad_norm": 3.595416096229768, | |
| "learning_rate": 2.1621621621621623e-06, | |
| "loss": 0.9803, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.08185538881309687, | |
| "grad_norm": 2.533438078443084, | |
| "learning_rate": 2.432432432432433e-06, | |
| "loss": 0.9224, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.09004092769440655, | |
| "grad_norm": 2.320695272044533, | |
| "learning_rate": 2.702702702702703e-06, | |
| "loss": 0.9266, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.09822646657571624, | |
| "grad_norm": 2.2691993926294485, | |
| "learning_rate": 2.9729729729729736e-06, | |
| "loss": 0.9506, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.10641200545702592, | |
| "grad_norm": 1.6484336480613677, | |
| "learning_rate": 3.2432432432432437e-06, | |
| "loss": 0.8901, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.1145975443383356, | |
| "grad_norm": 1.587811768676501, | |
| "learning_rate": 3.513513513513514e-06, | |
| "loss": 0.8924, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.12278308321964529, | |
| "grad_norm": 1.4428141733888997, | |
| "learning_rate": 3.7837837837837844e-06, | |
| "loss": 0.8533, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.13096862210095497, | |
| "grad_norm": 1.327517776817959, | |
| "learning_rate": 4.0540540540540545e-06, | |
| "loss": 0.8403, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.13915416098226466, | |
| "grad_norm": 1.1903685597921168, | |
| "learning_rate": 4.324324324324325e-06, | |
| "loss": 0.8213, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.14733969986357434, | |
| "grad_norm": 1.1265141075408769, | |
| "learning_rate": 4.594594594594596e-06, | |
| "loss": 0.7893, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.15552523874488403, | |
| "grad_norm": 1.0112222898955754, | |
| "learning_rate": 4.864864864864866e-06, | |
| "loss": 0.7686, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.16371077762619374, | |
| "grad_norm": 1.0155475919941201, | |
| "learning_rate": 5.135135135135135e-06, | |
| "loss": 0.7895, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.17189631650750342, | |
| "grad_norm": 0.9424205847898997, | |
| "learning_rate": 5.405405405405406e-06, | |
| "loss": 0.7934, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.1800818553888131, | |
| "grad_norm": 0.9297919004627947, | |
| "learning_rate": 5.675675675675676e-06, | |
| "loss": 0.7704, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.1882673942701228, | |
| "grad_norm": 0.8942582616046999, | |
| "learning_rate": 5.945945945945947e-06, | |
| "loss": 0.771, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.19645293315143247, | |
| "grad_norm": 0.8790765346003704, | |
| "learning_rate": 6.2162162162162164e-06, | |
| "loss": 0.7519, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.20463847203274216, | |
| "grad_norm": 0.8704256340257236, | |
| "learning_rate": 6.486486486486487e-06, | |
| "loss": 0.7817, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.21282401091405184, | |
| "grad_norm": 0.8936039954661188, | |
| "learning_rate": 6.7567567567567575e-06, | |
| "loss": 0.7558, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.22100954979536153, | |
| "grad_norm": 0.9000294604512896, | |
| "learning_rate": 7.027027027027028e-06, | |
| "loss": 0.756, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.2291950886766712, | |
| "grad_norm": 0.760124422848503, | |
| "learning_rate": 7.297297297297298e-06, | |
| "loss": 0.6948, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.2373806275579809, | |
| "grad_norm": 0.8051399822377988, | |
| "learning_rate": 7.567567567567569e-06, | |
| "loss": 0.7303, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.24556616643929058, | |
| "grad_norm": 0.7986272722014214, | |
| "learning_rate": 7.837837837837838e-06, | |
| "loss": 0.7072, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.25375170532060026, | |
| "grad_norm": 0.7581553292292164, | |
| "learning_rate": 8.108108108108109e-06, | |
| "loss": 0.7516, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.26193724420190995, | |
| "grad_norm": 0.7494941699803703, | |
| "learning_rate": 8.378378378378378e-06, | |
| "loss": 0.6875, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.27012278308321963, | |
| "grad_norm": 0.7035656192285054, | |
| "learning_rate": 8.64864864864865e-06, | |
| "loss": 0.6868, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.2783083219645293, | |
| "grad_norm": 0.825065049654731, | |
| "learning_rate": 8.91891891891892e-06, | |
| "loss": 0.724, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.286493860845839, | |
| "grad_norm": 0.7398891146832497, | |
| "learning_rate": 9.189189189189191e-06, | |
| "loss": 0.6647, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.2946793997271487, | |
| "grad_norm": 0.7872339743993603, | |
| "learning_rate": 9.45945945945946e-06, | |
| "loss": 0.7306, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.30286493860845837, | |
| "grad_norm": 0.802374234041993, | |
| "learning_rate": 9.729729729729732e-06, | |
| "loss": 0.7196, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.31105047748976805, | |
| "grad_norm": 0.6743014958494054, | |
| "learning_rate": 1e-05, | |
| "loss": 0.6844, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.31923601637107774, | |
| "grad_norm": 0.7173190441882299, | |
| "learning_rate": 9.999772047343259e-06, | |
| "loss": 0.7104, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.3274215552523875, | |
| "grad_norm": 0.7316123657845623, | |
| "learning_rate": 9.999088210158001e-06, | |
| "loss": 0.7021, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.33560709413369716, | |
| "grad_norm": 0.7117684019510708, | |
| "learning_rate": 9.997948550797227e-06, | |
| "loss": 0.6858, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.34379263301500684, | |
| "grad_norm": 0.7188976086512107, | |
| "learning_rate": 9.99635317317629e-06, | |
| "loss": 0.7019, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.3519781718963165, | |
| "grad_norm": 0.7377050581756815, | |
| "learning_rate": 9.994302222763415e-06, | |
| "loss": 0.6814, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.3601637107776262, | |
| "grad_norm": 0.9135462047269537, | |
| "learning_rate": 9.991795886566443e-06, | |
| "loss": 0.6657, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.3683492496589359, | |
| "grad_norm": 0.7192062851816737, | |
| "learning_rate": 9.988834393115768e-06, | |
| "loss": 0.683, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.3765347885402456, | |
| "grad_norm": 0.7317307030654772, | |
| "learning_rate": 9.98541801244351e-06, | |
| "loss": 0.698, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.38472032742155526, | |
| "grad_norm": 0.7370617382707109, | |
| "learning_rate": 9.981547056058893e-06, | |
| "loss": 0.6974, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.39290586630286495, | |
| "grad_norm": 0.7213739361864256, | |
| "learning_rate": 9.977221876919833e-06, | |
| "loss": 0.6768, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.40109140518417463, | |
| "grad_norm": 0.7364591234843338, | |
| "learning_rate": 9.97244286940076e-06, | |
| "loss": 0.681, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.4092769440654843, | |
| "grad_norm": 0.6967769200295396, | |
| "learning_rate": 9.967210469256657e-06, | |
| "loss": 0.6871, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.417462482946794, | |
| "grad_norm": 0.7133269832692841, | |
| "learning_rate": 9.961525153583327e-06, | |
| "loss": 0.6727, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.4256480218281037, | |
| "grad_norm": 0.7593591165695961, | |
| "learning_rate": 9.955387440773902e-06, | |
| "loss": 0.6613, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.43383356070941337, | |
| "grad_norm": 0.7629742272484056, | |
| "learning_rate": 9.948797890471552e-06, | |
| "loss": 0.7059, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.44201909959072305, | |
| "grad_norm": 0.7869395014265814, | |
| "learning_rate": 9.94175710351848e-06, | |
| "loss": 0.6809, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.45020463847203274, | |
| "grad_norm": 0.7557966627645061, | |
| "learning_rate": 9.93426572190112e-06, | |
| "loss": 0.6888, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.4583901773533424, | |
| "grad_norm": 0.7627089251776806, | |
| "learning_rate": 9.926324428691612e-06, | |
| "loss": 0.6749, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.4665757162346521, | |
| "grad_norm": 0.7647324761458698, | |
| "learning_rate": 9.917933947985508e-06, | |
| "loss": 0.651, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.4747612551159618, | |
| "grad_norm": 0.7522615995282188, | |
| "learning_rate": 9.909095044835755e-06, | |
| "loss": 0.6634, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.4829467939972715, | |
| "grad_norm": 0.6975083371701567, | |
| "learning_rate": 9.899808525182935e-06, | |
| "loss": 0.673, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.49113233287858116, | |
| "grad_norm": 0.7388297177443647, | |
| "learning_rate": 9.89007523578178e-06, | |
| "loss": 0.672, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.49931787175989084, | |
| "grad_norm": 0.7399455004229146, | |
| "learning_rate": 9.879896064123961e-06, | |
| "loss": 0.6869, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.5075034106412005, | |
| "grad_norm": 0.641768950781225, | |
| "learning_rate": 9.869271938357168e-06, | |
| "loss": 0.6656, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.5156889495225102, | |
| "grad_norm": 0.6747503842637269, | |
| "learning_rate": 9.858203827200477e-06, | |
| "loss": 0.6681, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.5238744884038199, | |
| "grad_norm": 0.7083166062792678, | |
| "learning_rate": 9.846692739856023e-06, | |
| "loss": 0.6862, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.5320600272851296, | |
| "grad_norm": 0.6630698780804426, | |
| "learning_rate": 9.834739725916988e-06, | |
| "loss": 0.6975, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.5402455661664393, | |
| "grad_norm": 0.681533225253199, | |
| "learning_rate": 9.822345875271884e-06, | |
| "loss": 0.686, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.548431105047749, | |
| "grad_norm": 0.6840648475760192, | |
| "learning_rate": 9.80951231800518e-06, | |
| "loss": 0.6657, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.5566166439290586, | |
| "grad_norm": 0.6882516854997278, | |
| "learning_rate": 9.79624022429427e-06, | |
| "loss": 0.6674, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.5648021828103683, | |
| "grad_norm": 0.6673399727161186, | |
| "learning_rate": 9.782530804302763e-06, | |
| "loss": 0.6712, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.572987721691678, | |
| "grad_norm": 0.6523633450588768, | |
| "learning_rate": 9.768385308070139e-06, | |
| "loss": 0.6499, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.5811732605729877, | |
| "grad_norm": 0.6402501529233903, | |
| "learning_rate": 9.75380502539778e-06, | |
| "loss": 0.6376, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.5893587994542974, | |
| "grad_norm": 0.6636314846823285, | |
| "learning_rate": 9.738791285731353e-06, | |
| "loss": 0.6278, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.597544338335607, | |
| "grad_norm": 0.8003245675376227, | |
| "learning_rate": 9.723345458039595e-06, | |
| "loss": 0.6538, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.6057298772169167, | |
| "grad_norm": 0.7185853457714209, | |
| "learning_rate": 9.70746895068949e-06, | |
| "loss": 0.6439, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.6139154160982264, | |
| "grad_norm": 0.6699308724912499, | |
| "learning_rate": 9.691163211317853e-06, | |
| "loss": 0.6968, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.6221009549795361, | |
| "grad_norm": 0.6797018354628909, | |
| "learning_rate": 9.674429726699324e-06, | |
| "loss": 0.6633, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.6302864938608458, | |
| "grad_norm": 0.666380912419756, | |
| "learning_rate": 9.657270022610814e-06, | |
| "loss": 0.6652, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.6384720327421555, | |
| "grad_norm": 0.6433578757501699, | |
| "learning_rate": 9.63968566369238e-06, | |
| "loss": 0.6231, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.6466575716234653, | |
| "grad_norm": 0.697083409509591, | |
| "learning_rate": 9.62167825330455e-06, | |
| "loss": 0.6617, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.654843110504775, | |
| "grad_norm": 0.6824957117453533, | |
| "learning_rate": 9.603249433382145e-06, | |
| "loss": 0.6845, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.6630286493860846, | |
| "grad_norm": 0.6293701917757308, | |
| "learning_rate": 9.584400884284546e-06, | |
| "loss": 0.6451, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.6712141882673943, | |
| "grad_norm": 0.6562401782569836, | |
| "learning_rate": 9.565134324642491e-06, | |
| "loss": 0.6687, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.679399727148704, | |
| "grad_norm": 0.668101435033017, | |
| "learning_rate": 9.545451511201365e-06, | |
| "loss": 0.6653, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.6875852660300137, | |
| "grad_norm": 0.6897068006081355, | |
| "learning_rate": 9.52535423866101e-06, | |
| "loss": 0.6769, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.6957708049113234, | |
| "grad_norm": 0.6444378609992228, | |
| "learning_rate": 9.504844339512096e-06, | |
| "loss": 0.6255, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.703956343792633, | |
| "grad_norm": 0.6988896735645694, | |
| "learning_rate": 9.483923683869025e-06, | |
| "loss": 0.6273, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.7121418826739427, | |
| "grad_norm": 0.7291838388829389, | |
| "learning_rate": 9.462594179299408e-06, | |
| "loss": 0.6644, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.7203274215552524, | |
| "grad_norm": 0.6690961584546128, | |
| "learning_rate": 9.440857770650139e-06, | |
| "loss": 0.7172, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.7285129604365621, | |
| "grad_norm": 0.9091183830996429, | |
| "learning_rate": 9.418716439870056e-06, | |
| "loss": 0.6236, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.7366984993178718, | |
| "grad_norm": 0.7033410685474086, | |
| "learning_rate": 9.396172205829235e-06, | |
| "loss": 0.6735, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.7448840381991815, | |
| "grad_norm": 0.6845856558187914, | |
| "learning_rate": 9.373227124134888e-06, | |
| "loss": 0.6464, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.7530695770804912, | |
| "grad_norm": 0.6765703675484731, | |
| "learning_rate": 9.349883286943951e-06, | |
| "loss": 0.6328, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.7612551159618008, | |
| "grad_norm": 0.6753232110728814, | |
| "learning_rate": 9.326142822772301e-06, | |
| "loss": 0.6653, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.7694406548431105, | |
| "grad_norm": 0.6604394934259564, | |
| "learning_rate": 9.302007896300697e-06, | |
| "loss": 0.6174, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.7776261937244202, | |
| "grad_norm": 0.6607499407814651, | |
| "learning_rate": 9.27748070817738e-06, | |
| "loss": 0.6667, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.7858117326057299, | |
| "grad_norm": 0.6177046884395893, | |
| "learning_rate": 9.252563494817426e-06, | |
| "loss": 0.6644, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.7939972714870396, | |
| "grad_norm": 0.6532085389117603, | |
| "learning_rate": 9.227258528198832e-06, | |
| "loss": 0.6554, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.8021828103683493, | |
| "grad_norm": 0.6531878007459695, | |
| "learning_rate": 9.201568115655343e-06, | |
| "loss": 0.6624, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.810368349249659, | |
| "grad_norm": 0.7244203842003859, | |
| "learning_rate": 9.175494599666078e-06, | |
| "loss": 0.6599, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.8185538881309686, | |
| "grad_norm": 0.6581466542372403, | |
| "learning_rate": 9.14904035764193e-06, | |
| "loss": 0.6409, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.8267394270122783, | |
| "grad_norm": 0.6590221337516023, | |
| "learning_rate": 9.122207801708802e-06, | |
| "loss": 0.6955, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.834924965893588, | |
| "grad_norm": 0.6461062189640926, | |
| "learning_rate": 9.094999378487659e-06, | |
| "loss": 0.6558, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.8431105047748977, | |
| "grad_norm": 0.6574652022208926, | |
| "learning_rate": 9.067417568871444e-06, | |
| "loss": 0.6355, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.8512960436562074, | |
| "grad_norm": 0.6733446230502218, | |
| "learning_rate": 9.03946488779887e-06, | |
| "loss": 0.6442, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.859481582537517, | |
| "grad_norm": 0.6644073928901805, | |
| "learning_rate": 9.0111438840251e-06, | |
| "loss": 0.6401, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.8676671214188267, | |
| "grad_norm": 0.6258042538207641, | |
| "learning_rate": 8.982457139889358e-06, | |
| "loss": 0.623, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.8758526603001364, | |
| "grad_norm": 0.6262353607723671, | |
| "learning_rate": 8.953407271079456e-06, | |
| "loss": 0.6215, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.8840381991814461, | |
| "grad_norm": 0.7145148499205656, | |
| "learning_rate": 8.923996926393306e-06, | |
| "loss": 0.6434, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.8922237380627558, | |
| "grad_norm": 0.6514835061195294, | |
| "learning_rate": 8.894228787497389e-06, | |
| "loss": 0.6487, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.9004092769440655, | |
| "grad_norm": 0.6478318245257795, | |
| "learning_rate": 8.864105568682245e-06, | |
| "loss": 0.6165, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.9085948158253752, | |
| "grad_norm": 0.635966960589811, | |
| "learning_rate": 8.833630016614976e-06, | |
| "loss": 0.6593, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.9167803547066848, | |
| "grad_norm": 0.6188382437651978, | |
| "learning_rate": 8.80280491008881e-06, | |
| "loss": 0.6539, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.9249658935879945, | |
| "grad_norm": 0.6533555870938427, | |
| "learning_rate": 8.771633059769712e-06, | |
| "loss": 0.6471, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.9331514324693042, | |
| "grad_norm": 0.6172503829228955, | |
| "learning_rate": 8.740117307940123e-06, | |
| "loss": 0.6363, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.9413369713506139, | |
| "grad_norm": 0.6690820221849542, | |
| "learning_rate": 8.708260528239788e-06, | |
| "loss": 0.6833, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.9495225102319236, | |
| "grad_norm": 0.63570538453503, | |
| "learning_rate": 8.676065625403733e-06, | |
| "loss": 0.655, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.9577080491132333, | |
| "grad_norm": 0.6481910046193415, | |
| "learning_rate": 8.64353553499741e-06, | |
| "loss": 0.6274, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.965893587994543, | |
| "grad_norm": 0.6226429114821875, | |
| "learning_rate": 8.610673223149036e-06, | |
| "loss": 0.637, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.9740791268758526, | |
| "grad_norm": 0.6252222904571513, | |
| "learning_rate": 8.577481686279123e-06, | |
| "loss": 0.648, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.9822646657571623, | |
| "grad_norm": 0.6580747113143244, | |
| "learning_rate": 8.543963950827279e-06, | |
| "loss": 0.6461, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.990450204638472, | |
| "grad_norm": 0.6540805705810817, | |
| "learning_rate": 8.51012307297624e-06, | |
| "loss": 0.6625, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.9986357435197817, | |
| "grad_norm": 0.6802947249507815, | |
| "learning_rate": 8.475962138373212e-06, | |
| "loss": 0.6561, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.6802947249507815, | |
| "learning_rate": 8.441484261848514e-06, | |
| "loss": 0.6048, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 1.0081855388813097, | |
| "grad_norm": 1.4690540777621837, | |
| "learning_rate": 8.406692587131569e-06, | |
| "loss": 0.5364, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 1.0163710777626194, | |
| "grad_norm": 0.6668502929153187, | |
| "learning_rate": 8.371590286564247e-06, | |
| "loss": 0.5698, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 1.024556616643929, | |
| "grad_norm": 0.6630075245165461, | |
| "learning_rate": 8.336180560811619e-06, | |
| "loss": 0.5295, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 1.0327421555252387, | |
| "grad_norm": 0.628047334528465, | |
| "learning_rate": 8.30046663857011e-06, | |
| "loss": 0.5522, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 1.0409276944065484, | |
| "grad_norm": 0.6321367262570028, | |
| "learning_rate": 8.264451776273104e-06, | |
| "loss": 0.5328, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 1.049113233287858, | |
| "grad_norm": 0.6509232625662688, | |
| "learning_rate": 8.228139257794012e-06, | |
| "loss": 0.5293, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 1.0572987721691678, | |
| "grad_norm": 0.6653995825660356, | |
| "learning_rate": 8.191532394146865e-06, | |
| "loss": 0.5321, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.0654843110504775, | |
| "grad_norm": 0.6753798862378513, | |
| "learning_rate": 8.154634523184389e-06, | |
| "loss": 0.5501, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 1.0736698499317872, | |
| "grad_norm": 0.6925676306857319, | |
| "learning_rate": 8.117449009293668e-06, | |
| "loss": 0.5588, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 1.0818553888130968, | |
| "grad_norm": 0.6875121153461677, | |
| "learning_rate": 8.07997924308938e-06, | |
| "loss": 0.5169, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 1.0900409276944065, | |
| "grad_norm": 0.6635596925394057, | |
| "learning_rate": 8.042228641104622e-06, | |
| "loss": 0.5288, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 1.0982264665757162, | |
| "grad_norm": 0.659706003479899, | |
| "learning_rate": 8.004200645479403e-06, | |
| "loss": 0.5431, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 1.106412005457026, | |
| "grad_norm": 0.63943734644461, | |
| "learning_rate": 7.965898723646777e-06, | |
| "loss": 0.502, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 1.1145975443383356, | |
| "grad_norm": 0.6648591862403953, | |
| "learning_rate": 7.927326368016677e-06, | |
| "loss": 0.5207, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 1.1227830832196453, | |
| "grad_norm": 0.7024194556815612, | |
| "learning_rate": 7.888487095657484e-06, | |
| "loss": 0.5611, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 1.130968622100955, | |
| "grad_norm": 0.7739952225143985, | |
| "learning_rate": 7.849384447975322e-06, | |
| "loss": 0.5366, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 1.1391541609822646, | |
| "grad_norm": 0.6733083982037561, | |
| "learning_rate": 7.810021990391163e-06, | |
| "loss": 0.5338, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.1473396998635743, | |
| "grad_norm": 0.7631285514542114, | |
| "learning_rate": 7.77040331201572e-06, | |
| "loss": 0.5433, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 1.155525238744884, | |
| "grad_norm": 0.7358222036858854, | |
| "learning_rate": 7.73053202532219e-06, | |
| "loss": 0.5157, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 1.1637107776261937, | |
| "grad_norm": 0.7221523031273229, | |
| "learning_rate": 7.690411765816864e-06, | |
| "loss": 0.5195, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 1.1718963165075034, | |
| "grad_norm": 0.7073093802159716, | |
| "learning_rate": 7.650046191707641e-06, | |
| "loss": 0.52, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 1.180081855388813, | |
| "grad_norm": 0.6973266687976701, | |
| "learning_rate": 7.609438983570461e-06, | |
| "loss": 0.5377, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 1.1882673942701227, | |
| "grad_norm": 0.6972778402889085, | |
| "learning_rate": 7.5685938440137185e-06, | |
| "loss": 0.5331, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 1.1964529331514324, | |
| "grad_norm": 0.7015895074267741, | |
| "learning_rate": 7.527514497340642e-06, | |
| "loss": 0.5009, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 1.204638472032742, | |
| "grad_norm": 0.6741755282012447, | |
| "learning_rate": 7.486204689209719e-06, | |
| "loss": 0.5084, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 1.2128240109140518, | |
| "grad_norm": 0.6897917137599813, | |
| "learning_rate": 7.444668186293153e-06, | |
| "loss": 0.5325, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 1.2210095497953615, | |
| "grad_norm": 0.7009173085054421, | |
| "learning_rate": 7.402908775933419e-06, | |
| "loss": 0.5427, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.2291950886766712, | |
| "grad_norm": 0.6955840979269092, | |
| "learning_rate": 7.360930265797934e-06, | |
| "loss": 0.5142, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 1.2373806275579808, | |
| "grad_norm": 0.6959419263618837, | |
| "learning_rate": 7.318736483531861e-06, | |
| "loss": 0.503, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 1.2455661664392905, | |
| "grad_norm": 0.7795131376962834, | |
| "learning_rate": 7.2763312764091055e-06, | |
| "loss": 0.5145, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 1.2537517053206002, | |
| "grad_norm": 0.7429337505244488, | |
| "learning_rate": 7.23371851098152e-06, | |
| "loss": 0.5349, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 1.26193724420191, | |
| "grad_norm": 0.6965919792231839, | |
| "learning_rate": 7.190902072726336e-06, | |
| "loss": 0.5057, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 1.2701227830832196, | |
| "grad_norm": 0.6591537697874793, | |
| "learning_rate": 7.147885865691899e-06, | |
| "loss": 0.5061, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 1.2783083219645293, | |
| "grad_norm": 0.7634419632836582, | |
| "learning_rate": 7.104673812141676e-06, | |
| "loss": 0.5328, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 1.286493860845839, | |
| "grad_norm": 0.7183717648332919, | |
| "learning_rate": 7.061269852196633e-06, | |
| "loss": 0.5068, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 1.2946793997271486, | |
| "grad_norm": 0.7590687460497102, | |
| "learning_rate": 7.017677943475962e-06, | |
| "loss": 0.5506, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 1.3028649386084583, | |
| "grad_norm": 0.6686899677756686, | |
| "learning_rate": 6.973902060736226e-06, | |
| "loss": 0.5391, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 1.311050477489768, | |
| "grad_norm": 0.7109155867540288, | |
| "learning_rate": 6.929946195508933e-06, | |
| "loss": 0.5052, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 1.3192360163710777, | |
| "grad_norm": 0.7331433020230873, | |
| "learning_rate": 6.8858143557365865e-06, | |
| "loss": 0.527, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 1.3274215552523874, | |
| "grad_norm": 0.6475968556383218, | |
| "learning_rate": 6.841510565407235e-06, | |
| "loss": 0.5509, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 1.3356070941336973, | |
| "grad_norm": 0.6833703206105858, | |
| "learning_rate": 6.797038864187564e-06, | |
| "loss": 0.5464, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 1.3437926330150067, | |
| "grad_norm": 0.8467686230501152, | |
| "learning_rate": 6.752403307054549e-06, | |
| "loss": 0.5268, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 1.3519781718963166, | |
| "grad_norm": 0.7310142599724203, | |
| "learning_rate": 6.707607963925725e-06, | |
| "loss": 0.5025, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 1.360163710777626, | |
| "grad_norm": 0.6809585209898018, | |
| "learning_rate": 6.66265691928808e-06, | |
| "loss": 0.531, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 1.368349249658936, | |
| "grad_norm": 0.6815435370183068, | |
| "learning_rate": 6.617554271825636e-06, | |
| "loss": 0.4991, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 1.3765347885402455, | |
| "grad_norm": 0.6756734543791449, | |
| "learning_rate": 6.5723041340457175e-06, | |
| "loss": 0.5595, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 1.3847203274215554, | |
| "grad_norm": 0.6708184208199593, | |
| "learning_rate": 6.526910631903973e-06, | |
| "loss": 0.5183, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 1.3929058663028648, | |
| "grad_norm": 1.0088094436813815, | |
| "learning_rate": 6.481377904428171e-06, | |
| "loss": 0.544, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 1.4010914051841747, | |
| "grad_norm": 0.6756865297821381, | |
| "learning_rate": 6.435710103340787e-06, | |
| "loss": 0.5268, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 1.4092769440654842, | |
| "grad_norm": 0.6886939982043838, | |
| "learning_rate": 6.3899113926804565e-06, | |
| "loss": 0.5284, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 1.4174624829467941, | |
| "grad_norm": 0.6615078806376291, | |
| "learning_rate": 6.3439859484222874e-06, | |
| "loss": 0.5013, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 1.4256480218281036, | |
| "grad_norm": 0.695006380687812, | |
| "learning_rate": 6.297937958097094e-06, | |
| "loss": 0.5378, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 1.4338335607094135, | |
| "grad_norm": 0.6757415493057738, | |
| "learning_rate": 6.251771620409563e-06, | |
| "loss": 0.4894, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 1.442019099590723, | |
| "grad_norm": 0.9523879204893828, | |
| "learning_rate": 6.205491144855432e-06, | |
| "loss": 0.5159, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 1.4502046384720328, | |
| "grad_norm": 0.6817012112883398, | |
| "learning_rate": 6.1591007513376425e-06, | |
| "loss": 0.5389, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 1.4583901773533423, | |
| "grad_norm": 0.7378659676716955, | |
| "learning_rate": 6.112604669781572e-06, | |
| "loss": 0.5106, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 1.4665757162346522, | |
| "grad_norm": 0.70634706188791, | |
| "learning_rate": 6.066007139749351e-06, | |
| "loss": 0.5053, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.4747612551159617, | |
| "grad_norm": 0.6840751935196304, | |
| "learning_rate": 6.019312410053286e-06, | |
| "loss": 0.5361, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 1.4829467939972716, | |
| "grad_norm": 0.7055601714057114, | |
| "learning_rate": 5.972524738368452e-06, | |
| "loss": 0.5342, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 1.491132332878581, | |
| "grad_norm": 0.6549263427404644, | |
| "learning_rate": 5.925648390844476e-06, | |
| "loss": 0.524, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 1.499317871759891, | |
| "grad_norm": 0.7016637634242648, | |
| "learning_rate": 5.878687641716539e-06, | |
| "loss": 0.5413, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 1.5075034106412004, | |
| "grad_norm": 0.6615116373276964, | |
| "learning_rate": 5.831646772915651e-06, | |
| "loss": 0.5294, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 1.5156889495225103, | |
| "grad_norm": 0.7070316346639182, | |
| "learning_rate": 5.7845300736782205e-06, | |
| "loss": 0.5594, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 1.5238744884038198, | |
| "grad_norm": 0.7195577910961622, | |
| "learning_rate": 5.7373418401549565e-06, | |
| "loss": 0.5132, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 1.5320600272851297, | |
| "grad_norm": 0.6883209321690876, | |
| "learning_rate": 5.690086375019135e-06, | |
| "loss": 0.5635, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 1.5402455661664392, | |
| "grad_norm": 0.6985266479941304, | |
| "learning_rate": 5.642767987074288e-06, | |
| "loss": 0.5112, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 1.548431105047749, | |
| "grad_norm": 0.674690351672937, | |
| "learning_rate": 5.595390990861311e-06, | |
| "loss": 0.5269, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.5566166439290585, | |
| "grad_norm": 0.6818863454446653, | |
| "learning_rate": 5.547959706265068e-06, | |
| "loss": 0.5097, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 1.5648021828103684, | |
| "grad_norm": 0.6629891853139933, | |
| "learning_rate": 5.500478458120493e-06, | |
| "loss": 0.5223, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 1.5729877216916779, | |
| "grad_norm": 0.6356895042570937, | |
| "learning_rate": 5.45295157581825e-06, | |
| "loss": 0.4993, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 1.5811732605729878, | |
| "grad_norm": 0.6628390830246884, | |
| "learning_rate": 5.405383392909973e-06, | |
| "loss": 0.5341, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 1.5893587994542973, | |
| "grad_norm": 0.6864305683806672, | |
| "learning_rate": 5.357778246713131e-06, | |
| "loss": 0.5005, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 1.5975443383356072, | |
| "grad_norm": 0.7040608166410719, | |
| "learning_rate": 5.310140477915544e-06, | |
| "loss": 0.5314, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 1.6057298772169166, | |
| "grad_norm": 0.6820604818315129, | |
| "learning_rate": 5.262474430179597e-06, | |
| "loss": 0.5104, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 1.6139154160982265, | |
| "grad_norm": 0.7090671749176974, | |
| "learning_rate": 5.2147844497461745e-06, | |
| "loss": 0.5229, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 1.622100954979536, | |
| "grad_norm": 0.7515147237804626, | |
| "learning_rate": 5.1670748850383734e-06, | |
| "loss": 0.531, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 1.630286493860846, | |
| "grad_norm": 0.6893223377834798, | |
| "learning_rate": 5.1193500862650045e-06, | |
| "loss": 0.5304, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.6384720327421554, | |
| "grad_norm": 0.7125759934041315, | |
| "learning_rate": 5.071614405023938e-06, | |
| "loss": 0.4967, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 1.6466575716234653, | |
| "grad_norm": 0.6732968194331714, | |
| "learning_rate": 5.023872193905316e-06, | |
| "loss": 0.5213, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 1.654843110504775, | |
| "grad_norm": 0.6741087890783836, | |
| "learning_rate": 4.976127806094685e-06, | |
| "loss": 0.5002, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 1.6630286493860846, | |
| "grad_norm": 0.6668306133525173, | |
| "learning_rate": 4.928385594976063e-06, | |
| "loss": 0.4974, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 1.6712141882673943, | |
| "grad_norm": 0.6678265510054164, | |
| "learning_rate": 4.880649913734996e-06, | |
| "loss": 0.5409, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 1.679399727148704, | |
| "grad_norm": 0.7139916457668897, | |
| "learning_rate": 4.832925114961629e-06, | |
| "loss": 0.5423, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 1.6875852660300137, | |
| "grad_norm": 0.6452652740953143, | |
| "learning_rate": 4.785215550253826e-06, | |
| "loss": 0.4958, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 1.6957708049113234, | |
| "grad_norm": 0.6573148425831011, | |
| "learning_rate": 4.737525569820405e-06, | |
| "loss": 0.5131, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 1.703956343792633, | |
| "grad_norm": 0.6505041626036248, | |
| "learning_rate": 4.689859522084457e-06, | |
| "loss": 0.5177, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 1.7121418826739427, | |
| "grad_norm": 0.6837935893705414, | |
| "learning_rate": 4.64222175328687e-06, | |
| "loss": 0.5103, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.7203274215552524, | |
| "grad_norm": 0.7126882310799999, | |
| "learning_rate": 4.594616607090028e-06, | |
| "loss": 0.5332, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 1.728512960436562, | |
| "grad_norm": 0.7721004643182724, | |
| "learning_rate": 4.547048424181751e-06, | |
| "loss": 0.5015, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 1.7366984993178718, | |
| "grad_norm": 0.6678185442268906, | |
| "learning_rate": 4.499521541879508e-06, | |
| "loss": 0.5343, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 1.7448840381991815, | |
| "grad_norm": 0.6752033479335326, | |
| "learning_rate": 4.452040293734934e-06, | |
| "loss": 0.5206, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 1.7530695770804912, | |
| "grad_norm": 0.9626993188200799, | |
| "learning_rate": 4.40460900913869e-06, | |
| "loss": 0.5419, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 1.7612551159618008, | |
| "grad_norm": 0.6806062145986467, | |
| "learning_rate": 4.357232012925714e-06, | |
| "loss": 0.5214, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 1.7694406548431105, | |
| "grad_norm": 0.6837895407788251, | |
| "learning_rate": 4.309913624980866e-06, | |
| "loss": 0.5125, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 1.7776261937244202, | |
| "grad_norm": 0.6886701406202698, | |
| "learning_rate": 4.262658159845046e-06, | |
| "loss": 0.5562, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 1.78581173260573, | |
| "grad_norm": 0.6605928299509395, | |
| "learning_rate": 4.2154699263217794e-06, | |
| "loss": 0.5372, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 1.7939972714870396, | |
| "grad_norm": 1.0325353508365365, | |
| "learning_rate": 4.1683532270843505e-06, | |
| "loss": 0.5242, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.8021828103683493, | |
| "grad_norm": 1.2024682114815595, | |
| "learning_rate": 4.121312358283464e-06, | |
| "loss": 0.5593, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 1.810368349249659, | |
| "grad_norm": 0.9054405970578053, | |
| "learning_rate": 4.074351609155527e-06, | |
| "loss": 0.5261, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 1.8185538881309686, | |
| "grad_norm": 0.6758636455040393, | |
| "learning_rate": 4.0274752616315485e-06, | |
| "loss": 0.5112, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 1.8267394270122783, | |
| "grad_norm": 0.6825912498379267, | |
| "learning_rate": 3.980687589946715e-06, | |
| "loss": 0.5445, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 1.834924965893588, | |
| "grad_norm": 0.6602675170538472, | |
| "learning_rate": 3.9339928602506505e-06, | |
| "loss": 0.5305, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.8431105047748977, | |
| "grad_norm": 0.6312647757728843, | |
| "learning_rate": 3.887395330218429e-06, | |
| "loss": 0.4989, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 1.8512960436562074, | |
| "grad_norm": 0.679901929082235, | |
| "learning_rate": 3.840899248662358e-06, | |
| "loss": 0.5445, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 1.859481582537517, | |
| "grad_norm": 0.6682843496384612, | |
| "learning_rate": 3.7945088551445698e-06, | |
| "loss": 0.5413, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 1.8676671214188267, | |
| "grad_norm": 0.6492016487295222, | |
| "learning_rate": 3.748228379590438e-06, | |
| "loss": 0.528, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 1.8758526603001364, | |
| "grad_norm": 0.6684595059784664, | |
| "learning_rate": 3.7020620419029095e-06, | |
| "loss": 0.5079, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.884038199181446, | |
| "grad_norm": 0.6925041008811655, | |
| "learning_rate": 3.656014051577713e-06, | |
| "loss": 0.4863, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 1.8922237380627558, | |
| "grad_norm": 0.6934722383156499, | |
| "learning_rate": 3.610088607319544e-06, | |
| "loss": 0.5311, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 1.9004092769440655, | |
| "grad_norm": 0.6700713862638494, | |
| "learning_rate": 3.5642898966592145e-06, | |
| "loss": 0.5267, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 1.9085948158253752, | |
| "grad_norm": 0.6675855588895423, | |
| "learning_rate": 3.518622095571831e-06, | |
| "loss": 0.5157, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 1.9167803547066848, | |
| "grad_norm": 0.7287813948503299, | |
| "learning_rate": 3.4730893680960267e-06, | |
| "loss": 0.5167, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 1.9249658935879945, | |
| "grad_norm": 0.6983347879001167, | |
| "learning_rate": 3.4276958659542838e-06, | |
| "loss": 0.5032, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 1.9331514324693042, | |
| "grad_norm": 0.6875622079627788, | |
| "learning_rate": 3.382445728174365e-06, | |
| "loss": 0.5354, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 1.941336971350614, | |
| "grad_norm": 0.7213008732695624, | |
| "learning_rate": 3.3373430807119212e-06, | |
| "loss": 0.5246, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 1.9495225102319236, | |
| "grad_norm": 0.707171568997416, | |
| "learning_rate": 3.292392036074277e-06, | |
| "loss": 0.5572, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 1.9577080491132333, | |
| "grad_norm": 0.6831140788449497, | |
| "learning_rate": 3.2475966929454505e-06, | |
| "loss": 0.525, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.965893587994543, | |
| "grad_norm": 0.6709288897249476, | |
| "learning_rate": 3.202961135812437e-06, | |
| "loss": 0.5047, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 1.9740791268758526, | |
| "grad_norm": 0.630389852839246, | |
| "learning_rate": 3.1584894345927663e-06, | |
| "loss": 0.4997, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 1.9822646657571623, | |
| "grad_norm": 0.6597569020078831, | |
| "learning_rate": 3.114185644263415e-06, | |
| "loss": 0.5359, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 1.990450204638472, | |
| "grad_norm": 0.7340456758824426, | |
| "learning_rate": 3.0700538044910684e-06, | |
| "loss": 0.5239, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 1.9986357435197817, | |
| "grad_norm": 0.6785372793547849, | |
| "learning_rate": 3.0260979392637753e-06, | |
| "loss": 0.5093, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.6785372793547849, | |
| "learning_rate": 2.9823220565240396e-06, | |
| "loss": 0.4999, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 2.00818553888131, | |
| "grad_norm": 1.8279474055803957, | |
| "learning_rate": 2.9387301478033694e-06, | |
| "loss": 0.4133, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 2.0163710777626194, | |
| "grad_norm": 0.7778576282194246, | |
| "learning_rate": 2.8953261878583263e-06, | |
| "loss": 0.4113, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 2.0245566166439293, | |
| "grad_norm": 0.7947084402075207, | |
| "learning_rate": 2.852114134308104e-06, | |
| "loss": 0.4277, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 2.0327421555252387, | |
| "grad_norm": 0.8407711968632838, | |
| "learning_rate": 2.8090979272736663e-06, | |
| "loss": 0.4181, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 2.0409276944065486, | |
| "grad_norm": 0.8070071079726969, | |
| "learning_rate": 2.766281489018482e-06, | |
| "loss": 0.414, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 2.049113233287858, | |
| "grad_norm": 0.7504845165785325, | |
| "learning_rate": 2.7236687235908953e-06, | |
| "loss": 0.4264, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 2.057298772169168, | |
| "grad_norm": 0.7484637498779095, | |
| "learning_rate": 2.681263516468139e-06, | |
| "loss": 0.424, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 2.0654843110504775, | |
| "grad_norm": 0.7672301449846941, | |
| "learning_rate": 2.6390697342020665e-06, | |
| "loss": 0.418, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 2.0736698499317874, | |
| "grad_norm": 0.8252088950713669, | |
| "learning_rate": 2.5970912240665815e-06, | |
| "loss": 0.4351, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 2.081855388813097, | |
| "grad_norm": 0.9368985866664803, | |
| "learning_rate": 2.5553318137068473e-06, | |
| "loss": 0.4047, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 2.0900409276944067, | |
| "grad_norm": 0.892666476951023, | |
| "learning_rate": 2.5137953107902814e-06, | |
| "loss": 0.4103, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 2.098226466575716, | |
| "grad_norm": 0.81654627837115, | |
| "learning_rate": 2.472485502659358e-06, | |
| "loss": 0.4098, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 2.106412005457026, | |
| "grad_norm": 0.7640263710507721, | |
| "learning_rate": 2.4314061559862836e-06, | |
| "loss": 0.3979, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 2.1145975443383356, | |
| "grad_norm": 0.7707988225465455, | |
| "learning_rate": 2.3905610164295394e-06, | |
| "loss": 0.4001, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 2.1227830832196455, | |
| "grad_norm": 0.7384332255225988, | |
| "learning_rate": 2.3499538082923607e-06, | |
| "loss": 0.424, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 2.130968622100955, | |
| "grad_norm": 0.7717039023517804, | |
| "learning_rate": 2.309588234183137e-06, | |
| "loss": 0.4116, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 2.139154160982265, | |
| "grad_norm": 0.774162944001744, | |
| "learning_rate": 2.2694679746778116e-06, | |
| "loss": 0.3698, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 2.1473396998635743, | |
| "grad_norm": 0.7788823100928985, | |
| "learning_rate": 2.22959668798428e-06, | |
| "loss": 0.3919, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 2.155525238744884, | |
| "grad_norm": 0.8632537243107956, | |
| "learning_rate": 2.1899780096088375e-06, | |
| "loss": 0.407, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 2.1637107776261937, | |
| "grad_norm": 0.7677472904066729, | |
| "learning_rate": 2.1506155520246795e-06, | |
| "loss": 0.4097, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 2.1718963165075036, | |
| "grad_norm": 0.8292963201693136, | |
| "learning_rate": 2.1115129043425188e-06, | |
| "loss": 0.4161, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 2.180081855388813, | |
| "grad_norm": 0.7630554646262787, | |
| "learning_rate": 2.072673631983323e-06, | |
| "loss": 0.3763, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 2.188267394270123, | |
| "grad_norm": 0.7431688030641092, | |
| "learning_rate": 2.0341012763532243e-06, | |
| "loss": 0.379, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 2.1964529331514324, | |
| "grad_norm": 0.8283857532938996, | |
| "learning_rate": 1.995799354520598e-06, | |
| "loss": 0.3986, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 2.2046384720327423, | |
| "grad_norm": 0.7664654913892277, | |
| "learning_rate": 1.9577713588953797e-06, | |
| "loss": 0.3874, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 2.212824010914052, | |
| "grad_norm": 0.7821218387875882, | |
| "learning_rate": 1.9200207569106216e-06, | |
| "loss": 0.3946, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 2.2210095497953617, | |
| "grad_norm": 0.8359718127109106, | |
| "learning_rate": 1.8825509907063328e-06, | |
| "loss": 0.4007, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 2.229195088676671, | |
| "grad_norm": 0.7827350042496607, | |
| "learning_rate": 1.8453654768156138e-06, | |
| "loss": 0.4269, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 2.237380627557981, | |
| "grad_norm": 0.841679648376414, | |
| "learning_rate": 1.8084676058531376e-06, | |
| "loss": 0.3932, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 2.2455661664392905, | |
| "grad_norm": 0.839943238676734, | |
| "learning_rate": 1.771860742205988e-06, | |
| "loss": 0.4322, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 2.2537517053206004, | |
| "grad_norm": 0.829532308903955, | |
| "learning_rate": 1.7355482237268983e-06, | |
| "loss": 0.436, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 2.26193724420191, | |
| "grad_norm": 0.8372210579762943, | |
| "learning_rate": 1.6995333614298908e-06, | |
| "loss": 0.4093, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 2.27012278308322, | |
| "grad_norm": 0.8081527770715485, | |
| "learning_rate": 1.6638194391883822e-06, | |
| "loss": 0.3858, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 2.2783083219645293, | |
| "grad_norm": 0.769954474277968, | |
| "learning_rate": 1.6284097134357535e-06, | |
| "loss": 0.3954, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 2.286493860845839, | |
| "grad_norm": 0.8260850385707194, | |
| "learning_rate": 1.5933074128684333e-06, | |
| "loss": 0.4086, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 2.2946793997271486, | |
| "grad_norm": 0.7953598123135504, | |
| "learning_rate": 1.5585157381514875e-06, | |
| "loss": 0.4159, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 2.3028649386084585, | |
| "grad_norm": 0.786859054444193, | |
| "learning_rate": 1.5240378616267887e-06, | |
| "loss": 0.4176, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 2.311050477489768, | |
| "grad_norm": 0.7969101629502529, | |
| "learning_rate": 1.4898769270237611e-06, | |
| "loss": 0.389, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 2.319236016371078, | |
| "grad_norm": 0.833189518702729, | |
| "learning_rate": 1.4560360491727233e-06, | |
| "loss": 0.3908, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 2.3274215552523874, | |
| "grad_norm": 0.7732573237876273, | |
| "learning_rate": 1.4225183137208775e-06, | |
| "loss": 0.4133, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 2.3356070941336973, | |
| "grad_norm": 0.7473413525545317, | |
| "learning_rate": 1.389326776850966e-06, | |
| "loss": 0.4233, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 2.3437926330150067, | |
| "grad_norm": 0.7491139940557698, | |
| "learning_rate": 1.3564644650025894e-06, | |
| "loss": 0.4123, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 2.3519781718963166, | |
| "grad_norm": 0.7874918542020864, | |
| "learning_rate": 1.323934374596268e-06, | |
| "loss": 0.3924, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 2.360163710777626, | |
| "grad_norm": 0.7090630129353084, | |
| "learning_rate": 1.2917394717602123e-06, | |
| "loss": 0.3871, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 2.368349249658936, | |
| "grad_norm": 0.7464575367696425, | |
| "learning_rate": 1.2598826920598773e-06, | |
| "loss": 0.3864, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 2.3765347885402455, | |
| "grad_norm": 0.7631727170097402, | |
| "learning_rate": 1.2283669402302878e-06, | |
| "loss": 0.4317, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 2.3847203274215554, | |
| "grad_norm": 0.766457447194422, | |
| "learning_rate": 1.197195089911191e-06, | |
| "loss": 0.3923, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 2.392905866302865, | |
| "grad_norm": 0.7813349442160811, | |
| "learning_rate": 1.166369983385024e-06, | |
| "loss": 0.3893, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 2.4010914051841747, | |
| "grad_norm": 0.7760077699058369, | |
| "learning_rate": 1.1358944313177566e-06, | |
| "loss": 0.419, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 2.409276944065484, | |
| "grad_norm": 0.8611771806504117, | |
| "learning_rate": 1.1057712125026116e-06, | |
| "loss": 0.4134, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 2.417462482946794, | |
| "grad_norm": 0.8715690874271717, | |
| "learning_rate": 1.0760030736066952e-06, | |
| "loss": 0.4299, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 2.4256480218281036, | |
| "grad_norm": 0.738527625183462, | |
| "learning_rate": 1.0465927289205452e-06, | |
| "loss": 0.3721, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 2.4338335607094135, | |
| "grad_norm": 0.7364008623286723, | |
| "learning_rate": 1.0175428601106441e-06, | |
| "loss": 0.4164, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 2.442019099590723, | |
| "grad_norm": 0.7606943953483646, | |
| "learning_rate": 9.888561159748995e-07, | |
| "loss": 0.3968, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 2.450204638472033, | |
| "grad_norm": 0.7936868365174442, | |
| "learning_rate": 9.605351122011308e-07, | |
| "loss": 0.417, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 2.4583901773533423, | |
| "grad_norm": 0.9111596760359036, | |
| "learning_rate": 9.325824311285564e-07, | |
| "loss": 0.3973, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 2.466575716234652, | |
| "grad_norm": 0.7528178925904793, | |
| "learning_rate": 9.050006215123419e-07, | |
| "loss": 0.4261, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 2.4747612551159617, | |
| "grad_norm": 0.7830814196247444, | |
| "learning_rate": 8.777921982911996e-07, | |
| "loss": 0.4053, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 2.4829467939972716, | |
| "grad_norm": 0.7602680477272743, | |
| "learning_rate": 8.509596423580712e-07, | |
| "loss": 0.4035, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 2.491132332878581, | |
| "grad_norm": 0.7415953605335074, | |
| "learning_rate": 8.245054003339247e-07, | |
| "loss": 0.3895, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 2.499317871759891, | |
| "grad_norm": 0.7570773863255101, | |
| "learning_rate": 7.984318843446593e-07, | |
| "loss": 0.4015, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 2.5075034106412004, | |
| "grad_norm": 0.7948263347472059, | |
| "learning_rate": 7.727414718011706e-07, | |
| "loss": 0.4006, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 2.5156889495225103, | |
| "grad_norm": 0.7441713828948021, | |
| "learning_rate": 7.474365051825749e-07, | |
| "loss": 0.4164, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 2.52387448840382, | |
| "grad_norm": 0.8318405222511415, | |
| "learning_rate": 7.225192918226215e-07, | |
| "loss": 0.3882, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 2.5320600272851297, | |
| "grad_norm": 0.7576816109290357, | |
| "learning_rate": 6.979921036993042e-07, | |
| "loss": 0.4149, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 2.540245566166439, | |
| "grad_norm": 0.7368181687339158, | |
| "learning_rate": 6.738571772276997e-07, | |
| "loss": 0.4117, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 2.548431105047749, | |
| "grad_norm": 0.7336446538957092, | |
| "learning_rate": 6.501167130560515e-07, | |
| "loss": 0.3916, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 2.5566166439290585, | |
| "grad_norm": 0.7740723418782292, | |
| "learning_rate": 6.267728758651131e-07, | |
| "loss": 0.411, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 2.5648021828103684, | |
| "grad_norm": 0.7461817145842243, | |
| "learning_rate": 6.038277941707671e-07, | |
| "loss": 0.3826, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 2.572987721691678, | |
| "grad_norm": 0.7518750830999247, | |
| "learning_rate": 5.812835601299438e-07, | |
| "loss": 0.4027, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 2.581173260572988, | |
| "grad_norm": 0.7838442075363712, | |
| "learning_rate": 5.591422293498633e-07, | |
| "loss": 0.4123, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 2.5893587994542973, | |
| "grad_norm": 0.8197901275460071, | |
| "learning_rate": 5.374058207005945e-07, | |
| "loss": 0.3929, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 2.597544338335607, | |
| "grad_norm": 0.756093939796975, | |
| "learning_rate": 5.160763161309768e-07, | |
| "loss": 0.3874, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 2.6057298772169166, | |
| "grad_norm": 0.7750985666461455, | |
| "learning_rate": 4.951556604879049e-07, | |
| "loss": 0.4153, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 2.6139154160982265, | |
| "grad_norm": 0.7232020878395763, | |
| "learning_rate": 4.7464576133899043e-07, | |
| "loss": 0.3866, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 2.622100954979536, | |
| "grad_norm": 0.7528175373532525, | |
| "learning_rate": 4.545484887986368e-07, | |
| "loss": 0.4131, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 2.630286493860846, | |
| "grad_norm": 0.7641457571529617, | |
| "learning_rate": 4.348656753575092e-07, | |
| "loss": 0.4046, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 2.6384720327421554, | |
| "grad_norm": 0.7621065360883466, | |
| "learning_rate": 4.1559911571545544e-07, | |
| "loss": 0.4142, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 2.6466575716234653, | |
| "grad_norm": 0.7958121777976367, | |
| "learning_rate": 3.9675056661785563e-07, | |
| "loss": 0.405, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 2.6548431105047747, | |
| "grad_norm": 0.7837478689185601, | |
| "learning_rate": 3.783217466954503e-07, | |
| "loss": 0.3924, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 2.6630286493860846, | |
| "grad_norm": 0.7818466172691447, | |
| "learning_rate": 3.603143363076217e-07, | |
| "loss": 0.405, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 2.6712141882673945, | |
| "grad_norm": 0.737120843251674, | |
| "learning_rate": 3.427299773891868e-07, | |
| "loss": 0.3855, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 2.679399727148704, | |
| "grad_norm": 0.7930563359029547, | |
| "learning_rate": 3.255702733006766e-07, | |
| "loss": 0.3987, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 2.6875852660300135, | |
| "grad_norm": 0.8366985596667617, | |
| "learning_rate": 3.088367886821481e-07, | |
| "loss": 0.3946, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 2.6957708049113234, | |
| "grad_norm": 0.8193841638700755, | |
| "learning_rate": 2.925310493105099e-07, | |
| "loss": 0.4186, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 2.7039563437926333, | |
| "grad_norm": 0.7544220340235186, | |
| "learning_rate": 2.7665454196040665e-07, | |
| "loss": 0.4052, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 2.7121418826739427, | |
| "grad_norm": 0.8023433638285234, | |
| "learning_rate": 2.6120871426864866e-07, | |
| "loss": 0.3807, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 2.720327421555252, | |
| "grad_norm": 0.7406196915706266, | |
| "learning_rate": 2.4619497460222184e-07, | |
| "loss": 0.4101, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 2.728512960436562, | |
| "grad_norm": 0.737077976937547, | |
| "learning_rate": 2.316146919298623e-07, | |
| "loss": 0.3873, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 2.736698499317872, | |
| "grad_norm": 0.8083843749329572, | |
| "learning_rate": 2.1746919569723858e-07, | |
| "loss": 0.4159, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 2.7448840381991815, | |
| "grad_norm": 0.7932339569895047, | |
| "learning_rate": 2.037597757057297e-07, | |
| "loss": 0.4098, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 2.753069577080491, | |
| "grad_norm": 0.8580661081839333, | |
| "learning_rate": 1.9048768199481983e-07, | |
| "loss": 0.3932, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 2.761255115961801, | |
| "grad_norm": 0.8216483122178869, | |
| "learning_rate": 1.776541247281177e-07, | |
| "loss": 0.4107, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 2.7694406548431107, | |
| "grad_norm": 0.8116256278850041, | |
| "learning_rate": 1.6526027408301227e-07, | |
| "loss": 0.4037, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 2.77762619372442, | |
| "grad_norm": 0.8129785704080104, | |
| "learning_rate": 1.5330726014397668e-07, | |
| "loss": 0.4109, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 2.7858117326057297, | |
| "grad_norm": 0.756821439289195, | |
| "learning_rate": 1.417961727995254e-07, | |
| "loss": 0.411, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 2.7939972714870396, | |
| "grad_norm": 0.7776940434532348, | |
| "learning_rate": 1.307280616428336e-07, | |
| "loss": 0.4278, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 2.8021828103683495, | |
| "grad_norm": 0.7541646575501004, | |
| "learning_rate": 1.2010393587603975e-07, | |
| "loss": 0.4309, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 2.810368349249659, | |
| "grad_norm": 0.7615569057804531, | |
| "learning_rate": 1.0992476421822052e-07, | |
| "loss": 0.4018, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 2.8185538881309684, | |
| "grad_norm": 0.7326601732112479, | |
| "learning_rate": 1.0019147481706626e-07, | |
| "loss": 0.3798, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 2.8267394270122783, | |
| "grad_norm": 0.7558224410956776, | |
| "learning_rate": 9.090495516424713e-08, | |
| "loss": 0.3934, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 2.8349249658935882, | |
| "grad_norm": 0.7586566867615607, | |
| "learning_rate": 8.206605201449447e-08, | |
| "loss": 0.4083, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 2.8431105047748977, | |
| "grad_norm": 0.7756404145344101, | |
| "learning_rate": 7.367557130838921e-08, | |
| "loss": 0.4198, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 2.851296043656207, | |
| "grad_norm": 0.7619932836793645, | |
| "learning_rate": 6.573427809888067e-08, | |
| "loss": 0.4052, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 2.859481582537517, | |
| "grad_norm": 0.7864285751411372, | |
| "learning_rate": 5.824289648152126e-08, | |
| "loss": 0.4049, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 2.867667121418827, | |
| "grad_norm": 0.7724732501336614, | |
| "learning_rate": 5.120210952844873e-08, | |
| "loss": 0.4082, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 2.8758526603001364, | |
| "grad_norm": 0.7448742382267542, | |
| "learning_rate": 4.461255922609986e-08, | |
| "loss": 0.4097, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 2.884038199181446, | |
| "grad_norm": 0.7752438252600893, | |
| "learning_rate": 3.8474846416672874e-08, | |
| "loss": 0.399, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 2.892223738062756, | |
| "grad_norm": 0.8642432702205011, | |
| "learning_rate": 3.278953074334512e-08, | |
| "loss": 0.3937, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 2.9004092769440657, | |
| "grad_norm": 0.7501677664181151, | |
| "learning_rate": 2.75571305992417e-08, | |
| "loss": 0.3895, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 2.908594815825375, | |
| "grad_norm": 0.7336645310315806, | |
| "learning_rate": 2.2778123080167136e-08, | |
| "loss": 0.4154, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 2.9167803547066846, | |
| "grad_norm": 0.8016612426903582, | |
| "learning_rate": 1.845294394110686e-08, | |
| "loss": 0.3989, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 2.9249658935879945, | |
| "grad_norm": 0.7453209695510444, | |
| "learning_rate": 1.4581987556490095e-08, | |
| "loss": 0.4218, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 2.9331514324693044, | |
| "grad_norm": 0.7376596618566437, | |
| "learning_rate": 1.1165606884234182e-08, | |
| "loss": 0.4093, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 2.941336971350614, | |
| "grad_norm": 0.7581298522735148, | |
| "learning_rate": 8.204113433559202e-09, | |
| "loss": 0.3715, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 2.9495225102319234, | |
| "grad_norm": 0.7252182853236371, | |
| "learning_rate": 5.6977772365857105e-09, | |
| "loss": 0.3921, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 2.9577080491132333, | |
| "grad_norm": 0.7594004110808722, | |
| "learning_rate": 3.6468268237105364e-09, | |
| "loss": 0.4069, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 2.965893587994543, | |
| "grad_norm": 0.7885350379055207, | |
| "learning_rate": 2.0514492027728928e-09, | |
| "loss": 0.4078, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 2.9740791268758526, | |
| "grad_norm": 0.762337453867713, | |
| "learning_rate": 9.117898419991333e-10, | |
| "loss": 0.3989, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 2.982264665757162, | |
| "grad_norm": 0.7984650139098143, | |
| "learning_rate": 2.2795265674113721e-10, | |
| "loss": 0.3826, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 2.982264665757162, | |
| "step": 366, | |
| "total_flos": 5.395725452134318e+17, | |
| "train_loss": 0.5496462623441154, | |
| "train_runtime": 28753.9424, | |
| "train_samples_per_second": 0.612, | |
| "train_steps_per_second": 0.013 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 366, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 5.395725452134318e+17, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |