| { |
| "best_global_step": 500, |
| "best_metric": 0.78649825, |
| "best_model_checkpoint": "/root/ms-swift/examples/train/liger/output/Qwen4SFTDeepResearch/v2-20250930-212021/checkpoint-400", |
| "epoch": 4.316546762589928, |
| "eval_steps": 100, |
| "global_step": 1200, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0035971223021582736, |
| "grad_norm": 15.756150320288057, |
| "learning_rate": 9.999995011498398e-06, |
| "loss": 1.5632460117340088, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.007194244604316547, |
| "grad_norm": 7.517349932297116, |
| "learning_rate": 9.999980046003546e-06, |
| "loss": 1.343105435371399, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.01079136690647482, |
| "grad_norm": 3.1958830788674173, |
| "learning_rate": 9.999955103545307e-06, |
| "loss": 1.1570754051208496, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.014388489208633094, |
| "grad_norm": 2.315374404024555, |
| "learning_rate": 9.999920184173449e-06, |
| "loss": 1.1298246383666992, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.017985611510791366, |
| "grad_norm": 2.941589170867705, |
| "learning_rate": 9.999875287957652e-06, |
| "loss": 1.1169365644454956, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.02158273381294964, |
| "grad_norm": 2.515381306131075, |
| "learning_rate": 9.999820414987502e-06, |
| "loss": 1.0570898056030273, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.025179856115107913, |
| "grad_norm": 2.2826473676552625, |
| "learning_rate": 9.999755565372492e-06, |
| "loss": 1.0654137134552002, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.02877697841726619, |
| "grad_norm": 1.5778221040103755, |
| "learning_rate": 9.999680739242022e-06, |
| "loss": 1.010119915008545, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.03237410071942446, |
| "grad_norm": 1.2932029633476676, |
| "learning_rate": 9.999595936745401e-06, |
| "loss": 1.0208889245986938, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.03597122302158273, |
| "grad_norm": 1.0284808012699043, |
| "learning_rate": 9.999501158051846e-06, |
| "loss": 0.9741397500038147, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.039568345323741004, |
| "grad_norm": 0.9153376375520705, |
| "learning_rate": 9.999396403350476e-06, |
| "loss": 0.9674245119094849, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.04316546762589928, |
| "grad_norm": 0.8405459688436054, |
| "learning_rate": 9.999281672850317e-06, |
| "loss": 0.956840991973877, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.046762589928057555, |
| "grad_norm": 0.7954149229636902, |
| "learning_rate": 9.999156966780306e-06, |
| "loss": 0.9293943643569946, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.050359712230215826, |
| "grad_norm": 0.7264144566060944, |
| "learning_rate": 9.99902228538928e-06, |
| "loss": 0.9339001774787903, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.0539568345323741, |
| "grad_norm": 0.6989667251925004, |
| "learning_rate": 9.998877628945982e-06, |
| "loss": 0.9139963984489441, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.05755395683453238, |
| "grad_norm": 0.7146203444147694, |
| "learning_rate": 9.99872299773906e-06, |
| "loss": 0.9200505018234253, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.06115107913669065, |
| "grad_norm": 0.6965354357010114, |
| "learning_rate": 9.998558392077065e-06, |
| "loss": 0.9260781407356262, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.06474820143884892, |
| "grad_norm": 0.7526497086024109, |
| "learning_rate": 9.998383812288451e-06, |
| "loss": 0.9045039415359497, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.0683453237410072, |
| "grad_norm": 0.6111014847522879, |
| "learning_rate": 9.998199258721576e-06, |
| "loss": 0.8944211602210999, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.07194244604316546, |
| "grad_norm": 0.5854801434764001, |
| "learning_rate": 9.998004731744696e-06, |
| "loss": 0.901360034942627, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.07553956834532374, |
| "grad_norm": 0.5912075388434902, |
| "learning_rate": 9.997800231745973e-06, |
| "loss": 0.8743350505828857, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.07913669064748201, |
| "grad_norm": 0.5644564808326691, |
| "learning_rate": 9.997585759133463e-06, |
| "loss": 0.8950492143630981, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.08273381294964029, |
| "grad_norm": 0.6193963516298768, |
| "learning_rate": 9.99736131433513e-06, |
| "loss": 0.8587119579315186, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.08633093525179857, |
| "grad_norm": 0.6319747604029503, |
| "learning_rate": 9.997126897798826e-06, |
| "loss": 0.8936506509780884, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.08992805755395683, |
| "grad_norm": 0.5786372791735689, |
| "learning_rate": 9.996882509992307e-06, |
| "loss": 0.8633454442024231, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.09352517985611511, |
| "grad_norm": 0.5786725523056919, |
| "learning_rate": 9.996628151403226e-06, |
| "loss": 0.8897263407707214, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.09712230215827339, |
| "grad_norm": 0.5664869834871367, |
| "learning_rate": 9.99636382253913e-06, |
| "loss": 0.8681536316871643, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.10071942446043165, |
| "grad_norm": 0.5515477526139622, |
| "learning_rate": 9.996089523927461e-06, |
| "loss": 0.8630425930023193, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.10431654676258993, |
| "grad_norm": 0.5315269272765065, |
| "learning_rate": 9.995805256115555e-06, |
| "loss": 0.8772830367088318, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.1079136690647482, |
| "grad_norm": 0.5437185163066915, |
| "learning_rate": 9.995511019670639e-06, |
| "loss": 0.8837330341339111, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.11151079136690648, |
| "grad_norm": 0.5549255962064505, |
| "learning_rate": 9.995206815179833e-06, |
| "loss": 0.870895266532898, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.11510791366906475, |
| "grad_norm": 0.5591701123937822, |
| "learning_rate": 9.994892643250147e-06, |
| "loss": 0.8615888953208923, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.11870503597122302, |
| "grad_norm": 0.5452402301752459, |
| "learning_rate": 9.99456850450848e-06, |
| "loss": 0.8485289812088013, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.1223021582733813, |
| "grad_norm": 0.5271304889146575, |
| "learning_rate": 9.99423439960162e-06, |
| "loss": 0.8544217944145203, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.12589928057553956, |
| "grad_norm": 0.5465089936521315, |
| "learning_rate": 9.993890329196236e-06, |
| "loss": 0.8891148567199707, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.12949640287769784, |
| "grad_norm": 0.545309807427878, |
| "learning_rate": 9.993536293978892e-06, |
| "loss": 0.8729881644248962, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.13309352517985612, |
| "grad_norm": 0.5522501928569817, |
| "learning_rate": 9.993172294656024e-06, |
| "loss": 0.8602065443992615, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.1366906474820144, |
| "grad_norm": 0.5384827461098874, |
| "learning_rate": 9.992798331953962e-06, |
| "loss": 0.8517411351203918, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.14028776978417265, |
| "grad_norm": 0.5297608942892242, |
| "learning_rate": 9.99241440661891e-06, |
| "loss": 0.8619793653488159, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.14388489208633093, |
| "grad_norm": 0.5347497959937362, |
| "learning_rate": 9.99202051941695e-06, |
| "loss": 0.8757240772247314, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.1474820143884892, |
| "grad_norm": 0.5302181279741001, |
| "learning_rate": 9.991616671134045e-06, |
| "loss": 0.8662016987800598, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.1510791366906475, |
| "grad_norm": 0.5110611514487727, |
| "learning_rate": 9.991202862576036e-06, |
| "loss": 0.8414884209632874, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.15467625899280577, |
| "grad_norm": 0.5318960632271792, |
| "learning_rate": 9.99077909456864e-06, |
| "loss": 0.8459748029708862, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.15827338129496402, |
| "grad_norm": 0.5509590436828911, |
| "learning_rate": 9.99034536795744e-06, |
| "loss": 0.8320192098617554, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.1618705035971223, |
| "grad_norm": 0.5244253719817727, |
| "learning_rate": 9.989901683607893e-06, |
| "loss": 0.8553211688995361, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.16546762589928057, |
| "grad_norm": 0.5323576490946048, |
| "learning_rate": 9.989448042405328e-06, |
| "loss": 0.8586495518684387, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.16906474820143885, |
| "grad_norm": 0.5877802964746905, |
| "learning_rate": 9.988984445254944e-06, |
| "loss": 0.8607613444328308, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.17266187050359713, |
| "grad_norm": 0.5495938798715749, |
| "learning_rate": 9.9885108930818e-06, |
| "loss": 0.8512550592422485, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.17625899280575538, |
| "grad_norm": 0.5540112993478788, |
| "learning_rate": 9.988027386830825e-06, |
| "loss": 0.8515195846557617, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.17985611510791366, |
| "grad_norm": 0.5806816109293668, |
| "learning_rate": 9.987533927466804e-06, |
| "loss": 0.8428958654403687, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.18345323741007194, |
| "grad_norm": 0.5679037465356196, |
| "learning_rate": 9.987030515974389e-06, |
| "loss": 0.8490457534790039, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.18705035971223022, |
| "grad_norm": 0.5649574806363405, |
| "learning_rate": 9.986517153358086e-06, |
| "loss": 0.8424092531204224, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.1906474820143885, |
| "grad_norm": 0.5107378637113733, |
| "learning_rate": 9.985993840642261e-06, |
| "loss": 0.8451225161552429, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.19424460431654678, |
| "grad_norm": 0.5266148919393265, |
| "learning_rate": 9.98546057887113e-06, |
| "loss": 0.8574127554893494, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.19784172661870503, |
| "grad_norm": 0.4968321264322306, |
| "learning_rate": 9.984917369108767e-06, |
| "loss": 0.8196417093276978, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.2014388489208633, |
| "grad_norm": 0.5301227993296292, |
| "learning_rate": 9.984364212439089e-06, |
| "loss": 0.8178601264953613, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.20503597122302158, |
| "grad_norm": 0.5322541177201829, |
| "learning_rate": 9.98380110996587e-06, |
| "loss": 0.8680044412612915, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.20863309352517986, |
| "grad_norm": 0.5617210521859881, |
| "learning_rate": 9.98322806281272e-06, |
| "loss": 0.8366754055023193, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.21223021582733814, |
| "grad_norm": 0.5333079804740503, |
| "learning_rate": 9.9826450721231e-06, |
| "loss": 0.8472665548324585, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.2158273381294964, |
| "grad_norm": 0.5670784495454859, |
| "learning_rate": 9.982052139060312e-06, |
| "loss": 0.8183034658432007, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.21942446043165467, |
| "grad_norm": 0.522473762554814, |
| "learning_rate": 9.981449264807493e-06, |
| "loss": 0.8251825571060181, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.22302158273381295, |
| "grad_norm": 0.5359962882882059, |
| "learning_rate": 9.980836450567619e-06, |
| "loss": 0.8035885095596313, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.22661870503597123, |
| "grad_norm": 0.5452200763440471, |
| "learning_rate": 9.9802136975635e-06, |
| "loss": 0.850425124168396, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.2302158273381295, |
| "grad_norm": 0.505908979785123, |
| "learning_rate": 9.979581007037776e-06, |
| "loss": 0.8475151062011719, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.23381294964028776, |
| "grad_norm": 0.4973287153261601, |
| "learning_rate": 9.97893838025292e-06, |
| "loss": 0.8509107828140259, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.23741007194244604, |
| "grad_norm": 0.5916298246777657, |
| "learning_rate": 9.978285818491232e-06, |
| "loss": 0.8484339714050293, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.24100719424460432, |
| "grad_norm": 0.5448094767840429, |
| "learning_rate": 9.977623323054828e-06, |
| "loss": 0.8285648822784424, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.2446043165467626, |
| "grad_norm": 0.5924423321400274, |
| "learning_rate": 9.976950895265657e-06, |
| "loss": 0.8395726680755615, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.24820143884892087, |
| "grad_norm": 0.5664782512788527, |
| "learning_rate": 9.976268536465482e-06, |
| "loss": 0.8284764289855957, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.2517985611510791, |
| "grad_norm": 0.5692368646880401, |
| "learning_rate": 9.975576248015878e-06, |
| "loss": 0.8533764481544495, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.25539568345323743, |
| "grad_norm": 0.5573191956981842, |
| "learning_rate": 9.974874031298242e-06, |
| "loss": 0.8336201906204224, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.2589928057553957, |
| "grad_norm": 0.5819733970676213, |
| "learning_rate": 9.974161887713775e-06, |
| "loss": 0.8206812143325806, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.26258992805755393, |
| "grad_norm": 0.6358219302706564, |
| "learning_rate": 9.97343981868349e-06, |
| "loss": 0.8437105417251587, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.26618705035971224, |
| "grad_norm": 0.5478398959411097, |
| "learning_rate": 9.972707825648204e-06, |
| "loss": 0.8165892362594604, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.2697841726618705, |
| "grad_norm": 0.5383050403703055, |
| "learning_rate": 9.971965910068536e-06, |
| "loss": 0.8145566582679749, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.2733812949640288, |
| "grad_norm": 0.5787407471208639, |
| "learning_rate": 9.971214073424906e-06, |
| "loss": 0.8369835019111633, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.27697841726618705, |
| "grad_norm": 0.6060564743412846, |
| "learning_rate": 9.970452317217528e-06, |
| "loss": 0.8166014552116394, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.2805755395683453, |
| "grad_norm": 0.5626076159082846, |
| "learning_rate": 9.969680642966409e-06, |
| "loss": 0.8157432079315186, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.2841726618705036, |
| "grad_norm": 0.5633091616428877, |
| "learning_rate": 9.96889905221135e-06, |
| "loss": 0.8347904086112976, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.28776978417266186, |
| "grad_norm": 0.5768068832703904, |
| "learning_rate": 9.968107546511942e-06, |
| "loss": 0.8339566588401794, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.29136690647482016, |
| "grad_norm": 0.5328471089077459, |
| "learning_rate": 9.96730612744755e-06, |
| "loss": 0.8300585746765137, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.2949640287769784, |
| "grad_norm": 0.5259481084278995, |
| "learning_rate": 9.966494796617328e-06, |
| "loss": 0.8587024211883545, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.29856115107913667, |
| "grad_norm": 0.6263217869688971, |
| "learning_rate": 9.965673555640208e-06, |
| "loss": 0.7940714955329895, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.302158273381295, |
| "grad_norm": 0.5672018351537288, |
| "learning_rate": 9.964842406154892e-06, |
| "loss": 0.8141801357269287, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.3057553956834532, |
| "grad_norm": 0.534278403371781, |
| "learning_rate": 9.964001349819856e-06, |
| "loss": 0.8463040590286255, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.30935251798561153, |
| "grad_norm": 0.5640353678708808, |
| "learning_rate": 9.963150388313347e-06, |
| "loss": 0.817227840423584, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.3129496402877698, |
| "grad_norm": 0.5402262875034433, |
| "learning_rate": 9.962289523333372e-06, |
| "loss": 0.8320227861404419, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.31654676258992803, |
| "grad_norm": 0.5559951550582068, |
| "learning_rate": 9.961418756597703e-06, |
| "loss": 0.8306981325149536, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.32014388489208634, |
| "grad_norm": 0.575668186919706, |
| "learning_rate": 9.960538089843868e-06, |
| "loss": 0.8308272361755371, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.3237410071942446, |
| "grad_norm": 0.5382001607426908, |
| "learning_rate": 9.959647524829148e-06, |
| "loss": 0.8604013323783875, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.3273381294964029, |
| "grad_norm": 0.5604808001643791, |
| "learning_rate": 9.95874706333058e-06, |
| "loss": 0.8057097792625427, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.33093525179856115, |
| "grad_norm": 0.5619174670637143, |
| "learning_rate": 9.957836707144943e-06, |
| "loss": 0.8586455583572388, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.3345323741007194, |
| "grad_norm": 0.5431824517780087, |
| "learning_rate": 9.956916458088765e-06, |
| "loss": 0.8322749137878418, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.3381294964028777, |
| "grad_norm": 0.5304521035196852, |
| "learning_rate": 9.95598631799831e-06, |
| "loss": 0.8248855471611023, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.34172661870503596, |
| "grad_norm": 0.5606065890967215, |
| "learning_rate": 9.95504628872958e-06, |
| "loss": 0.8336918950080872, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.34532374100719426, |
| "grad_norm": 0.5383743438248503, |
| "learning_rate": 9.95409637215831e-06, |
| "loss": 0.7947922945022583, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.3489208633093525, |
| "grad_norm": 0.5318998589457373, |
| "learning_rate": 9.953136570179966e-06, |
| "loss": 0.8381302356719971, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.35251798561151076, |
| "grad_norm": 0.5019982029325636, |
| "learning_rate": 9.952166884709735e-06, |
| "loss": 0.7833576202392578, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.35611510791366907, |
| "grad_norm": 0.5382418331396808, |
| "learning_rate": 9.951187317682527e-06, |
| "loss": 0.8222751021385193, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.3597122302158273, |
| "grad_norm": 0.5170694830246451, |
| "learning_rate": 9.950197871052974e-06, |
| "loss": 0.8138470649719238, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.3597122302158273, |
| "eval_loss": 0.816231906414032, |
| "eval_runtime": 7.507, |
| "eval_samples_per_second": 11.722, |
| "eval_steps_per_second": 0.4, |
| "eval_token_acc": 0.74746085109322, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.36330935251798563, |
| "grad_norm": 0.5673295296992458, |
| "learning_rate": 9.949198546795418e-06, |
| "loss": 0.8289770483970642, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.3669064748201439, |
| "grad_norm": 0.563998064338557, |
| "learning_rate": 9.94818934690391e-06, |
| "loss": 0.8491591811180115, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.37050359712230213, |
| "grad_norm": 0.5269425479847079, |
| "learning_rate": 9.947170273392206e-06, |
| "loss": 0.8284671902656555, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.37410071942446044, |
| "grad_norm": 0.5473532877664629, |
| "learning_rate": 9.94614132829377e-06, |
| "loss": 0.8391440510749817, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.3776978417266187, |
| "grad_norm": 0.5584379208820623, |
| "learning_rate": 9.945102513661758e-06, |
| "loss": 0.8177752494812012, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.381294964028777, |
| "grad_norm": 0.5660179757557332, |
| "learning_rate": 9.94405383156902e-06, |
| "loss": 0.8083318471908569, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.38489208633093525, |
| "grad_norm": 0.5366634741683651, |
| "learning_rate": 9.9429952841081e-06, |
| "loss": 0.826050877571106, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.38848920863309355, |
| "grad_norm": 0.5282446109645809, |
| "learning_rate": 9.941926873391223e-06, |
| "loss": 0.8216806650161743, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.3920863309352518, |
| "grad_norm": 0.5386018738453927, |
| "learning_rate": 9.940848601550295e-06, |
| "loss": 0.788651704788208, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.39568345323741005, |
| "grad_norm": 0.5483359069173901, |
| "learning_rate": 9.939760470736902e-06, |
| "loss": 0.8089826107025146, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.39928057553956836, |
| "grad_norm": 0.5617901623159829, |
| "learning_rate": 9.938662483122299e-06, |
| "loss": 0.7851020097732544, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.4028776978417266, |
| "grad_norm": 0.591579269733334, |
| "learning_rate": 9.937554640897414e-06, |
| "loss": 0.8273585438728333, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.4064748201438849, |
| "grad_norm": 0.557557042763066, |
| "learning_rate": 9.936436946272834e-06, |
| "loss": 0.8208383917808533, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.41007194244604317, |
| "grad_norm": 0.548113632755496, |
| "learning_rate": 9.935309401478808e-06, |
| "loss": 0.8339937925338745, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.4136690647482014, |
| "grad_norm": 0.5492991264279992, |
| "learning_rate": 9.93417200876524e-06, |
| "loss": 0.8161983489990234, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.4172661870503597, |
| "grad_norm": 0.5883988065819238, |
| "learning_rate": 9.933024770401682e-06, |
| "loss": 0.8346230983734131, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.420863309352518, |
| "grad_norm": 0.574684102157635, |
| "learning_rate": 9.931867688677338e-06, |
| "loss": 0.8311742544174194, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.4244604316546763, |
| "grad_norm": 0.5340788781452219, |
| "learning_rate": 9.930700765901046e-06, |
| "loss": 0.8129891157150269, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.42805755395683454, |
| "grad_norm": 0.5336075844649993, |
| "learning_rate": 9.92952400440129e-06, |
| "loss": 0.8301393985748291, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.4316546762589928, |
| "grad_norm": 0.5416549418922793, |
| "learning_rate": 9.928337406526172e-06, |
| "loss": 0.8299385905265808, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.4352517985611511, |
| "grad_norm": 0.5807637671155456, |
| "learning_rate": 9.927140974643438e-06, |
| "loss": 0.8278335332870483, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.43884892086330934, |
| "grad_norm": 0.5707236799451327, |
| "learning_rate": 9.925934711140444e-06, |
| "loss": 0.8232138156890869, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.44244604316546765, |
| "grad_norm": 0.5567248659200839, |
| "learning_rate": 9.924718618424172e-06, |
| "loss": 0.8311301469802856, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.4460431654676259, |
| "grad_norm": 0.5594513308954804, |
| "learning_rate": 9.923492698921214e-06, |
| "loss": 0.8267166614532471, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.44964028776978415, |
| "grad_norm": 0.546018461439583, |
| "learning_rate": 9.922256955077767e-06, |
| "loss": 0.8282893896102905, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.45323741007194246, |
| "grad_norm": 0.5325942679065744, |
| "learning_rate": 9.921011389359638e-06, |
| "loss": 0.8170527219772339, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.4568345323741007, |
| "grad_norm": 0.5480251944916799, |
| "learning_rate": 9.91975600425223e-06, |
| "loss": 0.8059794902801514, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.460431654676259, |
| "grad_norm": 0.5395204540816534, |
| "learning_rate": 9.918490802260538e-06, |
| "loss": 0.81952303647995, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.46402877697841727, |
| "grad_norm": 0.5392455596536625, |
| "learning_rate": 9.917215785909149e-06, |
| "loss": 0.8166015148162842, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.4676258992805755, |
| "grad_norm": 0.5359787934095424, |
| "learning_rate": 9.915930957742228e-06, |
| "loss": 0.80816251039505, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.4712230215827338, |
| "grad_norm": 0.5119849543094582, |
| "learning_rate": 9.914636320323525e-06, |
| "loss": 0.8249898552894592, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.4748201438848921, |
| "grad_norm": 0.5921026203775647, |
| "learning_rate": 9.913331876236358e-06, |
| "loss": 0.826164186000824, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.4784172661870504, |
| "grad_norm": 0.5533992224690237, |
| "learning_rate": 9.912017628083618e-06, |
| "loss": 0.808993935585022, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.48201438848920863, |
| "grad_norm": 0.5509377136584458, |
| "learning_rate": 9.910693578487755e-06, |
| "loss": 0.8223838210105896, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.4856115107913669, |
| "grad_norm": 0.5177279997027195, |
| "learning_rate": 9.909359730090777e-06, |
| "loss": 0.8290198445320129, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.4892086330935252, |
| "grad_norm": 0.5583013977579655, |
| "learning_rate": 9.90801608555425e-06, |
| "loss": 0.8074214458465576, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.49280575539568344, |
| "grad_norm": 0.5227084946155521, |
| "learning_rate": 9.906662647559279e-06, |
| "loss": 0.8390957713127136, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.49640287769784175, |
| "grad_norm": 0.5406546058934267, |
| "learning_rate": 9.905299418806517e-06, |
| "loss": 0.7853333950042725, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 0.5476922356088839, |
| "learning_rate": 9.903926402016153e-06, |
| "loss": 0.8493127822875977, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.5035971223021583, |
| "grad_norm": 0.5005184529391463, |
| "learning_rate": 9.902543599927903e-06, |
| "loss": 0.8202897906303406, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.5071942446043165, |
| "grad_norm": 0.5958907753430173, |
| "learning_rate": 9.901151015301012e-06, |
| "loss": 0.8216853737831116, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.5107913669064749, |
| "grad_norm": 0.5599001635444122, |
| "learning_rate": 9.899748650914245e-06, |
| "loss": 0.8264304399490356, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.5143884892086331, |
| "grad_norm": 0.5215334397603238, |
| "learning_rate": 9.89833650956588e-06, |
| "loss": 0.8092686533927917, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.5179856115107914, |
| "grad_norm": 0.5542935554492459, |
| "learning_rate": 9.896914594073703e-06, |
| "loss": 0.798918604850769, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.5215827338129496, |
| "grad_norm": 0.528969503072376, |
| "learning_rate": 9.895482907275009e-06, |
| "loss": 0.793848991394043, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.5251798561151079, |
| "grad_norm": 0.5063983054470111, |
| "learning_rate": 9.894041452026584e-06, |
| "loss": 0.8367159962654114, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.5287769784172662, |
| "grad_norm": 0.5543247794065501, |
| "learning_rate": 9.892590231204709e-06, |
| "loss": 0.8097944259643555, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.5323741007194245, |
| "grad_norm": 0.539616362600845, |
| "learning_rate": 9.891129247705153e-06, |
| "loss": 0.809989333152771, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.5359712230215827, |
| "grad_norm": 0.5113476170140943, |
| "learning_rate": 9.88965850444316e-06, |
| "loss": 0.8000381588935852, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.539568345323741, |
| "grad_norm": 0.5424905593254367, |
| "learning_rate": 9.888178004353456e-06, |
| "loss": 0.7990270256996155, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.5431654676258992, |
| "grad_norm": 0.5388457802161849, |
| "learning_rate": 9.88668775039023e-06, |
| "loss": 0.8318261504173279, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.5467625899280576, |
| "grad_norm": 0.5485418764664756, |
| "learning_rate": 9.885187745527132e-06, |
| "loss": 0.8133857250213623, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.5503597122302158, |
| "grad_norm": 0.544940799274823, |
| "learning_rate": 9.883677992757278e-06, |
| "loss": 0.8147492408752441, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.5539568345323741, |
| "grad_norm": 0.5709632152106743, |
| "learning_rate": 9.88215849509323e-06, |
| "loss": 0.8083347082138062, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.5575539568345323, |
| "grad_norm": 0.58028630349183, |
| "learning_rate": 9.880629255566988e-06, |
| "loss": 0.80954909324646, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.5611510791366906, |
| "grad_norm": 0.5566497809950424, |
| "learning_rate": 9.879090277230005e-06, |
| "loss": 0.8241982460021973, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.564748201438849, |
| "grad_norm": 0.5518771004831493, |
| "learning_rate": 9.877541563153157e-06, |
| "loss": 0.7945687174797058, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.5683453237410072, |
| "grad_norm": 0.5367710065256509, |
| "learning_rate": 9.875983116426745e-06, |
| "loss": 0.8111982941627502, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.5719424460431655, |
| "grad_norm": 0.5486988063810168, |
| "learning_rate": 9.874414940160501e-06, |
| "loss": 0.8155021667480469, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.5755395683453237, |
| "grad_norm": 0.5480126518439215, |
| "learning_rate": 9.87283703748356e-06, |
| "loss": 0.7951115369796753, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.579136690647482, |
| "grad_norm": 0.5717071424611518, |
| "learning_rate": 9.871249411544473e-06, |
| "loss": 0.8322063088417053, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.5827338129496403, |
| "grad_norm": 0.528060657804702, |
| "learning_rate": 9.869652065511188e-06, |
| "loss": 0.8200956583023071, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.5863309352517986, |
| "grad_norm": 0.5244238381398686, |
| "learning_rate": 9.868045002571052e-06, |
| "loss": 0.8096636533737183, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.5899280575539568, |
| "grad_norm": 0.49919420997101305, |
| "learning_rate": 9.866428225930798e-06, |
| "loss": 0.7951469421386719, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.5935251798561151, |
| "grad_norm": 0.535446981889461, |
| "learning_rate": 9.864801738816543e-06, |
| "loss": 0.8106690049171448, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.5971223021582733, |
| "grad_norm": 0.5333985451516444, |
| "learning_rate": 9.863165544473781e-06, |
| "loss": 0.8486871719360352, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.6007194244604317, |
| "grad_norm": 0.5458606068076399, |
| "learning_rate": 9.861519646167375e-06, |
| "loss": 0.7913973331451416, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.60431654676259, |
| "grad_norm": 0.5489824115884749, |
| "learning_rate": 9.859864047181551e-06, |
| "loss": 0.8237329125404358, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.6079136690647482, |
| "grad_norm": 0.5525766076515911, |
| "learning_rate": 9.858198750819896e-06, |
| "loss": 0.7945156097412109, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.6115107913669064, |
| "grad_norm": 0.5434672326912037, |
| "learning_rate": 9.856523760405339e-06, |
| "loss": 0.826322078704834, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.6151079136690647, |
| "grad_norm": 0.5272432795596584, |
| "learning_rate": 9.854839079280157e-06, |
| "loss": 0.8193082213401794, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.6187050359712231, |
| "grad_norm": 0.5424514802280374, |
| "learning_rate": 9.853144710805966e-06, |
| "loss": 0.7865622639656067, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.6223021582733813, |
| "grad_norm": 0.5354805511996885, |
| "learning_rate": 9.851440658363708e-06, |
| "loss": 0.8112363815307617, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.6258992805755396, |
| "grad_norm": 0.5386838343839405, |
| "learning_rate": 9.849726925353655e-06, |
| "loss": 0.8229953646659851, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.6294964028776978, |
| "grad_norm": 0.5531212837926246, |
| "learning_rate": 9.848003515195384e-06, |
| "loss": 0.8074018955230713, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.6330935251798561, |
| "grad_norm": 0.5708673804346629, |
| "learning_rate": 9.846270431327793e-06, |
| "loss": 0.8243955373764038, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.6366906474820144, |
| "grad_norm": 0.5576031502036335, |
| "learning_rate": 9.84452767720908e-06, |
| "loss": 0.8192329406738281, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.6402877697841727, |
| "grad_norm": 0.5436357529700776, |
| "learning_rate": 9.842775256316732e-06, |
| "loss": 0.8275960683822632, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.6438848920863309, |
| "grad_norm": 0.5913335099756953, |
| "learning_rate": 9.841013172147537e-06, |
| "loss": 0.8125853538513184, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.6474820143884892, |
| "grad_norm": 0.5813220515343052, |
| "learning_rate": 9.839241428217553e-06, |
| "loss": 0.8220874071121216, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.6510791366906474, |
| "grad_norm": 0.5298380673365016, |
| "learning_rate": 9.837460028062126e-06, |
| "loss": 0.8092227578163147, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.6546762589928058, |
| "grad_norm": 0.5772475049484589, |
| "learning_rate": 9.835668975235857e-06, |
| "loss": 0.8138964176177979, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.658273381294964, |
| "grad_norm": 0.5786451222409693, |
| "learning_rate": 9.833868273312617e-06, |
| "loss": 0.7935299277305603, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.6618705035971223, |
| "grad_norm": 0.5552310441780026, |
| "learning_rate": 9.832057925885526e-06, |
| "loss": 0.7891409397125244, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.6654676258992805, |
| "grad_norm": 0.5734949837391681, |
| "learning_rate": 9.830237936566953e-06, |
| "loss": 0.7957472801208496, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.6690647482014388, |
| "grad_norm": 0.599694217341625, |
| "learning_rate": 9.828408308988506e-06, |
| "loss": 0.8263557553291321, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.6726618705035972, |
| "grad_norm": 0.5799493702311006, |
| "learning_rate": 9.826569046801027e-06, |
| "loss": 0.7941974401473999, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.6762589928057554, |
| "grad_norm": 0.5563225911482126, |
| "learning_rate": 9.824720153674578e-06, |
| "loss": 0.829971194267273, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.6798561151079137, |
| "grad_norm": 0.5351288370080682, |
| "learning_rate": 9.822861633298442e-06, |
| "loss": 0.8155908584594727, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.6834532374100719, |
| "grad_norm": 0.5331482850465583, |
| "learning_rate": 9.820993489381114e-06, |
| "loss": 0.8068544864654541, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.6870503597122302, |
| "grad_norm": 0.552140047325023, |
| "learning_rate": 9.819115725650287e-06, |
| "loss": 0.8251328468322754, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.6906474820143885, |
| "grad_norm": 0.5565602616626132, |
| "learning_rate": 9.817228345852853e-06, |
| "loss": 0.8075183629989624, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.6942446043165468, |
| "grad_norm": 0.5424847223472784, |
| "learning_rate": 9.81533135375489e-06, |
| "loss": 0.7979018092155457, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.697841726618705, |
| "grad_norm": 0.5341753227747238, |
| "learning_rate": 9.813424753141658e-06, |
| "loss": 0.8171260356903076, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.7014388489208633, |
| "grad_norm": 0.5804615507655949, |
| "learning_rate": 9.81150854781759e-06, |
| "loss": 0.8082928657531738, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.7050359712230215, |
| "grad_norm": 0.5509094640235682, |
| "learning_rate": 9.809582741606283e-06, |
| "loss": 0.8074817657470703, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.7086330935251799, |
| "grad_norm": 0.5228610158009157, |
| "learning_rate": 9.80764733835049e-06, |
| "loss": 0.8101227283477783, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.7122302158273381, |
| "grad_norm": 0.5359888946429793, |
| "learning_rate": 9.805702341912117e-06, |
| "loss": 0.8064497709274292, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.7158273381294964, |
| "grad_norm": 0.5280348608576221, |
| "learning_rate": 9.803747756172214e-06, |
| "loss": 0.8218724727630615, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.7194244604316546, |
| "grad_norm": 0.5264312906235269, |
| "learning_rate": 9.801783585030959e-06, |
| "loss": 0.7979246377944946, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.7194244604316546, |
| "eval_loss": 0.7993502616882324, |
| "eval_runtime": 7.4862, |
| "eval_samples_per_second": 11.755, |
| "eval_steps_per_second": 0.401, |
| "eval_token_acc": 0.7509832726974058, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.7230215827338129, |
| "grad_norm": 0.5604566185186979, |
| "learning_rate": 9.79980983240766e-06, |
| "loss": 0.8044528961181641, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.7266187050359713, |
| "grad_norm": 0.5690323675931319, |
| "learning_rate": 9.797826502240746e-06, |
| "loss": 0.8124678134918213, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.7302158273381295, |
| "grad_norm": 0.5295911758422164, |
| "learning_rate": 9.795833598487757e-06, |
| "loss": 0.7948049306869507, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.7338129496402878, |
| "grad_norm": 0.5420116184769913, |
| "learning_rate": 9.79383112512533e-06, |
| "loss": 0.8087724447250366, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.737410071942446, |
| "grad_norm": 0.52000926600773, |
| "learning_rate": 9.791819086149204e-06, |
| "loss": 0.7969048023223877, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.7410071942446043, |
| "grad_norm": 0.5439164932565841, |
| "learning_rate": 9.789797485574204e-06, |
| "loss": 0.8051252365112305, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.7446043165467626, |
| "grad_norm": 0.527493581148968, |
| "learning_rate": 9.787766327434231e-06, |
| "loss": 0.8121972680091858, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.7482014388489209, |
| "grad_norm": 0.5316866063486113, |
| "learning_rate": 9.785725615782262e-06, |
| "loss": 0.7998046278953552, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.7517985611510791, |
| "grad_norm": 0.5529173658462174, |
| "learning_rate": 9.783675354690331e-06, |
| "loss": 0.7912847399711609, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.7553956834532374, |
| "grad_norm": 0.5434220395363016, |
| "learning_rate": 9.781615548249533e-06, |
| "loss": 0.8146765232086182, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.7589928057553957, |
| "grad_norm": 0.5174228032873612, |
| "learning_rate": 9.779546200570005e-06, |
| "loss": 0.8111634850502014, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.762589928057554, |
| "grad_norm": 0.5196942606957381, |
| "learning_rate": 9.777467315780926e-06, |
| "loss": 0.815482497215271, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.7661870503597122, |
| "grad_norm": 0.5341280558565928, |
| "learning_rate": 9.775378898030503e-06, |
| "loss": 0.8034203052520752, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.7697841726618705, |
| "grad_norm": 0.5284120510650635, |
| "learning_rate": 9.773280951485967e-06, |
| "loss": 0.8246862292289734, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.7733812949640287, |
| "grad_norm": 0.5544183956098815, |
| "learning_rate": 9.771173480333562e-06, |
| "loss": 0.8020129799842834, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.7769784172661871, |
| "grad_norm": 0.5229832751207113, |
| "learning_rate": 9.769056488778538e-06, |
| "loss": 0.8137690424919128, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.7805755395683454, |
| "grad_norm": 0.5699698782617088, |
| "learning_rate": 9.766929981045139e-06, |
| "loss": 0.7960363030433655, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.7841726618705036, |
| "grad_norm": 0.5417245877819069, |
| "learning_rate": 9.764793961376602e-06, |
| "loss": 0.8026304244995117, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.7877697841726619, |
| "grad_norm": 0.5541975108855247, |
| "learning_rate": 9.76264843403514e-06, |
| "loss": 0.820999026298523, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.7913669064748201, |
| "grad_norm": 0.5646050091020177, |
| "learning_rate": 9.760493403301941e-06, |
| "loss": 0.7787805199623108, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.7949640287769785, |
| "grad_norm": 0.5632008003186489, |
| "learning_rate": 9.758328873477154e-06, |
| "loss": 0.805914044380188, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.7985611510791367, |
| "grad_norm": 0.5508175428854032, |
| "learning_rate": 9.756154848879885e-06, |
| "loss": 0.8235931992530823, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.802158273381295, |
| "grad_norm": 0.5655349496395068, |
| "learning_rate": 9.753971333848183e-06, |
| "loss": 0.8301273584365845, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.8057553956834532, |
| "grad_norm": 0.5507470566009575, |
| "learning_rate": 9.751778332739033e-06, |
| "loss": 0.7969081401824951, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.8093525179856115, |
| "grad_norm": 0.5581369046139822, |
| "learning_rate": 9.749575849928354e-06, |
| "loss": 0.802039384841919, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.8129496402877698, |
| "grad_norm": 0.5481550959696141, |
| "learning_rate": 9.74736388981098e-06, |
| "loss": 0.7913751602172852, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.8165467625899281, |
| "grad_norm": 0.5654210262128606, |
| "learning_rate": 9.74514245680066e-06, |
| "loss": 0.7987425327301025, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.8201438848920863, |
| "grad_norm": 0.5265619857022084, |
| "learning_rate": 9.74291155533004e-06, |
| "loss": 0.8149737119674683, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.8237410071942446, |
| "grad_norm": 0.5501005539202152, |
| "learning_rate": 9.740671189850664e-06, |
| "loss": 0.8153947591781616, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.8273381294964028, |
| "grad_norm": 0.512562612677006, |
| "learning_rate": 9.738421364832956e-06, |
| "loss": 0.8032717704772949, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.8309352517985612, |
| "grad_norm": 0.56695996273869, |
| "learning_rate": 9.73616208476622e-06, |
| "loss": 0.8103581070899963, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.8345323741007195, |
| "grad_norm": 0.5176504269302804, |
| "learning_rate": 9.733893354158628e-06, |
| "loss": 0.797197699546814, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.8381294964028777, |
| "grad_norm": 0.568000922091697, |
| "learning_rate": 9.7316151775372e-06, |
| "loss": 0.83544921875, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.841726618705036, |
| "grad_norm": 0.5386142282078233, |
| "learning_rate": 9.729327559447817e-06, |
| "loss": 0.782428503036499, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.8453237410071942, |
| "grad_norm": 0.520703942967087, |
| "learning_rate": 9.727030504455192e-06, |
| "loss": 0.8059707283973694, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.8489208633093526, |
| "grad_norm": 0.5632290536976243, |
| "learning_rate": 9.724724017142869e-06, |
| "loss": 0.8086649179458618, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.8525179856115108, |
| "grad_norm": 0.5422450922661384, |
| "learning_rate": 9.722408102113212e-06, |
| "loss": 0.7960709929466248, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.8561151079136691, |
| "grad_norm": 0.5303594727396739, |
| "learning_rate": 9.720082763987404e-06, |
| "loss": 0.802277684211731, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.8597122302158273, |
| "grad_norm": 0.5607106547711301, |
| "learning_rate": 9.717748007405422e-06, |
| "loss": 0.7961342930793762, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.8633093525179856, |
| "grad_norm": 0.5600486650731337, |
| "learning_rate": 9.715403837026046e-06, |
| "loss": 0.7967619895935059, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.8669064748201439, |
| "grad_norm": 0.5516198506118594, |
| "learning_rate": 9.71305025752683e-06, |
| "loss": 0.8042537569999695, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.8705035971223022, |
| "grad_norm": 0.5402147757719346, |
| "learning_rate": 9.710687273604111e-06, |
| "loss": 0.8142298460006714, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.8741007194244604, |
| "grad_norm": 0.5690473240624642, |
| "learning_rate": 9.708314889972988e-06, |
| "loss": 0.7934916019439697, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.8776978417266187, |
| "grad_norm": 0.5277029008553127, |
| "learning_rate": 9.705933111367314e-06, |
| "loss": 0.7867487668991089, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.8812949640287769, |
| "grad_norm": 0.540333896423015, |
| "learning_rate": 9.703541942539697e-06, |
| "loss": 0.8125045299530029, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.8848920863309353, |
| "grad_norm": 0.5250035223657058, |
| "learning_rate": 9.701141388261471e-06, |
| "loss": 0.8113247156143188, |
| "step": 246 |
| }, |
| { |
| "epoch": 0.8884892086330936, |
| "grad_norm": 0.5215338961621239, |
| "learning_rate": 9.698731453322706e-06, |
| "loss": 0.8007864952087402, |
| "step": 247 |
| }, |
| { |
| "epoch": 0.8920863309352518, |
| "grad_norm": 0.5310159690708876, |
| "learning_rate": 9.69631214253219e-06, |
| "loss": 0.812530517578125, |
| "step": 248 |
| }, |
| { |
| "epoch": 0.89568345323741, |
| "grad_norm": 0.5340141074169955, |
| "learning_rate": 9.693883460717418e-06, |
| "loss": 0.7936503887176514, |
| "step": 249 |
| }, |
| { |
| "epoch": 0.8992805755395683, |
| "grad_norm": 0.5374886150627688, |
| "learning_rate": 9.691445412724576e-06, |
| "loss": 0.7722389101982117, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.9028776978417267, |
| "grad_norm": 0.541501770110855, |
| "learning_rate": 9.688998003418553e-06, |
| "loss": 0.8209556937217712, |
| "step": 251 |
| }, |
| { |
| "epoch": 0.9064748201438849, |
| "grad_norm": 0.5240271320265909, |
| "learning_rate": 9.68654123768291e-06, |
| "loss": 0.7845938801765442, |
| "step": 252 |
| }, |
| { |
| "epoch": 0.9100719424460432, |
| "grad_norm": 0.5224977136818818, |
| "learning_rate": 9.684075120419876e-06, |
| "loss": 0.7874033451080322, |
| "step": 253 |
| }, |
| { |
| "epoch": 0.9136690647482014, |
| "grad_norm": 0.5310749021933068, |
| "learning_rate": 9.681599656550346e-06, |
| "loss": 0.8098745346069336, |
| "step": 254 |
| }, |
| { |
| "epoch": 0.9172661870503597, |
| "grad_norm": 0.5611962784041806, |
| "learning_rate": 9.679114851013862e-06, |
| "loss": 0.7997758388519287, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.920863309352518, |
| "grad_norm": 0.552250838905918, |
| "learning_rate": 9.676620708768608e-06, |
| "loss": 0.7942588329315186, |
| "step": 256 |
| }, |
| { |
| "epoch": 0.9244604316546763, |
| "grad_norm": 0.4881842864225739, |
| "learning_rate": 9.674117234791391e-06, |
| "loss": 0.8118790984153748, |
| "step": 257 |
| }, |
| { |
| "epoch": 0.9280575539568345, |
| "grad_norm": 0.512794242246418, |
| "learning_rate": 9.67160443407765e-06, |
| "loss": 0.7766081094741821, |
| "step": 258 |
| }, |
| { |
| "epoch": 0.9316546762589928, |
| "grad_norm": 0.5200931171118809, |
| "learning_rate": 9.669082311641427e-06, |
| "loss": 0.7934583425521851, |
| "step": 259 |
| }, |
| { |
| "epoch": 0.935251798561151, |
| "grad_norm": 0.5033254279172306, |
| "learning_rate": 9.666550872515367e-06, |
| "loss": 0.8074806928634644, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.9388489208633094, |
| "grad_norm": 0.5524080696367791, |
| "learning_rate": 9.664010121750708e-06, |
| "loss": 0.812640368938446, |
| "step": 261 |
| }, |
| { |
| "epoch": 0.9424460431654677, |
| "grad_norm": 0.5522305688677573, |
| "learning_rate": 9.661460064417263e-06, |
| "loss": 0.8016558885574341, |
| "step": 262 |
| }, |
| { |
| "epoch": 0.9460431654676259, |
| "grad_norm": 0.5023947284632377, |
| "learning_rate": 9.658900705603417e-06, |
| "loss": 0.7996955513954163, |
| "step": 263 |
| }, |
| { |
| "epoch": 0.9496402877697842, |
| "grad_norm": 0.5316226094004911, |
| "learning_rate": 9.656332050416118e-06, |
| "loss": 0.8242018818855286, |
| "step": 264 |
| }, |
| { |
| "epoch": 0.9532374100719424, |
| "grad_norm": 0.5432189986932476, |
| "learning_rate": 9.653754103980861e-06, |
| "loss": 0.80191969871521, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.9568345323741008, |
| "grad_norm": 0.5391216754238493, |
| "learning_rate": 9.651166871441684e-06, |
| "loss": 0.8091660737991333, |
| "step": 266 |
| }, |
| { |
| "epoch": 0.960431654676259, |
| "grad_norm": 0.5053265004385964, |
| "learning_rate": 9.64857035796115e-06, |
| "loss": 0.8128654360771179, |
| "step": 267 |
| }, |
| { |
| "epoch": 0.9640287769784173, |
| "grad_norm": 0.5474674002836111, |
| "learning_rate": 9.645964568720345e-06, |
| "loss": 0.8065285682678223, |
| "step": 268 |
| }, |
| { |
| "epoch": 0.9676258992805755, |
| "grad_norm": 0.5546363834942746, |
| "learning_rate": 9.643349508918864e-06, |
| "loss": 0.8089965581893921, |
| "step": 269 |
| }, |
| { |
| "epoch": 0.9712230215827338, |
| "grad_norm": 0.5235854155432896, |
| "learning_rate": 9.640725183774797e-06, |
| "loss": 0.7981188297271729, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.9748201438848921, |
| "grad_norm": 0.5097113985031559, |
| "learning_rate": 9.638091598524723e-06, |
| "loss": 0.7769266366958618, |
| "step": 271 |
| }, |
| { |
| "epoch": 0.9784172661870504, |
| "grad_norm": 0.5407878481684559, |
| "learning_rate": 9.635448758423703e-06, |
| "loss": 0.7945355176925659, |
| "step": 272 |
| }, |
| { |
| "epoch": 0.9820143884892086, |
| "grad_norm": 0.5292720741687515, |
| "learning_rate": 9.632796668745259e-06, |
| "loss": 0.7883850932121277, |
| "step": 273 |
| }, |
| { |
| "epoch": 0.9856115107913669, |
| "grad_norm": 0.5447108091170464, |
| "learning_rate": 9.630135334781373e-06, |
| "loss": 0.8052664995193481, |
| "step": 274 |
| }, |
| { |
| "epoch": 0.9892086330935251, |
| "grad_norm": 0.5034511826735798, |
| "learning_rate": 9.627464761842475e-06, |
| "loss": 0.7900129556655884, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.9928057553956835, |
| "grad_norm": 0.5324839406585946, |
| "learning_rate": 9.624784955257423e-06, |
| "loss": 0.813730001449585, |
| "step": 276 |
| }, |
| { |
| "epoch": 0.9964028776978417, |
| "grad_norm": 0.5860885589822529, |
| "learning_rate": 9.62209592037351e-06, |
| "loss": 0.7950912714004517, |
| "step": 277 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 0.7374379136630071, |
| "learning_rate": 9.619397662556434e-06, |
| "loss": 0.7994446158409119, |
| "step": 278 |
| }, |
| { |
| "epoch": 1.0035971223021583, |
| "grad_norm": 0.6776588146804744, |
| "learning_rate": 9.616690187190304e-06, |
| "loss": 0.7538776397705078, |
| "step": 279 |
| }, |
| { |
| "epoch": 1.0071942446043165, |
| "grad_norm": 0.5879953650944638, |
| "learning_rate": 9.613973499677613e-06, |
| "loss": 0.7604464888572693, |
| "step": 280 |
| }, |
| { |
| "epoch": 1.0107913669064748, |
| "grad_norm": 0.5939581051805073, |
| "learning_rate": 9.611247605439246e-06, |
| "loss": 0.7662951946258545, |
| "step": 281 |
| }, |
| { |
| "epoch": 1.014388489208633, |
| "grad_norm": 0.6157303231020771, |
| "learning_rate": 9.608512509914453e-06, |
| "loss": 0.7262904644012451, |
| "step": 282 |
| }, |
| { |
| "epoch": 1.0179856115107913, |
| "grad_norm": 0.5893791108583288, |
| "learning_rate": 9.605768218560844e-06, |
| "loss": 0.7444915175437927, |
| "step": 283 |
| }, |
| { |
| "epoch": 1.0215827338129497, |
| "grad_norm": 0.5873809938610391, |
| "learning_rate": 9.60301473685438e-06, |
| "loss": 0.7208749055862427, |
| "step": 284 |
| }, |
| { |
| "epoch": 1.025179856115108, |
| "grad_norm": 0.6108756409032599, |
| "learning_rate": 9.60025207028936e-06, |
| "loss": 0.7390925884246826, |
| "step": 285 |
| }, |
| { |
| "epoch": 1.0287769784172662, |
| "grad_norm": 0.6137049233321895, |
| "learning_rate": 9.597480224378412e-06, |
| "loss": 0.7449308633804321, |
| "step": 286 |
| }, |
| { |
| "epoch": 1.0323741007194245, |
| "grad_norm": 0.5613813157036074, |
| "learning_rate": 9.594699204652478e-06, |
| "loss": 0.7346478700637817, |
| "step": 287 |
| }, |
| { |
| "epoch": 1.0359712230215827, |
| "grad_norm": 0.5939425307104599, |
| "learning_rate": 9.591909016660806e-06, |
| "loss": 0.7614048719406128, |
| "step": 288 |
| }, |
| { |
| "epoch": 1.039568345323741, |
| "grad_norm": 0.5610799009674011, |
| "learning_rate": 9.589109665970941e-06, |
| "loss": 0.7561317682266235, |
| "step": 289 |
| }, |
| { |
| "epoch": 1.0431654676258992, |
| "grad_norm": 0.6115021111353046, |
| "learning_rate": 9.586301158168706e-06, |
| "loss": 0.738190233707428, |
| "step": 290 |
| }, |
| { |
| "epoch": 1.0467625899280575, |
| "grad_norm": 0.6234487490114379, |
| "learning_rate": 9.583483498858202e-06, |
| "loss": 0.7404968738555908, |
| "step": 291 |
| }, |
| { |
| "epoch": 1.0503597122302157, |
| "grad_norm": 0.5779335573279631, |
| "learning_rate": 9.580656693661787e-06, |
| "loss": 0.7546930909156799, |
| "step": 292 |
| }, |
| { |
| "epoch": 1.0539568345323742, |
| "grad_norm": 0.5563644509280081, |
| "learning_rate": 9.57782074822007e-06, |
| "loss": 0.7298511266708374, |
| "step": 293 |
| }, |
| { |
| "epoch": 1.0575539568345325, |
| "grad_norm": 0.5403469768323708, |
| "learning_rate": 9.574975668191899e-06, |
| "loss": 0.7354592084884644, |
| "step": 294 |
| }, |
| { |
| "epoch": 1.0611510791366907, |
| "grad_norm": 0.5712568902968939, |
| "learning_rate": 9.572121459254349e-06, |
| "loss": 0.7321658134460449, |
| "step": 295 |
| }, |
| { |
| "epoch": 1.064748201438849, |
| "grad_norm": 0.5822296978334045, |
| "learning_rate": 9.569258127102708e-06, |
| "loss": 0.7498965263366699, |
| "step": 296 |
| }, |
| { |
| "epoch": 1.0683453237410072, |
| "grad_norm": 0.5189814814100225, |
| "learning_rate": 9.56638567745047e-06, |
| "loss": 0.7498589158058167, |
| "step": 297 |
| }, |
| { |
| "epoch": 1.0719424460431655, |
| "grad_norm": 0.5809627924078522, |
| "learning_rate": 9.56350411602933e-06, |
| "loss": 0.717056393623352, |
| "step": 298 |
| }, |
| { |
| "epoch": 1.0755395683453237, |
| "grad_norm": 0.6022908196879311, |
| "learning_rate": 9.560613448589149e-06, |
| "loss": 0.7413934469223022, |
| "step": 299 |
| }, |
| { |
| "epoch": 1.079136690647482, |
| "grad_norm": 0.546409548066647, |
| "learning_rate": 9.55771368089797e-06, |
| "loss": 0.7399191856384277, |
| "step": 300 |
| }, |
| { |
| "epoch": 1.079136690647482, |
| "eval_loss": 0.7952051162719727, |
| "eval_runtime": 7.7175, |
| "eval_samples_per_second": 11.403, |
| "eval_steps_per_second": 0.389, |
| "eval_token_acc": 0.7517150105187311, |
| "step": 300 |
| }, |
| { |
| "epoch": 1.0827338129496402, |
| "grad_norm": 0.5970255750138046, |
| "learning_rate": 9.554804818741994e-06, |
| "loss": 0.7446647882461548, |
| "step": 301 |
| }, |
| { |
| "epoch": 1.0863309352517985, |
| "grad_norm": 0.5541334916798, |
| "learning_rate": 9.551886867925565e-06, |
| "loss": 0.7263193130493164, |
| "step": 302 |
| }, |
| { |
| "epoch": 1.0899280575539567, |
| "grad_norm": 0.6369779920740517, |
| "learning_rate": 9.548959834271158e-06, |
| "loss": 0.7311091423034668, |
| "step": 303 |
| }, |
| { |
| "epoch": 1.0935251798561152, |
| "grad_norm": 0.5385079083621497, |
| "learning_rate": 9.546023723619387e-06, |
| "loss": 0.7392459511756897, |
| "step": 304 |
| }, |
| { |
| "epoch": 1.0971223021582734, |
| "grad_norm": 0.5523777755477719, |
| "learning_rate": 9.543078541828962e-06, |
| "loss": 0.7363039255142212, |
| "step": 305 |
| }, |
| { |
| "epoch": 1.1007194244604317, |
| "grad_norm": 0.61129080591158, |
| "learning_rate": 9.540124294776704e-06, |
| "loss": 0.7463387846946716, |
| "step": 306 |
| }, |
| { |
| "epoch": 1.10431654676259, |
| "grad_norm": 0.5704927319446069, |
| "learning_rate": 9.53716098835752e-06, |
| "loss": 0.7332433462142944, |
| "step": 307 |
| }, |
| { |
| "epoch": 1.1079136690647482, |
| "grad_norm": 0.5647463745104909, |
| "learning_rate": 9.534188628484391e-06, |
| "loss": 0.7626124620437622, |
| "step": 308 |
| }, |
| { |
| "epoch": 1.1115107913669064, |
| "grad_norm": 0.5634890166624024, |
| "learning_rate": 9.531207221088368e-06, |
| "loss": 0.7278907895088196, |
| "step": 309 |
| }, |
| { |
| "epoch": 1.1151079136690647, |
| "grad_norm": 0.5684506907730069, |
| "learning_rate": 9.528216772118553e-06, |
| "loss": 0.7007638812065125, |
| "step": 310 |
| }, |
| { |
| "epoch": 1.118705035971223, |
| "grad_norm": 0.569487505785454, |
| "learning_rate": 9.525217287542088e-06, |
| "loss": 0.73263019323349, |
| "step": 311 |
| }, |
| { |
| "epoch": 1.1223021582733812, |
| "grad_norm": 0.531149414984143, |
| "learning_rate": 9.522208773344147e-06, |
| "loss": 0.726309061050415, |
| "step": 312 |
| }, |
| { |
| "epoch": 1.1258992805755397, |
| "grad_norm": 0.5730769528941625, |
| "learning_rate": 9.519191235527923e-06, |
| "loss": 0.740322470664978, |
| "step": 313 |
| }, |
| { |
| "epoch": 1.129496402877698, |
| "grad_norm": 0.7272415428533405, |
| "learning_rate": 9.516164680114612e-06, |
| "loss": 0.7354894280433655, |
| "step": 314 |
| }, |
| { |
| "epoch": 1.1330935251798562, |
| "grad_norm": 0.5406965572808433, |
| "learning_rate": 9.513129113143405e-06, |
| "loss": 0.7542805671691895, |
| "step": 315 |
| }, |
| { |
| "epoch": 1.1366906474820144, |
| "grad_norm": 0.5384238572794032, |
| "learning_rate": 9.510084540671471e-06, |
| "loss": 0.7344615459442139, |
| "step": 316 |
| }, |
| { |
| "epoch": 1.1402877697841727, |
| "grad_norm": 0.5692959473422989, |
| "learning_rate": 9.507030968773956e-06, |
| "loss": 0.7373325824737549, |
| "step": 317 |
| }, |
| { |
| "epoch": 1.143884892086331, |
| "grad_norm": 0.5354506821538485, |
| "learning_rate": 9.50396840354396e-06, |
| "loss": 0.7316826581954956, |
| "step": 318 |
| }, |
| { |
| "epoch": 1.1474820143884892, |
| "grad_norm": 0.5262961196184975, |
| "learning_rate": 9.500896851092522e-06, |
| "loss": 0.7321020364761353, |
| "step": 319 |
| }, |
| { |
| "epoch": 1.1510791366906474, |
| "grad_norm": 0.5206041056150756, |
| "learning_rate": 9.497816317548625e-06, |
| "loss": 0.7204076051712036, |
| "step": 320 |
| }, |
| { |
| "epoch": 1.1546762589928057, |
| "grad_norm": 0.5198471869309446, |
| "learning_rate": 9.494726809059164e-06, |
| "loss": 0.7209657430648804, |
| "step": 321 |
| }, |
| { |
| "epoch": 1.158273381294964, |
| "grad_norm": 0.539020727608202, |
| "learning_rate": 9.491628331788949e-06, |
| "loss": 0.7441370487213135, |
| "step": 322 |
| }, |
| { |
| "epoch": 1.1618705035971222, |
| "grad_norm": 0.53720351049394, |
| "learning_rate": 9.488520891920683e-06, |
| "loss": 0.7360929250717163, |
| "step": 323 |
| }, |
| { |
| "epoch": 1.1654676258992807, |
| "grad_norm": 0.5307529143698696, |
| "learning_rate": 9.485404495654953e-06, |
| "loss": 0.7320318222045898, |
| "step": 324 |
| }, |
| { |
| "epoch": 1.169064748201439, |
| "grad_norm": 0.5586408968190051, |
| "learning_rate": 9.482279149210217e-06, |
| "loss": 0.7476272583007812, |
| "step": 325 |
| }, |
| { |
| "epoch": 1.1726618705035972, |
| "grad_norm": 0.5466847193799189, |
| "learning_rate": 9.479144858822795e-06, |
| "loss": 0.7348689436912537, |
| "step": 326 |
| }, |
| { |
| "epoch": 1.1762589928057554, |
| "grad_norm": 0.51531963600872, |
| "learning_rate": 9.476001630746852e-06, |
| "loss": 0.7275116443634033, |
| "step": 327 |
| }, |
| { |
| "epoch": 1.1798561151079137, |
| "grad_norm": 0.5483263021729351, |
| "learning_rate": 9.472849471254386e-06, |
| "loss": 0.7249739766120911, |
| "step": 328 |
| }, |
| { |
| "epoch": 1.183453237410072, |
| "grad_norm": 0.5392307946676005, |
| "learning_rate": 9.469688386635218e-06, |
| "loss": 0.7299447059631348, |
| "step": 329 |
| }, |
| { |
| "epoch": 1.1870503597122302, |
| "grad_norm": 0.54831249804049, |
| "learning_rate": 9.466518383196981e-06, |
| "loss": 0.7359820604324341, |
| "step": 330 |
| }, |
| { |
| "epoch": 1.1906474820143884, |
| "grad_norm": 0.5238958451400837, |
| "learning_rate": 9.4633394672651e-06, |
| "loss": 0.7344633340835571, |
| "step": 331 |
| }, |
| { |
| "epoch": 1.1942446043165469, |
| "grad_norm": 0.5489024189498876, |
| "learning_rate": 9.460151645182784e-06, |
| "loss": 0.749465823173523, |
| "step": 332 |
| }, |
| { |
| "epoch": 1.1978417266187051, |
| "grad_norm": 0.5493718850190237, |
| "learning_rate": 9.456954923311017e-06, |
| "loss": 0.7115287780761719, |
| "step": 333 |
| }, |
| { |
| "epoch": 1.2014388489208634, |
| "grad_norm": 0.5381363309369281, |
| "learning_rate": 9.453749308028542e-06, |
| "loss": 0.7070856690406799, |
| "step": 334 |
| }, |
| { |
| "epoch": 1.2050359712230216, |
| "grad_norm": 0.6270654982496251, |
| "learning_rate": 9.450534805731843e-06, |
| "loss": 0.7269983291625977, |
| "step": 335 |
| }, |
| { |
| "epoch": 1.20863309352518, |
| "grad_norm": 0.5892033078226898, |
| "learning_rate": 9.447311422835141e-06, |
| "loss": 0.7400103807449341, |
| "step": 336 |
| }, |
| { |
| "epoch": 1.2122302158273381, |
| "grad_norm": 0.5118100086223286, |
| "learning_rate": 9.444079165770376e-06, |
| "loss": 0.7445212602615356, |
| "step": 337 |
| }, |
| { |
| "epoch": 1.2158273381294964, |
| "grad_norm": 0.6003610021860912, |
| "learning_rate": 9.440838040987194e-06, |
| "loss": 0.7496727705001831, |
| "step": 338 |
| }, |
| { |
| "epoch": 1.2194244604316546, |
| "grad_norm": 0.6014272158963897, |
| "learning_rate": 9.437588054952943e-06, |
| "loss": 0.7463854551315308, |
| "step": 339 |
| }, |
| { |
| "epoch": 1.223021582733813, |
| "grad_norm": 0.5622909415736467, |
| "learning_rate": 9.43432921415264e-06, |
| "loss": 0.735248327255249, |
| "step": 340 |
| }, |
| { |
| "epoch": 1.2266187050359711, |
| "grad_norm": 0.5330204356289486, |
| "learning_rate": 9.431061525088982e-06, |
| "loss": 0.7185267806053162, |
| "step": 341 |
| }, |
| { |
| "epoch": 1.2302158273381294, |
| "grad_norm": 0.5744485810048889, |
| "learning_rate": 9.427784994282318e-06, |
| "loss": 0.7163181900978088, |
| "step": 342 |
| }, |
| { |
| "epoch": 1.2338129496402876, |
| "grad_norm": 0.6027911826984552, |
| "learning_rate": 9.42449962827064e-06, |
| "loss": 0.748116672039032, |
| "step": 343 |
| }, |
| { |
| "epoch": 1.2374100719424461, |
| "grad_norm": 0.5121609550128294, |
| "learning_rate": 9.421205433609568e-06, |
| "loss": 0.7265034914016724, |
| "step": 344 |
| }, |
| { |
| "epoch": 1.2410071942446044, |
| "grad_norm": 0.5503931962639481, |
| "learning_rate": 9.41790241687234e-06, |
| "loss": 0.7382141351699829, |
| "step": 345 |
| }, |
| { |
| "epoch": 1.2446043165467626, |
| "grad_norm": 0.5310640074137198, |
| "learning_rate": 9.414590584649798e-06, |
| "loss": 0.7352370023727417, |
| "step": 346 |
| }, |
| { |
| "epoch": 1.2482014388489209, |
| "grad_norm": 0.5583912500613908, |
| "learning_rate": 9.411269943550376e-06, |
| "loss": 0.7385387420654297, |
| "step": 347 |
| }, |
| { |
| "epoch": 1.2517985611510791, |
| "grad_norm": 0.5386372550063233, |
| "learning_rate": 9.407940500200082e-06, |
| "loss": 0.7346241474151611, |
| "step": 348 |
| }, |
| { |
| "epoch": 1.2553956834532374, |
| "grad_norm": 0.5725708975617698, |
| "learning_rate": 9.404602261242488e-06, |
| "loss": 0.7429782152175903, |
| "step": 349 |
| }, |
| { |
| "epoch": 1.2589928057553956, |
| "grad_norm": 0.5071301743038213, |
| "learning_rate": 9.40125523333872e-06, |
| "loss": 0.7370673418045044, |
| "step": 350 |
| }, |
| { |
| "epoch": 1.2625899280575539, |
| "grad_norm": 0.5927884076224349, |
| "learning_rate": 9.397899423167438e-06, |
| "loss": 0.7469853162765503, |
| "step": 351 |
| }, |
| { |
| "epoch": 1.2661870503597124, |
| "grad_norm": 0.548296598654736, |
| "learning_rate": 9.39453483742483e-06, |
| "loss": 0.7487526535987854, |
| "step": 352 |
| }, |
| { |
| "epoch": 1.2697841726618706, |
| "grad_norm": 0.5097163098936354, |
| "learning_rate": 9.391161482824591e-06, |
| "loss": 0.7192838191986084, |
| "step": 353 |
| }, |
| { |
| "epoch": 1.2733812949640289, |
| "grad_norm": 0.5552628434665424, |
| "learning_rate": 9.387779366097915e-06, |
| "loss": 0.7359743118286133, |
| "step": 354 |
| }, |
| { |
| "epoch": 1.276978417266187, |
| "grad_norm": 0.5419514485186101, |
| "learning_rate": 9.38438849399348e-06, |
| "loss": 0.7057188749313354, |
| "step": 355 |
| }, |
| { |
| "epoch": 1.2805755395683454, |
| "grad_norm": 0.5621822924024781, |
| "learning_rate": 9.380988873277436e-06, |
| "loss": 0.7457002401351929, |
| "step": 356 |
| }, |
| { |
| "epoch": 1.2841726618705036, |
| "grad_norm": 0.523519240727049, |
| "learning_rate": 9.377580510733386e-06, |
| "loss": 0.7411536574363708, |
| "step": 357 |
| }, |
| { |
| "epoch": 1.2877697841726619, |
| "grad_norm": 0.5491854800918223, |
| "learning_rate": 9.37416341316238e-06, |
| "loss": 0.7158464193344116, |
| "step": 358 |
| }, |
| { |
| "epoch": 1.29136690647482, |
| "grad_norm": 0.666484368385392, |
| "learning_rate": 9.370737587382896e-06, |
| "loss": 0.7320539951324463, |
| "step": 359 |
| }, |
| { |
| "epoch": 1.2949640287769784, |
| "grad_norm": 0.5365848280521327, |
| "learning_rate": 9.367303040230828e-06, |
| "loss": 0.7353171110153198, |
| "step": 360 |
| }, |
| { |
| "epoch": 1.2985611510791366, |
| "grad_norm": 0.5617185149633399, |
| "learning_rate": 9.363859778559476e-06, |
| "loss": 0.7434992790222168, |
| "step": 361 |
| }, |
| { |
| "epoch": 1.3021582733812949, |
| "grad_norm": 0.5274326997351031, |
| "learning_rate": 9.360407809239527e-06, |
| "loss": 0.7784279584884644, |
| "step": 362 |
| }, |
| { |
| "epoch": 1.3057553956834531, |
| "grad_norm": 0.5215842842673377, |
| "learning_rate": 9.35694713915904e-06, |
| "loss": 0.7144894599914551, |
| "step": 363 |
| }, |
| { |
| "epoch": 1.3093525179856116, |
| "grad_norm": 0.5382925072863687, |
| "learning_rate": 9.35347777522344e-06, |
| "loss": 0.7470029592514038, |
| "step": 364 |
| }, |
| { |
| "epoch": 1.3129496402877698, |
| "grad_norm": 0.5362900701795816, |
| "learning_rate": 9.349999724355496e-06, |
| "loss": 0.7410938739776611, |
| "step": 365 |
| }, |
| { |
| "epoch": 1.316546762589928, |
| "grad_norm": 0.5791880585481317, |
| "learning_rate": 9.346512993495315e-06, |
| "loss": 0.740675151348114, |
| "step": 366 |
| }, |
| { |
| "epoch": 1.3201438848920863, |
| "grad_norm": 0.5328457297104043, |
| "learning_rate": 9.343017589600322e-06, |
| "loss": 0.7332954406738281, |
| "step": 367 |
| }, |
| { |
| "epoch": 1.3237410071942446, |
| "grad_norm": 0.5270359471036749, |
| "learning_rate": 9.33951351964525e-06, |
| "loss": 0.7480515241622925, |
| "step": 368 |
| }, |
| { |
| "epoch": 1.3273381294964028, |
| "grad_norm": 0.5541952398439698, |
| "learning_rate": 9.336000790622117e-06, |
| "loss": 0.7403005361557007, |
| "step": 369 |
| }, |
| { |
| "epoch": 1.330935251798561, |
| "grad_norm": 0.5248778268205648, |
| "learning_rate": 9.33247940954023e-06, |
| "loss": 0.7062946557998657, |
| "step": 370 |
| }, |
| { |
| "epoch": 1.3345323741007193, |
| "grad_norm": 0.5341904855512976, |
| "learning_rate": 9.32894938342615e-06, |
| "loss": 0.7464907169342041, |
| "step": 371 |
| }, |
| { |
| "epoch": 1.3381294964028778, |
| "grad_norm": 0.5245639415447767, |
| "learning_rate": 9.3254107193237e-06, |
| "loss": 0.7266008853912354, |
| "step": 372 |
| }, |
| { |
| "epoch": 1.341726618705036, |
| "grad_norm": 0.5220543007037212, |
| "learning_rate": 9.321863424293926e-06, |
| "loss": 0.7336535453796387, |
| "step": 373 |
| }, |
| { |
| "epoch": 1.3453237410071943, |
| "grad_norm": 0.5504538542775073, |
| "learning_rate": 9.318307505415108e-06, |
| "loss": 0.7529866695404053, |
| "step": 374 |
| }, |
| { |
| "epoch": 1.3489208633093526, |
| "grad_norm": 0.5138351742769647, |
| "learning_rate": 9.314742969782725e-06, |
| "loss": 0.766657829284668, |
| "step": 375 |
| }, |
| { |
| "epoch": 1.3525179856115108, |
| "grad_norm": 0.5615878942041659, |
| "learning_rate": 9.311169824509454e-06, |
| "loss": 0.7512322664260864, |
| "step": 376 |
| }, |
| { |
| "epoch": 1.356115107913669, |
| "grad_norm": 0.5276166142542134, |
| "learning_rate": 9.307588076725153e-06, |
| "loss": 0.7312484979629517, |
| "step": 377 |
| }, |
| { |
| "epoch": 1.3597122302158273, |
| "grad_norm": 0.5728321595101409, |
| "learning_rate": 9.303997733576846e-06, |
| "loss": 0.759013831615448, |
| "step": 378 |
| }, |
| { |
| "epoch": 1.3633093525179856, |
| "grad_norm": 0.5903085328443332, |
| "learning_rate": 9.3003988022287e-06, |
| "loss": 0.7296818494796753, |
| "step": 379 |
| }, |
| { |
| "epoch": 1.3669064748201438, |
| "grad_norm": 0.5359969819862859, |
| "learning_rate": 9.296791289862031e-06, |
| "loss": 0.7522664666175842, |
| "step": 380 |
| }, |
| { |
| "epoch": 1.370503597122302, |
| "grad_norm": 0.5588759989046174, |
| "learning_rate": 9.293175203675268e-06, |
| "loss": 0.726894736289978, |
| "step": 381 |
| }, |
| { |
| "epoch": 1.3741007194244603, |
| "grad_norm": 0.577877679626679, |
| "learning_rate": 9.289550550883952e-06, |
| "loss": 0.7532353401184082, |
| "step": 382 |
| }, |
| { |
| "epoch": 1.3776978417266186, |
| "grad_norm": 0.5697600504827269, |
| "learning_rate": 9.285917338720719e-06, |
| "loss": 0.7286536693572998, |
| "step": 383 |
| }, |
| { |
| "epoch": 1.381294964028777, |
| "grad_norm": 0.5365518724533357, |
| "learning_rate": 9.28227557443528e-06, |
| "loss": 0.7433767318725586, |
| "step": 384 |
| }, |
| { |
| "epoch": 1.3848920863309353, |
| "grad_norm": 0.5739665857401817, |
| "learning_rate": 9.278625265294417e-06, |
| "loss": 0.7427285313606262, |
| "step": 385 |
| }, |
| { |
| "epoch": 1.3884892086330936, |
| "grad_norm": 0.5891308782340648, |
| "learning_rate": 9.274966418581958e-06, |
| "loss": 0.755325436592102, |
| "step": 386 |
| }, |
| { |
| "epoch": 1.3920863309352518, |
| "grad_norm": 0.5221581630154059, |
| "learning_rate": 9.271299041598768e-06, |
| "loss": 0.7211991548538208, |
| "step": 387 |
| }, |
| { |
| "epoch": 1.39568345323741, |
| "grad_norm": 0.5591826784203816, |
| "learning_rate": 9.267623141662734e-06, |
| "loss": 0.742611289024353, |
| "step": 388 |
| }, |
| { |
| "epoch": 1.3992805755395683, |
| "grad_norm": 0.6031525882590488, |
| "learning_rate": 9.263938726108747e-06, |
| "loss": 0.7558526992797852, |
| "step": 389 |
| }, |
| { |
| "epoch": 1.4028776978417266, |
| "grad_norm": 0.5434043181383015, |
| "learning_rate": 9.260245802288694e-06, |
| "loss": 0.7408391833305359, |
| "step": 390 |
| }, |
| { |
| "epoch": 1.406474820143885, |
| "grad_norm": 0.5318047677201455, |
| "learning_rate": 9.256544377571437e-06, |
| "loss": 0.7365950345993042, |
| "step": 391 |
| }, |
| { |
| "epoch": 1.4100719424460433, |
| "grad_norm": 0.5998492183556869, |
| "learning_rate": 9.252834459342801e-06, |
| "loss": 0.7364234924316406, |
| "step": 392 |
| }, |
| { |
| "epoch": 1.4136690647482015, |
| "grad_norm": 0.5498503102293683, |
| "learning_rate": 9.24911605500556e-06, |
| "loss": 0.7535672187805176, |
| "step": 393 |
| }, |
| { |
| "epoch": 1.4172661870503598, |
| "grad_norm": 0.5503794189317693, |
| "learning_rate": 9.24538917197942e-06, |
| "loss": 0.7519177198410034, |
| "step": 394 |
| }, |
| { |
| "epoch": 1.420863309352518, |
| "grad_norm": 0.6162738039028262, |
| "learning_rate": 9.241653817701006e-06, |
| "loss": 0.7295320630073547, |
| "step": 395 |
| }, |
| { |
| "epoch": 1.4244604316546763, |
| "grad_norm": 0.5555563314128995, |
| "learning_rate": 9.237909999623847e-06, |
| "loss": 0.7336751222610474, |
| "step": 396 |
| }, |
| { |
| "epoch": 1.4280575539568345, |
| "grad_norm": 0.5259597744806965, |
| "learning_rate": 9.234157725218355e-06, |
| "loss": 0.7189645171165466, |
| "step": 397 |
| }, |
| { |
| "epoch": 1.4316546762589928, |
| "grad_norm": 0.5630698969687603, |
| "learning_rate": 9.230397001971829e-06, |
| "loss": 0.73908531665802, |
| "step": 398 |
| }, |
| { |
| "epoch": 1.435251798561151, |
| "grad_norm": 0.5276648720445877, |
| "learning_rate": 9.226627837388413e-06, |
| "loss": 0.7388709783554077, |
| "step": 399 |
| }, |
| { |
| "epoch": 1.4388489208633093, |
| "grad_norm": 0.5536664986659428, |
| "learning_rate": 9.222850238989104e-06, |
| "loss": 0.7502380013465881, |
| "step": 400 |
| }, |
| { |
| "epoch": 1.4388489208633093, |
| "eval_loss": 0.7909448742866516, |
| "eval_runtime": 7.5526, |
| "eval_samples_per_second": 11.652, |
| "eval_steps_per_second": 0.397, |
| "eval_token_acc": 0.7528686175941879, |
| "step": 400 |
| }, |
| { |
| "epoch": 1.4424460431654675, |
| "grad_norm": 0.5311490565632518, |
| "learning_rate": 9.219064214311719e-06, |
| "loss": 0.7382062673568726, |
| "step": 401 |
| }, |
| { |
| "epoch": 1.4460431654676258, |
| "grad_norm": 0.5370721218852971, |
| "learning_rate": 9.215269770910897e-06, |
| "loss": 0.7413097620010376, |
| "step": 402 |
| }, |
| { |
| "epoch": 1.449640287769784, |
| "grad_norm": 0.5465408436928648, |
| "learning_rate": 9.211466916358074e-06, |
| "loss": 0.7206458449363708, |
| "step": 403 |
| }, |
| { |
| "epoch": 1.4532374100719425, |
| "grad_norm": 0.5656672911765269, |
| "learning_rate": 9.207655658241469e-06, |
| "loss": 0.7349211573600769, |
| "step": 404 |
| }, |
| { |
| "epoch": 1.4568345323741008, |
| "grad_norm": 0.5335944635099276, |
| "learning_rate": 9.203836004166064e-06, |
| "loss": 0.7555948495864868, |
| "step": 405 |
| }, |
| { |
| "epoch": 1.460431654676259, |
| "grad_norm": 0.5549021913248626, |
| "learning_rate": 9.200007961753605e-06, |
| "loss": 0.7555477619171143, |
| "step": 406 |
| }, |
| { |
| "epoch": 1.4640287769784173, |
| "grad_norm": 0.614098701080317, |
| "learning_rate": 9.196171538642567e-06, |
| "loss": 0.7401256561279297, |
| "step": 407 |
| }, |
| { |
| "epoch": 1.4676258992805755, |
| "grad_norm": 0.5418307481265139, |
| "learning_rate": 9.192326742488153e-06, |
| "loss": 0.7573046088218689, |
| "step": 408 |
| }, |
| { |
| "epoch": 1.4712230215827338, |
| "grad_norm": 0.5956224565062808, |
| "learning_rate": 9.18847358096227e-06, |
| "loss": 0.7176140546798706, |
| "step": 409 |
| }, |
| { |
| "epoch": 1.474820143884892, |
| "grad_norm": 0.570779754662452, |
| "learning_rate": 9.184612061753518e-06, |
| "loss": 0.7395877242088318, |
| "step": 410 |
| }, |
| { |
| "epoch": 1.4784172661870505, |
| "grad_norm": 0.5388215450729462, |
| "learning_rate": 9.180742192567177e-06, |
| "loss": 0.7351829409599304, |
| "step": 411 |
| }, |
| { |
| "epoch": 1.4820143884892087, |
| "grad_norm": 0.5727588774150603, |
| "learning_rate": 9.176863981125185e-06, |
| "loss": 0.7495085597038269, |
| "step": 412 |
| }, |
| { |
| "epoch": 1.485611510791367, |
| "grad_norm": 0.5697617446092769, |
| "learning_rate": 9.172977435166132e-06, |
| "loss": 0.7626926898956299, |
| "step": 413 |
| }, |
| { |
| "epoch": 1.4892086330935252, |
| "grad_norm": 0.5675953440673872, |
| "learning_rate": 9.169082562445228e-06, |
| "loss": 0.7373538017272949, |
| "step": 414 |
| }, |
| { |
| "epoch": 1.4928057553956835, |
| "grad_norm": 0.5479509775604315, |
| "learning_rate": 9.165179370734308e-06, |
| "loss": 0.7284923195838928, |
| "step": 415 |
| }, |
| { |
| "epoch": 1.4964028776978417, |
| "grad_norm": 0.5814710062165914, |
| "learning_rate": 9.161267867821802e-06, |
| "loss": 0.7396392822265625, |
| "step": 416 |
| }, |
| { |
| "epoch": 1.5, |
| "grad_norm": 0.5857709804866665, |
| "learning_rate": 9.157348061512728e-06, |
| "loss": 0.7299116849899292, |
| "step": 417 |
| }, |
| { |
| "epoch": 1.5035971223021583, |
| "grad_norm": 0.5637392300431893, |
| "learning_rate": 9.153419959628666e-06, |
| "loss": 0.7442845106124878, |
| "step": 418 |
| }, |
| { |
| "epoch": 1.5071942446043165, |
| "grad_norm": 0.5477432560373958, |
| "learning_rate": 9.149483570007755e-06, |
| "loss": 0.7370480895042419, |
| "step": 419 |
| }, |
| { |
| "epoch": 1.5107913669064748, |
| "grad_norm": 0.5761456074550545, |
| "learning_rate": 9.14553890050467e-06, |
| "loss": 0.7441853284835815, |
| "step": 420 |
| }, |
| { |
| "epoch": 1.514388489208633, |
| "grad_norm": 0.5357942602104266, |
| "learning_rate": 9.141585958990606e-06, |
| "loss": 0.6985485553741455, |
| "step": 421 |
| }, |
| { |
| "epoch": 1.5179856115107913, |
| "grad_norm": 0.5689984573039134, |
| "learning_rate": 9.137624753353268e-06, |
| "loss": 0.726935625076294, |
| "step": 422 |
| }, |
| { |
| "epoch": 1.5215827338129495, |
| "grad_norm": 0.5402664519943788, |
| "learning_rate": 9.133655291496842e-06, |
| "loss": 0.7158384919166565, |
| "step": 423 |
| }, |
| { |
| "epoch": 1.5251798561151078, |
| "grad_norm": 0.5393049283880241, |
| "learning_rate": 9.129677581342e-06, |
| "loss": 0.7156991958618164, |
| "step": 424 |
| }, |
| { |
| "epoch": 1.5287769784172662, |
| "grad_norm": 0.5417741371232614, |
| "learning_rate": 9.125691630825867e-06, |
| "loss": 0.7445224523544312, |
| "step": 425 |
| }, |
| { |
| "epoch": 1.5323741007194245, |
| "grad_norm": 0.537883938976634, |
| "learning_rate": 9.121697447902006e-06, |
| "loss": 0.734693169593811, |
| "step": 426 |
| }, |
| { |
| "epoch": 1.5359712230215827, |
| "grad_norm": 0.5276159043279869, |
| "learning_rate": 9.117695040540422e-06, |
| "loss": 0.7173066735267639, |
| "step": 427 |
| }, |
| { |
| "epoch": 1.539568345323741, |
| "grad_norm": 0.5675455525239093, |
| "learning_rate": 9.113684416727511e-06, |
| "loss": 0.7307520508766174, |
| "step": 428 |
| }, |
| { |
| "epoch": 1.5431654676258992, |
| "grad_norm": 0.4933193822920981, |
| "learning_rate": 9.10966558446608e-06, |
| "loss": 0.7307450175285339, |
| "step": 429 |
| }, |
| { |
| "epoch": 1.5467625899280577, |
| "grad_norm": 0.485898122283688, |
| "learning_rate": 9.10563855177531e-06, |
| "loss": 0.6748644113540649, |
| "step": 430 |
| }, |
| { |
| "epoch": 1.550359712230216, |
| "grad_norm": 0.5745646730466706, |
| "learning_rate": 9.101603326690742e-06, |
| "loss": 0.7469466924667358, |
| "step": 431 |
| }, |
| { |
| "epoch": 1.5539568345323742, |
| "grad_norm": 0.534327855901563, |
| "learning_rate": 9.097559917264268e-06, |
| "loss": 0.7524317502975464, |
| "step": 432 |
| }, |
| { |
| "epoch": 1.5575539568345325, |
| "grad_norm": 0.5412259006260789, |
| "learning_rate": 9.093508331564107e-06, |
| "loss": 0.7445288300514221, |
| "step": 433 |
| }, |
| { |
| "epoch": 1.5611510791366907, |
| "grad_norm": 0.5058320181775204, |
| "learning_rate": 9.089448577674799e-06, |
| "loss": 0.7400709390640259, |
| "step": 434 |
| }, |
| { |
| "epoch": 1.564748201438849, |
| "grad_norm": 0.5193454182825687, |
| "learning_rate": 9.08538066369718e-06, |
| "loss": 0.7423480749130249, |
| "step": 435 |
| }, |
| { |
| "epoch": 1.5683453237410072, |
| "grad_norm": 0.5207804383383916, |
| "learning_rate": 9.081304597748366e-06, |
| "loss": 0.7210599184036255, |
| "step": 436 |
| }, |
| { |
| "epoch": 1.5719424460431655, |
| "grad_norm": 0.5175530796698581, |
| "learning_rate": 9.077220387961742e-06, |
| "loss": 0.6935808658599854, |
| "step": 437 |
| }, |
| { |
| "epoch": 1.5755395683453237, |
| "grad_norm": 0.515905293322507, |
| "learning_rate": 9.073128042486945e-06, |
| "loss": 0.7411532402038574, |
| "step": 438 |
| }, |
| { |
| "epoch": 1.579136690647482, |
| "grad_norm": 0.5425545076894603, |
| "learning_rate": 9.06902756948984e-06, |
| "loss": 0.735691249370575, |
| "step": 439 |
| }, |
| { |
| "epoch": 1.5827338129496402, |
| "grad_norm": 0.599077809988531, |
| "learning_rate": 9.064918977152517e-06, |
| "loss": 0.7355791926383972, |
| "step": 440 |
| }, |
| { |
| "epoch": 1.5863309352517985, |
| "grad_norm": 0.6082129897806806, |
| "learning_rate": 9.060802273673259e-06, |
| "loss": 0.7199162244796753, |
| "step": 441 |
| }, |
| { |
| "epoch": 1.5899280575539567, |
| "grad_norm": 0.5103649601151766, |
| "learning_rate": 9.056677467266543e-06, |
| "loss": 0.7381383180618286, |
| "step": 442 |
| }, |
| { |
| "epoch": 1.593525179856115, |
| "grad_norm": 0.5627707086300117, |
| "learning_rate": 9.052544566163011e-06, |
| "loss": 0.7414311766624451, |
| "step": 443 |
| }, |
| { |
| "epoch": 1.5971223021582732, |
| "grad_norm": 0.5505980295381908, |
| "learning_rate": 9.048403578609454e-06, |
| "loss": 0.7309621572494507, |
| "step": 444 |
| }, |
| { |
| "epoch": 1.6007194244604317, |
| "grad_norm": 0.5456865652811913, |
| "learning_rate": 9.044254512868802e-06, |
| "loss": 0.7410526871681213, |
| "step": 445 |
| }, |
| { |
| "epoch": 1.60431654676259, |
| "grad_norm": 0.528770536586938, |
| "learning_rate": 9.040097377220103e-06, |
| "loss": 0.7214845418930054, |
| "step": 446 |
| }, |
| { |
| "epoch": 1.6079136690647482, |
| "grad_norm": 0.5102313482024892, |
| "learning_rate": 9.03593217995851e-06, |
| "loss": 0.7267544269561768, |
| "step": 447 |
| }, |
| { |
| "epoch": 1.6115107913669064, |
| "grad_norm": 0.5357842837733169, |
| "learning_rate": 9.031758929395259e-06, |
| "loss": 0.749438464641571, |
| "step": 448 |
| }, |
| { |
| "epoch": 1.6151079136690647, |
| "grad_norm": 0.5183117043379116, |
| "learning_rate": 9.027577633857653e-06, |
| "loss": 0.7412010431289673, |
| "step": 449 |
| }, |
| { |
| "epoch": 1.6187050359712232, |
| "grad_norm": 0.544498698607748, |
| "learning_rate": 9.02338830168906e-06, |
| "loss": 0.7321810722351074, |
| "step": 450 |
| }, |
| { |
| "epoch": 1.6223021582733814, |
| "grad_norm": 0.5591180344098369, |
| "learning_rate": 9.019190941248868e-06, |
| "loss": 0.7417110800743103, |
| "step": 451 |
| }, |
| { |
| "epoch": 1.6258992805755397, |
| "grad_norm": 0.5356391251910433, |
| "learning_rate": 9.014985560912499e-06, |
| "loss": 0.7386839985847473, |
| "step": 452 |
| }, |
| { |
| "epoch": 1.629496402877698, |
| "grad_norm": 0.5220619414688078, |
| "learning_rate": 9.010772169071366e-06, |
| "loss": 0.734441876411438, |
| "step": 453 |
| }, |
| { |
| "epoch": 1.6330935251798562, |
| "grad_norm": 0.5601301117263874, |
| "learning_rate": 9.00655077413288e-06, |
| "loss": 0.7375533580780029, |
| "step": 454 |
| }, |
| { |
| "epoch": 1.6366906474820144, |
| "grad_norm": 0.6271941176180186, |
| "learning_rate": 9.00232138452041e-06, |
| "loss": 0.7252668142318726, |
| "step": 455 |
| }, |
| { |
| "epoch": 1.6402877697841727, |
| "grad_norm": 0.500079390514242, |
| "learning_rate": 8.998084008673284e-06, |
| "loss": 0.751512885093689, |
| "step": 456 |
| }, |
| { |
| "epoch": 1.643884892086331, |
| "grad_norm": 0.5295943834414766, |
| "learning_rate": 8.993838655046768e-06, |
| "loss": 0.7241981029510498, |
| "step": 457 |
| }, |
| { |
| "epoch": 1.6474820143884892, |
| "grad_norm": 0.5128645411360977, |
| "learning_rate": 8.989585332112039e-06, |
| "loss": 0.7336317300796509, |
| "step": 458 |
| }, |
| { |
| "epoch": 1.6510791366906474, |
| "grad_norm": 0.520422971407478, |
| "learning_rate": 8.985324048356182e-06, |
| "loss": 0.7330301403999329, |
| "step": 459 |
| }, |
| { |
| "epoch": 1.6546762589928057, |
| "grad_norm": 0.5486117483299725, |
| "learning_rate": 8.981054812282162e-06, |
| "loss": 0.7439688444137573, |
| "step": 460 |
| }, |
| { |
| "epoch": 1.658273381294964, |
| "grad_norm": 0.5474240792690704, |
| "learning_rate": 8.976777632408823e-06, |
| "loss": 0.7569353580474854, |
| "step": 461 |
| }, |
| { |
| "epoch": 1.6618705035971222, |
| "grad_norm": 0.5537282539324976, |
| "learning_rate": 8.972492517270845e-06, |
| "loss": 0.7264491319656372, |
| "step": 462 |
| }, |
| { |
| "epoch": 1.6654676258992804, |
| "grad_norm": 0.5210887026164476, |
| "learning_rate": 8.968199475418753e-06, |
| "loss": 0.7550468444824219, |
| "step": 463 |
| }, |
| { |
| "epoch": 1.6690647482014387, |
| "grad_norm": 0.5227914073582502, |
| "learning_rate": 8.963898515418885e-06, |
| "loss": 0.7275254130363464, |
| "step": 464 |
| }, |
| { |
| "epoch": 1.6726618705035972, |
| "grad_norm": 0.547248636298651, |
| "learning_rate": 8.959589645853379e-06, |
| "loss": 0.7321031093597412, |
| "step": 465 |
| }, |
| { |
| "epoch": 1.6762589928057554, |
| "grad_norm": 0.5635636922781485, |
| "learning_rate": 8.955272875320159e-06, |
| "loss": 0.7457851767539978, |
| "step": 466 |
| }, |
| { |
| "epoch": 1.6798561151079137, |
| "grad_norm": 0.5260209843292031, |
| "learning_rate": 8.950948212432904e-06, |
| "loss": 0.7447621822357178, |
| "step": 467 |
| }, |
| { |
| "epoch": 1.683453237410072, |
| "grad_norm": 0.5809358369922651, |
| "learning_rate": 8.946615665821059e-06, |
| "loss": 0.7280055284500122, |
| "step": 468 |
| }, |
| { |
| "epoch": 1.6870503597122302, |
| "grad_norm": 0.5551594345768393, |
| "learning_rate": 8.942275244129784e-06, |
| "loss": 0.7527322769165039, |
| "step": 469 |
| }, |
| { |
| "epoch": 1.6906474820143886, |
| "grad_norm": 0.5617975071761427, |
| "learning_rate": 8.937926956019957e-06, |
| "loss": 0.7513195276260376, |
| "step": 470 |
| }, |
| { |
| "epoch": 1.6942446043165469, |
| "grad_norm": 0.5112275243890037, |
| "learning_rate": 8.933570810168164e-06, |
| "loss": 0.6966029405593872, |
| "step": 471 |
| }, |
| { |
| "epoch": 1.6978417266187051, |
| "grad_norm": 0.5375351381393408, |
| "learning_rate": 8.929206815266653e-06, |
| "loss": 0.73940110206604, |
| "step": 472 |
| }, |
| { |
| "epoch": 1.7014388489208634, |
| "grad_norm": 0.554451465074595, |
| "learning_rate": 8.924834980023345e-06, |
| "loss": 0.7569516897201538, |
| "step": 473 |
| }, |
| { |
| "epoch": 1.7050359712230216, |
| "grad_norm": 0.5383238373862146, |
| "learning_rate": 8.920455313161801e-06, |
| "loss": 0.7401361465454102, |
| "step": 474 |
| }, |
| { |
| "epoch": 1.70863309352518, |
| "grad_norm": 0.5104340245269192, |
| "learning_rate": 8.916067823421217e-06, |
| "loss": 0.7461481094360352, |
| "step": 475 |
| }, |
| { |
| "epoch": 1.7122302158273381, |
| "grad_norm": 0.53560752921969, |
| "learning_rate": 8.911672519556386e-06, |
| "loss": 0.7368592619895935, |
| "step": 476 |
| }, |
| { |
| "epoch": 1.7158273381294964, |
| "grad_norm": 0.536053839213111, |
| "learning_rate": 8.907269410337704e-06, |
| "loss": 0.7240039706230164, |
| "step": 477 |
| }, |
| { |
| "epoch": 1.7194244604316546, |
| "grad_norm": 0.5478965091566458, |
| "learning_rate": 8.902858504551136e-06, |
| "loss": 0.7458293437957764, |
| "step": 478 |
| }, |
| { |
| "epoch": 1.723021582733813, |
| "grad_norm": 0.5564378779508917, |
| "learning_rate": 8.898439810998207e-06, |
| "loss": 0.7189388871192932, |
| "step": 479 |
| }, |
| { |
| "epoch": 1.7266187050359711, |
| "grad_norm": 0.5174245932797992, |
| "learning_rate": 8.89401333849598e-06, |
| "loss": 0.735579252243042, |
| "step": 480 |
| }, |
| { |
| "epoch": 1.7302158273381294, |
| "grad_norm": 0.5695227424690098, |
| "learning_rate": 8.889579095877045e-06, |
| "loss": 0.7450761795043945, |
| "step": 481 |
| }, |
| { |
| "epoch": 1.7338129496402876, |
| "grad_norm": 0.5787345149604068, |
| "learning_rate": 8.885137091989488e-06, |
| "loss": 0.7268454432487488, |
| "step": 482 |
| }, |
| { |
| "epoch": 1.737410071942446, |
| "grad_norm": 0.5394179165975326, |
| "learning_rate": 8.880687335696888e-06, |
| "loss": 0.7514413595199585, |
| "step": 483 |
| }, |
| { |
| "epoch": 1.7410071942446042, |
| "grad_norm": 0.5303547723039538, |
| "learning_rate": 8.87622983587829e-06, |
| "loss": 0.7547081708908081, |
| "step": 484 |
| }, |
| { |
| "epoch": 1.7446043165467626, |
| "grad_norm": 0.514637981124783, |
| "learning_rate": 8.871764601428197e-06, |
| "loss": 0.734933078289032, |
| "step": 485 |
| }, |
| { |
| "epoch": 1.7482014388489209, |
| "grad_norm": 0.5586343580104267, |
| "learning_rate": 8.867291641256534e-06, |
| "loss": 0.7437984347343445, |
| "step": 486 |
| }, |
| { |
| "epoch": 1.7517985611510791, |
| "grad_norm": 0.5998511954854915, |
| "learning_rate": 8.862810964288653e-06, |
| "loss": 0.7622708082199097, |
| "step": 487 |
| }, |
| { |
| "epoch": 1.7553956834532374, |
| "grad_norm": 0.49612184572197465, |
| "learning_rate": 8.8583225794653e-06, |
| "loss": 0.723395586013794, |
| "step": 488 |
| }, |
| { |
| "epoch": 1.7589928057553958, |
| "grad_norm": 0.528640028638296, |
| "learning_rate": 8.853826495742598e-06, |
| "loss": 0.7272807359695435, |
| "step": 489 |
| }, |
| { |
| "epoch": 1.762589928057554, |
| "grad_norm": 0.524042782338471, |
| "learning_rate": 8.849322722092037e-06, |
| "loss": 0.7300413846969604, |
| "step": 490 |
| }, |
| { |
| "epoch": 1.7661870503597124, |
| "grad_norm": 0.5318202821857035, |
| "learning_rate": 8.84481126750045e-06, |
| "loss": 0.7286820411682129, |
| "step": 491 |
| }, |
| { |
| "epoch": 1.7697841726618706, |
| "grad_norm": 0.5651665747593855, |
| "learning_rate": 8.840292140969995e-06, |
| "loss": 0.7338886857032776, |
| "step": 492 |
| }, |
| { |
| "epoch": 1.7733812949640289, |
| "grad_norm": 0.5205303749173067, |
| "learning_rate": 8.835765351518141e-06, |
| "loss": 0.7232467532157898, |
| "step": 493 |
| }, |
| { |
| "epoch": 1.776978417266187, |
| "grad_norm": 0.5577630630822902, |
| "learning_rate": 8.83123090817765e-06, |
| "loss": 0.7327965497970581, |
| "step": 494 |
| }, |
| { |
| "epoch": 1.7805755395683454, |
| "grad_norm": 0.5453661781921937, |
| "learning_rate": 8.826688819996547e-06, |
| "loss": 0.7516458034515381, |
| "step": 495 |
| }, |
| { |
| "epoch": 1.7841726618705036, |
| "grad_norm": 0.5303678797009448, |
| "learning_rate": 8.82213909603812e-06, |
| "loss": 0.7440846562385559, |
| "step": 496 |
| }, |
| { |
| "epoch": 1.7877697841726619, |
| "grad_norm": 0.5342645964789604, |
| "learning_rate": 8.817581745380892e-06, |
| "loss": 0.7308763265609741, |
| "step": 497 |
| }, |
| { |
| "epoch": 1.79136690647482, |
| "grad_norm": 0.5495259593847395, |
| "learning_rate": 8.813016777118604e-06, |
| "loss": 0.751781702041626, |
| "step": 498 |
| }, |
| { |
| "epoch": 1.7949640287769784, |
| "grad_norm": 0.5663289088779772, |
| "learning_rate": 8.808444200360193e-06, |
| "loss": 0.7341732978820801, |
| "step": 499 |
| }, |
| { |
| "epoch": 1.7985611510791366, |
| "grad_norm": 0.5581078677091702, |
| "learning_rate": 8.803864024229786e-06, |
| "loss": 0.7606270909309387, |
| "step": 500 |
| }, |
| { |
| "epoch": 1.7985611510791366, |
| "eval_loss": 0.7864982485771179, |
| "eval_runtime": 7.7709, |
| "eval_samples_per_second": 11.324, |
| "eval_steps_per_second": 0.386, |
| "eval_token_acc": 0.753477154659882, |
| "step": 500 |
| }, |
| { |
| "epoch": 1.8021582733812949, |
| "grad_norm": 0.5531324796536057, |
| "learning_rate": 8.799276257866668e-06, |
| "loss": 0.7583173513412476, |
| "step": 501 |
| }, |
| { |
| "epoch": 1.8057553956834531, |
| "grad_norm": 0.5395790483494515, |
| "learning_rate": 8.79468091042527e-06, |
| "loss": 0.7361318469047546, |
| "step": 502 |
| }, |
| { |
| "epoch": 1.8093525179856114, |
| "grad_norm": 0.5517392006475015, |
| "learning_rate": 8.790077991075152e-06, |
| "loss": 0.722428560256958, |
| "step": 503 |
| }, |
| { |
| "epoch": 1.8129496402877698, |
| "grad_norm": 0.5929611636500313, |
| "learning_rate": 8.78546750900098e-06, |
| "loss": 0.730574369430542, |
| "step": 504 |
| }, |
| { |
| "epoch": 1.816546762589928, |
| "grad_norm": 0.5448615482316708, |
| "learning_rate": 8.780849473402516e-06, |
| "loss": 0.7352193593978882, |
| "step": 505 |
| }, |
| { |
| "epoch": 1.8201438848920863, |
| "grad_norm": 0.5253273778616157, |
| "learning_rate": 8.77622389349459e-06, |
| "loss": 0.7241075038909912, |
| "step": 506 |
| }, |
| { |
| "epoch": 1.8237410071942446, |
| "grad_norm": 0.5257045541245243, |
| "learning_rate": 8.77159077850709e-06, |
| "loss": 0.7681543827056885, |
| "step": 507 |
| }, |
| { |
| "epoch": 1.8273381294964028, |
| "grad_norm": 0.6002720552753918, |
| "learning_rate": 8.766950137684929e-06, |
| "loss": 0.7483848929405212, |
| "step": 508 |
| }, |
| { |
| "epoch": 1.8309352517985613, |
| "grad_norm": 0.5756325346480011, |
| "learning_rate": 8.762301980288052e-06, |
| "loss": 0.7401927709579468, |
| "step": 509 |
| }, |
| { |
| "epoch": 1.8345323741007196, |
| "grad_norm": 0.5405458899100752, |
| "learning_rate": 8.757646315591391e-06, |
| "loss": 0.7275959849357605, |
| "step": 510 |
| }, |
| { |
| "epoch": 1.8381294964028778, |
| "grad_norm": 0.5055345910881511, |
| "learning_rate": 8.752983152884865e-06, |
| "loss": 0.7402202486991882, |
| "step": 511 |
| }, |
| { |
| "epoch": 1.841726618705036, |
| "grad_norm": 0.5294594291530128, |
| "learning_rate": 8.748312501473351e-06, |
| "loss": 0.7236826419830322, |
| "step": 512 |
| }, |
| { |
| "epoch": 1.8453237410071943, |
| "grad_norm": 0.525511680520969, |
| "learning_rate": 8.743634370676668e-06, |
| "loss": 0.7717915773391724, |
| "step": 513 |
| }, |
| { |
| "epoch": 1.8489208633093526, |
| "grad_norm": 0.5238277780917114, |
| "learning_rate": 8.738948769829565e-06, |
| "loss": 0.7489827871322632, |
| "step": 514 |
| }, |
| { |
| "epoch": 1.8525179856115108, |
| "grad_norm": 0.541734141362648, |
| "learning_rate": 8.734255708281689e-06, |
| "loss": 0.7424919605255127, |
| "step": 515 |
| }, |
| { |
| "epoch": 1.856115107913669, |
| "grad_norm": 0.5469220011947715, |
| "learning_rate": 8.72955519539758e-06, |
| "loss": 0.7435829639434814, |
| "step": 516 |
| }, |
| { |
| "epoch": 1.8597122302158273, |
| "grad_norm": 0.5189466053231159, |
| "learning_rate": 8.724847240556646e-06, |
| "loss": 0.7290617227554321, |
| "step": 517 |
| }, |
| { |
| "epoch": 1.8633093525179856, |
| "grad_norm": 0.5480359007192052, |
| "learning_rate": 8.72013185315314e-06, |
| "loss": 0.7334051132202148, |
| "step": 518 |
| }, |
| { |
| "epoch": 1.8669064748201438, |
| "grad_norm": 0.5417268446962726, |
| "learning_rate": 8.71540904259615e-06, |
| "loss": 0.7550539374351501, |
| "step": 519 |
| }, |
| { |
| "epoch": 1.870503597122302, |
| "grad_norm": 0.5167623466664647, |
| "learning_rate": 8.710678818309576e-06, |
| "loss": 0.7526887059211731, |
| "step": 520 |
| }, |
| { |
| "epoch": 1.8741007194244603, |
| "grad_norm": 0.5225951181805961, |
| "learning_rate": 8.705941189732112e-06, |
| "loss": 0.7233267426490784, |
| "step": 521 |
| }, |
| { |
| "epoch": 1.8776978417266186, |
| "grad_norm": 0.5083543003377556, |
| "learning_rate": 8.701196166317221e-06, |
| "loss": 0.7155008912086487, |
| "step": 522 |
| }, |
| { |
| "epoch": 1.8812949640287768, |
| "grad_norm": 0.5184841428097599, |
| "learning_rate": 8.696443757533128e-06, |
| "loss": 0.7537486553192139, |
| "step": 523 |
| }, |
| { |
| "epoch": 1.8848920863309353, |
| "grad_norm": 0.5355064843039317, |
| "learning_rate": 8.691683972862792e-06, |
| "loss": 0.7314133644104004, |
| "step": 524 |
| }, |
| { |
| "epoch": 1.8884892086330936, |
| "grad_norm": 0.5274575652182439, |
| "learning_rate": 8.686916821803892e-06, |
| "loss": 0.7118090391159058, |
| "step": 525 |
| }, |
| { |
| "epoch": 1.8920863309352518, |
| "grad_norm": 0.543918653349304, |
| "learning_rate": 8.682142313868802e-06, |
| "loss": 0.7229712605476379, |
| "step": 526 |
| }, |
| { |
| "epoch": 1.89568345323741, |
| "grad_norm": 0.5411862870781207, |
| "learning_rate": 8.677360458584581e-06, |
| "loss": 0.7486415505409241, |
| "step": 527 |
| }, |
| { |
| "epoch": 1.8992805755395683, |
| "grad_norm": 0.513452919974691, |
| "learning_rate": 8.672571265492944e-06, |
| "loss": 0.7225480079650879, |
| "step": 528 |
| }, |
| { |
| "epoch": 1.9028776978417268, |
| "grad_norm": 0.5490215040066515, |
| "learning_rate": 8.667774744150251e-06, |
| "loss": 0.7476856708526611, |
| "step": 529 |
| }, |
| { |
| "epoch": 1.906474820143885, |
| "grad_norm": 0.5169106694290708, |
| "learning_rate": 8.66297090412748e-06, |
| "loss": 0.7361697554588318, |
| "step": 530 |
| }, |
| { |
| "epoch": 1.9100719424460433, |
| "grad_norm": 0.5268875864918143, |
| "learning_rate": 8.658159755010223e-06, |
| "loss": 0.7248069047927856, |
| "step": 531 |
| }, |
| { |
| "epoch": 1.9136690647482015, |
| "grad_norm": 0.5403639981809885, |
| "learning_rate": 8.653341306398645e-06, |
| "loss": 0.7208993434906006, |
| "step": 532 |
| }, |
| { |
| "epoch": 1.9172661870503598, |
| "grad_norm": 0.513303533346121, |
| "learning_rate": 8.648515567907485e-06, |
| "loss": 0.7428863644599915, |
| "step": 533 |
| }, |
| { |
| "epoch": 1.920863309352518, |
| "grad_norm": 0.5531793218051347, |
| "learning_rate": 8.643682549166021e-06, |
| "loss": 0.7477810382843018, |
| "step": 534 |
| }, |
| { |
| "epoch": 1.9244604316546763, |
| "grad_norm": 0.5286725859333752, |
| "learning_rate": 8.638842259818064e-06, |
| "loss": 0.7352632284164429, |
| "step": 535 |
| }, |
| { |
| "epoch": 1.9280575539568345, |
| "grad_norm": 0.528661274771883, |
| "learning_rate": 8.63399470952193e-06, |
| "loss": 0.7406404614448547, |
| "step": 536 |
| }, |
| { |
| "epoch": 1.9316546762589928, |
| "grad_norm": 0.5541977935906626, |
| "learning_rate": 8.629139907950424e-06, |
| "loss": 0.7357466220855713, |
| "step": 537 |
| }, |
| { |
| "epoch": 1.935251798561151, |
| "grad_norm": 0.5491288214982604, |
| "learning_rate": 8.62427786479082e-06, |
| "loss": 0.7417570948600769, |
| "step": 538 |
| }, |
| { |
| "epoch": 1.9388489208633093, |
| "grad_norm": 0.5348837842686455, |
| "learning_rate": 8.619408589744842e-06, |
| "loss": 0.7317093014717102, |
| "step": 539 |
| }, |
| { |
| "epoch": 1.9424460431654675, |
| "grad_norm": 0.540958863951335, |
| "learning_rate": 8.614532092528645e-06, |
| "loss": 0.7405022382736206, |
| "step": 540 |
| }, |
| { |
| "epoch": 1.9460431654676258, |
| "grad_norm": 0.5163812629918846, |
| "learning_rate": 8.609648382872795e-06, |
| "loss": 0.741442084312439, |
| "step": 541 |
| }, |
| { |
| "epoch": 1.949640287769784, |
| "grad_norm": 0.5442615603715865, |
| "learning_rate": 8.604757470522247e-06, |
| "loss": 0.763242244720459, |
| "step": 542 |
| }, |
| { |
| "epoch": 1.9532374100719423, |
| "grad_norm": 0.566199733420676, |
| "learning_rate": 8.599859365236334e-06, |
| "loss": 0.7346137762069702, |
| "step": 543 |
| }, |
| { |
| "epoch": 1.9568345323741008, |
| "grad_norm": 0.5354204680310805, |
| "learning_rate": 8.594954076788736e-06, |
| "loss": 0.7331790924072266, |
| "step": 544 |
| }, |
| { |
| "epoch": 1.960431654676259, |
| "grad_norm": 0.537266301641817, |
| "learning_rate": 8.590041614967471e-06, |
| "loss": 0.7450471520423889, |
| "step": 545 |
| }, |
| { |
| "epoch": 1.9640287769784173, |
| "grad_norm": 0.5595286110764061, |
| "learning_rate": 8.585121989574865e-06, |
| "loss": 0.71414715051651, |
| "step": 546 |
| }, |
| { |
| "epoch": 1.9676258992805755, |
| "grad_norm": 0.5434065388365801, |
| "learning_rate": 8.580195210427544e-06, |
| "loss": 0.7267420887947083, |
| "step": 547 |
| }, |
| { |
| "epoch": 1.9712230215827338, |
| "grad_norm": 0.5823483147181996, |
| "learning_rate": 8.575261287356407e-06, |
| "loss": 0.7253061532974243, |
| "step": 548 |
| }, |
| { |
| "epoch": 1.9748201438848922, |
| "grad_norm": 0.5663754055488593, |
| "learning_rate": 8.570320230206606e-06, |
| "loss": 0.7432679533958435, |
| "step": 549 |
| }, |
| { |
| "epoch": 1.9784172661870505, |
| "grad_norm": 0.5301593265031658, |
| "learning_rate": 8.565372048837529e-06, |
| "loss": 0.72602379322052, |
| "step": 550 |
| }, |
| { |
| "epoch": 1.9820143884892087, |
| "grad_norm": 0.5919650898951195, |
| "learning_rate": 8.56041675312278e-06, |
| "loss": 0.7479984760284424, |
| "step": 551 |
| }, |
| { |
| "epoch": 1.985611510791367, |
| "grad_norm": 0.5725881545065112, |
| "learning_rate": 8.555454352950161e-06, |
| "loss": 0.7247596979141235, |
| "step": 552 |
| }, |
| { |
| "epoch": 1.9892086330935252, |
| "grad_norm": 0.545104484483576, |
| "learning_rate": 8.550484858221648e-06, |
| "loss": 0.7440117001533508, |
| "step": 553 |
| }, |
| { |
| "epoch": 1.9928057553956835, |
| "grad_norm": 0.5046621526349823, |
| "learning_rate": 8.545508278853373e-06, |
| "loss": 0.7102154493331909, |
| "step": 554 |
| }, |
| { |
| "epoch": 1.9964028776978417, |
| "grad_norm": 0.5827148240841841, |
| "learning_rate": 8.54052462477561e-06, |
| "loss": 0.7293152809143066, |
| "step": 555 |
| }, |
| { |
| "epoch": 2.0, |
| "grad_norm": 0.6796164196944195, |
| "learning_rate": 8.535533905932739e-06, |
| "loss": 0.7514892816543579, |
| "step": 556 |
| }, |
| { |
| "epoch": 2.0035971223021583, |
| "grad_norm": 0.6397885740084429, |
| "learning_rate": 8.530536132283247e-06, |
| "loss": 0.671186089515686, |
| "step": 557 |
| }, |
| { |
| "epoch": 2.0071942446043165, |
| "grad_norm": 0.5966614415471234, |
| "learning_rate": 8.525531313799693e-06, |
| "loss": 0.6722039580345154, |
| "step": 558 |
| }, |
| { |
| "epoch": 2.0107913669064748, |
| "grad_norm": 0.5579282766077329, |
| "learning_rate": 8.520519460468697e-06, |
| "loss": 0.690311074256897, |
| "step": 559 |
| }, |
| { |
| "epoch": 2.014388489208633, |
| "grad_norm": 0.5433122510327065, |
| "learning_rate": 8.515500582290914e-06, |
| "loss": 0.6836049556732178, |
| "step": 560 |
| }, |
| { |
| "epoch": 2.0179856115107913, |
| "grad_norm": 0.5538529733134517, |
| "learning_rate": 8.510474689281016e-06, |
| "loss": 0.6595236659049988, |
| "step": 561 |
| }, |
| { |
| "epoch": 2.0215827338129495, |
| "grad_norm": 0.5422413975220873, |
| "learning_rate": 8.505441791467674e-06, |
| "loss": 0.6806621551513672, |
| "step": 562 |
| }, |
| { |
| "epoch": 2.0251798561151078, |
| "grad_norm": 0.5578089222740708, |
| "learning_rate": 8.500401898893536e-06, |
| "loss": 0.6593084335327148, |
| "step": 563 |
| }, |
| { |
| "epoch": 2.028776978417266, |
| "grad_norm": 0.565177209250898, |
| "learning_rate": 8.495355021615204e-06, |
| "loss": 0.6911220550537109, |
| "step": 564 |
| }, |
| { |
| "epoch": 2.0323741007194243, |
| "grad_norm": 0.5119808699742511, |
| "learning_rate": 8.490301169703224e-06, |
| "loss": 0.6811946630477905, |
| "step": 565 |
| }, |
| { |
| "epoch": 2.0359712230215825, |
| "grad_norm": 0.5731400065818258, |
| "learning_rate": 8.48524035324205e-06, |
| "loss": 0.6667191982269287, |
| "step": 566 |
| }, |
| { |
| "epoch": 2.039568345323741, |
| "grad_norm": 0.5998221151013243, |
| "learning_rate": 8.480172582330044e-06, |
| "loss": 0.6908724904060364, |
| "step": 567 |
| }, |
| { |
| "epoch": 2.0431654676258995, |
| "grad_norm": 0.5527881634506817, |
| "learning_rate": 8.475097867079437e-06, |
| "loss": 0.6831552982330322, |
| "step": 568 |
| }, |
| { |
| "epoch": 2.0467625899280577, |
| "grad_norm": 0.5385272898977095, |
| "learning_rate": 8.470016217616319e-06, |
| "loss": 0.6840323209762573, |
| "step": 569 |
| }, |
| { |
| "epoch": 2.050359712230216, |
| "grad_norm": 0.5758971391452382, |
| "learning_rate": 8.464927644080617e-06, |
| "loss": 0.6829456090927124, |
| "step": 570 |
| }, |
| { |
| "epoch": 2.053956834532374, |
| "grad_norm": 0.5722853086808806, |
| "learning_rate": 8.459832156626073e-06, |
| "loss": 0.6708418130874634, |
| "step": 571 |
| }, |
| { |
| "epoch": 2.0575539568345325, |
| "grad_norm": 0.5267935272022233, |
| "learning_rate": 8.454729765420228e-06, |
| "loss": 0.6527084112167358, |
| "step": 572 |
| }, |
| { |
| "epoch": 2.0611510791366907, |
| "grad_norm": 0.5433510959719299, |
| "learning_rate": 8.449620480644393e-06, |
| "loss": 0.6686661243438721, |
| "step": 573 |
| }, |
| { |
| "epoch": 2.064748201438849, |
| "grad_norm": 0.5333455600158562, |
| "learning_rate": 8.444504312493642e-06, |
| "loss": 0.6683956384658813, |
| "step": 574 |
| }, |
| { |
| "epoch": 2.068345323741007, |
| "grad_norm": 0.5559059316065588, |
| "learning_rate": 8.439381271176779e-06, |
| "loss": 0.6839472055435181, |
| "step": 575 |
| }, |
| { |
| "epoch": 2.0719424460431655, |
| "grad_norm": 0.5686589857247856, |
| "learning_rate": 8.434251366916323e-06, |
| "loss": 0.677751898765564, |
| "step": 576 |
| }, |
| { |
| "epoch": 2.0755395683453237, |
| "grad_norm": 0.581140831766113, |
| "learning_rate": 8.429114609948488e-06, |
| "loss": 0.6839741468429565, |
| "step": 577 |
| }, |
| { |
| "epoch": 2.079136690647482, |
| "grad_norm": 0.6030439117251434, |
| "learning_rate": 8.423971010523165e-06, |
| "loss": 0.6848390698432922, |
| "step": 578 |
| }, |
| { |
| "epoch": 2.08273381294964, |
| "grad_norm": 0.5462427914093523, |
| "learning_rate": 8.418820578903891e-06, |
| "loss": 0.6663479804992676, |
| "step": 579 |
| }, |
| { |
| "epoch": 2.0863309352517985, |
| "grad_norm": 0.5542619430609473, |
| "learning_rate": 8.413663325367845e-06, |
| "loss": 0.6420421004295349, |
| "step": 580 |
| }, |
| { |
| "epoch": 2.0899280575539567, |
| "grad_norm": 0.565911591941745, |
| "learning_rate": 8.40849926020581e-06, |
| "loss": 0.6627936959266663, |
| "step": 581 |
| }, |
| { |
| "epoch": 2.093525179856115, |
| "grad_norm": 0.5222447784122286, |
| "learning_rate": 8.403328393722169e-06, |
| "loss": 0.675050675868988, |
| "step": 582 |
| }, |
| { |
| "epoch": 2.097122302158273, |
| "grad_norm": 0.5564681058103657, |
| "learning_rate": 8.39815073623487e-06, |
| "loss": 0.6631220579147339, |
| "step": 583 |
| }, |
| { |
| "epoch": 2.1007194244604315, |
| "grad_norm": 0.5701117496605299, |
| "learning_rate": 8.392966298075413e-06, |
| "loss": 0.6832946538925171, |
| "step": 584 |
| }, |
| { |
| "epoch": 2.1043165467625897, |
| "grad_norm": 0.5567281046765565, |
| "learning_rate": 8.387775089588833e-06, |
| "loss": 0.6763100624084473, |
| "step": 585 |
| }, |
| { |
| "epoch": 2.1079136690647484, |
| "grad_norm": 0.5141405571972344, |
| "learning_rate": 8.382577121133664e-06, |
| "loss": 0.6828013062477112, |
| "step": 586 |
| }, |
| { |
| "epoch": 2.1115107913669067, |
| "grad_norm": 0.5690795804417714, |
| "learning_rate": 8.377372403081943e-06, |
| "loss": 0.6641282439231873, |
| "step": 587 |
| }, |
| { |
| "epoch": 2.115107913669065, |
| "grad_norm": 0.5877720850231689, |
| "learning_rate": 8.372160945819164e-06, |
| "loss": 0.6777174472808838, |
| "step": 588 |
| }, |
| { |
| "epoch": 2.118705035971223, |
| "grad_norm": 0.5971069404183997, |
| "learning_rate": 8.366942759744274e-06, |
| "loss": 0.6761478185653687, |
| "step": 589 |
| }, |
| { |
| "epoch": 2.1223021582733814, |
| "grad_norm": 0.5374508547318486, |
| "learning_rate": 8.361717855269643e-06, |
| "loss": 0.6704797744750977, |
| "step": 590 |
| }, |
| { |
| "epoch": 2.1258992805755397, |
| "grad_norm": 0.575014664172825, |
| "learning_rate": 8.356486242821048e-06, |
| "loss": 0.6633048057556152, |
| "step": 591 |
| }, |
| { |
| "epoch": 2.129496402877698, |
| "grad_norm": 0.5630765161146606, |
| "learning_rate": 8.351247932837655e-06, |
| "loss": 0.6555532217025757, |
| "step": 592 |
| }, |
| { |
| "epoch": 2.133093525179856, |
| "grad_norm": 0.5540501457675682, |
| "learning_rate": 8.346002935771988e-06, |
| "loss": 0.6675060987472534, |
| "step": 593 |
| }, |
| { |
| "epoch": 2.1366906474820144, |
| "grad_norm": 0.5771881310936134, |
| "learning_rate": 8.34075126208992e-06, |
| "loss": 0.6720588803291321, |
| "step": 594 |
| }, |
| { |
| "epoch": 2.1402877697841727, |
| "grad_norm": 0.5516169446446759, |
| "learning_rate": 8.335492922270642e-06, |
| "loss": 0.6864827871322632, |
| "step": 595 |
| }, |
| { |
| "epoch": 2.143884892086331, |
| "grad_norm": 0.5537872437146788, |
| "learning_rate": 8.330227926806652e-06, |
| "loss": 0.675635814666748, |
| "step": 596 |
| }, |
| { |
| "epoch": 2.147482014388489, |
| "grad_norm": 0.5399664246709232, |
| "learning_rate": 8.324956286203717e-06, |
| "loss": 0.6826367378234863, |
| "step": 597 |
| }, |
| { |
| "epoch": 2.1510791366906474, |
| "grad_norm": 0.5763658067111991, |
| "learning_rate": 8.319678010980883e-06, |
| "loss": 0.6901789903640747, |
| "step": 598 |
| }, |
| { |
| "epoch": 2.1546762589928057, |
| "grad_norm": 0.4974273454797143, |
| "learning_rate": 8.314393111670419e-06, |
| "loss": 0.6468210220336914, |
| "step": 599 |
| }, |
| { |
| "epoch": 2.158273381294964, |
| "grad_norm": 0.5859989003860766, |
| "learning_rate": 8.309101598817812e-06, |
| "loss": 0.6841794848442078, |
| "step": 600 |
| }, |
| { |
| "epoch": 2.158273381294964, |
| "eval_loss": 0.7964679598808289, |
| "eval_runtime": 7.6714, |
| "eval_samples_per_second": 11.471, |
| "eval_steps_per_second": 0.391, |
| "eval_token_acc": 0.7527902171133316, |
| "step": 600 |
| }, |
| { |
| "epoch": 2.161870503597122, |
| "grad_norm": 0.5935282145282037, |
| "learning_rate": 8.303803482981758e-06, |
| "loss": 0.6777781248092651, |
| "step": 601 |
| }, |
| { |
| "epoch": 2.1654676258992804, |
| "grad_norm": 0.5646029208074659, |
| "learning_rate": 8.298498774734114e-06, |
| "loss": 0.6669676303863525, |
| "step": 602 |
| }, |
| { |
| "epoch": 2.1690647482014387, |
| "grad_norm": 0.5488370688552963, |
| "learning_rate": 8.293187484659904e-06, |
| "loss": 0.6794064044952393, |
| "step": 603 |
| }, |
| { |
| "epoch": 2.172661870503597, |
| "grad_norm": 0.5567754831392528, |
| "learning_rate": 8.287869623357275e-06, |
| "loss": 0.6805890798568726, |
| "step": 604 |
| }, |
| { |
| "epoch": 2.176258992805755, |
| "grad_norm": 0.5112276480668334, |
| "learning_rate": 8.282545201437493e-06, |
| "loss": 0.6340111494064331, |
| "step": 605 |
| }, |
| { |
| "epoch": 2.1798561151079134, |
| "grad_norm": 0.5892481231313883, |
| "learning_rate": 8.277214229524913e-06, |
| "loss": 0.6796945333480835, |
| "step": 606 |
| }, |
| { |
| "epoch": 2.183453237410072, |
| "grad_norm": 0.5771549281819284, |
| "learning_rate": 8.271876718256958e-06, |
| "loss": 0.6738216280937195, |
| "step": 607 |
| }, |
| { |
| "epoch": 2.1870503597122304, |
| "grad_norm": 0.5416360126424467, |
| "learning_rate": 8.266532678284103e-06, |
| "loss": 0.6745922565460205, |
| "step": 608 |
| }, |
| { |
| "epoch": 2.1906474820143886, |
| "grad_norm": 0.5232225408923397, |
| "learning_rate": 8.261182120269851e-06, |
| "loss": 0.6739056706428528, |
| "step": 609 |
| }, |
| { |
| "epoch": 2.194244604316547, |
| "grad_norm": 0.5868675716566435, |
| "learning_rate": 8.255825054890705e-06, |
| "loss": 0.6711397171020508, |
| "step": 610 |
| }, |
| { |
| "epoch": 2.197841726618705, |
| "grad_norm": 0.5539408109487034, |
| "learning_rate": 8.250461492836158e-06, |
| "loss": 0.6721957921981812, |
| "step": 611 |
| }, |
| { |
| "epoch": 2.2014388489208634, |
| "grad_norm": 0.5576072951558111, |
| "learning_rate": 8.245091444808663e-06, |
| "loss": 0.6724331378936768, |
| "step": 612 |
| }, |
| { |
| "epoch": 2.2050359712230216, |
| "grad_norm": 0.5583937132116154, |
| "learning_rate": 8.239714921523623e-06, |
| "loss": 0.6844789981842041, |
| "step": 613 |
| }, |
| { |
| "epoch": 2.20863309352518, |
| "grad_norm": 0.5596163368588887, |
| "learning_rate": 8.234331933709353e-06, |
| "loss": 0.6690089702606201, |
| "step": 614 |
| }, |
| { |
| "epoch": 2.212230215827338, |
| "grad_norm": 0.5537700483756164, |
| "learning_rate": 8.228942492107065e-06, |
| "loss": 0.6785811185836792, |
| "step": 615 |
| }, |
| { |
| "epoch": 2.2158273381294964, |
| "grad_norm": 0.5596429509210736, |
| "learning_rate": 8.223546607470863e-06, |
| "loss": 0.6860724687576294, |
| "step": 616 |
| }, |
| { |
| "epoch": 2.2194244604316546, |
| "grad_norm": 0.5615860198473355, |
| "learning_rate": 8.218144290567694e-06, |
| "loss": 0.6716651916503906, |
| "step": 617 |
| }, |
| { |
| "epoch": 2.223021582733813, |
| "grad_norm": 0.5828702845645434, |
| "learning_rate": 8.212735552177347e-06, |
| "loss": 0.6850781440734863, |
| "step": 618 |
| }, |
| { |
| "epoch": 2.226618705035971, |
| "grad_norm": 0.5414999227771777, |
| "learning_rate": 8.207320403092419e-06, |
| "loss": 0.6482763290405273, |
| "step": 619 |
| }, |
| { |
| "epoch": 2.2302158273381294, |
| "grad_norm": 0.5932037908152304, |
| "learning_rate": 8.201898854118301e-06, |
| "loss": 0.6657392382621765, |
| "step": 620 |
| }, |
| { |
| "epoch": 2.2338129496402876, |
| "grad_norm": 0.5342906139121061, |
| "learning_rate": 8.196470916073161e-06, |
| "loss": 0.6627857685089111, |
| "step": 621 |
| }, |
| { |
| "epoch": 2.237410071942446, |
| "grad_norm": 0.6026520242202834, |
| "learning_rate": 8.191036599787908e-06, |
| "loss": 0.6858589053153992, |
| "step": 622 |
| }, |
| { |
| "epoch": 2.241007194244604, |
| "grad_norm": 0.5530261201191066, |
| "learning_rate": 8.185595916106175e-06, |
| "loss": 0.6939041018486023, |
| "step": 623 |
| }, |
| { |
| "epoch": 2.2446043165467624, |
| "grad_norm": 0.5223090539726519, |
| "learning_rate": 8.18014887588431e-06, |
| "loss": 0.6830934286117554, |
| "step": 624 |
| }, |
| { |
| "epoch": 2.2482014388489207, |
| "grad_norm": 0.5211039805430082, |
| "learning_rate": 8.174695489991343e-06, |
| "loss": 0.6655054092407227, |
| "step": 625 |
| }, |
| { |
| "epoch": 2.2517985611510793, |
| "grad_norm": 0.5394362084185865, |
| "learning_rate": 8.169235769308959e-06, |
| "loss": 0.6696172952651978, |
| "step": 626 |
| }, |
| { |
| "epoch": 2.2553956834532376, |
| "grad_norm": 0.5667420557087554, |
| "learning_rate": 8.163769724731491e-06, |
| "loss": 0.6671138405799866, |
| "step": 627 |
| }, |
| { |
| "epoch": 2.258992805755396, |
| "grad_norm": 0.545382220641246, |
| "learning_rate": 8.158297367165885e-06, |
| "loss": 0.6894287467002869, |
| "step": 628 |
| }, |
| { |
| "epoch": 2.262589928057554, |
| "grad_norm": 0.5417774154601794, |
| "learning_rate": 8.152818707531691e-06, |
| "loss": 0.6864540576934814, |
| "step": 629 |
| }, |
| { |
| "epoch": 2.2661870503597124, |
| "grad_norm": 0.5614615667910734, |
| "learning_rate": 8.147333756761027e-06, |
| "loss": 0.668978214263916, |
| "step": 630 |
| }, |
| { |
| "epoch": 2.2697841726618706, |
| "grad_norm": 0.5488039597992234, |
| "learning_rate": 8.141842525798567e-06, |
| "loss": 0.6848398447036743, |
| "step": 631 |
| }, |
| { |
| "epoch": 2.273381294964029, |
| "grad_norm": 0.5723936696701672, |
| "learning_rate": 8.13634502560152e-06, |
| "loss": 0.6667437553405762, |
| "step": 632 |
| }, |
| { |
| "epoch": 2.276978417266187, |
| "grad_norm": 0.5733329856292084, |
| "learning_rate": 8.130841267139598e-06, |
| "loss": 0.6772172451019287, |
| "step": 633 |
| }, |
| { |
| "epoch": 2.2805755395683454, |
| "grad_norm": 0.5176511941575904, |
| "learning_rate": 8.125331261395004e-06, |
| "loss": 0.6722500324249268, |
| "step": 634 |
| }, |
| { |
| "epoch": 2.2841726618705036, |
| "grad_norm": 0.5418827338877102, |
| "learning_rate": 8.11981501936241e-06, |
| "loss": 0.6875349283218384, |
| "step": 635 |
| }, |
| { |
| "epoch": 2.287769784172662, |
| "grad_norm": 0.5714231681415783, |
| "learning_rate": 8.114292552048925e-06, |
| "loss": 0.6613736152648926, |
| "step": 636 |
| }, |
| { |
| "epoch": 2.29136690647482, |
| "grad_norm": 0.5938180697661647, |
| "learning_rate": 8.108763870474088e-06, |
| "loss": 0.6840851306915283, |
| "step": 637 |
| }, |
| { |
| "epoch": 2.2949640287769784, |
| "grad_norm": 0.5260002035466227, |
| "learning_rate": 8.10322898566983e-06, |
| "loss": 0.6638031601905823, |
| "step": 638 |
| }, |
| { |
| "epoch": 2.2985611510791366, |
| "grad_norm": 0.591475324476344, |
| "learning_rate": 8.097687908680467e-06, |
| "loss": 0.6879281401634216, |
| "step": 639 |
| }, |
| { |
| "epoch": 2.302158273381295, |
| "grad_norm": 0.5331036790988722, |
| "learning_rate": 8.092140650562665e-06, |
| "loss": 0.6734105348587036, |
| "step": 640 |
| }, |
| { |
| "epoch": 2.305755395683453, |
| "grad_norm": 0.5172033041212313, |
| "learning_rate": 8.086587222385428e-06, |
| "loss": 0.6683194041252136, |
| "step": 641 |
| }, |
| { |
| "epoch": 2.3093525179856114, |
| "grad_norm": 0.5473876959056185, |
| "learning_rate": 8.08102763523007e-06, |
| "loss": 0.6741974353790283, |
| "step": 642 |
| }, |
| { |
| "epoch": 2.3129496402877696, |
| "grad_norm": 0.5324084730756219, |
| "learning_rate": 8.075461900190193e-06, |
| "loss": 0.6834567785263062, |
| "step": 643 |
| }, |
| { |
| "epoch": 2.316546762589928, |
| "grad_norm": 0.5181323608798409, |
| "learning_rate": 8.069890028371672e-06, |
| "loss": 0.6760212182998657, |
| "step": 644 |
| }, |
| { |
| "epoch": 2.3201438848920866, |
| "grad_norm": 0.5435693010138409, |
| "learning_rate": 8.064312030892618e-06, |
| "loss": 0.6685901880264282, |
| "step": 645 |
| }, |
| { |
| "epoch": 2.3237410071942444, |
| "grad_norm": 0.545294047023693, |
| "learning_rate": 8.058727918883376e-06, |
| "loss": 0.6555389165878296, |
| "step": 646 |
| }, |
| { |
| "epoch": 2.327338129496403, |
| "grad_norm": 0.5410247736328141, |
| "learning_rate": 8.053137703486482e-06, |
| "loss": 0.6900992393493652, |
| "step": 647 |
| }, |
| { |
| "epoch": 2.3309352517985613, |
| "grad_norm": 0.5466463022120158, |
| "learning_rate": 8.047541395856661e-06, |
| "loss": 0.6867014765739441, |
| "step": 648 |
| }, |
| { |
| "epoch": 2.3345323741007196, |
| "grad_norm": 0.550847927547804, |
| "learning_rate": 8.041939007160784e-06, |
| "loss": 0.666527509689331, |
| "step": 649 |
| }, |
| { |
| "epoch": 2.338129496402878, |
| "grad_norm": 0.5421989902212895, |
| "learning_rate": 8.036330548577866e-06, |
| "loss": 0.6644178628921509, |
| "step": 650 |
| }, |
| { |
| "epoch": 2.341726618705036, |
| "grad_norm": 0.5236348296663158, |
| "learning_rate": 8.030716031299022e-06, |
| "loss": 0.685157299041748, |
| "step": 651 |
| }, |
| { |
| "epoch": 2.3453237410071943, |
| "grad_norm": 0.5176525641905038, |
| "learning_rate": 8.025095466527468e-06, |
| "loss": 0.6704069375991821, |
| "step": 652 |
| }, |
| { |
| "epoch": 2.3489208633093526, |
| "grad_norm": 0.5892074323237977, |
| "learning_rate": 8.019468865478483e-06, |
| "loss": 0.6835877895355225, |
| "step": 653 |
| }, |
| { |
| "epoch": 2.352517985611511, |
| "grad_norm": 0.5652692671595871, |
| "learning_rate": 8.013836239379388e-06, |
| "loss": 0.6817514896392822, |
| "step": 654 |
| }, |
| { |
| "epoch": 2.356115107913669, |
| "grad_norm": 0.5620911489748651, |
| "learning_rate": 8.008197599469529e-06, |
| "loss": 0.6783486604690552, |
| "step": 655 |
| }, |
| { |
| "epoch": 2.3597122302158273, |
| "grad_norm": 0.5936476741299256, |
| "learning_rate": 8.002552957000254e-06, |
| "loss": 0.6681350469589233, |
| "step": 656 |
| }, |
| { |
| "epoch": 2.3633093525179856, |
| "grad_norm": 0.5443626063476964, |
| "learning_rate": 7.996902323234883e-06, |
| "loss": 0.6636416912078857, |
| "step": 657 |
| }, |
| { |
| "epoch": 2.366906474820144, |
| "grad_norm": 0.6165820617546384, |
| "learning_rate": 7.991245709448697e-06, |
| "loss": 0.6662098169326782, |
| "step": 658 |
| }, |
| { |
| "epoch": 2.370503597122302, |
| "grad_norm": 0.5954448294054072, |
| "learning_rate": 7.985583126928904e-06, |
| "loss": 0.671536386013031, |
| "step": 659 |
| }, |
| { |
| "epoch": 2.3741007194244603, |
| "grad_norm": 0.5699739721187903, |
| "learning_rate": 7.979914586974628e-06, |
| "loss": 0.678837776184082, |
| "step": 660 |
| }, |
| { |
| "epoch": 2.3776978417266186, |
| "grad_norm": 0.5714872752111637, |
| "learning_rate": 7.974240100896874e-06, |
| "loss": 0.6746315956115723, |
| "step": 661 |
| }, |
| { |
| "epoch": 2.381294964028777, |
| "grad_norm": 0.8033103327542886, |
| "learning_rate": 7.968559680018518e-06, |
| "loss": 0.6854047775268555, |
| "step": 662 |
| }, |
| { |
| "epoch": 2.384892086330935, |
| "grad_norm": 0.5422166443828278, |
| "learning_rate": 7.962873335674272e-06, |
| "loss": 0.6693898439407349, |
| "step": 663 |
| }, |
| { |
| "epoch": 2.3884892086330938, |
| "grad_norm": 0.5495434573450082, |
| "learning_rate": 7.957181079210676e-06, |
| "loss": 0.656349778175354, |
| "step": 664 |
| }, |
| { |
| "epoch": 2.3920863309352516, |
| "grad_norm": 0.5929225873029843, |
| "learning_rate": 7.951482921986058e-06, |
| "loss": 0.6681583523750305, |
| "step": 665 |
| }, |
| { |
| "epoch": 2.3956834532374103, |
| "grad_norm": 0.530361021028339, |
| "learning_rate": 7.945778875370527e-06, |
| "loss": 0.6552359461784363, |
| "step": 666 |
| }, |
| { |
| "epoch": 2.3992805755395685, |
| "grad_norm": 0.5756796589055374, |
| "learning_rate": 7.940068950745941e-06, |
| "loss": 0.6794135570526123, |
| "step": 667 |
| }, |
| { |
| "epoch": 2.402877697841727, |
| "grad_norm": 0.564306119284853, |
| "learning_rate": 7.934353159505885e-06, |
| "loss": 0.6614569425582886, |
| "step": 668 |
| }, |
| { |
| "epoch": 2.406474820143885, |
| "grad_norm": 0.551021041442497, |
| "learning_rate": 7.928631513055656e-06, |
| "loss": 0.6986576318740845, |
| "step": 669 |
| }, |
| { |
| "epoch": 2.4100719424460433, |
| "grad_norm": 0.5624604572287012, |
| "learning_rate": 7.922904022812228e-06, |
| "loss": 0.6947466135025024, |
| "step": 670 |
| }, |
| { |
| "epoch": 2.4136690647482015, |
| "grad_norm": 0.5443760240676394, |
| "learning_rate": 7.91717070020424e-06, |
| "loss": 0.6702431440353394, |
| "step": 671 |
| }, |
| { |
| "epoch": 2.41726618705036, |
| "grad_norm": 0.556358152638375, |
| "learning_rate": 7.911431556671967e-06, |
| "loss": 0.6885404586791992, |
| "step": 672 |
| }, |
| { |
| "epoch": 2.420863309352518, |
| "grad_norm": 0.5278486204625064, |
| "learning_rate": 7.905686603667301e-06, |
| "loss": 0.7020502090454102, |
| "step": 673 |
| }, |
| { |
| "epoch": 2.4244604316546763, |
| "grad_norm": 0.6140900556871232, |
| "learning_rate": 7.899935852653723e-06, |
| "loss": 0.6749525666236877, |
| "step": 674 |
| }, |
| { |
| "epoch": 2.4280575539568345, |
| "grad_norm": 0.5175983027819278, |
| "learning_rate": 7.89417931510629e-06, |
| "loss": 0.6766486167907715, |
| "step": 675 |
| }, |
| { |
| "epoch": 2.431654676258993, |
| "grad_norm": 0.6029093317105096, |
| "learning_rate": 7.888417002511592e-06, |
| "loss": 0.6930079460144043, |
| "step": 676 |
| }, |
| { |
| "epoch": 2.435251798561151, |
| "grad_norm": 0.5675850568410075, |
| "learning_rate": 7.88264892636776e-06, |
| "loss": 0.6770628690719604, |
| "step": 677 |
| }, |
| { |
| "epoch": 2.4388489208633093, |
| "grad_norm": 0.5692480264597963, |
| "learning_rate": 7.876875098184411e-06, |
| "loss": 0.6888518333435059, |
| "step": 678 |
| }, |
| { |
| "epoch": 2.4424460431654675, |
| "grad_norm": 0.5297351357365194, |
| "learning_rate": 7.871095529482648e-06, |
| "loss": 0.6821136474609375, |
| "step": 679 |
| }, |
| { |
| "epoch": 2.446043165467626, |
| "grad_norm": 0.5529634859522956, |
| "learning_rate": 7.865310231795026e-06, |
| "loss": 0.6747450828552246, |
| "step": 680 |
| }, |
| { |
| "epoch": 2.449640287769784, |
| "grad_norm": 0.5932486024596301, |
| "learning_rate": 7.859519216665531e-06, |
| "loss": 0.691978931427002, |
| "step": 681 |
| }, |
| { |
| "epoch": 2.4532374100719423, |
| "grad_norm": 0.5514442445967912, |
| "learning_rate": 7.853722495649558e-06, |
| "loss": 0.6781147718429565, |
| "step": 682 |
| }, |
| { |
| "epoch": 2.4568345323741005, |
| "grad_norm": 0.5447933548480308, |
| "learning_rate": 7.84792008031389e-06, |
| "loss": 0.6920583844184875, |
| "step": 683 |
| }, |
| { |
| "epoch": 2.460431654676259, |
| "grad_norm": 0.5079458959170539, |
| "learning_rate": 7.842111982236669e-06, |
| "loss": 0.6551939249038696, |
| "step": 684 |
| }, |
| { |
| "epoch": 2.4640287769784175, |
| "grad_norm": 0.5459392427484495, |
| "learning_rate": 7.836298213007376e-06, |
| "loss": 0.6810588836669922, |
| "step": 685 |
| }, |
| { |
| "epoch": 2.4676258992805753, |
| "grad_norm": 0.5979754426371551, |
| "learning_rate": 7.83047878422681e-06, |
| "loss": 0.6689721941947937, |
| "step": 686 |
| }, |
| { |
| "epoch": 2.471223021582734, |
| "grad_norm": 0.5536269344910104, |
| "learning_rate": 7.824653707507066e-06, |
| "loss": 0.6804069876670837, |
| "step": 687 |
| }, |
| { |
| "epoch": 2.4748201438848922, |
| "grad_norm": 0.5379884636295093, |
| "learning_rate": 7.818822994471504e-06, |
| "loss": 0.6857604384422302, |
| "step": 688 |
| }, |
| { |
| "epoch": 2.4784172661870505, |
| "grad_norm": 0.6094243016024037, |
| "learning_rate": 7.812986656754734e-06, |
| "loss": 0.6841216087341309, |
| "step": 689 |
| }, |
| { |
| "epoch": 2.4820143884892087, |
| "grad_norm": 0.569511803474814, |
| "learning_rate": 7.807144706002582e-06, |
| "loss": 0.6696672439575195, |
| "step": 690 |
| }, |
| { |
| "epoch": 2.485611510791367, |
| "grad_norm": 0.5526697959007867, |
| "learning_rate": 7.80129715387209e-06, |
| "loss": 0.6752933263778687, |
| "step": 691 |
| }, |
| { |
| "epoch": 2.4892086330935252, |
| "grad_norm": 0.580107961423166, |
| "learning_rate": 7.79544401203146e-06, |
| "loss": 0.6760281920433044, |
| "step": 692 |
| }, |
| { |
| "epoch": 2.4928057553956835, |
| "grad_norm": 0.5861334677021053, |
| "learning_rate": 7.789585292160056e-06, |
| "loss": 0.6881128549575806, |
| "step": 693 |
| }, |
| { |
| "epoch": 2.4964028776978417, |
| "grad_norm": 0.547004214562634, |
| "learning_rate": 7.783721005948374e-06, |
| "loss": 0.6737272143363953, |
| "step": 694 |
| }, |
| { |
| "epoch": 2.5, |
| "grad_norm": 0.560582389214819, |
| "learning_rate": 7.777851165098012e-06, |
| "loss": 0.6740151643753052, |
| "step": 695 |
| }, |
| { |
| "epoch": 2.5035971223021583, |
| "grad_norm": 0.5498672869869043, |
| "learning_rate": 7.771975781321655e-06, |
| "loss": 0.6854318380355835, |
| "step": 696 |
| }, |
| { |
| "epoch": 2.5071942446043165, |
| "grad_norm": 0.5913867717651166, |
| "learning_rate": 7.766094866343047e-06, |
| "loss": 0.6820253133773804, |
| "step": 697 |
| }, |
| { |
| "epoch": 2.5107913669064748, |
| "grad_norm": 0.5526401475935052, |
| "learning_rate": 7.760208431896971e-06, |
| "loss": 0.7144240736961365, |
| "step": 698 |
| }, |
| { |
| "epoch": 2.514388489208633, |
| "grad_norm": 0.551040934868349, |
| "learning_rate": 7.754316489729224e-06, |
| "loss": 0.6829532384872437, |
| "step": 699 |
| }, |
| { |
| "epoch": 2.5179856115107913, |
| "grad_norm": 0.5353368003261932, |
| "learning_rate": 7.748419051596586e-06, |
| "loss": 0.6781253814697266, |
| "step": 700 |
| }, |
| { |
| "epoch": 2.5179856115107913, |
| "eval_loss": 0.7946798205375671, |
| "eval_runtime": 7.5195, |
| "eval_samples_per_second": 11.703, |
| "eval_steps_per_second": 0.399, |
| "eval_token_acc": 0.7532400865391975, |
| "step": 700 |
| }, |
| { |
| "epoch": 2.5215827338129495, |
| "grad_norm": 0.5429962157842656, |
| "learning_rate": 7.74251612926681e-06, |
| "loss": 0.6926379203796387, |
| "step": 701 |
| }, |
| { |
| "epoch": 2.5251798561151078, |
| "grad_norm": 0.5529607867258931, |
| "learning_rate": 7.736607734518593e-06, |
| "loss": 0.6826739311218262, |
| "step": 702 |
| }, |
| { |
| "epoch": 2.528776978417266, |
| "grad_norm": 0.5709854889740096, |
| "learning_rate": 7.730693879141548e-06, |
| "loss": 0.6736966967582703, |
| "step": 703 |
| }, |
| { |
| "epoch": 2.5323741007194247, |
| "grad_norm": 0.5405832962269905, |
| "learning_rate": 7.72477457493619e-06, |
| "loss": 0.6709224581718445, |
| "step": 704 |
| }, |
| { |
| "epoch": 2.5359712230215825, |
| "grad_norm": 0.5470431841631396, |
| "learning_rate": 7.718849833713895e-06, |
| "loss": 0.6827516555786133, |
| "step": 705 |
| }, |
| { |
| "epoch": 2.539568345323741, |
| "grad_norm": 0.5587267021136716, |
| "learning_rate": 7.712919667296902e-06, |
| "loss": 0.6617273688316345, |
| "step": 706 |
| }, |
| { |
| "epoch": 2.543165467625899, |
| "grad_norm": 0.6011679551118603, |
| "learning_rate": 7.706984087518265e-06, |
| "loss": 0.6764623522758484, |
| "step": 707 |
| }, |
| { |
| "epoch": 2.5467625899280577, |
| "grad_norm": 0.5491196843814833, |
| "learning_rate": 7.701043106221847e-06, |
| "loss": 0.6807689070701599, |
| "step": 708 |
| }, |
| { |
| "epoch": 2.550359712230216, |
| "grad_norm": 0.5551355195436328, |
| "learning_rate": 7.695096735262284e-06, |
| "loss": 0.6934317350387573, |
| "step": 709 |
| }, |
| { |
| "epoch": 2.553956834532374, |
| "grad_norm": 0.5412804571062837, |
| "learning_rate": 7.689144986504966e-06, |
| "loss": 0.6880820989608765, |
| "step": 710 |
| }, |
| { |
| "epoch": 2.5575539568345325, |
| "grad_norm": 0.5104975508357923, |
| "learning_rate": 7.68318787182602e-06, |
| "loss": 0.6632246971130371, |
| "step": 711 |
| }, |
| { |
| "epoch": 2.5611510791366907, |
| "grad_norm": 0.5588315359311713, |
| "learning_rate": 7.677225403112277e-06, |
| "loss": 0.6966925263404846, |
| "step": 712 |
| }, |
| { |
| "epoch": 2.564748201438849, |
| "grad_norm": 0.5583593320200122, |
| "learning_rate": 7.671257592261247e-06, |
| "loss": 0.6910549402236938, |
| "step": 713 |
| }, |
| { |
| "epoch": 2.568345323741007, |
| "grad_norm": 0.5726244981625573, |
| "learning_rate": 7.665284451181106e-06, |
| "loss": 0.6583359241485596, |
| "step": 714 |
| }, |
| { |
| "epoch": 2.5719424460431655, |
| "grad_norm": 0.575772820092244, |
| "learning_rate": 7.659305991790663e-06, |
| "loss": 0.6736561059951782, |
| "step": 715 |
| }, |
| { |
| "epoch": 2.5755395683453237, |
| "grad_norm": 0.5446990409778754, |
| "learning_rate": 7.653322226019341e-06, |
| "loss": 0.6787956953048706, |
| "step": 716 |
| }, |
| { |
| "epoch": 2.579136690647482, |
| "grad_norm": 0.6357696348428653, |
| "learning_rate": 7.647333165807147e-06, |
| "loss": 0.673660159111023, |
| "step": 717 |
| }, |
| { |
| "epoch": 2.58273381294964, |
| "grad_norm": 0.5689245629147284, |
| "learning_rate": 7.64133882310466e-06, |
| "loss": 0.6633349657058716, |
| "step": 718 |
| }, |
| { |
| "epoch": 2.5863309352517985, |
| "grad_norm": 0.553093137270363, |
| "learning_rate": 7.63533920987299e-06, |
| "loss": 0.6687748432159424, |
| "step": 719 |
| }, |
| { |
| "epoch": 2.5899280575539567, |
| "grad_norm": 0.512706946176256, |
| "learning_rate": 7.629334338083774e-06, |
| "loss": 0.6984813809394836, |
| "step": 720 |
| }, |
| { |
| "epoch": 2.593525179856115, |
| "grad_norm": 0.5253240659400193, |
| "learning_rate": 7.623324219719134e-06, |
| "loss": 0.6692329049110413, |
| "step": 721 |
| }, |
| { |
| "epoch": 2.597122302158273, |
| "grad_norm": 0.5540251715820621, |
| "learning_rate": 7.617308866771667e-06, |
| "loss": 0.682288646697998, |
| "step": 722 |
| }, |
| { |
| "epoch": 2.600719424460432, |
| "grad_norm": 0.5415999732946063, |
| "learning_rate": 7.611288291244407e-06, |
| "loss": 0.6738604307174683, |
| "step": 723 |
| }, |
| { |
| "epoch": 2.6043165467625897, |
| "grad_norm": 0.5398258033309221, |
| "learning_rate": 7.605262505150819e-06, |
| "loss": 0.6791110634803772, |
| "step": 724 |
| }, |
| { |
| "epoch": 2.6079136690647484, |
| "grad_norm": 0.5870193785417733, |
| "learning_rate": 7.599231520514756e-06, |
| "loss": 0.6960330009460449, |
| "step": 725 |
| }, |
| { |
| "epoch": 2.6115107913669062, |
| "grad_norm": 0.5478681078185549, |
| "learning_rate": 7.5931953493704545e-06, |
| "loss": 0.6817760467529297, |
| "step": 726 |
| }, |
| { |
| "epoch": 2.615107913669065, |
| "grad_norm": 0.5643881705522068, |
| "learning_rate": 7.58715400376249e-06, |
| "loss": 0.6764886379241943, |
| "step": 727 |
| }, |
| { |
| "epoch": 2.618705035971223, |
| "grad_norm": 0.5311796813237344, |
| "learning_rate": 7.58110749574577e-06, |
| "loss": 0.681626558303833, |
| "step": 728 |
| }, |
| { |
| "epoch": 2.6223021582733814, |
| "grad_norm": 0.593290297861787, |
| "learning_rate": 7.575055837385497e-06, |
| "loss": 0.6835793256759644, |
| "step": 729 |
| }, |
| { |
| "epoch": 2.6258992805755397, |
| "grad_norm": 0.5433885872721661, |
| "learning_rate": 7.568999040757157e-06, |
| "loss": 0.692463219165802, |
| "step": 730 |
| }, |
| { |
| "epoch": 2.629496402877698, |
| "grad_norm": 0.566347769689796, |
| "learning_rate": 7.562937117946484e-06, |
| "loss": 0.6714706420898438, |
| "step": 731 |
| }, |
| { |
| "epoch": 2.633093525179856, |
| "grad_norm": 0.5788236428878424, |
| "learning_rate": 7.556870081049444e-06, |
| "loss": 0.6920543909072876, |
| "step": 732 |
| }, |
| { |
| "epoch": 2.6366906474820144, |
| "grad_norm": 0.5629361850310614, |
| "learning_rate": 7.550797942172207e-06, |
| "loss": 0.6735811233520508, |
| "step": 733 |
| }, |
| { |
| "epoch": 2.6402877697841727, |
| "grad_norm": 0.5238564692093962, |
| "learning_rate": 7.5447207134311195e-06, |
| "loss": 0.6693782210350037, |
| "step": 734 |
| }, |
| { |
| "epoch": 2.643884892086331, |
| "grad_norm": 0.5202480993459492, |
| "learning_rate": 7.538638406952692e-06, |
| "loss": 0.6865730285644531, |
| "step": 735 |
| }, |
| { |
| "epoch": 2.647482014388489, |
| "grad_norm": 0.5398235199330227, |
| "learning_rate": 7.532551034873558e-06, |
| "loss": 0.6806303262710571, |
| "step": 736 |
| }, |
| { |
| "epoch": 2.6510791366906474, |
| "grad_norm": 0.5169653355837797, |
| "learning_rate": 7.526458609340468e-06, |
| "loss": 0.6690386533737183, |
| "step": 737 |
| }, |
| { |
| "epoch": 2.6546762589928057, |
| "grad_norm": 0.5466767225574092, |
| "learning_rate": 7.520361142510248e-06, |
| "loss": 0.6830763816833496, |
| "step": 738 |
| }, |
| { |
| "epoch": 2.658273381294964, |
| "grad_norm": 0.5566750563025337, |
| "learning_rate": 7.5142586465497925e-06, |
| "loss": 0.6852663159370422, |
| "step": 739 |
| }, |
| { |
| "epoch": 2.661870503597122, |
| "grad_norm": 0.5280240519640393, |
| "learning_rate": 7.50815113363602e-06, |
| "loss": 0.6713100671768188, |
| "step": 740 |
| }, |
| { |
| "epoch": 2.6654676258992804, |
| "grad_norm": 0.5334063560685919, |
| "learning_rate": 7.5020386159558685e-06, |
| "loss": 0.6669318675994873, |
| "step": 741 |
| }, |
| { |
| "epoch": 2.6690647482014387, |
| "grad_norm": 0.5410901797634948, |
| "learning_rate": 7.49592110570626e-06, |
| "loss": 0.6787617206573486, |
| "step": 742 |
| }, |
| { |
| "epoch": 2.672661870503597, |
| "grad_norm": 0.5035182125942762, |
| "learning_rate": 7.489798615094077e-06, |
| "loss": 0.6858699917793274, |
| "step": 743 |
| }, |
| { |
| "epoch": 2.6762589928057556, |
| "grad_norm": 0.5272103112959701, |
| "learning_rate": 7.483671156336142e-06, |
| "loss": 0.6731346249580383, |
| "step": 744 |
| }, |
| { |
| "epoch": 2.6798561151079134, |
| "grad_norm": 0.5466411879626102, |
| "learning_rate": 7.4775387416591905e-06, |
| "loss": 0.6715824604034424, |
| "step": 745 |
| }, |
| { |
| "epoch": 2.683453237410072, |
| "grad_norm": 0.556601396124224, |
| "learning_rate": 7.471401383299847e-06, |
| "loss": 0.6648485660552979, |
| "step": 746 |
| }, |
| { |
| "epoch": 2.68705035971223, |
| "grad_norm": 0.5171685221567268, |
| "learning_rate": 7.465259093504598e-06, |
| "loss": 0.6769684553146362, |
| "step": 747 |
| }, |
| { |
| "epoch": 2.6906474820143886, |
| "grad_norm": 0.5606435302484213, |
| "learning_rate": 7.459111884529775e-06, |
| "loss": 0.6720460653305054, |
| "step": 748 |
| }, |
| { |
| "epoch": 2.694244604316547, |
| "grad_norm": 0.5842110032032478, |
| "learning_rate": 7.452959768641521e-06, |
| "loss": 0.677690863609314, |
| "step": 749 |
| }, |
| { |
| "epoch": 2.697841726618705, |
| "grad_norm": 0.5701430080753563, |
| "learning_rate": 7.446802758115775e-06, |
| "loss": 0.6708799600601196, |
| "step": 750 |
| }, |
| { |
| "epoch": 2.7014388489208634, |
| "grad_norm": 0.5581963729995891, |
| "learning_rate": 7.4406408652382355e-06, |
| "loss": 0.6710215210914612, |
| "step": 751 |
| }, |
| { |
| "epoch": 2.7050359712230216, |
| "grad_norm": 0.5658077696960466, |
| "learning_rate": 7.43447410230435e-06, |
| "loss": 0.6781923770904541, |
| "step": 752 |
| }, |
| { |
| "epoch": 2.70863309352518, |
| "grad_norm": 0.518577960318417, |
| "learning_rate": 7.428302481619281e-06, |
| "loss": 0.6796495318412781, |
| "step": 753 |
| }, |
| { |
| "epoch": 2.712230215827338, |
| "grad_norm": 0.5688235971525768, |
| "learning_rate": 7.422126015497884e-06, |
| "loss": 0.6821467280387878, |
| "step": 754 |
| }, |
| { |
| "epoch": 2.7158273381294964, |
| "grad_norm": 0.5480902682706095, |
| "learning_rate": 7.415944716264682e-06, |
| "loss": 0.6760743856430054, |
| "step": 755 |
| }, |
| { |
| "epoch": 2.7194244604316546, |
| "grad_norm": 0.6283165937878773, |
| "learning_rate": 7.409758596253849e-06, |
| "loss": 0.689350962638855, |
| "step": 756 |
| }, |
| { |
| "epoch": 2.723021582733813, |
| "grad_norm": 0.5425030285263174, |
| "learning_rate": 7.403567667809165e-06, |
| "loss": 0.6779059171676636, |
| "step": 757 |
| }, |
| { |
| "epoch": 2.726618705035971, |
| "grad_norm": 0.5682122278337587, |
| "learning_rate": 7.397371943284017e-06, |
| "loss": 0.6475615501403809, |
| "step": 758 |
| }, |
| { |
| "epoch": 2.7302158273381294, |
| "grad_norm": 0.547877297052162, |
| "learning_rate": 7.391171435041358e-06, |
| "loss": 0.6795694231987, |
| "step": 759 |
| }, |
| { |
| "epoch": 2.7338129496402876, |
| "grad_norm": 0.5533186641373539, |
| "learning_rate": 7.384966155453686e-06, |
| "loss": 0.6737759113311768, |
| "step": 760 |
| }, |
| { |
| "epoch": 2.737410071942446, |
| "grad_norm": 0.542436921949499, |
| "learning_rate": 7.378756116903018e-06, |
| "loss": 0.6761566996574402, |
| "step": 761 |
| }, |
| { |
| "epoch": 2.741007194244604, |
| "grad_norm": 0.5668917886571787, |
| "learning_rate": 7.372541331780871e-06, |
| "loss": 0.6677440404891968, |
| "step": 762 |
| }, |
| { |
| "epoch": 2.744604316546763, |
| "grad_norm": 0.5216933811477933, |
| "learning_rate": 7.366321812488228e-06, |
| "loss": 0.6678866147994995, |
| "step": 763 |
| }, |
| { |
| "epoch": 2.7482014388489207, |
| "grad_norm": 0.5317153536757551, |
| "learning_rate": 7.360097571435527e-06, |
| "loss": 0.6877514123916626, |
| "step": 764 |
| }, |
| { |
| "epoch": 2.7517985611510793, |
| "grad_norm": 0.5612769192367274, |
| "learning_rate": 7.353868621042617e-06, |
| "loss": 0.6856654286384583, |
| "step": 765 |
| }, |
| { |
| "epoch": 2.755395683453237, |
| "grad_norm": 0.5757162734183815, |
| "learning_rate": 7.347634973738753e-06, |
| "loss": 0.6839314699172974, |
| "step": 766 |
| }, |
| { |
| "epoch": 2.758992805755396, |
| "grad_norm": 0.5107418889568749, |
| "learning_rate": 7.3413966419625595e-06, |
| "loss": 0.6695917844772339, |
| "step": 767 |
| }, |
| { |
| "epoch": 2.762589928057554, |
| "grad_norm": 0.598400476477111, |
| "learning_rate": 7.335153638162005e-06, |
| "loss": 0.6794536113739014, |
| "step": 768 |
| }, |
| { |
| "epoch": 2.7661870503597124, |
| "grad_norm": 0.5997392739164393, |
| "learning_rate": 7.328905974794383e-06, |
| "loss": 0.6909158229827881, |
| "step": 769 |
| }, |
| { |
| "epoch": 2.7697841726618706, |
| "grad_norm": 0.5335666892122978, |
| "learning_rate": 7.322653664326289e-06, |
| "loss": 0.6530051827430725, |
| "step": 770 |
| }, |
| { |
| "epoch": 2.773381294964029, |
| "grad_norm": 0.5459019178889832, |
| "learning_rate": 7.3163967192335825e-06, |
| "loss": 0.6856040358543396, |
| "step": 771 |
| }, |
| { |
| "epoch": 2.776978417266187, |
| "grad_norm": 0.5439989869401481, |
| "learning_rate": 7.310135152001381e-06, |
| "loss": 0.6689339876174927, |
| "step": 772 |
| }, |
| { |
| "epoch": 2.7805755395683454, |
| "grad_norm": 0.5477532967369428, |
| "learning_rate": 7.303868975124014e-06, |
| "loss": 0.6802787780761719, |
| "step": 773 |
| }, |
| { |
| "epoch": 2.7841726618705036, |
| "grad_norm": 0.5874456113809587, |
| "learning_rate": 7.29759820110502e-06, |
| "loss": 0.7009991407394409, |
| "step": 774 |
| }, |
| { |
| "epoch": 2.787769784172662, |
| "grad_norm": 0.551919806400329, |
| "learning_rate": 7.291322842457103e-06, |
| "loss": 0.6680014133453369, |
| "step": 775 |
| }, |
| { |
| "epoch": 2.79136690647482, |
| "grad_norm": 0.5800724322668104, |
| "learning_rate": 7.285042911702116e-06, |
| "loss": 0.6827471852302551, |
| "step": 776 |
| }, |
| { |
| "epoch": 2.7949640287769784, |
| "grad_norm": 0.550034880832541, |
| "learning_rate": 7.278758421371041e-06, |
| "loss": 0.6836577653884888, |
| "step": 777 |
| }, |
| { |
| "epoch": 2.7985611510791366, |
| "grad_norm": 0.5525819839003926, |
| "learning_rate": 7.27246938400395e-06, |
| "loss": 0.6785211563110352, |
| "step": 778 |
| }, |
| { |
| "epoch": 2.802158273381295, |
| "grad_norm": 0.5439718572691205, |
| "learning_rate": 7.266175812149993e-06, |
| "loss": 0.6937276124954224, |
| "step": 779 |
| }, |
| { |
| "epoch": 2.805755395683453, |
| "grad_norm": 0.5432443235974502, |
| "learning_rate": 7.259877718367372e-06, |
| "loss": 0.6906551122665405, |
| "step": 780 |
| }, |
| { |
| "epoch": 2.8093525179856114, |
| "grad_norm": 0.5604293839707127, |
| "learning_rate": 7.2535751152233015e-06, |
| "loss": 0.6693239808082581, |
| "step": 781 |
| }, |
| { |
| "epoch": 2.81294964028777, |
| "grad_norm": 0.5334401786784696, |
| "learning_rate": 7.2472680152940015e-06, |
| "loss": 0.6608414649963379, |
| "step": 782 |
| }, |
| { |
| "epoch": 2.816546762589928, |
| "grad_norm": 0.5556108924839624, |
| "learning_rate": 7.240956431164664e-06, |
| "loss": 0.6791374087333679, |
| "step": 783 |
| }, |
| { |
| "epoch": 2.8201438848920866, |
| "grad_norm": 0.5595296433734097, |
| "learning_rate": 7.234640375429427e-06, |
| "loss": 0.6910173892974854, |
| "step": 784 |
| }, |
| { |
| "epoch": 2.8237410071942444, |
| "grad_norm": 0.6026988319042906, |
| "learning_rate": 7.228319860691354e-06, |
| "loss": 0.6745973825454712, |
| "step": 785 |
| }, |
| { |
| "epoch": 2.827338129496403, |
| "grad_norm": 0.5537401684593308, |
| "learning_rate": 7.2219948995624035e-06, |
| "loss": 0.6714953184127808, |
| "step": 786 |
| }, |
| { |
| "epoch": 2.8309352517985613, |
| "grad_norm": 0.5159995510628995, |
| "learning_rate": 7.215665504663405e-06, |
| "loss": 0.6651762127876282, |
| "step": 787 |
| }, |
| { |
| "epoch": 2.8345323741007196, |
| "grad_norm": 0.5478561040721687, |
| "learning_rate": 7.209331688624039e-06, |
| "loss": 0.6660327911376953, |
| "step": 788 |
| }, |
| { |
| "epoch": 2.838129496402878, |
| "grad_norm": 0.5379987836741197, |
| "learning_rate": 7.202993464082807e-06, |
| "loss": 0.6914957761764526, |
| "step": 789 |
| }, |
| { |
| "epoch": 2.841726618705036, |
| "grad_norm": 0.5253645674454258, |
| "learning_rate": 7.1966508436870044e-06, |
| "loss": 0.6743172407150269, |
| "step": 790 |
| }, |
| { |
| "epoch": 2.8453237410071943, |
| "grad_norm": 0.5598208338037115, |
| "learning_rate": 7.190303840092701e-06, |
| "loss": 0.6961389780044556, |
| "step": 791 |
| }, |
| { |
| "epoch": 2.8489208633093526, |
| "grad_norm": 0.5390983976675429, |
| "learning_rate": 7.183952465964711e-06, |
| "loss": 0.6708844900131226, |
| "step": 792 |
| }, |
| { |
| "epoch": 2.852517985611511, |
| "grad_norm": 0.5442089855190607, |
| "learning_rate": 7.177596733976571e-06, |
| "loss": 0.6630173921585083, |
| "step": 793 |
| }, |
| { |
| "epoch": 2.856115107913669, |
| "grad_norm": 0.671153831312209, |
| "learning_rate": 7.171236656810513e-06, |
| "loss": 0.6860195994377136, |
| "step": 794 |
| }, |
| { |
| "epoch": 2.8597122302158273, |
| "grad_norm": 0.5780688024521513, |
| "learning_rate": 7.1648722471574385e-06, |
| "loss": 0.6743713617324829, |
| "step": 795 |
| }, |
| { |
| "epoch": 2.8633093525179856, |
| "grad_norm": 0.5966815162498091, |
| "learning_rate": 7.158503517716894e-06, |
| "loss": 0.6660425066947937, |
| "step": 796 |
| }, |
| { |
| "epoch": 2.866906474820144, |
| "grad_norm": 0.5220025188437087, |
| "learning_rate": 7.152130481197048e-06, |
| "loss": 0.677868127822876, |
| "step": 797 |
| }, |
| { |
| "epoch": 2.870503597122302, |
| "grad_norm": 0.6530859738063051, |
| "learning_rate": 7.145753150314661e-06, |
| "loss": 0.6992630362510681, |
| "step": 798 |
| }, |
| { |
| "epoch": 2.8741007194244603, |
| "grad_norm": 0.5372141806985119, |
| "learning_rate": 7.139371537795063e-06, |
| "loss": 0.6782932281494141, |
| "step": 799 |
| }, |
| { |
| "epoch": 2.8776978417266186, |
| "grad_norm": 0.5594739478699063, |
| "learning_rate": 7.132985656372126e-06, |
| "loss": 0.6552035212516785, |
| "step": 800 |
| }, |
| { |
| "epoch": 2.8776978417266186, |
| "eval_loss": 0.7926490902900696, |
| "eval_runtime": 7.5343, |
| "eval_samples_per_second": 11.68, |
| "eval_steps_per_second": 0.398, |
| "eval_token_acc": 0.7537646230896883, |
| "step": 800 |
| }, |
| { |
| "epoch": 2.881294964028777, |
| "grad_norm": 0.4969988859148219, |
| "learning_rate": 7.126595518788244e-06, |
| "loss": 0.6519225835800171, |
| "step": 801 |
| }, |
| { |
| "epoch": 2.884892086330935, |
| "grad_norm": 0.5345813451985074, |
| "learning_rate": 7.120201137794301e-06, |
| "loss": 0.665630578994751, |
| "step": 802 |
| }, |
| { |
| "epoch": 2.8884892086330938, |
| "grad_norm": 0.5027605576333997, |
| "learning_rate": 7.11380252614965e-06, |
| "loss": 0.6709873676300049, |
| "step": 803 |
| }, |
| { |
| "epoch": 2.8920863309352516, |
| "grad_norm": 0.5884864926353124, |
| "learning_rate": 7.1073996966220835e-06, |
| "loss": 0.6509050130844116, |
| "step": 804 |
| }, |
| { |
| "epoch": 2.8956834532374103, |
| "grad_norm": 0.5531162956721329, |
| "learning_rate": 7.10099266198781e-06, |
| "loss": 0.6866740584373474, |
| "step": 805 |
| }, |
| { |
| "epoch": 2.899280575539568, |
| "grad_norm": 0.5162955485980054, |
| "learning_rate": 7.0945814350314346e-06, |
| "loss": 0.6785730123519897, |
| "step": 806 |
| }, |
| { |
| "epoch": 2.902877697841727, |
| "grad_norm": 0.520461442041789, |
| "learning_rate": 7.0881660285459205e-06, |
| "loss": 0.6749669313430786, |
| "step": 807 |
| }, |
| { |
| "epoch": 2.906474820143885, |
| "grad_norm": 0.5301790568209777, |
| "learning_rate": 7.0817464553325764e-06, |
| "loss": 0.6701935529708862, |
| "step": 808 |
| }, |
| { |
| "epoch": 2.9100719424460433, |
| "grad_norm": 0.5630040213851715, |
| "learning_rate": 7.07532272820102e-06, |
| "loss": 0.678869366645813, |
| "step": 809 |
| }, |
| { |
| "epoch": 2.9136690647482015, |
| "grad_norm": 0.5390394232246156, |
| "learning_rate": 7.068894859969162e-06, |
| "loss": 0.6616356372833252, |
| "step": 810 |
| }, |
| { |
| "epoch": 2.91726618705036, |
| "grad_norm": 0.5666314327761091, |
| "learning_rate": 7.062462863463176e-06, |
| "loss": 0.6806893348693848, |
| "step": 811 |
| }, |
| { |
| "epoch": 2.920863309352518, |
| "grad_norm": 0.5302133065489669, |
| "learning_rate": 7.0560267515174685e-06, |
| "loss": 0.669838547706604, |
| "step": 812 |
| }, |
| { |
| "epoch": 2.9244604316546763, |
| "grad_norm": 0.5765934775556447, |
| "learning_rate": 7.049586536974664e-06, |
| "loss": 0.6633042097091675, |
| "step": 813 |
| }, |
| { |
| "epoch": 2.9280575539568345, |
| "grad_norm": 0.5277877946096216, |
| "learning_rate": 7.043142232685572e-06, |
| "loss": 0.6631823778152466, |
| "step": 814 |
| }, |
| { |
| "epoch": 2.931654676258993, |
| "grad_norm": 0.5388691932839309, |
| "learning_rate": 7.036693851509158e-06, |
| "loss": 0.6837793588638306, |
| "step": 815 |
| }, |
| { |
| "epoch": 2.935251798561151, |
| "grad_norm": 0.5504190884085625, |
| "learning_rate": 7.030241406312528e-06, |
| "loss": 0.6746753454208374, |
| "step": 816 |
| }, |
| { |
| "epoch": 2.9388489208633093, |
| "grad_norm": 0.5366808043734594, |
| "learning_rate": 7.023784909970894e-06, |
| "loss": 0.6726278066635132, |
| "step": 817 |
| }, |
| { |
| "epoch": 2.9424460431654675, |
| "grad_norm": 0.5435443879201644, |
| "learning_rate": 7.0173243753675544e-06, |
| "loss": 0.6841650009155273, |
| "step": 818 |
| }, |
| { |
| "epoch": 2.946043165467626, |
| "grad_norm": 0.5542852746597902, |
| "learning_rate": 7.010859815393863e-06, |
| "loss": 0.6923326253890991, |
| "step": 819 |
| }, |
| { |
| "epoch": 2.949640287769784, |
| "grad_norm": 0.5289515792429416, |
| "learning_rate": 7.004391242949209e-06, |
| "loss": 0.6755713224411011, |
| "step": 820 |
| }, |
| { |
| "epoch": 2.9532374100719423, |
| "grad_norm": 0.537949591506382, |
| "learning_rate": 6.997918670940984e-06, |
| "loss": 0.6711938381195068, |
| "step": 821 |
| }, |
| { |
| "epoch": 2.956834532374101, |
| "grad_norm": 0.5728469119954346, |
| "learning_rate": 6.991442112284561e-06, |
| "loss": 0.6789706945419312, |
| "step": 822 |
| }, |
| { |
| "epoch": 2.960431654676259, |
| "grad_norm": 0.5356838759841633, |
| "learning_rate": 6.984961579903273e-06, |
| "loss": 0.6805518865585327, |
| "step": 823 |
| }, |
| { |
| "epoch": 2.9640287769784175, |
| "grad_norm": 0.5526149559188349, |
| "learning_rate": 6.978477086728375e-06, |
| "loss": 0.6938677430152893, |
| "step": 824 |
| }, |
| { |
| "epoch": 2.9676258992805753, |
| "grad_norm": 0.5717927051941512, |
| "learning_rate": 6.9719886456990306e-06, |
| "loss": 0.681894838809967, |
| "step": 825 |
| }, |
| { |
| "epoch": 2.971223021582734, |
| "grad_norm": 0.537402386834528, |
| "learning_rate": 6.96549626976228e-06, |
| "loss": 0.6860501766204834, |
| "step": 826 |
| }, |
| { |
| "epoch": 2.9748201438848922, |
| "grad_norm": 0.5826694738153739, |
| "learning_rate": 6.958999971873012e-06, |
| "loss": 0.6714417934417725, |
| "step": 827 |
| }, |
| { |
| "epoch": 2.9784172661870505, |
| "grad_norm": 0.6983420638788042, |
| "learning_rate": 6.952499764993945e-06, |
| "loss": 0.6649173498153687, |
| "step": 828 |
| }, |
| { |
| "epoch": 2.9820143884892087, |
| "grad_norm": 0.5186299633485753, |
| "learning_rate": 6.945995662095596e-06, |
| "loss": 0.6727434396743774, |
| "step": 829 |
| }, |
| { |
| "epoch": 2.985611510791367, |
| "grad_norm": 0.543769678807827, |
| "learning_rate": 6.9394876761562555e-06, |
| "loss": 0.6850245594978333, |
| "step": 830 |
| }, |
| { |
| "epoch": 2.9892086330935252, |
| "grad_norm": 0.5438520208221167, |
| "learning_rate": 6.932975820161965e-06, |
| "loss": 0.6802777051925659, |
| "step": 831 |
| }, |
| { |
| "epoch": 2.9928057553956835, |
| "grad_norm": 0.5462075560169861, |
| "learning_rate": 6.926460107106483e-06, |
| "loss": 0.690509021282196, |
| "step": 832 |
| }, |
| { |
| "epoch": 2.9964028776978417, |
| "grad_norm": 0.5333472735304643, |
| "learning_rate": 6.9199405499912675e-06, |
| "loss": 0.690028190612793, |
| "step": 833 |
| }, |
| { |
| "epoch": 3.0, |
| "grad_norm": 0.6699724818892617, |
| "learning_rate": 6.913417161825449e-06, |
| "loss": 0.6984052658081055, |
| "step": 834 |
| }, |
| { |
| "epoch": 3.0035971223021583, |
| "grad_norm": 0.714752211223029, |
| "learning_rate": 6.906889955625801e-06, |
| "loss": 0.6158499121665955, |
| "step": 835 |
| }, |
| { |
| "epoch": 3.0071942446043165, |
| "grad_norm": 0.6514192010389133, |
| "learning_rate": 6.900358944416712e-06, |
| "loss": 0.6168891191482544, |
| "step": 836 |
| }, |
| { |
| "epoch": 3.0107913669064748, |
| "grad_norm": 0.64522746175661, |
| "learning_rate": 6.893824141230168e-06, |
| "loss": 0.6089640855789185, |
| "step": 837 |
| }, |
| { |
| "epoch": 3.014388489208633, |
| "grad_norm": 0.5626906528595018, |
| "learning_rate": 6.887285559105721e-06, |
| "loss": 0.6088428497314453, |
| "step": 838 |
| }, |
| { |
| "epoch": 3.0179856115107913, |
| "grad_norm": 0.5952893215965329, |
| "learning_rate": 6.880743211090457e-06, |
| "loss": 0.6181361079216003, |
| "step": 839 |
| }, |
| { |
| "epoch": 3.0215827338129495, |
| "grad_norm": 0.6112525299111327, |
| "learning_rate": 6.874197110238986e-06, |
| "loss": 0.6280969381332397, |
| "step": 840 |
| }, |
| { |
| "epoch": 3.0251798561151078, |
| "grad_norm": 0.6255439774080229, |
| "learning_rate": 6.8676472696134e-06, |
| "loss": 0.6228752732276917, |
| "step": 841 |
| }, |
| { |
| "epoch": 3.028776978417266, |
| "grad_norm": 0.5893354763922161, |
| "learning_rate": 6.861093702283254e-06, |
| "loss": 0.6126219630241394, |
| "step": 842 |
| }, |
| { |
| "epoch": 3.0323741007194243, |
| "grad_norm": 0.5519668238089376, |
| "learning_rate": 6.854536421325544e-06, |
| "loss": 0.6280915141105652, |
| "step": 843 |
| }, |
| { |
| "epoch": 3.0359712230215825, |
| "grad_norm": 0.5744905423808728, |
| "learning_rate": 6.847975439824669e-06, |
| "loss": 0.6058259606361389, |
| "step": 844 |
| }, |
| { |
| "epoch": 3.039568345323741, |
| "grad_norm": 0.6867335751974597, |
| "learning_rate": 6.841410770872418e-06, |
| "loss": 0.6044293642044067, |
| "step": 845 |
| }, |
| { |
| "epoch": 3.0431654676258995, |
| "grad_norm": 0.6004411853436984, |
| "learning_rate": 6.834842427567934e-06, |
| "loss": 0.6123199462890625, |
| "step": 846 |
| }, |
| { |
| "epoch": 3.0467625899280577, |
| "grad_norm": 0.6205986433996288, |
| "learning_rate": 6.828270423017694e-06, |
| "loss": 0.6331131458282471, |
| "step": 847 |
| }, |
| { |
| "epoch": 3.050359712230216, |
| "grad_norm": 0.584969288502554, |
| "learning_rate": 6.8216947703354815e-06, |
| "loss": 0.6207732558250427, |
| "step": 848 |
| }, |
| { |
| "epoch": 3.053956834532374, |
| "grad_norm": 0.5646862308680043, |
| "learning_rate": 6.815115482642358e-06, |
| "loss": 0.6176056265830994, |
| "step": 849 |
| }, |
| { |
| "epoch": 3.0575539568345325, |
| "grad_norm": 0.6211660655189948, |
| "learning_rate": 6.808532573066635e-06, |
| "loss": 0.6335552930831909, |
| "step": 850 |
| }, |
| { |
| "epoch": 3.0611510791366907, |
| "grad_norm": 0.5444740509963648, |
| "learning_rate": 6.801946054743858e-06, |
| "loss": 0.5941608548164368, |
| "step": 851 |
| }, |
| { |
| "epoch": 3.064748201438849, |
| "grad_norm": 0.6099555103213884, |
| "learning_rate": 6.795355940816768e-06, |
| "loss": 0.612288236618042, |
| "step": 852 |
| }, |
| { |
| "epoch": 3.068345323741007, |
| "grad_norm": 0.5819148236716216, |
| "learning_rate": 6.7887622444352826e-06, |
| "loss": 0.6323422193527222, |
| "step": 853 |
| }, |
| { |
| "epoch": 3.0719424460431655, |
| "grad_norm": 0.5943964905724924, |
| "learning_rate": 6.7821649787564706e-06, |
| "loss": 0.6132618188858032, |
| "step": 854 |
| }, |
| { |
| "epoch": 3.0755395683453237, |
| "grad_norm": 0.5760335005664527, |
| "learning_rate": 6.775564156944517e-06, |
| "loss": 0.6210411787033081, |
| "step": 855 |
| }, |
| { |
| "epoch": 3.079136690647482, |
| "grad_norm": 0.7508771443385528, |
| "learning_rate": 6.7689597921707065e-06, |
| "loss": 0.6110687255859375, |
| "step": 856 |
| }, |
| { |
| "epoch": 3.08273381294964, |
| "grad_norm": 0.565618301653177, |
| "learning_rate": 6.762351897613393e-06, |
| "loss": 0.6275750398635864, |
| "step": 857 |
| }, |
| { |
| "epoch": 3.0863309352517985, |
| "grad_norm": 0.5746230797567085, |
| "learning_rate": 6.755740486457973e-06, |
| "loss": 0.6221362352371216, |
| "step": 858 |
| }, |
| { |
| "epoch": 3.0899280575539567, |
| "grad_norm": 0.5958569000961971, |
| "learning_rate": 6.749125571896863e-06, |
| "loss": 0.6043376922607422, |
| "step": 859 |
| }, |
| { |
| "epoch": 3.093525179856115, |
| "grad_norm": 0.6241765945103184, |
| "learning_rate": 6.742507167129465e-06, |
| "loss": 0.6196264624595642, |
| "step": 860 |
| }, |
| { |
| "epoch": 3.097122302158273, |
| "grad_norm": 0.5663996849434653, |
| "learning_rate": 6.735885285362151e-06, |
| "loss": 0.608167290687561, |
| "step": 861 |
| }, |
| { |
| "epoch": 3.1007194244604315, |
| "grad_norm": 0.6078428622732656, |
| "learning_rate": 6.729259939808223e-06, |
| "loss": 0.6227314472198486, |
| "step": 862 |
| }, |
| { |
| "epoch": 3.1043165467625897, |
| "grad_norm": 0.5866739065637034, |
| "learning_rate": 6.722631143687904e-06, |
| "loss": 0.6130132079124451, |
| "step": 863 |
| }, |
| { |
| "epoch": 3.1079136690647484, |
| "grad_norm": 0.6096843816859564, |
| "learning_rate": 6.715998910228296e-06, |
| "loss": 0.6262279748916626, |
| "step": 864 |
| }, |
| { |
| "epoch": 3.1115107913669067, |
| "grad_norm": 0.5731998321265244, |
| "learning_rate": 6.7093632526633655e-06, |
| "loss": 0.6277258396148682, |
| "step": 865 |
| }, |
| { |
| "epoch": 3.115107913669065, |
| "grad_norm": 0.5829645067278316, |
| "learning_rate": 6.702724184233904e-06, |
| "loss": 0.6140638589859009, |
| "step": 866 |
| }, |
| { |
| "epoch": 3.118705035971223, |
| "grad_norm": 0.7914211484234219, |
| "learning_rate": 6.6960817181875146e-06, |
| "loss": 0.6318271160125732, |
| "step": 867 |
| }, |
| { |
| "epoch": 3.1223021582733814, |
| "grad_norm": 0.5308246774286106, |
| "learning_rate": 6.689435867778578e-06, |
| "loss": 0.6080624461174011, |
| "step": 868 |
| }, |
| { |
| "epoch": 3.1258992805755397, |
| "grad_norm": 0.5842607361037628, |
| "learning_rate": 6.682786646268226e-06, |
| "loss": 0.6224777698516846, |
| "step": 869 |
| }, |
| { |
| "epoch": 3.129496402877698, |
| "grad_norm": 0.5788476300864046, |
| "learning_rate": 6.676134066924325e-06, |
| "loss": 0.6108511090278625, |
| "step": 870 |
| }, |
| { |
| "epoch": 3.133093525179856, |
| "grad_norm": 0.6005172650285331, |
| "learning_rate": 6.66947814302143e-06, |
| "loss": 0.6167119741439819, |
| "step": 871 |
| }, |
| { |
| "epoch": 3.1366906474820144, |
| "grad_norm": 0.54491315413818, |
| "learning_rate": 6.6628188878407806e-06, |
| "loss": 0.6260868310928345, |
| "step": 872 |
| }, |
| { |
| "epoch": 3.1402877697841727, |
| "grad_norm": 0.5905361649715649, |
| "learning_rate": 6.656156314670257e-06, |
| "loss": 0.6211916208267212, |
| "step": 873 |
| }, |
| { |
| "epoch": 3.143884892086331, |
| "grad_norm": 0.5576927601793549, |
| "learning_rate": 6.64949043680436e-06, |
| "loss": 0.6212013363838196, |
| "step": 874 |
| }, |
| { |
| "epoch": 3.147482014388489, |
| "grad_norm": 0.5761586070224683, |
| "learning_rate": 6.642821267544189e-06, |
| "loss": 0.6145411729812622, |
| "step": 875 |
| }, |
| { |
| "epoch": 3.1510791366906474, |
| "grad_norm": 0.5805295146865893, |
| "learning_rate": 6.636148820197409e-06, |
| "loss": 0.627646803855896, |
| "step": 876 |
| }, |
| { |
| "epoch": 3.1546762589928057, |
| "grad_norm": 0.5660419934936849, |
| "learning_rate": 6.6294731080782236e-06, |
| "loss": 0.6196659803390503, |
| "step": 877 |
| }, |
| { |
| "epoch": 3.158273381294964, |
| "grad_norm": 0.5794080453638514, |
| "learning_rate": 6.622794144507356e-06, |
| "loss": 0.6331967115402222, |
| "step": 878 |
| }, |
| { |
| "epoch": 3.161870503597122, |
| "grad_norm": 0.57141434057609, |
| "learning_rate": 6.616111942812011e-06, |
| "loss": 0.6022119522094727, |
| "step": 879 |
| }, |
| { |
| "epoch": 3.1654676258992804, |
| "grad_norm": 0.570784712886501, |
| "learning_rate": 6.609426516325859e-06, |
| "loss": 0.6375267505645752, |
| "step": 880 |
| }, |
| { |
| "epoch": 3.1690647482014387, |
| "grad_norm": 0.5479852387476096, |
| "learning_rate": 6.6027378783890065e-06, |
| "loss": 0.6149478554725647, |
| "step": 881 |
| }, |
| { |
| "epoch": 3.172661870503597, |
| "grad_norm": 0.5384725050590596, |
| "learning_rate": 6.596046042347964e-06, |
| "loss": 0.604001522064209, |
| "step": 882 |
| }, |
| { |
| "epoch": 3.176258992805755, |
| "grad_norm": 0.5946710979785322, |
| "learning_rate": 6.589351021555626e-06, |
| "loss": 0.61385178565979, |
| "step": 883 |
| }, |
| { |
| "epoch": 3.1798561151079134, |
| "grad_norm": 0.5397188512181753, |
| "learning_rate": 6.58265282937124e-06, |
| "loss": 0.6094223856925964, |
| "step": 884 |
| }, |
| { |
| "epoch": 3.183453237410072, |
| "grad_norm": 0.5599086808031262, |
| "learning_rate": 6.575951479160385e-06, |
| "loss": 0.6294345855712891, |
| "step": 885 |
| }, |
| { |
| "epoch": 3.1870503597122304, |
| "grad_norm": 0.58204300377972, |
| "learning_rate": 6.569246984294938e-06, |
| "loss": 0.6208990812301636, |
| "step": 886 |
| }, |
| { |
| "epoch": 3.1906474820143886, |
| "grad_norm": 0.5531860200808557, |
| "learning_rate": 6.562539358153053e-06, |
| "loss": 0.6135451197624207, |
| "step": 887 |
| }, |
| { |
| "epoch": 3.194244604316547, |
| "grad_norm": 0.5471226603588754, |
| "learning_rate": 6.555828614119132e-06, |
| "loss": 0.6110199689865112, |
| "step": 888 |
| }, |
| { |
| "epoch": 3.197841726618705, |
| "grad_norm": 0.5562073314211422, |
| "learning_rate": 6.549114765583799e-06, |
| "loss": 0.6199094653129578, |
| "step": 889 |
| }, |
| { |
| "epoch": 3.2014388489208634, |
| "grad_norm": 0.584420712338533, |
| "learning_rate": 6.542397825943867e-06, |
| "loss": 0.602266252040863, |
| "step": 890 |
| }, |
| { |
| "epoch": 3.2050359712230216, |
| "grad_norm": 0.5664900474746994, |
| "learning_rate": 6.535677808602327e-06, |
| "loss": 0.634172797203064, |
| "step": 891 |
| }, |
| { |
| "epoch": 3.20863309352518, |
| "grad_norm": 0.5574653576860307, |
| "learning_rate": 6.528954726968302e-06, |
| "loss": 0.6266528367996216, |
| "step": 892 |
| }, |
| { |
| "epoch": 3.212230215827338, |
| "grad_norm": 0.6214080021118603, |
| "learning_rate": 6.522228594457036e-06, |
| "loss": 0.6186414957046509, |
| "step": 893 |
| }, |
| { |
| "epoch": 3.2158273381294964, |
| "grad_norm": 0.6278160972504896, |
| "learning_rate": 6.515499424489857e-06, |
| "loss": 0.6303264498710632, |
| "step": 894 |
| }, |
| { |
| "epoch": 3.2194244604316546, |
| "grad_norm": 0.5700913834752749, |
| "learning_rate": 6.508767230494155e-06, |
| "loss": 0.636367917060852, |
| "step": 895 |
| }, |
| { |
| "epoch": 3.223021582733813, |
| "grad_norm": 0.5539077159835792, |
| "learning_rate": 6.502032025903356e-06, |
| "loss": 0.6246867179870605, |
| "step": 896 |
| }, |
| { |
| "epoch": 3.226618705035971, |
| "grad_norm": 0.5722974248170735, |
| "learning_rate": 6.495293824156887e-06, |
| "loss": 0.6357770562171936, |
| "step": 897 |
| }, |
| { |
| "epoch": 3.2302158273381294, |
| "grad_norm": 0.55706090318583, |
| "learning_rate": 6.4885526387001654e-06, |
| "loss": 0.6066106557846069, |
| "step": 898 |
| }, |
| { |
| "epoch": 3.2338129496402876, |
| "grad_norm": 0.6139163839279302, |
| "learning_rate": 6.481808482984554e-06, |
| "loss": 0.6301413774490356, |
| "step": 899 |
| }, |
| { |
| "epoch": 3.237410071942446, |
| "grad_norm": 0.5821754793482845, |
| "learning_rate": 6.475061370467346e-06, |
| "loss": 0.6320218443870544, |
| "step": 900 |
| }, |
| { |
| "epoch": 3.237410071942446, |
| "eval_loss": 0.8133171200752258, |
| "eval_runtime": 7.8053, |
| "eval_samples_per_second": 11.274, |
| "eval_steps_per_second": 0.384, |
| "eval_token_acc": 0.7516627435314936, |
| "step": 900 |
| }, |
| { |
| "epoch": 3.241007194244604, |
| "grad_norm": 0.5433359774163814, |
| "learning_rate": 6.468311314611734e-06, |
| "loss": 0.6361285448074341, |
| "step": 901 |
| }, |
| { |
| "epoch": 3.2446043165467624, |
| "grad_norm": 0.5663609802203171, |
| "learning_rate": 6.461558328886786e-06, |
| "loss": 0.6213710308074951, |
| "step": 902 |
| }, |
| { |
| "epoch": 3.2482014388489207, |
| "grad_norm": 0.5485700818172271, |
| "learning_rate": 6.454802426767409e-06, |
| "loss": 0.6224983930587769, |
| "step": 903 |
| }, |
| { |
| "epoch": 3.2517985611510793, |
| "grad_norm": 0.6075827214980387, |
| "learning_rate": 6.4480436217343366e-06, |
| "loss": 0.6264669895172119, |
| "step": 904 |
| }, |
| { |
| "epoch": 3.2553956834532376, |
| "grad_norm": 0.5688295864017505, |
| "learning_rate": 6.4412819272740955e-06, |
| "loss": 0.6276412606239319, |
| "step": 905 |
| }, |
| { |
| "epoch": 3.258992805755396, |
| "grad_norm": 0.5275825790352916, |
| "learning_rate": 6.434517356878974e-06, |
| "loss": 0.6151010990142822, |
| "step": 906 |
| }, |
| { |
| "epoch": 3.262589928057554, |
| "grad_norm": 0.6096221075109286, |
| "learning_rate": 6.427749924046999e-06, |
| "loss": 0.6230264902114868, |
| "step": 907 |
| }, |
| { |
| "epoch": 3.2661870503597124, |
| "grad_norm": 0.612072472195744, |
| "learning_rate": 6.420979642281909e-06, |
| "loss": 0.6169232726097107, |
| "step": 908 |
| }, |
| { |
| "epoch": 3.2697841726618706, |
| "grad_norm": 0.5645657602010562, |
| "learning_rate": 6.4142065250931315e-06, |
| "loss": 0.6280378699302673, |
| "step": 909 |
| }, |
| { |
| "epoch": 3.273381294964029, |
| "grad_norm": 0.5445126890364774, |
| "learning_rate": 6.4074305859957475e-06, |
| "loss": 0.6251436471939087, |
| "step": 910 |
| }, |
| { |
| "epoch": 3.276978417266187, |
| "grad_norm": 0.6226293432430507, |
| "learning_rate": 6.40065183851047e-06, |
| "loss": 0.6066725850105286, |
| "step": 911 |
| }, |
| { |
| "epoch": 3.2805755395683454, |
| "grad_norm": 0.5532512858779444, |
| "learning_rate": 6.393870296163616e-06, |
| "loss": 0.6277509927749634, |
| "step": 912 |
| }, |
| { |
| "epoch": 3.2841726618705036, |
| "grad_norm": 0.5484197426412167, |
| "learning_rate": 6.387085972487082e-06, |
| "loss": 0.6050516366958618, |
| "step": 913 |
| }, |
| { |
| "epoch": 3.287769784172662, |
| "grad_norm": 0.553188339774859, |
| "learning_rate": 6.380298881018307e-06, |
| "loss": 0.6248925924301147, |
| "step": 914 |
| }, |
| { |
| "epoch": 3.29136690647482, |
| "grad_norm": 0.5838918969845017, |
| "learning_rate": 6.373509035300261e-06, |
| "loss": 0.6234239339828491, |
| "step": 915 |
| }, |
| { |
| "epoch": 3.2949640287769784, |
| "grad_norm": 0.6761408326081915, |
| "learning_rate": 6.366716448881407e-06, |
| "loss": 0.6067996025085449, |
| "step": 916 |
| }, |
| { |
| "epoch": 3.2985611510791366, |
| "grad_norm": 0.5789294408382947, |
| "learning_rate": 6.359921135315673e-06, |
| "loss": 0.6355147957801819, |
| "step": 917 |
| }, |
| { |
| "epoch": 3.302158273381295, |
| "grad_norm": 0.5703657815163174, |
| "learning_rate": 6.353123108162435e-06, |
| "loss": 0.6251250505447388, |
| "step": 918 |
| }, |
| { |
| "epoch": 3.305755395683453, |
| "grad_norm": 0.5705102674833985, |
| "learning_rate": 6.346322380986482e-06, |
| "loss": 0.6201918125152588, |
| "step": 919 |
| }, |
| { |
| "epoch": 3.3093525179856114, |
| "grad_norm": 0.5182484237658441, |
| "learning_rate": 6.339518967357985e-06, |
| "loss": 0.5968239307403564, |
| "step": 920 |
| }, |
| { |
| "epoch": 3.3129496402877696, |
| "grad_norm": 0.579106482961709, |
| "learning_rate": 6.332712880852483e-06, |
| "loss": 0.6315656900405884, |
| "step": 921 |
| }, |
| { |
| "epoch": 3.316546762589928, |
| "grad_norm": 0.5490619934499182, |
| "learning_rate": 6.325904135050844e-06, |
| "loss": 0.6207892298698425, |
| "step": 922 |
| }, |
| { |
| "epoch": 3.3201438848920866, |
| "grad_norm": 0.551984663606509, |
| "learning_rate": 6.319092743539244e-06, |
| "loss": 0.6102540493011475, |
| "step": 923 |
| }, |
| { |
| "epoch": 3.3237410071942444, |
| "grad_norm": 0.5709695659026294, |
| "learning_rate": 6.312278719909138e-06, |
| "loss": 0.6253349781036377, |
| "step": 924 |
| }, |
| { |
| "epoch": 3.327338129496403, |
| "grad_norm": 0.5903086917501752, |
| "learning_rate": 6.305462077757235e-06, |
| "loss": 0.6325236558914185, |
| "step": 925 |
| }, |
| { |
| "epoch": 3.3309352517985613, |
| "grad_norm": 0.5898347793539362, |
| "learning_rate": 6.298642830685464e-06, |
| "loss": 0.6274194717407227, |
| "step": 926 |
| }, |
| { |
| "epoch": 3.3345323741007196, |
| "grad_norm": 0.5419411991976139, |
| "learning_rate": 6.291820992300956e-06, |
| "loss": 0.6277546286582947, |
| "step": 927 |
| }, |
| { |
| "epoch": 3.338129496402878, |
| "grad_norm": 0.5990610317077253, |
| "learning_rate": 6.284996576216014e-06, |
| "loss": 0.6244072318077087, |
| "step": 928 |
| }, |
| { |
| "epoch": 3.341726618705036, |
| "grad_norm": 0.5900745326399537, |
| "learning_rate": 6.278169596048081e-06, |
| "loss": 0.628463625907898, |
| "step": 929 |
| }, |
| { |
| "epoch": 3.3453237410071943, |
| "grad_norm": 0.5480932820904987, |
| "learning_rate": 6.271340065419715e-06, |
| "loss": 0.6218065023422241, |
| "step": 930 |
| }, |
| { |
| "epoch": 3.3489208633093526, |
| "grad_norm": 0.6125479999978816, |
| "learning_rate": 6.2645079979585686e-06, |
| "loss": 0.6407708525657654, |
| "step": 931 |
| }, |
| { |
| "epoch": 3.352517985611511, |
| "grad_norm": 0.5531587910215395, |
| "learning_rate": 6.257673407297352e-06, |
| "loss": 0.6171718835830688, |
| "step": 932 |
| }, |
| { |
| "epoch": 3.356115107913669, |
| "grad_norm": 0.5486944909683307, |
| "learning_rate": 6.250836307073813e-06, |
| "loss": 0.6350491642951965, |
| "step": 933 |
| }, |
| { |
| "epoch": 3.3597122302158273, |
| "grad_norm": 0.5693874304619684, |
| "learning_rate": 6.243996710930705e-06, |
| "loss": 0.6411446332931519, |
| "step": 934 |
| }, |
| { |
| "epoch": 3.3633093525179856, |
| "grad_norm": 0.5756735464434138, |
| "learning_rate": 6.237154632515764e-06, |
| "loss": 0.6317604780197144, |
| "step": 935 |
| }, |
| { |
| "epoch": 3.366906474820144, |
| "grad_norm": 0.5792345382019256, |
| "learning_rate": 6.230310085481677e-06, |
| "loss": 0.6155068278312683, |
| "step": 936 |
| }, |
| { |
| "epoch": 3.370503597122302, |
| "grad_norm": 0.580886016903391, |
| "learning_rate": 6.223463083486055e-06, |
| "loss": 0.6173931360244751, |
| "step": 937 |
| }, |
| { |
| "epoch": 3.3741007194244603, |
| "grad_norm": 0.5970064971945658, |
| "learning_rate": 6.216613640191414e-06, |
| "loss": 0.6314873695373535, |
| "step": 938 |
| }, |
| { |
| "epoch": 3.3776978417266186, |
| "grad_norm": 0.5650813171755603, |
| "learning_rate": 6.209761769265133e-06, |
| "loss": 0.6283479332923889, |
| "step": 939 |
| }, |
| { |
| "epoch": 3.381294964028777, |
| "grad_norm": 0.5998949151653221, |
| "learning_rate": 6.2029074843794445e-06, |
| "loss": 0.6180216670036316, |
| "step": 940 |
| }, |
| { |
| "epoch": 3.384892086330935, |
| "grad_norm": 0.5913243467686229, |
| "learning_rate": 6.19605079921139e-06, |
| "loss": 0.621105432510376, |
| "step": 941 |
| }, |
| { |
| "epoch": 3.3884892086330938, |
| "grad_norm": 0.6160120518012295, |
| "learning_rate": 6.189191727442807e-06, |
| "loss": 0.639343798160553, |
| "step": 942 |
| }, |
| { |
| "epoch": 3.3920863309352516, |
| "grad_norm": 0.5522125574974834, |
| "learning_rate": 6.182330282760286e-06, |
| "loss": 0.651248037815094, |
| "step": 943 |
| }, |
| { |
| "epoch": 3.3956834532374103, |
| "grad_norm": 0.567072359350329, |
| "learning_rate": 6.175466478855161e-06, |
| "loss": 0.6345071792602539, |
| "step": 944 |
| }, |
| { |
| "epoch": 3.3992805755395685, |
| "grad_norm": 0.5859261607861899, |
| "learning_rate": 6.168600329423472e-06, |
| "loss": 0.6209615468978882, |
| "step": 945 |
| }, |
| { |
| "epoch": 3.402877697841727, |
| "grad_norm": 0.607665412086887, |
| "learning_rate": 6.1617318481659364e-06, |
| "loss": 0.6264888644218445, |
| "step": 946 |
| }, |
| { |
| "epoch": 3.406474820143885, |
| "grad_norm": 0.5896905998280686, |
| "learning_rate": 6.154861048787928e-06, |
| "loss": 0.6273084878921509, |
| "step": 947 |
| }, |
| { |
| "epoch": 3.4100719424460433, |
| "grad_norm": 0.5186817083485703, |
| "learning_rate": 6.147987944999441e-06, |
| "loss": 0.610866904258728, |
| "step": 948 |
| }, |
| { |
| "epoch": 3.4136690647482015, |
| "grad_norm": 0.6000730221149679, |
| "learning_rate": 6.1411125505150735e-06, |
| "loss": 0.6253103017807007, |
| "step": 949 |
| }, |
| { |
| "epoch": 3.41726618705036, |
| "grad_norm": 0.60799932528241, |
| "learning_rate": 6.13423487905399e-06, |
| "loss": 0.6207418441772461, |
| "step": 950 |
| }, |
| { |
| "epoch": 3.420863309352518, |
| "grad_norm": 0.5555330946593812, |
| "learning_rate": 6.127354944339905e-06, |
| "loss": 0.6155734658241272, |
| "step": 951 |
| }, |
| { |
| "epoch": 3.4244604316546763, |
| "grad_norm": 0.5767982008584605, |
| "learning_rate": 6.1204727601010396e-06, |
| "loss": 0.6276605725288391, |
| "step": 952 |
| }, |
| { |
| "epoch": 3.4280575539568345, |
| "grad_norm": 0.5748993900038213, |
| "learning_rate": 6.113588340070112e-06, |
| "loss": 0.6277233362197876, |
| "step": 953 |
| }, |
| { |
| "epoch": 3.431654676258993, |
| "grad_norm": 0.5772809250644908, |
| "learning_rate": 6.106701697984296e-06, |
| "loss": 0.6388263702392578, |
| "step": 954 |
| }, |
| { |
| "epoch": 3.435251798561151, |
| "grad_norm": 0.5769783761628331, |
| "learning_rate": 6.0998128475852025e-06, |
| "loss": 0.6354755163192749, |
| "step": 955 |
| }, |
| { |
| "epoch": 3.4388489208633093, |
| "grad_norm": 0.5424758329996289, |
| "learning_rate": 6.092921802618849e-06, |
| "loss": 0.6111853122711182, |
| "step": 956 |
| }, |
| { |
| "epoch": 3.4424460431654675, |
| "grad_norm": 0.5611679378503189, |
| "learning_rate": 6.0860285768356295e-06, |
| "loss": 0.6185018420219421, |
| "step": 957 |
| }, |
| { |
| "epoch": 3.446043165467626, |
| "grad_norm": 0.588002247832766, |
| "learning_rate": 6.079133183990292e-06, |
| "loss": 0.6287339925765991, |
| "step": 958 |
| }, |
| { |
| "epoch": 3.449640287769784, |
| "grad_norm": 0.7443287565839711, |
| "learning_rate": 6.072235637841909e-06, |
| "loss": 0.5987130403518677, |
| "step": 959 |
| }, |
| { |
| "epoch": 3.4532374100719423, |
| "grad_norm": 0.5583210492633316, |
| "learning_rate": 6.065335952153846e-06, |
| "loss": 0.6217409372329712, |
| "step": 960 |
| }, |
| { |
| "epoch": 3.4568345323741005, |
| "grad_norm": 0.5325265812852955, |
| "learning_rate": 6.058434140693741e-06, |
| "loss": 0.6196964979171753, |
| "step": 961 |
| }, |
| { |
| "epoch": 3.460431654676259, |
| "grad_norm": 0.5606450235116687, |
| "learning_rate": 6.051530217233473e-06, |
| "loss": 0.6245044469833374, |
| "step": 962 |
| }, |
| { |
| "epoch": 3.4640287769784175, |
| "grad_norm": 0.6416629534030027, |
| "learning_rate": 6.044624195549137e-06, |
| "loss": 0.6353148818016052, |
| "step": 963 |
| }, |
| { |
| "epoch": 3.4676258992805753, |
| "grad_norm": 0.549414675909668, |
| "learning_rate": 6.037716089421011e-06, |
| "loss": 0.6427045464515686, |
| "step": 964 |
| }, |
| { |
| "epoch": 3.471223021582734, |
| "grad_norm": 0.5653728079544905, |
| "learning_rate": 6.030805912633535e-06, |
| "loss": 0.6173840165138245, |
| "step": 965 |
| }, |
| { |
| "epoch": 3.4748201438848922, |
| "grad_norm": 0.5750387905226975, |
| "learning_rate": 6.023893678975281e-06, |
| "loss": 0.6136468052864075, |
| "step": 966 |
| }, |
| { |
| "epoch": 3.4784172661870505, |
| "grad_norm": 0.5562192543103848, |
| "learning_rate": 6.016979402238922e-06, |
| "loss": 0.6289895176887512, |
| "step": 967 |
| }, |
| { |
| "epoch": 3.4820143884892087, |
| "grad_norm": 0.6071855904365742, |
| "learning_rate": 6.010063096221215e-06, |
| "loss": 0.6055720448493958, |
| "step": 968 |
| }, |
| { |
| "epoch": 3.485611510791367, |
| "grad_norm": 0.5853664881319613, |
| "learning_rate": 6.003144774722956e-06, |
| "loss": 0.6191238164901733, |
| "step": 969 |
| }, |
| { |
| "epoch": 3.4892086330935252, |
| "grad_norm": 0.5836022611985129, |
| "learning_rate": 5.996224451548974e-06, |
| "loss": 0.6153513193130493, |
| "step": 970 |
| }, |
| { |
| "epoch": 3.4928057553956835, |
| "grad_norm": 0.5501626906244823, |
| "learning_rate": 5.989302140508081e-06, |
| "loss": 0.6205925941467285, |
| "step": 971 |
| }, |
| { |
| "epoch": 3.4964028776978417, |
| "grad_norm": 0.561858415194521, |
| "learning_rate": 5.982377855413063e-06, |
| "loss": 0.618821382522583, |
| "step": 972 |
| }, |
| { |
| "epoch": 3.5, |
| "grad_norm": 0.9366644791621612, |
| "learning_rate": 5.975451610080643e-06, |
| "loss": 0.6133970618247986, |
| "step": 973 |
| }, |
| { |
| "epoch": 3.5035971223021583, |
| "grad_norm": 0.5477008824327935, |
| "learning_rate": 5.968523418331453e-06, |
| "loss": 0.6296148300170898, |
| "step": 974 |
| }, |
| { |
| "epoch": 3.5071942446043165, |
| "grad_norm": 0.5208248633143249, |
| "learning_rate": 5.9615932939900154e-06, |
| "loss": 0.5915785431861877, |
| "step": 975 |
| }, |
| { |
| "epoch": 3.5107913669064748, |
| "grad_norm": 0.578508865290858, |
| "learning_rate": 5.954661250884704e-06, |
| "loss": 0.6324668526649475, |
| "step": 976 |
| }, |
| { |
| "epoch": 3.514388489208633, |
| "grad_norm": 0.5461540530255063, |
| "learning_rate": 5.947727302847719e-06, |
| "loss": 0.6380304098129272, |
| "step": 977 |
| }, |
| { |
| "epoch": 3.5179856115107913, |
| "grad_norm": 0.5362843251572532, |
| "learning_rate": 5.940791463715068e-06, |
| "loss": 0.6359459161758423, |
| "step": 978 |
| }, |
| { |
| "epoch": 3.5215827338129495, |
| "grad_norm": 0.5526432685264635, |
| "learning_rate": 5.933853747326525e-06, |
| "loss": 0.6332674026489258, |
| "step": 979 |
| }, |
| { |
| "epoch": 3.5251798561151078, |
| "grad_norm": 0.5128905428703581, |
| "learning_rate": 5.926914167525618e-06, |
| "loss": 0.6182379722595215, |
| "step": 980 |
| }, |
| { |
| "epoch": 3.528776978417266, |
| "grad_norm": 0.5869091984847182, |
| "learning_rate": 5.919972738159587e-06, |
| "loss": 0.6357277631759644, |
| "step": 981 |
| }, |
| { |
| "epoch": 3.5323741007194247, |
| "grad_norm": 0.5717959873717039, |
| "learning_rate": 5.913029473079365e-06, |
| "loss": 0.6151210069656372, |
| "step": 982 |
| }, |
| { |
| "epoch": 3.5359712230215825, |
| "grad_norm": 0.5775579121789572, |
| "learning_rate": 5.906084386139545e-06, |
| "loss": 0.6470330953598022, |
| "step": 983 |
| }, |
| { |
| "epoch": 3.539568345323741, |
| "grad_norm": 0.5474266720437985, |
| "learning_rate": 5.899137491198364e-06, |
| "loss": 0.6377556324005127, |
| "step": 984 |
| }, |
| { |
| "epoch": 3.543165467625899, |
| "grad_norm": 0.6664511062730858, |
| "learning_rate": 5.8921888021176545e-06, |
| "loss": 0.632561445236206, |
| "step": 985 |
| }, |
| { |
| "epoch": 3.5467625899280577, |
| "grad_norm": 0.5982625641511464, |
| "learning_rate": 5.885238332762837e-06, |
| "loss": 0.6204080581665039, |
| "step": 986 |
| }, |
| { |
| "epoch": 3.550359712230216, |
| "grad_norm": 0.5419504378083249, |
| "learning_rate": 5.878286097002884e-06, |
| "loss": 0.6141122579574585, |
| "step": 987 |
| }, |
| { |
| "epoch": 3.553956834532374, |
| "grad_norm": 0.526761312287397, |
| "learning_rate": 5.871332108710292e-06, |
| "loss": 0.6195293068885803, |
| "step": 988 |
| }, |
| { |
| "epoch": 3.5575539568345325, |
| "grad_norm": 0.5560617796758003, |
| "learning_rate": 5.86437638176105e-06, |
| "loss": 0.6226100921630859, |
| "step": 989 |
| }, |
| { |
| "epoch": 3.5611510791366907, |
| "grad_norm": 0.5147824798221119, |
| "learning_rate": 5.8574189300346215e-06, |
| "loss": 0.5921021103858948, |
| "step": 990 |
| }, |
| { |
| "epoch": 3.564748201438849, |
| "grad_norm": 0.5976606416916541, |
| "learning_rate": 5.8504597674139115e-06, |
| "loss": 0.6294490098953247, |
| "step": 991 |
| }, |
| { |
| "epoch": 3.568345323741007, |
| "grad_norm": 0.5632826758618952, |
| "learning_rate": 5.843498907785236e-06, |
| "loss": 0.6351437568664551, |
| "step": 992 |
| }, |
| { |
| "epoch": 3.5719424460431655, |
| "grad_norm": 0.5509722038236922, |
| "learning_rate": 5.8365363650383e-06, |
| "loss": 0.6352639198303223, |
| "step": 993 |
| }, |
| { |
| "epoch": 3.5755395683453237, |
| "grad_norm": 0.5524669379370264, |
| "learning_rate": 5.829572153066166e-06, |
| "loss": 0.6275777816772461, |
| "step": 994 |
| }, |
| { |
| "epoch": 3.579136690647482, |
| "grad_norm": 0.5675598136381055, |
| "learning_rate": 5.8226062857652254e-06, |
| "loss": 0.622998833656311, |
| "step": 995 |
| }, |
| { |
| "epoch": 3.58273381294964, |
| "grad_norm": 0.5104403543711602, |
| "learning_rate": 5.815638777035175e-06, |
| "loss": 0.6314291954040527, |
| "step": 996 |
| }, |
| { |
| "epoch": 3.5863309352517985, |
| "grad_norm": 0.5122673876452001, |
| "learning_rate": 5.808669640778986e-06, |
| "loss": 0.6079280376434326, |
| "step": 997 |
| }, |
| { |
| "epoch": 3.5899280575539567, |
| "grad_norm": 0.5581467016360934, |
| "learning_rate": 5.801698890902877e-06, |
| "loss": 0.6032590866088867, |
| "step": 998 |
| }, |
| { |
| "epoch": 3.593525179856115, |
| "grad_norm": 0.5438226946507215, |
| "learning_rate": 5.794726541316288e-06, |
| "loss": 0.6497535109519958, |
| "step": 999 |
| }, |
| { |
| "epoch": 3.597122302158273, |
| "grad_norm": 0.5832843430696965, |
| "learning_rate": 5.78775260593185e-06, |
| "loss": 0.6055078506469727, |
| "step": 1000 |
| }, |
| { |
| "epoch": 3.597122302158273, |
| "eval_loss": 0.8125540614128113, |
| "eval_runtime": 7.8509, |
| "eval_samples_per_second": 11.209, |
| "eval_steps_per_second": 0.382, |
| "eval_token_acc": 0.7516720769220718, |
| "step": 1000 |
| }, |
| { |
| "epoch": 3.600719424460432, |
| "grad_norm": 0.5387037791518224, |
| "learning_rate": 5.780777098665357e-06, |
| "loss": 0.6361275911331177, |
| "step": 1001 |
| }, |
| { |
| "epoch": 3.6043165467625897, |
| "grad_norm": 0.5931750766720936, |
| "learning_rate": 5.77380003343574e-06, |
| "loss": 0.6325686573982239, |
| "step": 1002 |
| }, |
| { |
| "epoch": 3.6079136690647484, |
| "grad_norm": 0.5683590800729738, |
| "learning_rate": 5.766821424165039e-06, |
| "loss": 0.6415737271308899, |
| "step": 1003 |
| }, |
| { |
| "epoch": 3.6115107913669062, |
| "grad_norm": 0.5261138010028708, |
| "learning_rate": 5.759841284778379e-06, |
| "loss": 0.6264912486076355, |
| "step": 1004 |
| }, |
| { |
| "epoch": 3.615107913669065, |
| "grad_norm": 0.544748136565225, |
| "learning_rate": 5.752859629203933e-06, |
| "loss": 0.6192924380302429, |
| "step": 1005 |
| }, |
| { |
| "epoch": 3.618705035971223, |
| "grad_norm": 0.6841535562931874, |
| "learning_rate": 5.7458764713729e-06, |
| "loss": 0.6135982275009155, |
| "step": 1006 |
| }, |
| { |
| "epoch": 3.6223021582733814, |
| "grad_norm": 0.5757825111921732, |
| "learning_rate": 5.738891825219477e-06, |
| "loss": 0.6414120197296143, |
| "step": 1007 |
| }, |
| { |
| "epoch": 3.6258992805755397, |
| "grad_norm": 0.834185194411395, |
| "learning_rate": 5.731905704680834e-06, |
| "loss": 0.6300562620162964, |
| "step": 1008 |
| }, |
| { |
| "epoch": 3.629496402877698, |
| "grad_norm": 0.5914648938614046, |
| "learning_rate": 5.7249181236970774e-06, |
| "loss": 0.6344118714332581, |
| "step": 1009 |
| }, |
| { |
| "epoch": 3.633093525179856, |
| "grad_norm": 0.5837957271398493, |
| "learning_rate": 5.717929096211232e-06, |
| "loss": 0.6232997179031372, |
| "step": 1010 |
| }, |
| { |
| "epoch": 3.6366906474820144, |
| "grad_norm": 0.6949256014787403, |
| "learning_rate": 5.710938636169211e-06, |
| "loss": 0.6331793069839478, |
| "step": 1011 |
| }, |
| { |
| "epoch": 3.6402877697841727, |
| "grad_norm": 0.533156956369847, |
| "learning_rate": 5.703946757519777e-06, |
| "loss": 0.6080916523933411, |
| "step": 1012 |
| }, |
| { |
| "epoch": 3.643884892086331, |
| "grad_norm": 0.571944227267364, |
| "learning_rate": 5.6969534742145325e-06, |
| "loss": 0.6211006045341492, |
| "step": 1013 |
| }, |
| { |
| "epoch": 3.647482014388489, |
| "grad_norm": 0.6397571875041864, |
| "learning_rate": 5.689958800207879e-06, |
| "loss": 0.6273400187492371, |
| "step": 1014 |
| }, |
| { |
| "epoch": 3.6510791366906474, |
| "grad_norm": 0.5689459230735638, |
| "learning_rate": 5.682962749456994e-06, |
| "loss": 0.6134950518608093, |
| "step": 1015 |
| }, |
| { |
| "epoch": 3.6546762589928057, |
| "grad_norm": 0.5569625906726352, |
| "learning_rate": 5.6759653359218e-06, |
| "loss": 0.611720085144043, |
| "step": 1016 |
| }, |
| { |
| "epoch": 3.658273381294964, |
| "grad_norm": 0.5368099011844354, |
| "learning_rate": 5.668966573564942e-06, |
| "loss": 0.6366114616394043, |
| "step": 1017 |
| }, |
| { |
| "epoch": 3.661870503597122, |
| "grad_norm": 0.5636975451024273, |
| "learning_rate": 5.661966476351756e-06, |
| "loss": 0.6357487440109253, |
| "step": 1018 |
| }, |
| { |
| "epoch": 3.6654676258992804, |
| "grad_norm": 0.5381342570956121, |
| "learning_rate": 5.6549650582502366e-06, |
| "loss": 0.621475875377655, |
| "step": 1019 |
| }, |
| { |
| "epoch": 3.6690647482014387, |
| "grad_norm": 0.5680718171486167, |
| "learning_rate": 5.647962333231021e-06, |
| "loss": 0.620261549949646, |
| "step": 1020 |
| }, |
| { |
| "epoch": 3.672661870503597, |
| "grad_norm": 0.5382743912552496, |
| "learning_rate": 5.640958315267351e-06, |
| "loss": 0.650659441947937, |
| "step": 1021 |
| }, |
| { |
| "epoch": 3.6762589928057556, |
| "grad_norm": 0.5569716148721183, |
| "learning_rate": 5.633953018335047e-06, |
| "loss": 0.6229879856109619, |
| "step": 1022 |
| }, |
| { |
| "epoch": 3.6798561151079134, |
| "grad_norm": 0.5749593410999209, |
| "learning_rate": 5.626946456412484e-06, |
| "loss": 0.6209127902984619, |
| "step": 1023 |
| }, |
| { |
| "epoch": 3.683453237410072, |
| "grad_norm": 0.5456231050875293, |
| "learning_rate": 5.6199386434805615e-06, |
| "loss": 0.6160396337509155, |
| "step": 1024 |
| }, |
| { |
| "epoch": 3.68705035971223, |
| "grad_norm": 0.5289471769079246, |
| "learning_rate": 5.612929593522671e-06, |
| "loss": 0.6104173064231873, |
| "step": 1025 |
| }, |
| { |
| "epoch": 3.6906474820143886, |
| "grad_norm": 0.5843671396747508, |
| "learning_rate": 5.605919320524677e-06, |
| "loss": 0.6159001588821411, |
| "step": 1026 |
| }, |
| { |
| "epoch": 3.694244604316547, |
| "grad_norm": 0.5651476004357376, |
| "learning_rate": 5.598907838474883e-06, |
| "loss": 0.6387501955032349, |
| "step": 1027 |
| }, |
| { |
| "epoch": 3.697841726618705, |
| "grad_norm": 0.5706908322156868, |
| "learning_rate": 5.591895161364006e-06, |
| "loss": 0.6199760437011719, |
| "step": 1028 |
| }, |
| { |
| "epoch": 3.7014388489208634, |
| "grad_norm": 0.5322410609869666, |
| "learning_rate": 5.584881303185142e-06, |
| "loss": 0.6229681372642517, |
| "step": 1029 |
| }, |
| { |
| "epoch": 3.7050359712230216, |
| "grad_norm": 0.5721084026652471, |
| "learning_rate": 5.577866277933752e-06, |
| "loss": 0.619432270526886, |
| "step": 1030 |
| }, |
| { |
| "epoch": 3.70863309352518, |
| "grad_norm": 0.5264586525019622, |
| "learning_rate": 5.570850099607621e-06, |
| "loss": 0.6281554698944092, |
| "step": 1031 |
| }, |
| { |
| "epoch": 3.712230215827338, |
| "grad_norm": 0.5603922534581942, |
| "learning_rate": 5.563832782206835e-06, |
| "loss": 0.6256821155548096, |
| "step": 1032 |
| }, |
| { |
| "epoch": 3.7158273381294964, |
| "grad_norm": 0.561218503029973, |
| "learning_rate": 5.556814339733754e-06, |
| "loss": 0.6172206401824951, |
| "step": 1033 |
| }, |
| { |
| "epoch": 3.7194244604316546, |
| "grad_norm": 0.5800927690024219, |
| "learning_rate": 5.549794786192984e-06, |
| "loss": 0.6275442838668823, |
| "step": 1034 |
| }, |
| { |
| "epoch": 3.723021582733813, |
| "grad_norm": 0.5703911890599143, |
| "learning_rate": 5.542774135591344e-06, |
| "loss": 0.6432952880859375, |
| "step": 1035 |
| }, |
| { |
| "epoch": 3.726618705035971, |
| "grad_norm": 0.5288906768093431, |
| "learning_rate": 5.535752401937846e-06, |
| "loss": 0.6199349164962769, |
| "step": 1036 |
| }, |
| { |
| "epoch": 3.7302158273381294, |
| "grad_norm": 0.5526986942953863, |
| "learning_rate": 5.528729599243662e-06, |
| "loss": 0.634512722492218, |
| "step": 1037 |
| }, |
| { |
| "epoch": 3.7338129496402876, |
| "grad_norm": 0.5338421783071607, |
| "learning_rate": 5.521705741522096e-06, |
| "loss": 0.6207709312438965, |
| "step": 1038 |
| }, |
| { |
| "epoch": 3.737410071942446, |
| "grad_norm": 0.5441886221006373, |
| "learning_rate": 5.51468084278856e-06, |
| "loss": 0.6194617748260498, |
| "step": 1039 |
| }, |
| { |
| "epoch": 3.741007194244604, |
| "grad_norm": 0.57751833674519, |
| "learning_rate": 5.507654917060541e-06, |
| "loss": 0.6412806510925293, |
| "step": 1040 |
| }, |
| { |
| "epoch": 3.744604316546763, |
| "grad_norm": 0.5480165246411622, |
| "learning_rate": 5.500627978357576e-06, |
| "loss": 0.6190841197967529, |
| "step": 1041 |
| }, |
| { |
| "epoch": 3.7482014388489207, |
| "grad_norm": 0.5486370930882859, |
| "learning_rate": 5.49360004070122e-06, |
| "loss": 0.6254152059555054, |
| "step": 1042 |
| }, |
| { |
| "epoch": 3.7517985611510793, |
| "grad_norm": 0.5879050372467775, |
| "learning_rate": 5.486571118115026e-06, |
| "loss": 0.6343721151351929, |
| "step": 1043 |
| }, |
| { |
| "epoch": 3.755395683453237, |
| "grad_norm": 0.5370445690132445, |
| "learning_rate": 5.4795412246245126e-06, |
| "loss": 0.6343209743499756, |
| "step": 1044 |
| }, |
| { |
| "epoch": 3.758992805755396, |
| "grad_norm": 0.5450154530597571, |
| "learning_rate": 5.472510374257133e-06, |
| "loss": 0.6196166276931763, |
| "step": 1045 |
| }, |
| { |
| "epoch": 3.762589928057554, |
| "grad_norm": 0.5343709806843335, |
| "learning_rate": 5.465478581042248e-06, |
| "loss": 0.6157248616218567, |
| "step": 1046 |
| }, |
| { |
| "epoch": 3.7661870503597124, |
| "grad_norm": 0.5628459523993931, |
| "learning_rate": 5.458445859011105e-06, |
| "loss": 0.6136109828948975, |
| "step": 1047 |
| }, |
| { |
| "epoch": 3.7697841726618706, |
| "grad_norm": 0.5565016825347403, |
| "learning_rate": 5.451412222196801e-06, |
| "loss": 0.6306936144828796, |
| "step": 1048 |
| }, |
| { |
| "epoch": 3.773381294964029, |
| "grad_norm": 0.5580290826020277, |
| "learning_rate": 5.44437768463426e-06, |
| "loss": 0.6189572215080261, |
| "step": 1049 |
| }, |
| { |
| "epoch": 3.776978417266187, |
| "grad_norm": 0.5431193983755793, |
| "learning_rate": 5.4373422603602025e-06, |
| "loss": 0.6176700592041016, |
| "step": 1050 |
| }, |
| { |
| "epoch": 3.7805755395683454, |
| "grad_norm": 0.5397695802779046, |
| "learning_rate": 5.4303059634131175e-06, |
| "loss": 0.6354320049285889, |
| "step": 1051 |
| }, |
| { |
| "epoch": 3.7841726618705036, |
| "grad_norm": 0.5417952730271903, |
| "learning_rate": 5.4232688078332384e-06, |
| "loss": 0.6268483400344849, |
| "step": 1052 |
| }, |
| { |
| "epoch": 3.787769784172662, |
| "grad_norm": 0.577453991046705, |
| "learning_rate": 5.416230807662509e-06, |
| "loss": 0.6346728205680847, |
| "step": 1053 |
| }, |
| { |
| "epoch": 3.79136690647482, |
| "grad_norm": 0.5315960494904669, |
| "learning_rate": 5.409191976944559e-06, |
| "loss": 0.6320599317550659, |
| "step": 1054 |
| }, |
| { |
| "epoch": 3.7949640287769784, |
| "grad_norm": 0.5438905695079264, |
| "learning_rate": 5.402152329724675e-06, |
| "loss": 0.6306254863739014, |
| "step": 1055 |
| }, |
| { |
| "epoch": 3.7985611510791366, |
| "grad_norm": 0.5530900019557347, |
| "learning_rate": 5.395111880049775e-06, |
| "loss": 0.6188409924507141, |
| "step": 1056 |
| }, |
| { |
| "epoch": 3.802158273381295, |
| "grad_norm": 0.6056617368798535, |
| "learning_rate": 5.388070641968376e-06, |
| "loss": 0.6206061840057373, |
| "step": 1057 |
| }, |
| { |
| "epoch": 3.805755395683453, |
| "grad_norm": 0.5472356989518676, |
| "learning_rate": 5.3810286295305715e-06, |
| "loss": 0.6444727182388306, |
| "step": 1058 |
| }, |
| { |
| "epoch": 3.8093525179856114, |
| "grad_norm": 0.5910357756320749, |
| "learning_rate": 5.373985856787993e-06, |
| "loss": 0.6342467069625854, |
| "step": 1059 |
| }, |
| { |
| "epoch": 3.81294964028777, |
| "grad_norm": 0.6026061719511084, |
| "learning_rate": 5.366942337793798e-06, |
| "loss": 0.6200609803199768, |
| "step": 1060 |
| }, |
| { |
| "epoch": 3.816546762589928, |
| "grad_norm": 0.5898403550967662, |
| "learning_rate": 5.359898086602625e-06, |
| "loss": 0.6159372329711914, |
| "step": 1061 |
| }, |
| { |
| "epoch": 3.8201438848920866, |
| "grad_norm": 0.5481084757987922, |
| "learning_rate": 5.352853117270581e-06, |
| "loss": 0.6328880190849304, |
| "step": 1062 |
| }, |
| { |
| "epoch": 3.8237410071942444, |
| "grad_norm": 0.5511071790400082, |
| "learning_rate": 5.3458074438552e-06, |
| "loss": 0.6281937956809998, |
| "step": 1063 |
| }, |
| { |
| "epoch": 3.827338129496403, |
| "grad_norm": 0.5714388926162269, |
| "learning_rate": 5.338761080415425e-06, |
| "loss": 0.6304974555969238, |
| "step": 1064 |
| }, |
| { |
| "epoch": 3.8309352517985613, |
| "grad_norm": 0.5751960650810334, |
| "learning_rate": 5.331714041011571e-06, |
| "loss": 0.6289598941802979, |
| "step": 1065 |
| }, |
| { |
| "epoch": 3.8345323741007196, |
| "grad_norm": 0.5602235046145158, |
| "learning_rate": 5.324666339705307e-06, |
| "loss": 0.6236809492111206, |
| "step": 1066 |
| }, |
| { |
| "epoch": 3.838129496402878, |
| "grad_norm": 0.600074702160604, |
| "learning_rate": 5.317617990559623e-06, |
| "loss": 0.6230597496032715, |
| "step": 1067 |
| }, |
| { |
| "epoch": 3.841726618705036, |
| "grad_norm": 0.5296383380122605, |
| "learning_rate": 5.310569007638795e-06, |
| "loss": 0.6161667704582214, |
| "step": 1068 |
| }, |
| { |
| "epoch": 3.8453237410071943, |
| "grad_norm": 0.5612028308860264, |
| "learning_rate": 5.303519405008373e-06, |
| "loss": 0.6226469874382019, |
| "step": 1069 |
| }, |
| { |
| "epoch": 3.8489208633093526, |
| "grad_norm": 0.5521493512307732, |
| "learning_rate": 5.296469196735134e-06, |
| "loss": 0.622602105140686, |
| "step": 1070 |
| }, |
| { |
| "epoch": 3.852517985611511, |
| "grad_norm": 0.5462130844151715, |
| "learning_rate": 5.28941839688707e-06, |
| "loss": 0.6175041198730469, |
| "step": 1071 |
| }, |
| { |
| "epoch": 3.856115107913669, |
| "grad_norm": 0.5856748118827945, |
| "learning_rate": 5.28236701953335e-06, |
| "loss": 0.6236345171928406, |
| "step": 1072 |
| }, |
| { |
| "epoch": 3.8597122302158273, |
| "grad_norm": 0.5596463937851168, |
| "learning_rate": 5.2753150787443e-06, |
| "loss": 0.631768524646759, |
| "step": 1073 |
| }, |
| { |
| "epoch": 3.8633093525179856, |
| "grad_norm": 0.5934140299026798, |
| "learning_rate": 5.268262588591364e-06, |
| "loss": 0.6321205496788025, |
| "step": 1074 |
| }, |
| { |
| "epoch": 3.866906474820144, |
| "grad_norm": 0.5178313892634216, |
| "learning_rate": 5.261209563147088e-06, |
| "loss": 0.6305399537086487, |
| "step": 1075 |
| }, |
| { |
| "epoch": 3.870503597122302, |
| "grad_norm": 0.5380795134593158, |
| "learning_rate": 5.254156016485082e-06, |
| "loss": 0.6334234476089478, |
| "step": 1076 |
| }, |
| { |
| "epoch": 3.8741007194244603, |
| "grad_norm": 0.5555020176810418, |
| "learning_rate": 5.247101962679998e-06, |
| "loss": 0.6309899091720581, |
| "step": 1077 |
| }, |
| { |
| "epoch": 3.8776978417266186, |
| "grad_norm": 0.5979700486904005, |
| "learning_rate": 5.240047415807499e-06, |
| "loss": 0.6348890066146851, |
| "step": 1078 |
| }, |
| { |
| "epoch": 3.881294964028777, |
| "grad_norm": 0.5496978553199497, |
| "learning_rate": 5.2329923899442325e-06, |
| "loss": 0.6270749568939209, |
| "step": 1079 |
| }, |
| { |
| "epoch": 3.884892086330935, |
| "grad_norm": 0.5400189981264519, |
| "learning_rate": 5.225936899167803e-06, |
| "loss": 0.6539863348007202, |
| "step": 1080 |
| }, |
| { |
| "epoch": 3.8884892086330938, |
| "grad_norm": 0.553113550788483, |
| "learning_rate": 5.21888095755674e-06, |
| "loss": 0.6327900886535645, |
| "step": 1081 |
| }, |
| { |
| "epoch": 3.8920863309352516, |
| "grad_norm": 0.5478545536226945, |
| "learning_rate": 5.211824579190473e-06, |
| "loss": 0.6178438663482666, |
| "step": 1082 |
| }, |
| { |
| "epoch": 3.8956834532374103, |
| "grad_norm": 0.547517202573347, |
| "learning_rate": 5.2047677781493035e-06, |
| "loss": 0.6254318952560425, |
| "step": 1083 |
| }, |
| { |
| "epoch": 3.899280575539568, |
| "grad_norm": 0.5502866887115144, |
| "learning_rate": 5.197710568514381e-06, |
| "loss": 0.6129550337791443, |
| "step": 1084 |
| }, |
| { |
| "epoch": 3.902877697841727, |
| "grad_norm": 0.5289111949450003, |
| "learning_rate": 5.190652964367662e-06, |
| "loss": 0.6353284120559692, |
| "step": 1085 |
| }, |
| { |
| "epoch": 3.906474820143885, |
| "grad_norm": 0.5697558816611187, |
| "learning_rate": 5.183594979791896e-06, |
| "loss": 0.6242085695266724, |
| "step": 1086 |
| }, |
| { |
| "epoch": 3.9100719424460433, |
| "grad_norm": 0.5276051579756038, |
| "learning_rate": 5.1765366288705896e-06, |
| "loss": 0.6136658191680908, |
| "step": 1087 |
| }, |
| { |
| "epoch": 3.9136690647482015, |
| "grad_norm": 0.5392013067435808, |
| "learning_rate": 5.169477925687981e-06, |
| "loss": 0.6261371374130249, |
| "step": 1088 |
| }, |
| { |
| "epoch": 3.91726618705036, |
| "grad_norm": 0.5252931018787899, |
| "learning_rate": 5.1624188843290115e-06, |
| "loss": 0.6383857727050781, |
| "step": 1089 |
| }, |
| { |
| "epoch": 3.920863309352518, |
| "grad_norm": 0.5447213918855934, |
| "learning_rate": 5.155359518879295e-06, |
| "loss": 0.6328091621398926, |
| "step": 1090 |
| }, |
| { |
| "epoch": 3.9244604316546763, |
| "grad_norm": 0.5302972618495836, |
| "learning_rate": 5.148299843425095e-06, |
| "loss": 0.6408435702323914, |
| "step": 1091 |
| }, |
| { |
| "epoch": 3.9280575539568345, |
| "grad_norm": 0.5190325944565576, |
| "learning_rate": 5.141239872053294e-06, |
| "loss": 0.6295177936553955, |
| "step": 1092 |
| }, |
| { |
| "epoch": 3.931654676258993, |
| "grad_norm": 0.5383200426474236, |
| "learning_rate": 5.134179618851361e-06, |
| "loss": 0.6353203058242798, |
| "step": 1093 |
| }, |
| { |
| "epoch": 3.935251798561151, |
| "grad_norm": 0.5315118968640762, |
| "learning_rate": 5.1271190979073296e-06, |
| "loss": 0.6318327188491821, |
| "step": 1094 |
| }, |
| { |
| "epoch": 3.9388489208633093, |
| "grad_norm": 0.517717860063696, |
| "learning_rate": 5.120058323309769e-06, |
| "loss": 0.6413143873214722, |
| "step": 1095 |
| }, |
| { |
| "epoch": 3.9424460431654675, |
| "grad_norm": 0.5448478306910934, |
| "learning_rate": 5.112997309147753e-06, |
| "loss": 0.6358555555343628, |
| "step": 1096 |
| }, |
| { |
| "epoch": 3.946043165467626, |
| "grad_norm": 0.5503703443156099, |
| "learning_rate": 5.105936069510835e-06, |
| "loss": 0.6255465745925903, |
| "step": 1097 |
| }, |
| { |
| "epoch": 3.949640287769784, |
| "grad_norm": 0.5332494995645366, |
| "learning_rate": 5.098874618489017e-06, |
| "loss": 0.6314849257469177, |
| "step": 1098 |
| }, |
| { |
| "epoch": 3.9532374100719423, |
| "grad_norm": 0.5462920590900435, |
| "learning_rate": 5.0918129701727205e-06, |
| "loss": 0.6063763499259949, |
| "step": 1099 |
| }, |
| { |
| "epoch": 3.956834532374101, |
| "grad_norm": 0.595376308125965, |
| "learning_rate": 5.084751138652764e-06, |
| "loss": 0.6303770542144775, |
| "step": 1100 |
| }, |
| { |
| "epoch": 3.956834532374101, |
| "eval_loss": 0.8109321594238281, |
| "eval_runtime": 7.8833, |
| "eval_samples_per_second": 11.163, |
| "eval_steps_per_second": 0.381, |
| "eval_token_acc": 0.751970745420572, |
| "step": 1100 |
| }, |
| { |
| "epoch": 3.960431654676259, |
| "grad_norm": 0.5545223372673511, |
| "learning_rate": 5.077689138020331e-06, |
| "loss": 0.636076807975769, |
| "step": 1101 |
| }, |
| { |
| "epoch": 3.9640287769784175, |
| "grad_norm": 0.5415888886715428, |
| "learning_rate": 5.070626982366943e-06, |
| "loss": 0.6309289932250977, |
| "step": 1102 |
| }, |
| { |
| "epoch": 3.9676258992805753, |
| "grad_norm": 0.530195768532205, |
| "learning_rate": 5.063564685784428e-06, |
| "loss": 0.6334230303764343, |
| "step": 1103 |
| }, |
| { |
| "epoch": 3.971223021582734, |
| "grad_norm": 0.5264957915760434, |
| "learning_rate": 5.0565022623649e-06, |
| "loss": 0.6209136843681335, |
| "step": 1104 |
| }, |
| { |
| "epoch": 3.9748201438848922, |
| "grad_norm": 0.5675717547938626, |
| "learning_rate": 5.04943972620072e-06, |
| "loss": 0.6466224193572998, |
| "step": 1105 |
| }, |
| { |
| "epoch": 3.9784172661870505, |
| "grad_norm": 0.546893892458039, |
| "learning_rate": 5.0423770913844795e-06, |
| "loss": 0.6209077835083008, |
| "step": 1106 |
| }, |
| { |
| "epoch": 3.9820143884892087, |
| "grad_norm": 0.566904391407611, |
| "learning_rate": 5.035314372008962e-06, |
| "loss": 0.6295535564422607, |
| "step": 1107 |
| }, |
| { |
| "epoch": 3.985611510791367, |
| "grad_norm": 0.5112849932312409, |
| "learning_rate": 5.028251582167126e-06, |
| "loss": 0.6494243144989014, |
| "step": 1108 |
| }, |
| { |
| "epoch": 3.9892086330935252, |
| "grad_norm": 0.5445209737712887, |
| "learning_rate": 5.021188735952065e-06, |
| "loss": 0.6363496780395508, |
| "step": 1109 |
| }, |
| { |
| "epoch": 3.9928057553956835, |
| "grad_norm": 0.5196093708153622, |
| "learning_rate": 5.0141258474569855e-06, |
| "loss": 0.6188162565231323, |
| "step": 1110 |
| }, |
| { |
| "epoch": 3.9964028776978417, |
| "grad_norm": 0.5349441127307514, |
| "learning_rate": 5.0070629307751816e-06, |
| "loss": 0.6086728572845459, |
| "step": 1111 |
| }, |
| { |
| "epoch": 4.0, |
| "grad_norm": 0.7632014833912022, |
| "learning_rate": 5e-06, |
| "loss": 0.6367975473403931, |
| "step": 1112 |
| }, |
| { |
| "epoch": 4.003597122302159, |
| "grad_norm": 0.6671812832280104, |
| "learning_rate": 4.992937069224818e-06, |
| "loss": 0.5751312971115112, |
| "step": 1113 |
| }, |
| { |
| "epoch": 4.0071942446043165, |
| "grad_norm": 0.6179882227804654, |
| "learning_rate": 4.985874152543015e-06, |
| "loss": 0.6045902967453003, |
| "step": 1114 |
| }, |
| { |
| "epoch": 4.010791366906475, |
| "grad_norm": 0.5961266178839438, |
| "learning_rate": 4.978811264047936e-06, |
| "loss": 0.5806431174278259, |
| "step": 1115 |
| }, |
| { |
| "epoch": 4.014388489208633, |
| "grad_norm": 0.5691235961486315, |
| "learning_rate": 4.971748417832876e-06, |
| "loss": 0.566252589225769, |
| "step": 1116 |
| }, |
| { |
| "epoch": 4.017985611510792, |
| "grad_norm": 0.5791093646172678, |
| "learning_rate": 4.964685627991038e-06, |
| "loss": 0.5949286818504333, |
| "step": 1117 |
| }, |
| { |
| "epoch": 4.0215827338129495, |
| "grad_norm": 0.5771818604656702, |
| "learning_rate": 4.957622908615522e-06, |
| "loss": 0.5870711803436279, |
| "step": 1118 |
| }, |
| { |
| "epoch": 4.025179856115108, |
| "grad_norm": 0.6035369572670033, |
| "learning_rate": 4.9505602737992805e-06, |
| "loss": 0.5918598175048828, |
| "step": 1119 |
| }, |
| { |
| "epoch": 4.028776978417266, |
| "grad_norm": 0.6065311525715938, |
| "learning_rate": 4.943497737635103e-06, |
| "loss": 0.5854548215866089, |
| "step": 1120 |
| }, |
| { |
| "epoch": 4.032374100719425, |
| "grad_norm": 0.5894501705087571, |
| "learning_rate": 4.936435314215573e-06, |
| "loss": 0.5900008082389832, |
| "step": 1121 |
| }, |
| { |
| "epoch": 4.0359712230215825, |
| "grad_norm": 0.5343328098237431, |
| "learning_rate": 4.92937301763306e-06, |
| "loss": 0.5490322113037109, |
| "step": 1122 |
| }, |
| { |
| "epoch": 4.039568345323741, |
| "grad_norm": 0.5705647249862055, |
| "learning_rate": 4.9223108619796705e-06, |
| "loss": 0.5875539779663086, |
| "step": 1123 |
| }, |
| { |
| "epoch": 4.043165467625899, |
| "grad_norm": 0.5943474698787913, |
| "learning_rate": 4.915248861347239e-06, |
| "loss": 0.5715224742889404, |
| "step": 1124 |
| }, |
| { |
| "epoch": 4.046762589928058, |
| "grad_norm": 0.5608640103822211, |
| "learning_rate": 4.908187029827282e-06, |
| "loss": 0.59614098072052, |
| "step": 1125 |
| }, |
| { |
| "epoch": 4.0503597122302155, |
| "grad_norm": 0.5736354228437628, |
| "learning_rate": 4.901125381510986e-06, |
| "loss": 0.5507819652557373, |
| "step": 1126 |
| }, |
| { |
| "epoch": 4.053956834532374, |
| "grad_norm": 0.5809586226091275, |
| "learning_rate": 4.894063930489166e-06, |
| "loss": 0.5653989315032959, |
| "step": 1127 |
| }, |
| { |
| "epoch": 4.057553956834532, |
| "grad_norm": 0.6151642417224333, |
| "learning_rate": 4.887002690852249e-06, |
| "loss": 0.5762919187545776, |
| "step": 1128 |
| }, |
| { |
| "epoch": 4.061151079136691, |
| "grad_norm": 0.5502446261477949, |
| "learning_rate": 4.8799416766902315e-06, |
| "loss": 0.5796780586242676, |
| "step": 1129 |
| }, |
| { |
| "epoch": 4.0647482014388485, |
| "grad_norm": 0.5658259411352802, |
| "learning_rate": 4.872880902092673e-06, |
| "loss": 0.5663573145866394, |
| "step": 1130 |
| }, |
| { |
| "epoch": 4.068345323741007, |
| "grad_norm": 0.5470757908122678, |
| "learning_rate": 4.865820381148641e-06, |
| "loss": 0.5743259191513062, |
| "step": 1131 |
| }, |
| { |
| "epoch": 4.071942446043165, |
| "grad_norm": 0.5785124023334323, |
| "learning_rate": 4.858760127946707e-06, |
| "loss": 0.5840204954147339, |
| "step": 1132 |
| }, |
| { |
| "epoch": 4.075539568345324, |
| "grad_norm": 0.59689209556221, |
| "learning_rate": 4.8517001565749065e-06, |
| "loss": 0.5999338626861572, |
| "step": 1133 |
| }, |
| { |
| "epoch": 4.079136690647482, |
| "grad_norm": 0.5647273424274459, |
| "learning_rate": 4.8446404811207055e-06, |
| "loss": 0.565536379814148, |
| "step": 1134 |
| }, |
| { |
| "epoch": 4.08273381294964, |
| "grad_norm": 0.561464772321604, |
| "learning_rate": 4.837581115670991e-06, |
| "loss": 0.588786244392395, |
| "step": 1135 |
| }, |
| { |
| "epoch": 4.086330935251799, |
| "grad_norm": 0.5804342083767994, |
| "learning_rate": 4.830522074312019e-06, |
| "loss": 0.5741912126541138, |
| "step": 1136 |
| }, |
| { |
| "epoch": 4.089928057553957, |
| "grad_norm": 0.5752775990581241, |
| "learning_rate": 4.823463371129411e-06, |
| "loss": 0.5821037292480469, |
| "step": 1137 |
| }, |
| { |
| "epoch": 4.093525179856115, |
| "grad_norm": 0.5494215057833093, |
| "learning_rate": 4.816405020208105e-06, |
| "loss": 0.5683557391166687, |
| "step": 1138 |
| }, |
| { |
| "epoch": 4.097122302158273, |
| "grad_norm": 0.5819120226688422, |
| "learning_rate": 4.80934703563234e-06, |
| "loss": 0.5606533885002136, |
| "step": 1139 |
| }, |
| { |
| "epoch": 4.100719424460432, |
| "grad_norm": 0.5665547666948522, |
| "learning_rate": 4.80228943148562e-06, |
| "loss": 0.559918999671936, |
| "step": 1140 |
| }, |
| { |
| "epoch": 4.10431654676259, |
| "grad_norm": 0.5671885384831782, |
| "learning_rate": 4.795232221850697e-06, |
| "loss": 0.574749767780304, |
| "step": 1141 |
| }, |
| { |
| "epoch": 4.107913669064748, |
| "grad_norm": 0.5802314387975677, |
| "learning_rate": 4.788175420809528e-06, |
| "loss": 0.5675938725471497, |
| "step": 1142 |
| }, |
| { |
| "epoch": 4.111510791366906, |
| "grad_norm": 0.5569385712485541, |
| "learning_rate": 4.781119042443263e-06, |
| "loss": 0.5684279203414917, |
| "step": 1143 |
| }, |
| { |
| "epoch": 4.115107913669065, |
| "grad_norm": 0.5543611755367137, |
| "learning_rate": 4.774063100832199e-06, |
| "loss": 0.5701501369476318, |
| "step": 1144 |
| }, |
| { |
| "epoch": 4.118705035971223, |
| "grad_norm": 0.545108517220662, |
| "learning_rate": 4.76700761005577e-06, |
| "loss": 0.5616727471351624, |
| "step": 1145 |
| }, |
| { |
| "epoch": 4.122302158273381, |
| "grad_norm": 0.5583632998054106, |
| "learning_rate": 4.759952584192502e-06, |
| "loss": 0.585227370262146, |
| "step": 1146 |
| }, |
| { |
| "epoch": 4.125899280575539, |
| "grad_norm": 0.5385461358182791, |
| "learning_rate": 4.752898037320004e-06, |
| "loss": 0.5916910767555237, |
| "step": 1147 |
| }, |
| { |
| "epoch": 4.129496402877698, |
| "grad_norm": 0.5411752970794544, |
| "learning_rate": 4.745843983514919e-06, |
| "loss": 0.5454212427139282, |
| "step": 1148 |
| }, |
| { |
| "epoch": 4.133093525179856, |
| "grad_norm": 0.5378753096176189, |
| "learning_rate": 4.738790436852914e-06, |
| "loss": 0.5978329181671143, |
| "step": 1149 |
| }, |
| { |
| "epoch": 4.136690647482014, |
| "grad_norm": 0.5559630783400712, |
| "learning_rate": 4.731737411408637e-06, |
| "loss": 0.555715799331665, |
| "step": 1150 |
| }, |
| { |
| "epoch": 4.140287769784172, |
| "grad_norm": 0.5920639013002785, |
| "learning_rate": 4.724684921255704e-06, |
| "loss": 0.5829776525497437, |
| "step": 1151 |
| }, |
| { |
| "epoch": 4.143884892086331, |
| "grad_norm": 0.5487788143272614, |
| "learning_rate": 4.717632980466652e-06, |
| "loss": 0.5738891363143921, |
| "step": 1152 |
| }, |
| { |
| "epoch": 4.14748201438849, |
| "grad_norm": 0.5979520376739319, |
| "learning_rate": 4.710581603112931e-06, |
| "loss": 0.5732789039611816, |
| "step": 1153 |
| }, |
| { |
| "epoch": 4.151079136690647, |
| "grad_norm": 0.5763606911373871, |
| "learning_rate": 4.703530803264868e-06, |
| "loss": 0.5686631798744202, |
| "step": 1154 |
| }, |
| { |
| "epoch": 4.154676258992806, |
| "grad_norm": 0.5661816906841558, |
| "learning_rate": 4.696480594991629e-06, |
| "loss": 0.5841742753982544, |
| "step": 1155 |
| }, |
| { |
| "epoch": 4.158273381294964, |
| "grad_norm": 0.5950331674109112, |
| "learning_rate": 4.6894309923612055e-06, |
| "loss": 0.5730827450752258, |
| "step": 1156 |
| }, |
| { |
| "epoch": 4.161870503597123, |
| "grad_norm": 0.5881173493949299, |
| "learning_rate": 4.682382009440378e-06, |
| "loss": 0.5937288999557495, |
| "step": 1157 |
| }, |
| { |
| "epoch": 4.16546762589928, |
| "grad_norm": 0.55290224725042, |
| "learning_rate": 4.675333660294693e-06, |
| "loss": 0.5837142467498779, |
| "step": 1158 |
| }, |
| { |
| "epoch": 4.169064748201439, |
| "grad_norm": 0.6889012765482136, |
| "learning_rate": 4.66828595898843e-06, |
| "loss": 0.5661232471466064, |
| "step": 1159 |
| }, |
| { |
| "epoch": 4.172661870503597, |
| "grad_norm": 0.5466375770495574, |
| "learning_rate": 4.661238919584578e-06, |
| "loss": 0.5689799785614014, |
| "step": 1160 |
| }, |
| { |
| "epoch": 4.176258992805756, |
| "grad_norm": 0.5568083020527735, |
| "learning_rate": 4.654192556144801e-06, |
| "loss": 0.5801382064819336, |
| "step": 1161 |
| }, |
| { |
| "epoch": 4.179856115107913, |
| "grad_norm": 0.5758747775029548, |
| "learning_rate": 4.647146882729422e-06, |
| "loss": 0.5707106590270996, |
| "step": 1162 |
| }, |
| { |
| "epoch": 4.183453237410072, |
| "grad_norm": 0.5763824084621219, |
| "learning_rate": 4.640101913397376e-06, |
| "loss": 0.5798112154006958, |
| "step": 1163 |
| }, |
| { |
| "epoch": 4.18705035971223, |
| "grad_norm": 0.5941164621193183, |
| "learning_rate": 4.633057662206205e-06, |
| "loss": 0.5752370357513428, |
| "step": 1164 |
| }, |
| { |
| "epoch": 4.190647482014389, |
| "grad_norm": 0.5625375092244381, |
| "learning_rate": 4.626014143212008e-06, |
| "loss": 0.5721004009246826, |
| "step": 1165 |
| }, |
| { |
| "epoch": 4.194244604316546, |
| "grad_norm": 0.5665818979516964, |
| "learning_rate": 4.618971370469431e-06, |
| "loss": 0.5877313613891602, |
| "step": 1166 |
| }, |
| { |
| "epoch": 4.197841726618705, |
| "grad_norm": 0.6795586900116738, |
| "learning_rate": 4.611929358031625e-06, |
| "loss": 0.5642364025115967, |
| "step": 1167 |
| }, |
| { |
| "epoch": 4.201438848920863, |
| "grad_norm": 0.5415548661428123, |
| "learning_rate": 4.6048881199502265e-06, |
| "loss": 0.5845080614089966, |
| "step": 1168 |
| }, |
| { |
| "epoch": 4.205035971223022, |
| "grad_norm": 0.5869059273185752, |
| "learning_rate": 4.597847670275327e-06, |
| "loss": 0.5940054655075073, |
| "step": 1169 |
| }, |
| { |
| "epoch": 4.2086330935251794, |
| "grad_norm": 0.5607873720318685, |
| "learning_rate": 4.590808023055444e-06, |
| "loss": 0.573835551738739, |
| "step": 1170 |
| }, |
| { |
| "epoch": 4.212230215827338, |
| "grad_norm": 0.6003731381877622, |
| "learning_rate": 4.583769192337493e-06, |
| "loss": 0.5652705430984497, |
| "step": 1171 |
| }, |
| { |
| "epoch": 4.215827338129497, |
| "grad_norm": 0.5739016573249696, |
| "learning_rate": 4.576731192166762e-06, |
| "loss": 0.5749368667602539, |
| "step": 1172 |
| }, |
| { |
| "epoch": 4.219424460431655, |
| "grad_norm": 0.5581888001105628, |
| "learning_rate": 4.569694036586883e-06, |
| "loss": 0.5914247035980225, |
| "step": 1173 |
| }, |
| { |
| "epoch": 4.223021582733813, |
| "grad_norm": 0.5559528710698097, |
| "learning_rate": 4.562657739639798e-06, |
| "loss": 0.5822369456291199, |
| "step": 1174 |
| }, |
| { |
| "epoch": 4.226618705035971, |
| "grad_norm": 0.5533112566404729, |
| "learning_rate": 4.555622315365741e-06, |
| "loss": 0.6120118498802185, |
| "step": 1175 |
| }, |
| { |
| "epoch": 4.23021582733813, |
| "grad_norm": 0.6197934095165797, |
| "learning_rate": 4.548587777803198e-06, |
| "loss": 0.5657411217689514, |
| "step": 1176 |
| }, |
| { |
| "epoch": 4.233812949640288, |
| "grad_norm": 0.5538805662869524, |
| "learning_rate": 4.541554140988896e-06, |
| "loss": 0.6044785976409912, |
| "step": 1177 |
| }, |
| { |
| "epoch": 4.237410071942446, |
| "grad_norm": 0.599310631889973, |
| "learning_rate": 4.534521418957753e-06, |
| "loss": 0.575440526008606, |
| "step": 1178 |
| }, |
| { |
| "epoch": 4.241007194244604, |
| "grad_norm": 0.6071682175544293, |
| "learning_rate": 4.5274896257428684e-06, |
| "loss": 0.5827499032020569, |
| "step": 1179 |
| }, |
| { |
| "epoch": 4.244604316546763, |
| "grad_norm": 0.5585723963020499, |
| "learning_rate": 4.520458775375488e-06, |
| "loss": 0.5806722640991211, |
| "step": 1180 |
| }, |
| { |
| "epoch": 4.248201438848921, |
| "grad_norm": 0.6680955934459385, |
| "learning_rate": 4.513428881884976e-06, |
| "loss": 0.5744950771331787, |
| "step": 1181 |
| }, |
| { |
| "epoch": 4.251798561151079, |
| "grad_norm": 0.5498839260681047, |
| "learning_rate": 4.506399959298781e-06, |
| "loss": 0.5698388814926147, |
| "step": 1182 |
| }, |
| { |
| "epoch": 4.255395683453237, |
| "grad_norm": 0.6069444284018976, |
| "learning_rate": 4.499372021642427e-06, |
| "loss": 0.5755789875984192, |
| "step": 1183 |
| }, |
| { |
| "epoch": 4.258992805755396, |
| "grad_norm": 0.5599727416246281, |
| "learning_rate": 4.49234508293946e-06, |
| "loss": 0.5962889194488525, |
| "step": 1184 |
| }, |
| { |
| "epoch": 4.262589928057554, |
| "grad_norm": 0.6342165286791643, |
| "learning_rate": 4.485319157211441e-06, |
| "loss": 0.5775662660598755, |
| "step": 1185 |
| }, |
| { |
| "epoch": 4.266187050359712, |
| "grad_norm": 0.5526730574177247, |
| "learning_rate": 4.4782942584779046e-06, |
| "loss": 0.5755195617675781, |
| "step": 1186 |
| }, |
| { |
| "epoch": 4.26978417266187, |
| "grad_norm": 0.6227375274845137, |
| "learning_rate": 4.47127040075634e-06, |
| "loss": 0.5777647495269775, |
| "step": 1187 |
| }, |
| { |
| "epoch": 4.273381294964029, |
| "grad_norm": 1.0110886173810598, |
| "learning_rate": 4.464247598062156e-06, |
| "loss": 0.5726004838943481, |
| "step": 1188 |
| }, |
| { |
| "epoch": 4.276978417266187, |
| "grad_norm": 0.5669341213587702, |
| "learning_rate": 4.457225864408659e-06, |
| "loss": 0.5730968117713928, |
| "step": 1189 |
| }, |
| { |
| "epoch": 4.280575539568345, |
| "grad_norm": 0.5763712469502784, |
| "learning_rate": 4.450205213807018e-06, |
| "loss": 0.595857560634613, |
| "step": 1190 |
| }, |
| { |
| "epoch": 4.284172661870503, |
| "grad_norm": 0.5946725282808688, |
| "learning_rate": 4.443185660266248e-06, |
| "loss": 0.5865065455436707, |
| "step": 1191 |
| }, |
| { |
| "epoch": 4.287769784172662, |
| "grad_norm": 0.5740713350476653, |
| "learning_rate": 4.436167217793167e-06, |
| "loss": 0.5740945935249329, |
| "step": 1192 |
| }, |
| { |
| "epoch": 4.2913669064748206, |
| "grad_norm": 0.5612912595120162, |
| "learning_rate": 4.429149900392379e-06, |
| "loss": 0.5785430669784546, |
| "step": 1193 |
| }, |
| { |
| "epoch": 4.294964028776978, |
| "grad_norm": 0.5740412978546652, |
| "learning_rate": 4.4221337220662495e-06, |
| "loss": 0.5770116448402405, |
| "step": 1194 |
| }, |
| { |
| "epoch": 4.298561151079137, |
| "grad_norm": 0.5795305629307095, |
| "learning_rate": 4.415118696814859e-06, |
| "loss": 0.5700653791427612, |
| "step": 1195 |
| }, |
| { |
| "epoch": 4.302158273381295, |
| "grad_norm": 0.5718157967814855, |
| "learning_rate": 4.408104838635996e-06, |
| "loss": 0.6017115116119385, |
| "step": 1196 |
| }, |
| { |
| "epoch": 4.305755395683454, |
| "grad_norm": 0.5892253053245672, |
| "learning_rate": 4.401092161525117e-06, |
| "loss": 0.5771710872650146, |
| "step": 1197 |
| }, |
| { |
| "epoch": 4.309352517985611, |
| "grad_norm": 0.6233608453273818, |
| "learning_rate": 4.394080679475324e-06, |
| "loss": 0.599836528301239, |
| "step": 1198 |
| }, |
| { |
| "epoch": 4.31294964028777, |
| "grad_norm": 0.5719492505819993, |
| "learning_rate": 4.38707040647733e-06, |
| "loss": 0.5776287913322449, |
| "step": 1199 |
| }, |
| { |
| "epoch": 4.316546762589928, |
| "grad_norm": 0.551585937328142, |
| "learning_rate": 4.38006135651944e-06, |
| "loss": 0.5568167567253113, |
| "step": 1200 |
| }, |
| { |
| "epoch": 4.316546762589928, |
| "eval_loss": 0.8378241658210754, |
| "eval_runtime": 7.7777, |
| "eval_samples_per_second": 11.314, |
| "eval_steps_per_second": 0.386, |
| "eval_token_acc": 0.7497680652441335, |
| "step": 1200 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 2224, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 8, |
| "save_steps": 200, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 899169880113152.0, |
| "train_batch_size": 4, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|