| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 0.6916825177243645, |
| "eval_steps": 500, |
| "global_step": 1000, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0006916825177243646, |
| "grad_norm": 0.17447508871555328, |
| "learning_rate": 0.0, |
| "loss": 2.4721, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.0013833650354487291, |
| "grad_norm": 0.24757890403270721, |
| "learning_rate": 1.3698630136986302e-06, |
| "loss": 2.7788, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.0020750475531730937, |
| "grad_norm": 0.20169955492019653, |
| "learning_rate": 2.7397260273972604e-06, |
| "loss": 2.817, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.0027667300708974583, |
| "grad_norm": 0.2597520053386688, |
| "learning_rate": 4.10958904109589e-06, |
| "loss": 2.8883, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.0034584125886218224, |
| "grad_norm": 0.2476697564125061, |
| "learning_rate": 5.479452054794521e-06, |
| "loss": 2.5194, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.004150095106346187, |
| "grad_norm": 0.19448921084403992, |
| "learning_rate": 6.849315068493151e-06, |
| "loss": 2.0166, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.0048417776240705515, |
| "grad_norm": 0.30129846930503845, |
| "learning_rate": 8.21917808219178e-06, |
| "loss": 2.0576, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.0055334601417949165, |
| "grad_norm": 0.29668128490448, |
| "learning_rate": 9.589041095890411e-06, |
| "loss": 2.0307, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.006225142659519281, |
| "grad_norm": 0.20030352473258972, |
| "learning_rate": 1.0958904109589042e-05, |
| "loss": 2.6198, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.006916825177243645, |
| "grad_norm": 0.2806167006492615, |
| "learning_rate": 1.2328767123287671e-05, |
| "loss": 2.7184, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.00760850769496801, |
| "grad_norm": 0.3155820965766907, |
| "learning_rate": 1.3698630136986302e-05, |
| "loss": 2.5599, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.008300190212692375, |
| "grad_norm": 0.37342140078544617, |
| "learning_rate": 1.5068493150684931e-05, |
| "loss": 1.6116, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.008991872730416739, |
| "grad_norm": 0.3110700845718384, |
| "learning_rate": 1.643835616438356e-05, |
| "loss": 2.03, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.009683555248141103, |
| "grad_norm": 0.22983220219612122, |
| "learning_rate": 1.780821917808219e-05, |
| "loss": 3.0022, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.010375237765865467, |
| "grad_norm": 0.20027688145637512, |
| "learning_rate": 1.9178082191780822e-05, |
| "loss": 2.3026, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.011066920283589833, |
| "grad_norm": 0.2387463003396988, |
| "learning_rate": 2.0547945205479453e-05, |
| "loss": 2.9633, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.011758602801314197, |
| "grad_norm": 0.21235717833042145, |
| "learning_rate": 2.1917808219178083e-05, |
| "loss": 2.8526, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.012450285319038561, |
| "grad_norm": 0.29500314593315125, |
| "learning_rate": 2.328767123287671e-05, |
| "loss": 2.5277, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.013141967836762926, |
| "grad_norm": 0.20916298031806946, |
| "learning_rate": 2.4657534246575342e-05, |
| "loss": 2.3433, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.01383365035448729, |
| "grad_norm": 0.29092636704444885, |
| "learning_rate": 2.6027397260273973e-05, |
| "loss": 2.6725, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.014525332872211656, |
| "grad_norm": 0.2366064488887787, |
| "learning_rate": 2.7397260273972603e-05, |
| "loss": 2.7656, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.01521701538993602, |
| "grad_norm": 0.23627543449401855, |
| "learning_rate": 2.8767123287671234e-05, |
| "loss": 2.5008, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.015908697907660384, |
| "grad_norm": 0.30645817518234253, |
| "learning_rate": 3.0136986301369862e-05, |
| "loss": 1.8266, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.01660038042538475, |
| "grad_norm": 0.5485689043998718, |
| "learning_rate": 3.1506849315068496e-05, |
| "loss": 2.7961, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.017292062943109112, |
| "grad_norm": 0.2701663076877594, |
| "learning_rate": 3.287671232876712e-05, |
| "loss": 2.7765, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.017983745460833478, |
| "grad_norm": 0.28117069602012634, |
| "learning_rate": 3.424657534246575e-05, |
| "loss": 2.7523, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.01867542797855784, |
| "grad_norm": 0.38187164068222046, |
| "learning_rate": 3.561643835616438e-05, |
| "loss": 2.2493, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.019367110496282206, |
| "grad_norm": 0.23057831823825836, |
| "learning_rate": 3.698630136986301e-05, |
| "loss": 2.8409, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.020058793014006572, |
| "grad_norm": 0.314023494720459, |
| "learning_rate": 3.8356164383561644e-05, |
| "loss": 2.7994, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.020750475531730934, |
| "grad_norm": 0.5843497514724731, |
| "learning_rate": 3.9726027397260274e-05, |
| "loss": 2.8692, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.0214421580494553, |
| "grad_norm": 0.3582310974597931, |
| "learning_rate": 4.1095890410958905e-05, |
| "loss": 2.5466, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.022133840567179666, |
| "grad_norm": 0.2534823715686798, |
| "learning_rate": 4.2465753424657536e-05, |
| "loss": 2.5193, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.02282552308490403, |
| "grad_norm": 0.34849318861961365, |
| "learning_rate": 4.383561643835617e-05, |
| "loss": 1.7904, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.023517205602628394, |
| "grad_norm": 0.35676291584968567, |
| "learning_rate": 4.520547945205479e-05, |
| "loss": 1.6514, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.024208888120352757, |
| "grad_norm": 0.24594038724899292, |
| "learning_rate": 4.657534246575342e-05, |
| "loss": 2.3192, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.024900570638077123, |
| "grad_norm": 0.3562544882297516, |
| "learning_rate": 4.794520547945205e-05, |
| "loss": 2.8913, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.02559225315580149, |
| "grad_norm": 0.4177575409412384, |
| "learning_rate": 4.9315068493150684e-05, |
| "loss": 2.4487, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.02628393567352585, |
| "grad_norm": 0.3126346170902252, |
| "learning_rate": 5.068493150684932e-05, |
| "loss": 1.4528, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.026975618191250217, |
| "grad_norm": 0.30482178926467896, |
| "learning_rate": 5.2054794520547945e-05, |
| "loss": 2.6085, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.02766730070897458, |
| "grad_norm": 0.31493324041366577, |
| "learning_rate": 5.342465753424658e-05, |
| "loss": 1.8078, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.028358983226698945, |
| "grad_norm": 0.4693777859210968, |
| "learning_rate": 5.479452054794521e-05, |
| "loss": 1.363, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.02905066574442331, |
| "grad_norm": 0.6538149118423462, |
| "learning_rate": 5.616438356164384e-05, |
| "loss": 1.2077, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.029742348262147673, |
| "grad_norm": 0.32594457268714905, |
| "learning_rate": 5.753424657534247e-05, |
| "loss": 2.2301, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.03043403077987204, |
| "grad_norm": 0.35883522033691406, |
| "learning_rate": 5.89041095890411e-05, |
| "loss": 2.5199, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.0311257132975964, |
| "grad_norm": 0.3142642080783844, |
| "learning_rate": 6.0273972602739724e-05, |
| "loss": 2.0425, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.03181739581532077, |
| "grad_norm": 0.31059974431991577, |
| "learning_rate": 6.164383561643835e-05, |
| "loss": 2.6123, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.03250907833304513, |
| "grad_norm": 0.24734480679035187, |
| "learning_rate": 6.301369863013699e-05, |
| "loss": 2.5415, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.0332007608507695, |
| "grad_norm": 0.3828027546405792, |
| "learning_rate": 6.438356164383562e-05, |
| "loss": 2.3858, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.03389244336849386, |
| "grad_norm": 0.32175979018211365, |
| "learning_rate": 6.575342465753424e-05, |
| "loss": 2.135, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.034584125886218224, |
| "grad_norm": 0.5965486764907837, |
| "learning_rate": 6.712328767123288e-05, |
| "loss": 2.2111, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.03527580840394259, |
| "grad_norm": 0.5561261177062988, |
| "learning_rate": 6.84931506849315e-05, |
| "loss": 1.725, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.035967490921666956, |
| "grad_norm": 0.46931296586990356, |
| "learning_rate": 6.986301369863014e-05, |
| "loss": 2.5344, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.03665917343939132, |
| "grad_norm": 0.4662770926952362, |
| "learning_rate": 7.123287671232876e-05, |
| "loss": 2.4031, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.03735085595711568, |
| "grad_norm": 0.3315083980560303, |
| "learning_rate": 7.26027397260274e-05, |
| "loss": 2.0916, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.03804253847484005, |
| "grad_norm": 0.36718690395355225, |
| "learning_rate": 7.397260273972603e-05, |
| "loss": 2.6542, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.03873422099256441, |
| "grad_norm": 0.43732333183288574, |
| "learning_rate": 7.534246575342466e-05, |
| "loss": 2.3609, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.039425903510288775, |
| "grad_norm": 0.5333709716796875, |
| "learning_rate": 7.671232876712329e-05, |
| "loss": 2.0736, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.040117586028013144, |
| "grad_norm": 0.609667181968689, |
| "learning_rate": 7.808219178082192e-05, |
| "loss": 1.9863, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.040809268545737507, |
| "grad_norm": 0.38461530208587646, |
| "learning_rate": 7.945205479452055e-05, |
| "loss": 2.0244, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.04150095106346187, |
| "grad_norm": 0.37384161353111267, |
| "learning_rate": 8.082191780821919e-05, |
| "loss": 2.5084, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.04219263358118624, |
| "grad_norm": 0.575130820274353, |
| "learning_rate": 8.219178082191781e-05, |
| "loss": 1.5943, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.0428843160989106, |
| "grad_norm": 0.47650450468063354, |
| "learning_rate": 8.356164383561645e-05, |
| "loss": 2.4752, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.04357599861663496, |
| "grad_norm": 0.36863836646080017, |
| "learning_rate": 8.493150684931507e-05, |
| "loss": 2.5023, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.04426768113435933, |
| "grad_norm": 0.3590114116668701, |
| "learning_rate": 8.630136986301371e-05, |
| "loss": 2.1982, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.044959363652083695, |
| "grad_norm": 0.4362975060939789, |
| "learning_rate": 8.767123287671233e-05, |
| "loss": 2.0463, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.04565104616980806, |
| "grad_norm": 0.39568379521369934, |
| "learning_rate": 8.904109589041096e-05, |
| "loss": 2.5595, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.04634272868753242, |
| "grad_norm": 0.41195258498191833, |
| "learning_rate": 9.041095890410958e-05, |
| "loss": 2.348, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.04703441120525679, |
| "grad_norm": 0.5394885540008545, |
| "learning_rate": 9.178082191780822e-05, |
| "loss": 1.2385, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.04772609372298115, |
| "grad_norm": 0.41482311487197876, |
| "learning_rate": 9.315068493150684e-05, |
| "loss": 2.7036, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.048417776240705514, |
| "grad_norm": 0.4505786597728729, |
| "learning_rate": 9.452054794520548e-05, |
| "loss": 1.7636, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.04910945875842988, |
| "grad_norm": 0.26118066906929016, |
| "learning_rate": 9.58904109589041e-05, |
| "loss": 1.7056, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.049801141276154245, |
| "grad_norm": 0.7240022420883179, |
| "learning_rate": 9.726027397260274e-05, |
| "loss": 2.3171, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.05049282379387861, |
| "grad_norm": 0.5030315518379211, |
| "learning_rate": 9.863013698630137e-05, |
| "loss": 2.1896, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.05118450631160298, |
| "grad_norm": 0.7940630316734314, |
| "learning_rate": 0.0001, |
| "loss": 1.4996, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.05187618882932734, |
| "grad_norm": 0.45342186093330383, |
| "learning_rate": 9.992716678805537e-05, |
| "loss": 2.1487, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.0525678713470517, |
| "grad_norm": 0.4451451301574707, |
| "learning_rate": 9.985433357611071e-05, |
| "loss": 2.1561, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.053259553864776064, |
| "grad_norm": 0.39434245228767395, |
| "learning_rate": 9.978150036416607e-05, |
| "loss": 1.8114, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.053951236382500434, |
| "grad_norm": 0.5808930397033691, |
| "learning_rate": 9.970866715222141e-05, |
| "loss": 2.0528, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.054642918900224796, |
| "grad_norm": 0.4741731882095337, |
| "learning_rate": 9.963583394027677e-05, |
| "loss": 1.988, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.05533460141794916, |
| "grad_norm": 0.38925132155418396, |
| "learning_rate": 9.956300072833212e-05, |
| "loss": 1.1629, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.05602628393567353, |
| "grad_norm": 0.5677796602249146, |
| "learning_rate": 9.949016751638748e-05, |
| "loss": 1.7325, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.05671796645339789, |
| "grad_norm": 0.5454609990119934, |
| "learning_rate": 9.941733430444284e-05, |
| "loss": 2.0386, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.05740964897112225, |
| "grad_norm": 0.42700880765914917, |
| "learning_rate": 9.934450109249819e-05, |
| "loss": 2.1646, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.05810133148884662, |
| "grad_norm": 0.4880882203578949, |
| "learning_rate": 9.927166788055353e-05, |
| "loss": 1.8193, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.058793014006570984, |
| "grad_norm": 0.4928286373615265, |
| "learning_rate": 9.919883466860888e-05, |
| "loss": 1.6243, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.05948469652429535, |
| "grad_norm": 0.5179658532142639, |
| "learning_rate": 9.912600145666424e-05, |
| "loss": 1.6846, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.060176379042019716, |
| "grad_norm": 0.6409149765968323, |
| "learning_rate": 9.905316824471959e-05, |
| "loss": 0.8419, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.06086806155974408, |
| "grad_norm": 0.7547211050987244, |
| "learning_rate": 9.898033503277495e-05, |
| "loss": 2.399, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.06155974407746844, |
| "grad_norm": 0.412672758102417, |
| "learning_rate": 9.890750182083031e-05, |
| "loss": 2.2212, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.0622514265951928, |
| "grad_norm": 0.5434851050376892, |
| "learning_rate": 9.883466860888566e-05, |
| "loss": 2.3083, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.06294310911291717, |
| "grad_norm": 0.4428962767124176, |
| "learning_rate": 9.876183539694101e-05, |
| "loss": 2.3586, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.06363479163064154, |
| "grad_norm": 0.643883466720581, |
| "learning_rate": 9.868900218499635e-05, |
| "loss": 2.4555, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.0643264741483659, |
| "grad_norm": 1.8360555171966553, |
| "learning_rate": 9.861616897305172e-05, |
| "loss": 2.3463, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.06501815666609026, |
| "grad_norm": 0.5649489164352417, |
| "learning_rate": 9.854333576110706e-05, |
| "loss": 1.9054, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.06570983918381462, |
| "grad_norm": 0.5043310523033142, |
| "learning_rate": 9.847050254916242e-05, |
| "loss": 2.0604, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.066401521701539, |
| "grad_norm": 0.49205994606018066, |
| "learning_rate": 9.839766933721779e-05, |
| "loss": 2.5182, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.06709320421926336, |
| "grad_norm": 0.9012492299079895, |
| "learning_rate": 9.832483612527313e-05, |
| "loss": 1.9895, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.06778488673698772, |
| "grad_norm": 0.576653778553009, |
| "learning_rate": 9.825200291332848e-05, |
| "loss": 2.5201, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.06847656925471209, |
| "grad_norm": 0.4745285212993622, |
| "learning_rate": 9.817916970138383e-05, |
| "loss": 1.9348, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.06916825177243645, |
| "grad_norm": 0.5546420216560364, |
| "learning_rate": 9.810633648943919e-05, |
| "loss": 1.6732, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.06985993429016081, |
| "grad_norm": 0.8806080222129822, |
| "learning_rate": 9.803350327749454e-05, |
| "loss": 1.713, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.07055161680788519, |
| "grad_norm": 0.6029040217399597, |
| "learning_rate": 9.79606700655499e-05, |
| "loss": 1.5178, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.07124329932560955, |
| "grad_norm": 0.56731778383255, |
| "learning_rate": 9.788783685360526e-05, |
| "loss": 1.6848, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.07193498184333391, |
| "grad_norm": 0.5833808183670044, |
| "learning_rate": 9.781500364166059e-05, |
| "loss": 1.703, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.07262666436105827, |
| "grad_norm": 0.724471390247345, |
| "learning_rate": 9.774217042971595e-05, |
| "loss": 2.227, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.07331834687878264, |
| "grad_norm": 0.722097635269165, |
| "learning_rate": 9.76693372177713e-05, |
| "loss": 1.2173, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.074010029396507, |
| "grad_norm": 0.7538579702377319, |
| "learning_rate": 9.759650400582666e-05, |
| "loss": 1.8546, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.07470171191423136, |
| "grad_norm": 0.7746326327323914, |
| "learning_rate": 9.752367079388201e-05, |
| "loss": 2.4067, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.07539339443195574, |
| "grad_norm": 0.5197188258171082, |
| "learning_rate": 9.745083758193737e-05, |
| "loss": 1.9193, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.0760850769496801, |
| "grad_norm": 0.4536001682281494, |
| "learning_rate": 9.737800436999273e-05, |
| "loss": 2.2905, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.07677675946740446, |
| "grad_norm": 0.6520904302597046, |
| "learning_rate": 9.730517115804807e-05, |
| "loss": 2.012, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.07746844198512882, |
| "grad_norm": 0.5670090913772583, |
| "learning_rate": 9.723233794610343e-05, |
| "loss": 2.0289, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.07816012450285319, |
| "grad_norm": 0.8609398603439331, |
| "learning_rate": 9.715950473415877e-05, |
| "loss": 1.332, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.07885180702057755, |
| "grad_norm": 0.5880535244941711, |
| "learning_rate": 9.708667152221414e-05, |
| "loss": 1.7267, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.07954348953830193, |
| "grad_norm": 0.6090431213378906, |
| "learning_rate": 9.701383831026948e-05, |
| "loss": 1.3968, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.08023517205602629, |
| "grad_norm": 0.5701532363891602, |
| "learning_rate": 9.694100509832484e-05, |
| "loss": 2.1829, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.08092685457375065, |
| "grad_norm": 0.6294519305229187, |
| "learning_rate": 9.68681718863802e-05, |
| "loss": 1.7385, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.08161853709147501, |
| "grad_norm": 0.5141008496284485, |
| "learning_rate": 9.679533867443554e-05, |
| "loss": 1.8417, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.08231021960919938, |
| "grad_norm": 1.0640853643417358, |
| "learning_rate": 9.67225054624909e-05, |
| "loss": 2.1031, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.08300190212692374, |
| "grad_norm": 0.6235936284065247, |
| "learning_rate": 9.664967225054625e-05, |
| "loss": 1.9911, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.0836935846446481, |
| "grad_norm": 0.8041340112686157, |
| "learning_rate": 9.657683903860161e-05, |
| "loss": 1.9172, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.08438526716237248, |
| "grad_norm": 0.6447221040725708, |
| "learning_rate": 9.650400582665696e-05, |
| "loss": 2.0157, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.08507694968009684, |
| "grad_norm": 0.7299740314483643, |
| "learning_rate": 9.643117261471232e-05, |
| "loss": 1.9157, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.0857686321978212, |
| "grad_norm": 0.8950564861297607, |
| "learning_rate": 9.635833940276767e-05, |
| "loss": 1.4313, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.08646031471554556, |
| "grad_norm": 0.5062771439552307, |
| "learning_rate": 9.628550619082301e-05, |
| "loss": 1.7742, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.08715199723326993, |
| "grad_norm": 0.7482561469078064, |
| "learning_rate": 9.621267297887837e-05, |
| "loss": 1.6122, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.08784367975099429, |
| "grad_norm": 0.4244556725025177, |
| "learning_rate": 9.613983976693372e-05, |
| "loss": 1.3825, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.08853536226871866, |
| "grad_norm": 0.5349308252334595, |
| "learning_rate": 9.606700655498908e-05, |
| "loss": 1.9771, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.08922704478644303, |
| "grad_norm": 0.5429610013961792, |
| "learning_rate": 9.599417334304443e-05, |
| "loss": 2.5704, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.08991872730416739, |
| "grad_norm": 0.4875735938549042, |
| "learning_rate": 9.592134013109979e-05, |
| "loss": 2.0568, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.09061040982189175, |
| "grad_norm": 0.5956497192382812, |
| "learning_rate": 9.584850691915514e-05, |
| "loss": 2.3604, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.09130209233961611, |
| "grad_norm": 1.0286632776260376, |
| "learning_rate": 9.577567370721049e-05, |
| "loss": 1.278, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.09199377485734048, |
| "grad_norm": 0.5342454314231873, |
| "learning_rate": 9.570284049526585e-05, |
| "loss": 1.7806, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.09268545737506484, |
| "grad_norm": 0.5517177581787109, |
| "learning_rate": 9.56300072833212e-05, |
| "loss": 2.0241, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.09337713989278922, |
| "grad_norm": 0.6514694094657898, |
| "learning_rate": 9.555717407137656e-05, |
| "loss": 2.002, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.09406882241051358, |
| "grad_norm": 0.6356580257415771, |
| "learning_rate": 9.54843408594319e-05, |
| "loss": 2.2756, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.09476050492823794, |
| "grad_norm": 0.6672503352165222, |
| "learning_rate": 9.541150764748726e-05, |
| "loss": 1.4608, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.0954521874459623, |
| "grad_norm": 0.8465030193328857, |
| "learning_rate": 9.533867443554261e-05, |
| "loss": 2.0107, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.09614386996368667, |
| "grad_norm": 0.6064625978469849, |
| "learning_rate": 9.526584122359796e-05, |
| "loss": 2.1404, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.09683555248141103, |
| "grad_norm": 0.6728728413581848, |
| "learning_rate": 9.519300801165332e-05, |
| "loss": 1.7548, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.0975272349991354, |
| "grad_norm": 0.7992367744445801, |
| "learning_rate": 9.512017479970867e-05, |
| "loss": 1.5071, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.09821891751685977, |
| "grad_norm": 0.466144859790802, |
| "learning_rate": 9.504734158776403e-05, |
| "loss": 1.5908, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.09891060003458413, |
| "grad_norm": 0.694807767868042, |
| "learning_rate": 9.497450837581938e-05, |
| "loss": 1.6261, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.09960228255230849, |
| "grad_norm": 0.5561632513999939, |
| "learning_rate": 9.490167516387472e-05, |
| "loss": 1.2933, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.10029396507003285, |
| "grad_norm": 0.6609554886817932, |
| "learning_rate": 9.482884195193008e-05, |
| "loss": 1.7889, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.10098564758775722, |
| "grad_norm": 0.9272559285163879, |
| "learning_rate": 9.475600873998543e-05, |
| "loss": 1.2951, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.10167733010548158, |
| "grad_norm": 1.5346007347106934, |
| "learning_rate": 9.46831755280408e-05, |
| "loss": 1.5395, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.10236901262320595, |
| "grad_norm": 0.8031420111656189, |
| "learning_rate": 9.461034231609614e-05, |
| "loss": 2.2035, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.10306069514093032, |
| "grad_norm": 0.5305848121643066, |
| "learning_rate": 9.45375091041515e-05, |
| "loss": 1.784, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.10375237765865468, |
| "grad_norm": 0.6742457151412964, |
| "learning_rate": 9.446467589220685e-05, |
| "loss": 2.0042, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.10444406017637904, |
| "grad_norm": 0.7282371520996094, |
| "learning_rate": 9.43918426802622e-05, |
| "loss": 2.2441, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.1051357426941034, |
| "grad_norm": 0.5512478351593018, |
| "learning_rate": 9.431900946831756e-05, |
| "loss": 2.0307, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.10582742521182777, |
| "grad_norm": 0.7224892377853394, |
| "learning_rate": 9.42461762563729e-05, |
| "loss": 2.4692, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.10651910772955213, |
| "grad_norm": 0.7150198817253113, |
| "learning_rate": 9.417334304442827e-05, |
| "loss": 2.0194, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.1072107902472765, |
| "grad_norm": 0.5024279356002808, |
| "learning_rate": 9.410050983248361e-05, |
| "loss": 2.0461, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.10790247276500087, |
| "grad_norm": 1.0661766529083252, |
| "learning_rate": 9.402767662053898e-05, |
| "loss": 1.3458, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.10859415528272523, |
| "grad_norm": 0.577153205871582, |
| "learning_rate": 9.395484340859432e-05, |
| "loss": 1.517, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.10928583780044959, |
| "grad_norm": 0.6876221895217896, |
| "learning_rate": 9.388201019664967e-05, |
| "loss": 1.9622, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.10997752031817395, |
| "grad_norm": 0.5576820373535156, |
| "learning_rate": 9.380917698470503e-05, |
| "loss": 1.6063, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.11066920283589832, |
| "grad_norm": 0.8604760766029358, |
| "learning_rate": 9.373634377276038e-05, |
| "loss": 1.8693, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.1113608853536227, |
| "grad_norm": 0.6998944282531738, |
| "learning_rate": 9.366351056081574e-05, |
| "loss": 1.8942, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.11205256787134706, |
| "grad_norm": 0.7106878757476807, |
| "learning_rate": 9.359067734887109e-05, |
| "loss": 1.9999, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.11274425038907142, |
| "grad_norm": 1.167450189590454, |
| "learning_rate": 9.351784413692645e-05, |
| "loss": 1.26, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.11343593290679578, |
| "grad_norm": 0.6208682060241699, |
| "learning_rate": 9.34450109249818e-05, |
| "loss": 1.7881, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.11412761542452014, |
| "grad_norm": 0.7046381235122681, |
| "learning_rate": 9.337217771303714e-05, |
| "loss": 1.7403, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.1148192979422445, |
| "grad_norm": 0.6957246661186218, |
| "learning_rate": 9.32993445010925e-05, |
| "loss": 2.1148, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.11551098045996887, |
| "grad_norm": 0.6169411540031433, |
| "learning_rate": 9.322651128914785e-05, |
| "loss": 1.9182, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.11620266297769324, |
| "grad_norm": 0.8539403080940247, |
| "learning_rate": 9.315367807720321e-05, |
| "loss": 1.7015, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.1168943454954176, |
| "grad_norm": 1.037312626838684, |
| "learning_rate": 9.308084486525856e-05, |
| "loss": 1.3205, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.11758602801314197, |
| "grad_norm": 0.5971755981445312, |
| "learning_rate": 9.300801165331392e-05, |
| "loss": 1.1845, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.11827771053086633, |
| "grad_norm": 0.5809158086776733, |
| "learning_rate": 9.293517844136927e-05, |
| "loss": 1.5852, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.1189693930485907, |
| "grad_norm": 0.6503626704216003, |
| "learning_rate": 9.286234522942462e-05, |
| "loss": 1.9045, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.11966107556631506, |
| "grad_norm": 0.5208730101585388, |
| "learning_rate": 9.278951201747998e-05, |
| "loss": 1.7599, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.12035275808403943, |
| "grad_norm": 0.76451176404953, |
| "learning_rate": 9.271667880553533e-05, |
| "loss": 2.2624, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.1210444406017638, |
| "grad_norm": 0.5927959084510803, |
| "learning_rate": 9.264384559359069e-05, |
| "loss": 1.5911, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.12173612311948816, |
| "grad_norm": 0.6512097716331482, |
| "learning_rate": 9.257101238164603e-05, |
| "loss": 1.3409, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.12242780563721252, |
| "grad_norm": 0.4837232530117035, |
| "learning_rate": 9.24981791697014e-05, |
| "loss": 1.2417, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.12311948815493688, |
| "grad_norm": 0.6251150369644165, |
| "learning_rate": 9.242534595775674e-05, |
| "loss": 2.1377, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.12381117067266124, |
| "grad_norm": 0.5964468717575073, |
| "learning_rate": 9.235251274581209e-05, |
| "loss": 1.616, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.1245028531903856, |
| "grad_norm": 0.8185293674468994, |
| "learning_rate": 9.227967953386745e-05, |
| "loss": 2.372, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.12519453570810998, |
| "grad_norm": 0.7391765117645264, |
| "learning_rate": 9.22068463219228e-05, |
| "loss": 2.0473, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.12588621822583435, |
| "grad_norm": 0.6867517828941345, |
| "learning_rate": 9.213401310997816e-05, |
| "loss": 2.2592, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.1265779007435587, |
| "grad_norm": 0.6889393329620361, |
| "learning_rate": 9.206117989803351e-05, |
| "loss": 1.8863, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.12726958326128307, |
| "grad_norm": 0.6404229402542114, |
| "learning_rate": 9.198834668608885e-05, |
| "loss": 1.5859, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.12796126577900743, |
| "grad_norm": 2.46905255317688, |
| "learning_rate": 9.191551347414422e-05, |
| "loss": 2.4798, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.1286529482967318, |
| "grad_norm": 0.7490037083625793, |
| "learning_rate": 9.184268026219956e-05, |
| "loss": 2.1623, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.12934463081445616, |
| "grad_norm": 0.5947558283805847, |
| "learning_rate": 9.176984705025492e-05, |
| "loss": 2.2375, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.13003631333218052, |
| "grad_norm": 0.4829116463661194, |
| "learning_rate": 9.169701383831027e-05, |
| "loss": 1.66, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.13072799584990488, |
| "grad_norm": 0.5807337164878845, |
| "learning_rate": 9.162418062636563e-05, |
| "loss": 1.3098, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.13141967836762924, |
| "grad_norm": 0.8253926038742065, |
| "learning_rate": 9.155134741442098e-05, |
| "loss": 1.135, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.13211136088535363, |
| "grad_norm": 0.7517785429954529, |
| "learning_rate": 9.147851420247633e-05, |
| "loss": 1.6342, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.132803043403078, |
| "grad_norm": 0.791852593421936, |
| "learning_rate": 9.140568099053169e-05, |
| "loss": 1.3013, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.13349472592080236, |
| "grad_norm": 0.6530910730361938, |
| "learning_rate": 9.133284777858704e-05, |
| "loss": 2.4537, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.13418640843852672, |
| "grad_norm": 0.8071674704551697, |
| "learning_rate": 9.12600145666424e-05, |
| "loss": 1.1286, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.13487809095625108, |
| "grad_norm": 0.5800924301147461, |
| "learning_rate": 9.118718135469774e-05, |
| "loss": 1.913, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.13556977347397545, |
| "grad_norm": 0.6227284073829651, |
| "learning_rate": 9.11143481427531e-05, |
| "loss": 2.1569, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.1362614559916998, |
| "grad_norm": 0.7267847657203674, |
| "learning_rate": 9.104151493080845e-05, |
| "loss": 2.4129, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.13695313850942417, |
| "grad_norm": 0.6935005784034729, |
| "learning_rate": 9.09686817188638e-05, |
| "loss": 1.2993, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.13764482102714853, |
| "grad_norm": 0.5625126957893372, |
| "learning_rate": 9.089584850691916e-05, |
| "loss": 1.6137, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.1383365035448729, |
| "grad_norm": 0.6450534462928772, |
| "learning_rate": 9.082301529497451e-05, |
| "loss": 2.0705, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.13902818606259726, |
| "grad_norm": 0.7700170874595642, |
| "learning_rate": 9.075018208302987e-05, |
| "loss": 1.2858, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.13971986858032162, |
| "grad_norm": 0.4889370799064636, |
| "learning_rate": 9.067734887108522e-05, |
| "loss": 1.1423, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.14041155109804598, |
| "grad_norm": 0.5464507341384888, |
| "learning_rate": 9.060451565914058e-05, |
| "loss": 1.0928, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.14110323361577037, |
| "grad_norm": 0.7002052664756775, |
| "learning_rate": 9.053168244719593e-05, |
| "loss": 2.154, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.14179491613349474, |
| "grad_norm": 0.73245769739151, |
| "learning_rate": 9.045884923525127e-05, |
| "loss": 2.3437, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.1424865986512191, |
| "grad_norm": 0.6388504505157471, |
| "learning_rate": 9.038601602330664e-05, |
| "loss": 1.6209, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.14317828116894346, |
| "grad_norm": 0.5278307199478149, |
| "learning_rate": 9.031318281136198e-05, |
| "loss": 1.1904, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.14386996368666782, |
| "grad_norm": 0.6711840033531189, |
| "learning_rate": 9.024034959941734e-05, |
| "loss": 1.7129, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.14456164620439219, |
| "grad_norm": 0.6358078122138977, |
| "learning_rate": 9.016751638747269e-05, |
| "loss": 1.6655, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.14525332872211655, |
| "grad_norm": 0.6745946407318115, |
| "learning_rate": 9.009468317552805e-05, |
| "loss": 1.8217, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.1459450112398409, |
| "grad_norm": 2.227377414703369, |
| "learning_rate": 9.00218499635834e-05, |
| "loss": 3.1095, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.14663669375756527, |
| "grad_norm": 0.5836493968963623, |
| "learning_rate": 8.994901675163875e-05, |
| "loss": 1.9259, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.14732837627528964, |
| "grad_norm": 0.6573939323425293, |
| "learning_rate": 8.987618353969411e-05, |
| "loss": 1.9532, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.148020058793014, |
| "grad_norm": 0.8865386843681335, |
| "learning_rate": 8.980335032774946e-05, |
| "loss": 1.7339, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.14871174131073836, |
| "grad_norm": 0.7364823818206787, |
| "learning_rate": 8.973051711580482e-05, |
| "loss": 1.2418, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.14940342382846272, |
| "grad_norm": 0.7573681473731995, |
| "learning_rate": 8.965768390386016e-05, |
| "loss": 1.5159, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.1500951063461871, |
| "grad_norm": 0.6876881718635559, |
| "learning_rate": 8.958485069191553e-05, |
| "loss": 2.2091, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.15078678886391148, |
| "grad_norm": 0.6091864705085754, |
| "learning_rate": 8.951201747997087e-05, |
| "loss": 1.2208, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.15147847138163584, |
| "grad_norm": 0.7913519740104675, |
| "learning_rate": 8.943918426802622e-05, |
| "loss": 2.3047, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.1521701538993602, |
| "grad_norm": 1.3490211963653564, |
| "learning_rate": 8.936635105608158e-05, |
| "loss": 1.7839, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.15286183641708456, |
| "grad_norm": 0.8339107036590576, |
| "learning_rate": 8.929351784413693e-05, |
| "loss": 2.1117, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.15355351893480892, |
| "grad_norm": 1.5490026473999023, |
| "learning_rate": 8.922068463219229e-05, |
| "loss": 1.8909, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.1542452014525333, |
| "grad_norm": 0.7403117418289185, |
| "learning_rate": 8.914785142024764e-05, |
| "loss": 1.838, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.15493688397025765, |
| "grad_norm": 0.9296958446502686, |
| "learning_rate": 8.907501820830299e-05, |
| "loss": 1.8148, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.155628566487982, |
| "grad_norm": 0.6639947891235352, |
| "learning_rate": 8.900218499635835e-05, |
| "loss": 1.8576, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.15632024900570637, |
| "grad_norm": 0.7341317534446716, |
| "learning_rate": 8.89293517844137e-05, |
| "loss": 1.9323, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.15701193152343074, |
| "grad_norm": 0.5162379145622253, |
| "learning_rate": 8.885651857246906e-05, |
| "loss": 2.1562, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.1577036140411551, |
| "grad_norm": 0.6806768774986267, |
| "learning_rate": 8.87836853605244e-05, |
| "loss": 1.8393, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.15839529655887946, |
| "grad_norm": 0.5701708197593689, |
| "learning_rate": 8.871085214857976e-05, |
| "loss": 1.4365, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.15908697907660385, |
| "grad_norm": 0.7863550186157227, |
| "learning_rate": 8.863801893663511e-05, |
| "loss": 2.071, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.15977866159432821, |
| "grad_norm": 0.6164423823356628, |
| "learning_rate": 8.856518572469046e-05, |
| "loss": 2.0573, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.16047034411205258, |
| "grad_norm": 0.8759453892707825, |
| "learning_rate": 8.849235251274582e-05, |
| "loss": 1.8708, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.16116202662977694, |
| "grad_norm": 1.3226007223129272, |
| "learning_rate": 8.841951930080117e-05, |
| "loss": 1.4042, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.1618537091475013, |
| "grad_norm": 0.5054848790168762, |
| "learning_rate": 8.834668608885653e-05, |
| "loss": 1.2018, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.16254539166522566, |
| "grad_norm": 0.6680052876472473, |
| "learning_rate": 8.827385287691188e-05, |
| "loss": 2.1112, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.16323707418295003, |
| "grad_norm": 0.838471531867981, |
| "learning_rate": 8.820101966496724e-05, |
| "loss": 2.3499, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.1639287567006744, |
| "grad_norm": 0.6450280547142029, |
| "learning_rate": 8.812818645302258e-05, |
| "loss": 1.5009, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.16462043921839875, |
| "grad_norm": 0.5540733933448792, |
| "learning_rate": 8.805535324107793e-05, |
| "loss": 1.7475, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.1653121217361231, |
| "grad_norm": 0.6765146255493164, |
| "learning_rate": 8.798252002913329e-05, |
| "loss": 2.0798, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.16600380425384748, |
| "grad_norm": 0.6925728917121887, |
| "learning_rate": 8.790968681718864e-05, |
| "loss": 1.9599, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.16669548677157184, |
| "grad_norm": 0.787634015083313, |
| "learning_rate": 8.7836853605244e-05, |
| "loss": 1.5637, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.1673871692892962, |
| "grad_norm": 0.7191415429115295, |
| "learning_rate": 8.776402039329935e-05, |
| "loss": 2.1707, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.1680788518070206, |
| "grad_norm": 0.7958317995071411, |
| "learning_rate": 8.769118718135471e-05, |
| "loss": 2.147, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.16877053432474495, |
| "grad_norm": 0.6804454922676086, |
| "learning_rate": 8.761835396941004e-05, |
| "loss": 1.9832, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.16946221684246932, |
| "grad_norm": 0.8284922242164612, |
| "learning_rate": 8.75455207574654e-05, |
| "loss": 1.2555, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.17015389936019368, |
| "grad_norm": 0.7180774211883545, |
| "learning_rate": 8.747268754552075e-05, |
| "loss": 1.8228, |
| "step": 246 |
| }, |
| { |
| "epoch": 0.17084558187791804, |
| "grad_norm": 0.8632107377052307, |
| "learning_rate": 8.739985433357611e-05, |
| "loss": 1.1625, |
| "step": 247 |
| }, |
| { |
| "epoch": 0.1715372643956424, |
| "grad_norm": 0.5004351139068604, |
| "learning_rate": 8.732702112163147e-05, |
| "loss": 1.6948, |
| "step": 248 |
| }, |
| { |
| "epoch": 0.17222894691336676, |
| "grad_norm": 0.6219859719276428, |
| "learning_rate": 8.725418790968682e-05, |
| "loss": 1.6748, |
| "step": 249 |
| }, |
| { |
| "epoch": 0.17292062943109113, |
| "grad_norm": 0.8690136075019836, |
| "learning_rate": 8.718135469774218e-05, |
| "loss": 1.7746, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.1736123119488155, |
| "grad_norm": 0.6689561605453491, |
| "learning_rate": 8.710852148579752e-05, |
| "loss": 1.1883, |
| "step": 251 |
| }, |
| { |
| "epoch": 0.17430399446653985, |
| "grad_norm": 1.0592402219772339, |
| "learning_rate": 8.703568827385288e-05, |
| "loss": 1.6322, |
| "step": 252 |
| }, |
| { |
| "epoch": 0.17499567698426421, |
| "grad_norm": 0.721960723400116, |
| "learning_rate": 8.696285506190823e-05, |
| "loss": 1.8509, |
| "step": 253 |
| }, |
| { |
| "epoch": 0.17568735950198858, |
| "grad_norm": 0.7862136960029602, |
| "learning_rate": 8.689002184996359e-05, |
| "loss": 1.5465, |
| "step": 254 |
| }, |
| { |
| "epoch": 0.17637904201971294, |
| "grad_norm": 0.62255859375, |
| "learning_rate": 8.681718863801895e-05, |
| "loss": 1.7961, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.17707072453743733, |
| "grad_norm": 0.8009974956512451, |
| "learning_rate": 8.67443554260743e-05, |
| "loss": 1.7475, |
| "step": 256 |
| }, |
| { |
| "epoch": 0.1777624070551617, |
| "grad_norm": 0.6300957202911377, |
| "learning_rate": 8.667152221412966e-05, |
| "loss": 1.8628, |
| "step": 257 |
| }, |
| { |
| "epoch": 0.17845408957288605, |
| "grad_norm": 0.5678866505622864, |
| "learning_rate": 8.659868900218499e-05, |
| "loss": 1.2443, |
| "step": 258 |
| }, |
| { |
| "epoch": 0.17914577209061042, |
| "grad_norm": 0.6319582462310791, |
| "learning_rate": 8.652585579024035e-05, |
| "loss": 1.3356, |
| "step": 259 |
| }, |
| { |
| "epoch": 0.17983745460833478, |
| "grad_norm": 0.5843179821968079, |
| "learning_rate": 8.64530225782957e-05, |
| "loss": 1.9961, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.18052913712605914, |
| "grad_norm": 0.6211555600166321, |
| "learning_rate": 8.638018936635106e-05, |
| "loss": 1.1243, |
| "step": 261 |
| }, |
| { |
| "epoch": 0.1812208196437835, |
| "grad_norm": 0.7266420722007751, |
| "learning_rate": 8.630735615440642e-05, |
| "loss": 2.2338, |
| "step": 262 |
| }, |
| { |
| "epoch": 0.18191250216150787, |
| "grad_norm": 0.8187587857246399, |
| "learning_rate": 8.623452294246177e-05, |
| "loss": 2.0237, |
| "step": 263 |
| }, |
| { |
| "epoch": 0.18260418467923223, |
| "grad_norm": 0.9444418549537659, |
| "learning_rate": 8.616168973051712e-05, |
| "loss": 1.6278, |
| "step": 264 |
| }, |
| { |
| "epoch": 0.1832958671969566, |
| "grad_norm": 0.5889193415641785, |
| "learning_rate": 8.608885651857246e-05, |
| "loss": 2.1453, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.18398754971468095, |
| "grad_norm": 0.6819151639938354, |
| "learning_rate": 8.601602330662782e-05, |
| "loss": 1.2219, |
| "step": 266 |
| }, |
| { |
| "epoch": 0.18467923223240532, |
| "grad_norm": 0.8448613286018372, |
| "learning_rate": 8.594319009468317e-05, |
| "loss": 2.1193, |
| "step": 267 |
| }, |
| { |
| "epoch": 0.18537091475012968, |
| "grad_norm": 0.7247136235237122, |
| "learning_rate": 8.587035688273853e-05, |
| "loss": 1.6155, |
| "step": 268 |
| }, |
| { |
| "epoch": 0.18606259726785407, |
| "grad_norm": 0.7371733784675598, |
| "learning_rate": 8.57975236707939e-05, |
| "loss": 1.5466, |
| "step": 269 |
| }, |
| { |
| "epoch": 0.18675427978557843, |
| "grad_norm": 0.6676841974258423, |
| "learning_rate": 8.572469045884924e-05, |
| "loss": 2.1909, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.1874459623033028, |
| "grad_norm": 0.7091754078865051, |
| "learning_rate": 8.565185724690459e-05, |
| "loss": 2.3795, |
| "step": 271 |
| }, |
| { |
| "epoch": 0.18813764482102716, |
| "grad_norm": 0.7999275922775269, |
| "learning_rate": 8.557902403495994e-05, |
| "loss": 2.0659, |
| "step": 272 |
| }, |
| { |
| "epoch": 0.18882932733875152, |
| "grad_norm": 0.5176097750663757, |
| "learning_rate": 8.55061908230153e-05, |
| "loss": 1.4036, |
| "step": 273 |
| }, |
| { |
| "epoch": 0.18952100985647588, |
| "grad_norm": 0.6579016447067261, |
| "learning_rate": 8.543335761107065e-05, |
| "loss": 1.9072, |
| "step": 274 |
| }, |
| { |
| "epoch": 0.19021269237420024, |
| "grad_norm": 1.0604159832000732, |
| "learning_rate": 8.536052439912601e-05, |
| "loss": 2.1682, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.1909043748919246, |
| "grad_norm": 0.7157823443412781, |
| "learning_rate": 8.528769118718137e-05, |
| "loss": 1.9188, |
| "step": 276 |
| }, |
| { |
| "epoch": 0.19159605740964897, |
| "grad_norm": 0.691941499710083, |
| "learning_rate": 8.521485797523672e-05, |
| "loss": 1.8626, |
| "step": 277 |
| }, |
| { |
| "epoch": 0.19228773992737333, |
| "grad_norm": 0.5831330418586731, |
| "learning_rate": 8.514202476329206e-05, |
| "loss": 1.9975, |
| "step": 278 |
| }, |
| { |
| "epoch": 0.1929794224450977, |
| "grad_norm": 0.6630810499191284, |
| "learning_rate": 8.506919155134741e-05, |
| "loss": 2.0809, |
| "step": 279 |
| }, |
| { |
| "epoch": 0.19367110496282205, |
| "grad_norm": 0.601836621761322, |
| "learning_rate": 8.499635833940277e-05, |
| "loss": 1.2491, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.19436278748054642, |
| "grad_norm": 0.7532071471214294, |
| "learning_rate": 8.492352512745812e-05, |
| "loss": 2.0672, |
| "step": 281 |
| }, |
| { |
| "epoch": 0.1950544699982708, |
| "grad_norm": 0.70450758934021, |
| "learning_rate": 8.485069191551348e-05, |
| "loss": 2.1466, |
| "step": 282 |
| }, |
| { |
| "epoch": 0.19574615251599517, |
| "grad_norm": 0.6915740966796875, |
| "learning_rate": 8.477785870356884e-05, |
| "loss": 1.4086, |
| "step": 283 |
| }, |
| { |
| "epoch": 0.19643783503371953, |
| "grad_norm": 0.949400007724762, |
| "learning_rate": 8.470502549162418e-05, |
| "loss": 1.6344, |
| "step": 284 |
| }, |
| { |
| "epoch": 0.1971295175514439, |
| "grad_norm": 0.7051184177398682, |
| "learning_rate": 8.463219227967954e-05, |
| "loss": 1.4375, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.19782120006916826, |
| "grad_norm": 0.5827087163925171, |
| "learning_rate": 8.455935906773488e-05, |
| "loss": 1.4801, |
| "step": 286 |
| }, |
| { |
| "epoch": 0.19851288258689262, |
| "grad_norm": 0.6058622002601624, |
| "learning_rate": 8.448652585579024e-05, |
| "loss": 1.4378, |
| "step": 287 |
| }, |
| { |
| "epoch": 0.19920456510461698, |
| "grad_norm": 0.7779908180236816, |
| "learning_rate": 8.441369264384559e-05, |
| "loss": 1.6377, |
| "step": 288 |
| }, |
| { |
| "epoch": 0.19989624762234134, |
| "grad_norm": 0.6972940564155579, |
| "learning_rate": 8.434085943190095e-05, |
| "loss": 1.9437, |
| "step": 289 |
| }, |
| { |
| "epoch": 0.2005879301400657, |
| "grad_norm": 0.7289730906486511, |
| "learning_rate": 8.426802621995631e-05, |
| "loss": 1.5402, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.20127961265779007, |
| "grad_norm": 0.6566206216812134, |
| "learning_rate": 8.419519300801165e-05, |
| "loss": 1.4304, |
| "step": 291 |
| }, |
| { |
| "epoch": 0.20197129517551443, |
| "grad_norm": 0.97121262550354, |
| "learning_rate": 8.412235979606701e-05, |
| "loss": 1.7801, |
| "step": 292 |
| }, |
| { |
| "epoch": 0.2026629776932388, |
| "grad_norm": 0.7539506554603577, |
| "learning_rate": 8.404952658412236e-05, |
| "loss": 1.5326, |
| "step": 293 |
| }, |
| { |
| "epoch": 0.20335466021096316, |
| "grad_norm": 0.8037034869194031, |
| "learning_rate": 8.397669337217772e-05, |
| "loss": 1.5559, |
| "step": 294 |
| }, |
| { |
| "epoch": 0.20404634272868752, |
| "grad_norm": 1.2110254764556885, |
| "learning_rate": 8.390386016023307e-05, |
| "loss": 1.7529, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.2047380252464119, |
| "grad_norm": 0.9396728873252869, |
| "learning_rate": 8.383102694828843e-05, |
| "loss": 2.0101, |
| "step": 296 |
| }, |
| { |
| "epoch": 0.20542970776413627, |
| "grad_norm": 0.7678546905517578, |
| "learning_rate": 8.375819373634379e-05, |
| "loss": 2.0231, |
| "step": 297 |
| }, |
| { |
| "epoch": 0.20612139028186063, |
| "grad_norm": 1.1070085763931274, |
| "learning_rate": 8.368536052439912e-05, |
| "loss": 1.7999, |
| "step": 298 |
| }, |
| { |
| "epoch": 0.206813072799585, |
| "grad_norm": 0.7589883804321289, |
| "learning_rate": 8.361252731245448e-05, |
| "loss": 1.6723, |
| "step": 299 |
| }, |
| { |
| "epoch": 0.20750475531730936, |
| "grad_norm": 0.6468162536621094, |
| "learning_rate": 8.353969410050983e-05, |
| "loss": 1.3266, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.20819643783503372, |
| "grad_norm": 0.7720943689346313, |
| "learning_rate": 8.346686088856519e-05, |
| "loss": 1.5897, |
| "step": 301 |
| }, |
| { |
| "epoch": 0.20888812035275808, |
| "grad_norm": 0.7349941730499268, |
| "learning_rate": 8.339402767662054e-05, |
| "loss": 1.9242, |
| "step": 302 |
| }, |
| { |
| "epoch": 0.20957980287048245, |
| "grad_norm": 0.7902266979217529, |
| "learning_rate": 8.33211944646759e-05, |
| "loss": 2.2367, |
| "step": 303 |
| }, |
| { |
| "epoch": 0.2102714853882068, |
| "grad_norm": 0.822661280632019, |
| "learning_rate": 8.324836125273125e-05, |
| "loss": 2.02, |
| "step": 304 |
| }, |
| { |
| "epoch": 0.21096316790593117, |
| "grad_norm": 0.6001129746437073, |
| "learning_rate": 8.31755280407866e-05, |
| "loss": 1.4506, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.21165485042365553, |
| "grad_norm": 0.7674906253814697, |
| "learning_rate": 8.310269482884196e-05, |
| "loss": 1.9657, |
| "step": 306 |
| }, |
| { |
| "epoch": 0.2123465329413799, |
| "grad_norm": 0.5808192491531372, |
| "learning_rate": 8.30298616168973e-05, |
| "loss": 1.8088, |
| "step": 307 |
| }, |
| { |
| "epoch": 0.21303821545910426, |
| "grad_norm": 0.7100119590759277, |
| "learning_rate": 8.295702840495266e-05, |
| "loss": 1.9353, |
| "step": 308 |
| }, |
| { |
| "epoch": 0.21372989797682865, |
| "grad_norm": 0.7359102964401245, |
| "learning_rate": 8.288419519300801e-05, |
| "loss": 1.7844, |
| "step": 309 |
| }, |
| { |
| "epoch": 0.214421580494553, |
| "grad_norm": 0.7489526867866516, |
| "learning_rate": 8.281136198106337e-05, |
| "loss": 1.6238, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.21511326301227737, |
| "grad_norm": 0.8063543438911438, |
| "learning_rate": 8.273852876911872e-05, |
| "loss": 1.6634, |
| "step": 311 |
| }, |
| { |
| "epoch": 0.21580494553000173, |
| "grad_norm": 0.7559211850166321, |
| "learning_rate": 8.266569555717407e-05, |
| "loss": 1.9663, |
| "step": 312 |
| }, |
| { |
| "epoch": 0.2164966280477261, |
| "grad_norm": 0.7710449695587158, |
| "learning_rate": 8.259286234522943e-05, |
| "loss": 2.0552, |
| "step": 313 |
| }, |
| { |
| "epoch": 0.21718831056545046, |
| "grad_norm": 1.0111525058746338, |
| "learning_rate": 8.252002913328478e-05, |
| "loss": 1.668, |
| "step": 314 |
| }, |
| { |
| "epoch": 0.21787999308317482, |
| "grad_norm": 0.7205057144165039, |
| "learning_rate": 8.244719592134014e-05, |
| "loss": 2.1546, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.21857167560089918, |
| "grad_norm": 0.9559133052825928, |
| "learning_rate": 8.237436270939549e-05, |
| "loss": 1.6799, |
| "step": 316 |
| }, |
| { |
| "epoch": 0.21926335811862355, |
| "grad_norm": 0.7418698668479919, |
| "learning_rate": 8.230152949745085e-05, |
| "loss": 1.2123, |
| "step": 317 |
| }, |
| { |
| "epoch": 0.2199550406363479, |
| "grad_norm": 0.7784197330474854, |
| "learning_rate": 8.22286962855062e-05, |
| "loss": 1.1066, |
| "step": 318 |
| }, |
| { |
| "epoch": 0.22064672315407227, |
| "grad_norm": 0.754654586315155, |
| "learning_rate": 8.215586307356154e-05, |
| "loss": 2.0185, |
| "step": 319 |
| }, |
| { |
| "epoch": 0.22133840567179663, |
| "grad_norm": 0.6510013341903687, |
| "learning_rate": 8.20830298616169e-05, |
| "loss": 1.8316, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.222030088189521, |
| "grad_norm": 1.541455626487732, |
| "learning_rate": 8.201019664967225e-05, |
| "loss": 1.3254, |
| "step": 321 |
| }, |
| { |
| "epoch": 0.2227217707072454, |
| "grad_norm": 0.8958970308303833, |
| "learning_rate": 8.193736343772761e-05, |
| "loss": 1.9616, |
| "step": 322 |
| }, |
| { |
| "epoch": 0.22341345322496975, |
| "grad_norm": 0.6273507475852966, |
| "learning_rate": 8.186453022578296e-05, |
| "loss": 1.13, |
| "step": 323 |
| }, |
| { |
| "epoch": 0.2241051357426941, |
| "grad_norm": 0.7866650223731995, |
| "learning_rate": 8.17916970138383e-05, |
| "loss": 1.784, |
| "step": 324 |
| }, |
| { |
| "epoch": 0.22479681826041847, |
| "grad_norm": 0.9650758504867554, |
| "learning_rate": 8.171886380189367e-05, |
| "loss": 0.9438, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.22548850077814284, |
| "grad_norm": 0.7491368055343628, |
| "learning_rate": 8.164603058994901e-05, |
| "loss": 1.1472, |
| "step": 326 |
| }, |
| { |
| "epoch": 0.2261801832958672, |
| "grad_norm": 0.7516188025474548, |
| "learning_rate": 8.157319737800438e-05, |
| "loss": 1.841, |
| "step": 327 |
| }, |
| { |
| "epoch": 0.22687186581359156, |
| "grad_norm": 0.8555276989936829, |
| "learning_rate": 8.150036416605972e-05, |
| "loss": 2.0819, |
| "step": 328 |
| }, |
| { |
| "epoch": 0.22756354833131592, |
| "grad_norm": 0.8025880455970764, |
| "learning_rate": 8.142753095411508e-05, |
| "loss": 1.871, |
| "step": 329 |
| }, |
| { |
| "epoch": 0.22825523084904029, |
| "grad_norm": 0.8006131052970886, |
| "learning_rate": 8.135469774217043e-05, |
| "loss": 1.8293, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.22894691336676465, |
| "grad_norm": 0.6898099184036255, |
| "learning_rate": 8.128186453022578e-05, |
| "loss": 1.9701, |
| "step": 331 |
| }, |
| { |
| "epoch": 0.229638595884489, |
| "grad_norm": 0.7827674150466919, |
| "learning_rate": 8.120903131828114e-05, |
| "loss": 1.9634, |
| "step": 332 |
| }, |
| { |
| "epoch": 0.23033027840221337, |
| "grad_norm": 0.7763857245445251, |
| "learning_rate": 8.113619810633649e-05, |
| "loss": 1.4908, |
| "step": 333 |
| }, |
| { |
| "epoch": 0.23102196091993774, |
| "grad_norm": 0.7040197253227234, |
| "learning_rate": 8.106336489439185e-05, |
| "loss": 1.9854, |
| "step": 334 |
| }, |
| { |
| "epoch": 0.23171364343766213, |
| "grad_norm": 1.0337374210357666, |
| "learning_rate": 8.09905316824472e-05, |
| "loss": 2.1542, |
| "step": 335 |
| }, |
| { |
| "epoch": 0.2324053259553865, |
| "grad_norm": 0.7228108048439026, |
| "learning_rate": 8.091769847050256e-05, |
| "loss": 1.9267, |
| "step": 336 |
| }, |
| { |
| "epoch": 0.23309700847311085, |
| "grad_norm": 0.6841875910758972, |
| "learning_rate": 8.08448652585579e-05, |
| "loss": 1.769, |
| "step": 337 |
| }, |
| { |
| "epoch": 0.2337886909908352, |
| "grad_norm": 0.6749361753463745, |
| "learning_rate": 8.077203204661325e-05, |
| "loss": 1.7758, |
| "step": 338 |
| }, |
| { |
| "epoch": 0.23448037350855958, |
| "grad_norm": 0.6411470174789429, |
| "learning_rate": 8.069919883466861e-05, |
| "loss": 1.2938, |
| "step": 339 |
| }, |
| { |
| "epoch": 0.23517205602628394, |
| "grad_norm": 0.6457031965255737, |
| "learning_rate": 8.062636562272396e-05, |
| "loss": 1.3416, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.2358637385440083, |
| "grad_norm": 0.7273378372192383, |
| "learning_rate": 8.055353241077932e-05, |
| "loss": 1.5468, |
| "step": 341 |
| }, |
| { |
| "epoch": 0.23655542106173266, |
| "grad_norm": 0.7910048365592957, |
| "learning_rate": 8.048069919883467e-05, |
| "loss": 1.8577, |
| "step": 342 |
| }, |
| { |
| "epoch": 0.23724710357945702, |
| "grad_norm": 0.7172124981880188, |
| "learning_rate": 8.040786598689003e-05, |
| "loss": 2.2877, |
| "step": 343 |
| }, |
| { |
| "epoch": 0.2379387860971814, |
| "grad_norm": 0.6511913537979126, |
| "learning_rate": 8.033503277494538e-05, |
| "loss": 1.0914, |
| "step": 344 |
| }, |
| { |
| "epoch": 0.23863046861490575, |
| "grad_norm": 0.6148791909217834, |
| "learning_rate": 8.026219956300073e-05, |
| "loss": 1.3388, |
| "step": 345 |
| }, |
| { |
| "epoch": 0.2393221511326301, |
| "grad_norm": 0.7384858131408691, |
| "learning_rate": 8.018936635105609e-05, |
| "loss": 1.8186, |
| "step": 346 |
| }, |
| { |
| "epoch": 0.24001383365035447, |
| "grad_norm": 0.793538510799408, |
| "learning_rate": 8.011653313911143e-05, |
| "loss": 1.5962, |
| "step": 347 |
| }, |
| { |
| "epoch": 0.24070551616807886, |
| "grad_norm": 0.6316525936126709, |
| "learning_rate": 8.00436999271668e-05, |
| "loss": 1.4712, |
| "step": 348 |
| }, |
| { |
| "epoch": 0.24139719868580323, |
| "grad_norm": 0.7671139240264893, |
| "learning_rate": 7.997086671522214e-05, |
| "loss": 2.2086, |
| "step": 349 |
| }, |
| { |
| "epoch": 0.2420888812035276, |
| "grad_norm": 0.7423043251037598, |
| "learning_rate": 7.98980335032775e-05, |
| "loss": 2.1409, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.24278056372125195, |
| "grad_norm": 0.834575891494751, |
| "learning_rate": 7.982520029133285e-05, |
| "loss": 1.5166, |
| "step": 351 |
| }, |
| { |
| "epoch": 0.24347224623897631, |
| "grad_norm": 1.0157177448272705, |
| "learning_rate": 7.97523670793882e-05, |
| "loss": 1.7054, |
| "step": 352 |
| }, |
| { |
| "epoch": 0.24416392875670068, |
| "grad_norm": 0.8765892386436462, |
| "learning_rate": 7.967953386744356e-05, |
| "loss": 2.4397, |
| "step": 353 |
| }, |
| { |
| "epoch": 0.24485561127442504, |
| "grad_norm": 0.6456695795059204, |
| "learning_rate": 7.960670065549891e-05, |
| "loss": 1.2428, |
| "step": 354 |
| }, |
| { |
| "epoch": 0.2455472937921494, |
| "grad_norm": 0.813552737236023, |
| "learning_rate": 7.953386744355427e-05, |
| "loss": 1.7926, |
| "step": 355 |
| }, |
| { |
| "epoch": 0.24623897630987376, |
| "grad_norm": 0.6702203750610352, |
| "learning_rate": 7.946103423160962e-05, |
| "loss": 1.6654, |
| "step": 356 |
| }, |
| { |
| "epoch": 0.24693065882759813, |
| "grad_norm": 0.9113941788673401, |
| "learning_rate": 7.938820101966498e-05, |
| "loss": 2.1639, |
| "step": 357 |
| }, |
| { |
| "epoch": 0.2476223413453225, |
| "grad_norm": 0.6006965041160583, |
| "learning_rate": 7.931536780772032e-05, |
| "loss": 1.815, |
| "step": 358 |
| }, |
| { |
| "epoch": 0.24831402386304685, |
| "grad_norm": 0.9116283059120178, |
| "learning_rate": 7.924253459577567e-05, |
| "loss": 1.5244, |
| "step": 359 |
| }, |
| { |
| "epoch": 0.2490057063807712, |
| "grad_norm": 0.8563610315322876, |
| "learning_rate": 7.916970138383103e-05, |
| "loss": 2.1287, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.2496973888984956, |
| "grad_norm": 0.7254488468170166, |
| "learning_rate": 7.909686817188638e-05, |
| "loss": 1.1997, |
| "step": 361 |
| }, |
| { |
| "epoch": 0.25038907141621997, |
| "grad_norm": 0.8156582117080688, |
| "learning_rate": 7.902403495994174e-05, |
| "loss": 2.1275, |
| "step": 362 |
| }, |
| { |
| "epoch": 0.2510807539339443, |
| "grad_norm": 0.7504958510398865, |
| "learning_rate": 7.895120174799709e-05, |
| "loss": 2.1381, |
| "step": 363 |
| }, |
| { |
| "epoch": 0.2517724364516687, |
| "grad_norm": 0.6650155782699585, |
| "learning_rate": 7.887836853605244e-05, |
| "loss": 0.9823, |
| "step": 364 |
| }, |
| { |
| "epoch": 0.252464118969393, |
| "grad_norm": 0.6706410646438599, |
| "learning_rate": 7.88055353241078e-05, |
| "loss": 1.1854, |
| "step": 365 |
| }, |
| { |
| "epoch": 0.2531558014871174, |
| "grad_norm": 1.0841103792190552, |
| "learning_rate": 7.873270211216315e-05, |
| "loss": 1.4933, |
| "step": 366 |
| }, |
| { |
| "epoch": 0.25384748400484175, |
| "grad_norm": 0.7275784611701965, |
| "learning_rate": 7.86598689002185e-05, |
| "loss": 2.162, |
| "step": 367 |
| }, |
| { |
| "epoch": 0.25453916652256614, |
| "grad_norm": 0.994987964630127, |
| "learning_rate": 7.858703568827385e-05, |
| "loss": 1.6992, |
| "step": 368 |
| }, |
| { |
| "epoch": 0.25523084904029053, |
| "grad_norm": 1.160749912261963, |
| "learning_rate": 7.851420247632922e-05, |
| "loss": 1.2521, |
| "step": 369 |
| }, |
| { |
| "epoch": 0.25592253155801487, |
| "grad_norm": 0.8106790781021118, |
| "learning_rate": 7.844136926438456e-05, |
| "loss": 1.5949, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.25661421407573926, |
| "grad_norm": 0.5805374383926392, |
| "learning_rate": 7.836853605243991e-05, |
| "loss": 1.713, |
| "step": 371 |
| }, |
| { |
| "epoch": 0.2573058965934636, |
| "grad_norm": 0.6805906295776367, |
| "learning_rate": 7.829570284049527e-05, |
| "loss": 1.7713, |
| "step": 372 |
| }, |
| { |
| "epoch": 0.257997579111188, |
| "grad_norm": 0.6964860558509827, |
| "learning_rate": 7.822286962855062e-05, |
| "loss": 1.8013, |
| "step": 373 |
| }, |
| { |
| "epoch": 0.2586892616289123, |
| "grad_norm": 1.2079120874404907, |
| "learning_rate": 7.815003641660598e-05, |
| "loss": 2.0171, |
| "step": 374 |
| }, |
| { |
| "epoch": 0.2593809441466367, |
| "grad_norm": 0.689630925655365, |
| "learning_rate": 7.807720320466133e-05, |
| "loss": 1.8816, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.26007262666436104, |
| "grad_norm": 0.9079475402832031, |
| "learning_rate": 7.800436999271669e-05, |
| "loss": 1.5992, |
| "step": 376 |
| }, |
| { |
| "epoch": 0.26076430918208543, |
| "grad_norm": 0.4846475422382355, |
| "learning_rate": 7.793153678077204e-05, |
| "loss": 0.9038, |
| "step": 377 |
| }, |
| { |
| "epoch": 0.26145599169980976, |
| "grad_norm": 0.6311324238777161, |
| "learning_rate": 7.785870356882738e-05, |
| "loss": 1.757, |
| "step": 378 |
| }, |
| { |
| "epoch": 0.26214767421753415, |
| "grad_norm": 0.6497640609741211, |
| "learning_rate": 7.778587035688274e-05, |
| "loss": 1.799, |
| "step": 379 |
| }, |
| { |
| "epoch": 0.2628393567352585, |
| "grad_norm": 0.6967574954032898, |
| "learning_rate": 7.771303714493809e-05, |
| "loss": 2.0541, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.2635310392529829, |
| "grad_norm": 0.8538269400596619, |
| "learning_rate": 7.764020393299345e-05, |
| "loss": 1.4781, |
| "step": 381 |
| }, |
| { |
| "epoch": 0.26422272177070727, |
| "grad_norm": 0.747353196144104, |
| "learning_rate": 7.75673707210488e-05, |
| "loss": 1.063, |
| "step": 382 |
| }, |
| { |
| "epoch": 0.2649144042884316, |
| "grad_norm": 0.7110087871551514, |
| "learning_rate": 7.749453750910416e-05, |
| "loss": 1.6301, |
| "step": 383 |
| }, |
| { |
| "epoch": 0.265606086806156, |
| "grad_norm": 0.8608129620552063, |
| "learning_rate": 7.742170429715951e-05, |
| "loss": 1.7459, |
| "step": 384 |
| }, |
| { |
| "epoch": 0.26629776932388033, |
| "grad_norm": 0.903325617313385, |
| "learning_rate": 7.734887108521486e-05, |
| "loss": 1.0401, |
| "step": 385 |
| }, |
| { |
| "epoch": 0.2669894518416047, |
| "grad_norm": 0.6887355446815491, |
| "learning_rate": 7.727603787327022e-05, |
| "loss": 1.6558, |
| "step": 386 |
| }, |
| { |
| "epoch": 0.26768113435932905, |
| "grad_norm": 0.8748055100440979, |
| "learning_rate": 7.720320466132557e-05, |
| "loss": 2.2414, |
| "step": 387 |
| }, |
| { |
| "epoch": 0.26837281687705344, |
| "grad_norm": 0.7315294146537781, |
| "learning_rate": 7.713037144938093e-05, |
| "loss": 1.7651, |
| "step": 388 |
| }, |
| { |
| "epoch": 0.2690644993947778, |
| "grad_norm": 0.7685129642486572, |
| "learning_rate": 7.705753823743627e-05, |
| "loss": 1.64, |
| "step": 389 |
| }, |
| { |
| "epoch": 0.26975618191250217, |
| "grad_norm": 0.7387799620628357, |
| "learning_rate": 7.698470502549163e-05, |
| "loss": 1.655, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.2704478644302265, |
| "grad_norm": 0.6962683200836182, |
| "learning_rate": 7.691187181354698e-05, |
| "loss": 2.0759, |
| "step": 391 |
| }, |
| { |
| "epoch": 0.2711395469479509, |
| "grad_norm": 0.6928436160087585, |
| "learning_rate": 7.683903860160233e-05, |
| "loss": 1.902, |
| "step": 392 |
| }, |
| { |
| "epoch": 0.27183122946567523, |
| "grad_norm": 0.6286187767982483, |
| "learning_rate": 7.676620538965769e-05, |
| "loss": 1.1205, |
| "step": 393 |
| }, |
| { |
| "epoch": 0.2725229119833996, |
| "grad_norm": 0.8087384104728699, |
| "learning_rate": 7.669337217771304e-05, |
| "loss": 1.5928, |
| "step": 394 |
| }, |
| { |
| "epoch": 0.273214594501124, |
| "grad_norm": 0.6950211524963379, |
| "learning_rate": 7.66205389657684e-05, |
| "loss": 1.9022, |
| "step": 395 |
| }, |
| { |
| "epoch": 0.27390627701884834, |
| "grad_norm": 0.716234028339386, |
| "learning_rate": 7.654770575382375e-05, |
| "loss": 1.9103, |
| "step": 396 |
| }, |
| { |
| "epoch": 0.27459795953657273, |
| "grad_norm": 1.0911515951156616, |
| "learning_rate": 7.647487254187911e-05, |
| "loss": 2.1727, |
| "step": 397 |
| }, |
| { |
| "epoch": 0.27528964205429707, |
| "grad_norm": 1.0012110471725464, |
| "learning_rate": 7.640203932993446e-05, |
| "loss": 2.0671, |
| "step": 398 |
| }, |
| { |
| "epoch": 0.27598132457202146, |
| "grad_norm": 0.7271398305892944, |
| "learning_rate": 7.63292061179898e-05, |
| "loss": 1.8487, |
| "step": 399 |
| }, |
| { |
| "epoch": 0.2766730070897458, |
| "grad_norm": 0.9112443923950195, |
| "learning_rate": 7.625637290604516e-05, |
| "loss": 1.6583, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.2773646896074702, |
| "grad_norm": 0.8109308481216431, |
| "learning_rate": 7.618353969410051e-05, |
| "loss": 1.6202, |
| "step": 401 |
| }, |
| { |
| "epoch": 0.2780563721251945, |
| "grad_norm": 0.7934839129447937, |
| "learning_rate": 7.611070648215587e-05, |
| "loss": 2.2605, |
| "step": 402 |
| }, |
| { |
| "epoch": 0.2787480546429189, |
| "grad_norm": 0.7166281342506409, |
| "learning_rate": 7.603787327021122e-05, |
| "loss": 1.5321, |
| "step": 403 |
| }, |
| { |
| "epoch": 0.27943973716064324, |
| "grad_norm": 0.6386390924453735, |
| "learning_rate": 7.596504005826657e-05, |
| "loss": 0.9431, |
| "step": 404 |
| }, |
| { |
| "epoch": 0.28013141967836763, |
| "grad_norm": 1.2669974565505981, |
| "learning_rate": 7.589220684632193e-05, |
| "loss": 0.7634, |
| "step": 405 |
| }, |
| { |
| "epoch": 0.28082310219609197, |
| "grad_norm": 0.7032380700111389, |
| "learning_rate": 7.581937363437728e-05, |
| "loss": 1.9926, |
| "step": 406 |
| }, |
| { |
| "epoch": 0.28151478471381636, |
| "grad_norm": 0.648324191570282, |
| "learning_rate": 7.574654042243264e-05, |
| "loss": 1.2027, |
| "step": 407 |
| }, |
| { |
| "epoch": 0.28220646723154075, |
| "grad_norm": 0.7746976613998413, |
| "learning_rate": 7.567370721048798e-05, |
| "loss": 1.3615, |
| "step": 408 |
| }, |
| { |
| "epoch": 0.2828981497492651, |
| "grad_norm": 0.8174494504928589, |
| "learning_rate": 7.560087399854335e-05, |
| "loss": 1.9945, |
| "step": 409 |
| }, |
| { |
| "epoch": 0.28358983226698947, |
| "grad_norm": 0.9701315760612488, |
| "learning_rate": 7.55280407865987e-05, |
| "loss": 1.4894, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.2842815147847138, |
| "grad_norm": 0.9906868934631348, |
| "learning_rate": 7.545520757465404e-05, |
| "loss": 1.7937, |
| "step": 411 |
| }, |
| { |
| "epoch": 0.2849731973024382, |
| "grad_norm": 0.6380108594894409, |
| "learning_rate": 7.53823743627094e-05, |
| "loss": 1.6794, |
| "step": 412 |
| }, |
| { |
| "epoch": 0.28566487982016253, |
| "grad_norm": 0.6367055177688599, |
| "learning_rate": 7.530954115076475e-05, |
| "loss": 1.4019, |
| "step": 413 |
| }, |
| { |
| "epoch": 0.2863565623378869, |
| "grad_norm": 0.9779865741729736, |
| "learning_rate": 7.523670793882011e-05, |
| "loss": 1.8048, |
| "step": 414 |
| }, |
| { |
| "epoch": 0.28704824485561126, |
| "grad_norm": 0.6246545910835266, |
| "learning_rate": 7.516387472687546e-05, |
| "loss": 1.7138, |
| "step": 415 |
| }, |
| { |
| "epoch": 0.28773992737333565, |
| "grad_norm": 0.7418220639228821, |
| "learning_rate": 7.509104151493082e-05, |
| "loss": 1.9917, |
| "step": 416 |
| }, |
| { |
| "epoch": 0.28843160989106, |
| "grad_norm": 0.7883859276771545, |
| "learning_rate": 7.501820830298617e-05, |
| "loss": 1.2134, |
| "step": 417 |
| }, |
| { |
| "epoch": 0.28912329240878437, |
| "grad_norm": 0.8116041421890259, |
| "learning_rate": 7.494537509104151e-05, |
| "loss": 1.1634, |
| "step": 418 |
| }, |
| { |
| "epoch": 0.2898149749265087, |
| "grad_norm": 0.7708969712257385, |
| "learning_rate": 7.487254187909688e-05, |
| "loss": 1.8895, |
| "step": 419 |
| }, |
| { |
| "epoch": 0.2905066574442331, |
| "grad_norm": 1.3700909614562988, |
| "learning_rate": 7.479970866715222e-05, |
| "loss": 1.1294, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.2911983399619575, |
| "grad_norm": 0.8107597827911377, |
| "learning_rate": 7.472687545520758e-05, |
| "loss": 1.0097, |
| "step": 421 |
| }, |
| { |
| "epoch": 0.2918900224796818, |
| "grad_norm": 0.8277159929275513, |
| "learning_rate": 7.465404224326293e-05, |
| "loss": 1.0633, |
| "step": 422 |
| }, |
| { |
| "epoch": 0.2925817049974062, |
| "grad_norm": 0.75505131483078, |
| "learning_rate": 7.458120903131829e-05, |
| "loss": 1.4971, |
| "step": 423 |
| }, |
| { |
| "epoch": 0.29327338751513055, |
| "grad_norm": 0.7761143445968628, |
| "learning_rate": 7.450837581937363e-05, |
| "loss": 1.569, |
| "step": 424 |
| }, |
| { |
| "epoch": 0.29396507003285494, |
| "grad_norm": 0.8429370522499084, |
| "learning_rate": 7.443554260742899e-05, |
| "loss": 2.147, |
| "step": 425 |
| }, |
| { |
| "epoch": 0.29465675255057927, |
| "grad_norm": 0.7506592273712158, |
| "learning_rate": 7.436270939548435e-05, |
| "loss": 2.1514, |
| "step": 426 |
| }, |
| { |
| "epoch": 0.29534843506830366, |
| "grad_norm": 0.6654348969459534, |
| "learning_rate": 7.42898761835397e-05, |
| "loss": 1.7697, |
| "step": 427 |
| }, |
| { |
| "epoch": 0.296040117586028, |
| "grad_norm": 1.179040551185608, |
| "learning_rate": 7.421704297159506e-05, |
| "loss": 1.6906, |
| "step": 428 |
| }, |
| { |
| "epoch": 0.2967318001037524, |
| "grad_norm": 0.6145846247673035, |
| "learning_rate": 7.41442097596504e-05, |
| "loss": 1.8308, |
| "step": 429 |
| }, |
| { |
| "epoch": 0.2974234826214767, |
| "grad_norm": 0.7995002269744873, |
| "learning_rate": 7.407137654770577e-05, |
| "loss": 1.7942, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.2981151651392011, |
| "grad_norm": 0.707095742225647, |
| "learning_rate": 7.39985433357611e-05, |
| "loss": 1.6325, |
| "step": 431 |
| }, |
| { |
| "epoch": 0.29880684765692545, |
| "grad_norm": 0.5955487489700317, |
| "learning_rate": 7.392571012381646e-05, |
| "loss": 1.3466, |
| "step": 432 |
| }, |
| { |
| "epoch": 0.29949853017464984, |
| "grad_norm": 0.9258183240890503, |
| "learning_rate": 7.385287691187182e-05, |
| "loss": 1.8544, |
| "step": 433 |
| }, |
| { |
| "epoch": 0.3001902126923742, |
| "grad_norm": 0.8435434103012085, |
| "learning_rate": 7.378004369992717e-05, |
| "loss": 1.8884, |
| "step": 434 |
| }, |
| { |
| "epoch": 0.30088189521009856, |
| "grad_norm": 0.7008845210075378, |
| "learning_rate": 7.370721048798253e-05, |
| "loss": 1.8409, |
| "step": 435 |
| }, |
| { |
| "epoch": 0.30157357772782295, |
| "grad_norm": 0.7846685647964478, |
| "learning_rate": 7.363437727603788e-05, |
| "loss": 1.8821, |
| "step": 436 |
| }, |
| { |
| "epoch": 0.3022652602455473, |
| "grad_norm": 0.7767228484153748, |
| "learning_rate": 7.356154406409324e-05, |
| "loss": 2.0034, |
| "step": 437 |
| }, |
| { |
| "epoch": 0.3029569427632717, |
| "grad_norm": 0.787028431892395, |
| "learning_rate": 7.348871085214857e-05, |
| "loss": 1.8664, |
| "step": 438 |
| }, |
| { |
| "epoch": 0.303648625280996, |
| "grad_norm": 0.6583303809165955, |
| "learning_rate": 7.341587764020393e-05, |
| "loss": 1.1991, |
| "step": 439 |
| }, |
| { |
| "epoch": 0.3043403077987204, |
| "grad_norm": 0.8681322932243347, |
| "learning_rate": 7.33430444282593e-05, |
| "loss": 1.7124, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.30503199031644473, |
| "grad_norm": 0.9781901240348816, |
| "learning_rate": 7.327021121631464e-05, |
| "loss": 2.0128, |
| "step": 441 |
| }, |
| { |
| "epoch": 0.3057236728341691, |
| "grad_norm": 0.6712149381637573, |
| "learning_rate": 7.319737800437e-05, |
| "loss": 1.4835, |
| "step": 442 |
| }, |
| { |
| "epoch": 0.30641535535189346, |
| "grad_norm": 0.7322747111320496, |
| "learning_rate": 7.312454479242535e-05, |
| "loss": 1.1381, |
| "step": 443 |
| }, |
| { |
| "epoch": 0.30710703786961785, |
| "grad_norm": 0.8576450347900391, |
| "learning_rate": 7.30517115804807e-05, |
| "loss": 1.8142, |
| "step": 444 |
| }, |
| { |
| "epoch": 0.3077987203873422, |
| "grad_norm": 0.6087522506713867, |
| "learning_rate": 7.297887836853605e-05, |
| "loss": 1.2711, |
| "step": 445 |
| }, |
| { |
| "epoch": 0.3084904029050666, |
| "grad_norm": 0.8685612082481384, |
| "learning_rate": 7.290604515659141e-05, |
| "loss": 1.2365, |
| "step": 446 |
| }, |
| { |
| "epoch": 0.30918208542279096, |
| "grad_norm": 0.7040896415710449, |
| "learning_rate": 7.283321194464677e-05, |
| "loss": 1.6385, |
| "step": 447 |
| }, |
| { |
| "epoch": 0.3098737679405153, |
| "grad_norm": 0.943147599697113, |
| "learning_rate": 7.276037873270212e-05, |
| "loss": 1.4699, |
| "step": 448 |
| }, |
| { |
| "epoch": 0.3105654504582397, |
| "grad_norm": 0.6641246676445007, |
| "learning_rate": 7.268754552075748e-05, |
| "loss": 1.6489, |
| "step": 449 |
| }, |
| { |
| "epoch": 0.311257132975964, |
| "grad_norm": 0.7420269250869751, |
| "learning_rate": 7.261471230881282e-05, |
| "loss": 2.1525, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.3119488154936884, |
| "grad_norm": 0.7198522090911865, |
| "learning_rate": 7.254187909686817e-05, |
| "loss": 2.01, |
| "step": 451 |
| }, |
| { |
| "epoch": 0.31264049801141275, |
| "grad_norm": 0.8209108114242554, |
| "learning_rate": 7.246904588492352e-05, |
| "loss": 1.5205, |
| "step": 452 |
| }, |
| { |
| "epoch": 0.31333218052913714, |
| "grad_norm": 1.0207217931747437, |
| "learning_rate": 7.239621267297888e-05, |
| "loss": 1.589, |
| "step": 453 |
| }, |
| { |
| "epoch": 0.3140238630468615, |
| "grad_norm": 0.7939510345458984, |
| "learning_rate": 7.232337946103424e-05, |
| "loss": 1.9908, |
| "step": 454 |
| }, |
| { |
| "epoch": 0.31471554556458586, |
| "grad_norm": 0.6668927073478699, |
| "learning_rate": 7.225054624908959e-05, |
| "loss": 1.7476, |
| "step": 455 |
| }, |
| { |
| "epoch": 0.3154072280823102, |
| "grad_norm": 0.8068589568138123, |
| "learning_rate": 7.217771303714495e-05, |
| "loss": 1.7593, |
| "step": 456 |
| }, |
| { |
| "epoch": 0.3160989106000346, |
| "grad_norm": 0.6828214526176453, |
| "learning_rate": 7.21048798252003e-05, |
| "loss": 1.5636, |
| "step": 457 |
| }, |
| { |
| "epoch": 0.3167905931177589, |
| "grad_norm": 0.7853332757949829, |
| "learning_rate": 7.203204661325565e-05, |
| "loss": 1.1448, |
| "step": 458 |
| }, |
| { |
| "epoch": 0.3174822756354833, |
| "grad_norm": 1.0938345193862915, |
| "learning_rate": 7.195921340131099e-05, |
| "loss": 1.7145, |
| "step": 459 |
| }, |
| { |
| "epoch": 0.3181739581532077, |
| "grad_norm": 0.7735335826873779, |
| "learning_rate": 7.188638018936635e-05, |
| "loss": 1.2118, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.31886564067093204, |
| "grad_norm": 0.6759296655654907, |
| "learning_rate": 7.181354697742171e-05, |
| "loss": 1.1214, |
| "step": 461 |
| }, |
| { |
| "epoch": 0.31955732318865643, |
| "grad_norm": 0.8163008689880371, |
| "learning_rate": 7.174071376547706e-05, |
| "loss": 1.9071, |
| "step": 462 |
| }, |
| { |
| "epoch": 0.32024900570638076, |
| "grad_norm": 0.8205277919769287, |
| "learning_rate": 7.166788055353242e-05, |
| "loss": 1.0775, |
| "step": 463 |
| }, |
| { |
| "epoch": 0.32094068822410515, |
| "grad_norm": 0.712774395942688, |
| "learning_rate": 7.159504734158776e-05, |
| "loss": 1.8912, |
| "step": 464 |
| }, |
| { |
| "epoch": 0.3216323707418295, |
| "grad_norm": 0.7423759698867798, |
| "learning_rate": 7.152221412964312e-05, |
| "loss": 2.0655, |
| "step": 465 |
| }, |
| { |
| "epoch": 0.3223240532595539, |
| "grad_norm": 0.6948613524436951, |
| "learning_rate": 7.144938091769847e-05, |
| "loss": 2.1957, |
| "step": 466 |
| }, |
| { |
| "epoch": 0.3230157357772782, |
| "grad_norm": 0.6164250373840332, |
| "learning_rate": 7.137654770575383e-05, |
| "loss": 0.9997, |
| "step": 467 |
| }, |
| { |
| "epoch": 0.3237074182950026, |
| "grad_norm": 0.7659111022949219, |
| "learning_rate": 7.130371449380919e-05, |
| "loss": 1.4006, |
| "step": 468 |
| }, |
| { |
| "epoch": 0.32439910081272694, |
| "grad_norm": 0.830442488193512, |
| "learning_rate": 7.123088128186454e-05, |
| "loss": 1.6562, |
| "step": 469 |
| }, |
| { |
| "epoch": 0.3250907833304513, |
| "grad_norm": 0.649169921875, |
| "learning_rate": 7.11580480699199e-05, |
| "loss": 1.8275, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.32578246584817566, |
| "grad_norm": 0.7854671478271484, |
| "learning_rate": 7.108521485797523e-05, |
| "loss": 0.3147, |
| "step": 471 |
| }, |
| { |
| "epoch": 0.32647414836590005, |
| "grad_norm": 1.0381858348846436, |
| "learning_rate": 7.101238164603059e-05, |
| "loss": 2.0842, |
| "step": 472 |
| }, |
| { |
| "epoch": 0.32716583088362444, |
| "grad_norm": 0.7317079305648804, |
| "learning_rate": 7.093954843408594e-05, |
| "loss": 2.1326, |
| "step": 473 |
| }, |
| { |
| "epoch": 0.3278575134013488, |
| "grad_norm": 0.73940110206604, |
| "learning_rate": 7.08667152221413e-05, |
| "loss": 1.9647, |
| "step": 474 |
| }, |
| { |
| "epoch": 0.32854919591907317, |
| "grad_norm": 0.8855507969856262, |
| "learning_rate": 7.079388201019666e-05, |
| "loss": 1.9308, |
| "step": 475 |
| }, |
| { |
| "epoch": 0.3292408784367975, |
| "grad_norm": 0.7795459628105164, |
| "learning_rate": 7.072104879825201e-05, |
| "loss": 1.9297, |
| "step": 476 |
| }, |
| { |
| "epoch": 0.3299325609545219, |
| "grad_norm": 0.765084981918335, |
| "learning_rate": 7.064821558630737e-05, |
| "loss": 2.3063, |
| "step": 477 |
| }, |
| { |
| "epoch": 0.3306242434722462, |
| "grad_norm": 0.7164610624313354, |
| "learning_rate": 7.05753823743627e-05, |
| "loss": 1.8509, |
| "step": 478 |
| }, |
| { |
| "epoch": 0.3313159259899706, |
| "grad_norm": 0.6976935863494873, |
| "learning_rate": 7.050254916241806e-05, |
| "loss": 1.8351, |
| "step": 479 |
| }, |
| { |
| "epoch": 0.33200760850769495, |
| "grad_norm": 1.1228684186935425, |
| "learning_rate": 7.042971595047341e-05, |
| "loss": 1.4274, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.33269929102541934, |
| "grad_norm": 0.7229692935943604, |
| "learning_rate": 7.035688273852877e-05, |
| "loss": 1.5104, |
| "step": 481 |
| }, |
| { |
| "epoch": 0.3333909735431437, |
| "grad_norm": 0.7485585808753967, |
| "learning_rate": 7.028404952658413e-05, |
| "loss": 1.1417, |
| "step": 482 |
| }, |
| { |
| "epoch": 0.33408265606086807, |
| "grad_norm": 1.7345014810562134, |
| "learning_rate": 7.021121631463948e-05, |
| "loss": 1.6614, |
| "step": 483 |
| }, |
| { |
| "epoch": 0.3347743385785924, |
| "grad_norm": 0.7559076547622681, |
| "learning_rate": 7.013838310269483e-05, |
| "loss": 1.8508, |
| "step": 484 |
| }, |
| { |
| "epoch": 0.3354660210963168, |
| "grad_norm": 1.2972337007522583, |
| "learning_rate": 7.006554989075018e-05, |
| "loss": 1.9322, |
| "step": 485 |
| }, |
| { |
| "epoch": 0.3361577036140412, |
| "grad_norm": 0.9719124436378479, |
| "learning_rate": 6.999271667880554e-05, |
| "loss": 1.6138, |
| "step": 486 |
| }, |
| { |
| "epoch": 0.3368493861317655, |
| "grad_norm": 0.7772285342216492, |
| "learning_rate": 6.991988346686089e-05, |
| "loss": 2.1904, |
| "step": 487 |
| }, |
| { |
| "epoch": 0.3375410686494899, |
| "grad_norm": 0.8187295198440552, |
| "learning_rate": 6.984705025491625e-05, |
| "loss": 1.4893, |
| "step": 488 |
| }, |
| { |
| "epoch": 0.33823275116721424, |
| "grad_norm": 0.8635819554328918, |
| "learning_rate": 6.977421704297161e-05, |
| "loss": 2.0885, |
| "step": 489 |
| }, |
| { |
| "epoch": 0.33892443368493863, |
| "grad_norm": 1.2180774211883545, |
| "learning_rate": 6.970138383102696e-05, |
| "loss": 1.1084, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.33961611620266297, |
| "grad_norm": 0.8329484462738037, |
| "learning_rate": 6.96285506190823e-05, |
| "loss": 1.3746, |
| "step": 491 |
| }, |
| { |
| "epoch": 0.34030779872038736, |
| "grad_norm": 0.7301535606384277, |
| "learning_rate": 6.955571740713765e-05, |
| "loss": 1.2109, |
| "step": 492 |
| }, |
| { |
| "epoch": 0.3409994812381117, |
| "grad_norm": 0.740479052066803, |
| "learning_rate": 6.948288419519301e-05, |
| "loss": 1.9807, |
| "step": 493 |
| }, |
| { |
| "epoch": 0.3416911637558361, |
| "grad_norm": 0.9610648155212402, |
| "learning_rate": 6.941005098324836e-05, |
| "loss": 1.5924, |
| "step": 494 |
| }, |
| { |
| "epoch": 0.3423828462735604, |
| "grad_norm": 0.7890329360961914, |
| "learning_rate": 6.933721777130372e-05, |
| "loss": 1.7208, |
| "step": 495 |
| }, |
| { |
| "epoch": 0.3430745287912848, |
| "grad_norm": 0.7466354370117188, |
| "learning_rate": 6.926438455935908e-05, |
| "loss": 1.8483, |
| "step": 496 |
| }, |
| { |
| "epoch": 0.34376621130900914, |
| "grad_norm": 0.8114244341850281, |
| "learning_rate": 6.919155134741443e-05, |
| "loss": 1.9642, |
| "step": 497 |
| }, |
| { |
| "epoch": 0.34445789382673353, |
| "grad_norm": 0.8441615104675293, |
| "learning_rate": 6.911871813546978e-05, |
| "loss": 1.6589, |
| "step": 498 |
| }, |
| { |
| "epoch": 0.3451495763444579, |
| "grad_norm": 0.771629273891449, |
| "learning_rate": 6.904588492352512e-05, |
| "loss": 2.1515, |
| "step": 499 |
| }, |
| { |
| "epoch": 0.34584125886218225, |
| "grad_norm": 0.7773415446281433, |
| "learning_rate": 6.897305171158048e-05, |
| "loss": 1.1782, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.34584125886218225, |
| "eval_loss": 1.5188031196594238, |
| "eval_runtime": 636.0478, |
| "eval_samples_per_second": 2.02, |
| "eval_steps_per_second": 1.011, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.34653294137990664, |
| "grad_norm": 0.8539692759513855, |
| "learning_rate": 6.890021849963583e-05, |
| "loss": 2.2602, |
| "step": 501 |
| }, |
| { |
| "epoch": 0.347224623897631, |
| "grad_norm": 0.7637029886245728, |
| "learning_rate": 6.882738528769119e-05, |
| "loss": 1.8441, |
| "step": 502 |
| }, |
| { |
| "epoch": 0.34791630641535537, |
| "grad_norm": 0.7741591334342957, |
| "learning_rate": 6.875455207574655e-05, |
| "loss": 1.876, |
| "step": 503 |
| }, |
| { |
| "epoch": 0.3486079889330797, |
| "grad_norm": 0.7942149639129639, |
| "learning_rate": 6.868171886380189e-05, |
| "loss": 1.2218, |
| "step": 504 |
| }, |
| { |
| "epoch": 0.3492996714508041, |
| "grad_norm": 0.7626279592514038, |
| "learning_rate": 6.860888565185725e-05, |
| "loss": 1.563, |
| "step": 505 |
| }, |
| { |
| "epoch": 0.34999135396852843, |
| "grad_norm": 0.6218715310096741, |
| "learning_rate": 6.85360524399126e-05, |
| "loss": 1.7303, |
| "step": 506 |
| }, |
| { |
| "epoch": 0.3506830364862528, |
| "grad_norm": 0.8599730134010315, |
| "learning_rate": 6.846321922796796e-05, |
| "loss": 1.3347, |
| "step": 507 |
| }, |
| { |
| "epoch": 0.35137471900397715, |
| "grad_norm": 0.5834329128265381, |
| "learning_rate": 6.83903860160233e-05, |
| "loss": 1.1654, |
| "step": 508 |
| }, |
| { |
| "epoch": 0.35206640152170154, |
| "grad_norm": 0.8399999737739563, |
| "learning_rate": 6.831755280407867e-05, |
| "loss": 1.4099, |
| "step": 509 |
| }, |
| { |
| "epoch": 0.3527580840394259, |
| "grad_norm": 0.7133538126945496, |
| "learning_rate": 6.824471959213403e-05, |
| "loss": 0.9115, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.35344976655715027, |
| "grad_norm": 0.7058927416801453, |
| "learning_rate": 6.817188638018936e-05, |
| "loss": 1.4206, |
| "step": 511 |
| }, |
| { |
| "epoch": 0.35414144907487466, |
| "grad_norm": 1.1560832262039185, |
| "learning_rate": 6.809905316824472e-05, |
| "loss": 1.5635, |
| "step": 512 |
| }, |
| { |
| "epoch": 0.354833131592599, |
| "grad_norm": 0.7324162721633911, |
| "learning_rate": 6.802621995630007e-05, |
| "loss": 2.045, |
| "step": 513 |
| }, |
| { |
| "epoch": 0.3555248141103234, |
| "grad_norm": 0.6711490750312805, |
| "learning_rate": 6.795338674435543e-05, |
| "loss": 2.0891, |
| "step": 514 |
| }, |
| { |
| "epoch": 0.3562164966280477, |
| "grad_norm": 0.795870304107666, |
| "learning_rate": 6.788055353241078e-05, |
| "loss": 2.1285, |
| "step": 515 |
| }, |
| { |
| "epoch": 0.3569081791457721, |
| "grad_norm": 1.913321852684021, |
| "learning_rate": 6.780772032046614e-05, |
| "loss": 2.1861, |
| "step": 516 |
| }, |
| { |
| "epoch": 0.35759986166349644, |
| "grad_norm": 0.7828819751739502, |
| "learning_rate": 6.77348871085215e-05, |
| "loss": 1.4283, |
| "step": 517 |
| }, |
| { |
| "epoch": 0.35829154418122083, |
| "grad_norm": 0.7590184211730957, |
| "learning_rate": 6.766205389657683e-05, |
| "loss": 1.6581, |
| "step": 518 |
| }, |
| { |
| "epoch": 0.35898322669894517, |
| "grad_norm": 0.8273248672485352, |
| "learning_rate": 6.75892206846322e-05, |
| "loss": 2.0783, |
| "step": 519 |
| }, |
| { |
| "epoch": 0.35967490921666956, |
| "grad_norm": 0.791447639465332, |
| "learning_rate": 6.751638747268754e-05, |
| "loss": 1.2507, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.3603665917343939, |
| "grad_norm": 0.7864679098129272, |
| "learning_rate": 6.74435542607429e-05, |
| "loss": 1.4686, |
| "step": 521 |
| }, |
| { |
| "epoch": 0.3610582742521183, |
| "grad_norm": 0.7522057294845581, |
| "learning_rate": 6.737072104879825e-05, |
| "loss": 1.4506, |
| "step": 522 |
| }, |
| { |
| "epoch": 0.3617499567698426, |
| "grad_norm": 0.6190770864486694, |
| "learning_rate": 6.729788783685361e-05, |
| "loss": 2.0413, |
| "step": 523 |
| }, |
| { |
| "epoch": 0.362441639287567, |
| "grad_norm": 0.9738603830337524, |
| "learning_rate": 6.722505462490896e-05, |
| "loss": 1.315, |
| "step": 524 |
| }, |
| { |
| "epoch": 0.3631333218052914, |
| "grad_norm": 0.7005712389945984, |
| "learning_rate": 6.715222141296431e-05, |
| "loss": 1.528, |
| "step": 525 |
| }, |
| { |
| "epoch": 0.36382500432301573, |
| "grad_norm": 1.2126790285110474, |
| "learning_rate": 6.707938820101967e-05, |
| "loss": 2.0808, |
| "step": 526 |
| }, |
| { |
| "epoch": 0.3645166868407401, |
| "grad_norm": 0.9661760330200195, |
| "learning_rate": 6.700655498907502e-05, |
| "loss": 2.0187, |
| "step": 527 |
| }, |
| { |
| "epoch": 0.36520836935846446, |
| "grad_norm": 0.7546457648277283, |
| "learning_rate": 6.693372177713038e-05, |
| "loss": 1.4964, |
| "step": 528 |
| }, |
| { |
| "epoch": 0.36590005187618885, |
| "grad_norm": 0.8270094394683838, |
| "learning_rate": 6.686088856518573e-05, |
| "loss": 1.7801, |
| "step": 529 |
| }, |
| { |
| "epoch": 0.3665917343939132, |
| "grad_norm": 0.7802672386169434, |
| "learning_rate": 6.678805535324109e-05, |
| "loss": 1.4457, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.3672834169116376, |
| "grad_norm": 0.7970958948135376, |
| "learning_rate": 6.671522214129643e-05, |
| "loss": 1.5796, |
| "step": 531 |
| }, |
| { |
| "epoch": 0.3679750994293619, |
| "grad_norm": 0.5904183983802795, |
| "learning_rate": 6.664238892935178e-05, |
| "loss": 1.013, |
| "step": 532 |
| }, |
| { |
| "epoch": 0.3686667819470863, |
| "grad_norm": 0.8535469174385071, |
| "learning_rate": 6.656955571740714e-05, |
| "loss": 1.3146, |
| "step": 533 |
| }, |
| { |
| "epoch": 0.36935846446481063, |
| "grad_norm": 0.722558319568634, |
| "learning_rate": 6.649672250546249e-05, |
| "loss": 1.2695, |
| "step": 534 |
| }, |
| { |
| "epoch": 0.370050146982535, |
| "grad_norm": 0.7470422983169556, |
| "learning_rate": 6.642388929351785e-05, |
| "loss": 2.1384, |
| "step": 535 |
| }, |
| { |
| "epoch": 0.37074182950025936, |
| "grad_norm": 1.035486102104187, |
| "learning_rate": 6.63510560815732e-05, |
| "loss": 1.6666, |
| "step": 536 |
| }, |
| { |
| "epoch": 0.37143351201798375, |
| "grad_norm": 0.8261759281158447, |
| "learning_rate": 6.627822286962856e-05, |
| "loss": 1.9752, |
| "step": 537 |
| }, |
| { |
| "epoch": 0.37212519453570814, |
| "grad_norm": 0.8254296183586121, |
| "learning_rate": 6.620538965768391e-05, |
| "loss": 1.9392, |
| "step": 538 |
| }, |
| { |
| "epoch": 0.37281687705343247, |
| "grad_norm": 0.9176802635192871, |
| "learning_rate": 6.613255644573925e-05, |
| "loss": 2.1566, |
| "step": 539 |
| }, |
| { |
| "epoch": 0.37350855957115686, |
| "grad_norm": 0.6909226179122925, |
| "learning_rate": 6.605972323379462e-05, |
| "loss": 1.4706, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.3742002420888812, |
| "grad_norm": 1.0335818529129028, |
| "learning_rate": 6.598689002184996e-05, |
| "loss": 2.4501, |
| "step": 541 |
| }, |
| { |
| "epoch": 0.3748919246066056, |
| "grad_norm": 0.9764876365661621, |
| "learning_rate": 6.591405680990532e-05, |
| "loss": 2.2261, |
| "step": 542 |
| }, |
| { |
| "epoch": 0.3755836071243299, |
| "grad_norm": 0.8037734627723694, |
| "learning_rate": 6.584122359796067e-05, |
| "loss": 1.6487, |
| "step": 543 |
| }, |
| { |
| "epoch": 0.3762752896420543, |
| "grad_norm": 0.9658376574516296, |
| "learning_rate": 6.576839038601602e-05, |
| "loss": 2.0596, |
| "step": 544 |
| }, |
| { |
| "epoch": 0.37696697215977865, |
| "grad_norm": 0.7336639165878296, |
| "learning_rate": 6.569555717407138e-05, |
| "loss": 1.9689, |
| "step": 545 |
| }, |
| { |
| "epoch": 0.37765865467750304, |
| "grad_norm": 0.6894165873527527, |
| "learning_rate": 6.562272396212673e-05, |
| "loss": 1.8499, |
| "step": 546 |
| }, |
| { |
| "epoch": 0.37835033719522737, |
| "grad_norm": 0.7265276908874512, |
| "learning_rate": 6.554989075018209e-05, |
| "loss": 2.111, |
| "step": 547 |
| }, |
| { |
| "epoch": 0.37904201971295176, |
| "grad_norm": 0.8299263715744019, |
| "learning_rate": 6.547705753823744e-05, |
| "loss": 2.0565, |
| "step": 548 |
| }, |
| { |
| "epoch": 0.3797337022306761, |
| "grad_norm": 0.8597300052642822, |
| "learning_rate": 6.54042243262928e-05, |
| "loss": 1.7233, |
| "step": 549 |
| }, |
| { |
| "epoch": 0.3804253847484005, |
| "grad_norm": 0.7230535745620728, |
| "learning_rate": 6.533139111434814e-05, |
| "loss": 1.2764, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.3811170672661249, |
| "grad_norm": 0.8922619819641113, |
| "learning_rate": 6.525855790240349e-05, |
| "loss": 2.1088, |
| "step": 551 |
| }, |
| { |
| "epoch": 0.3818087497838492, |
| "grad_norm": 0.7452985048294067, |
| "learning_rate": 6.518572469045885e-05, |
| "loss": 1.5725, |
| "step": 552 |
| }, |
| { |
| "epoch": 0.3825004323015736, |
| "grad_norm": 0.8344093561172485, |
| "learning_rate": 6.51128914785142e-05, |
| "loss": 1.8517, |
| "step": 553 |
| }, |
| { |
| "epoch": 0.38319211481929794, |
| "grad_norm": 0.6585731506347656, |
| "learning_rate": 6.504005826656956e-05, |
| "loss": 1.4584, |
| "step": 554 |
| }, |
| { |
| "epoch": 0.3838837973370223, |
| "grad_norm": 0.8440024256706238, |
| "learning_rate": 6.496722505462491e-05, |
| "loss": 1.547, |
| "step": 555 |
| }, |
| { |
| "epoch": 0.38457547985474666, |
| "grad_norm": 0.8570727705955505, |
| "learning_rate": 6.489439184268027e-05, |
| "loss": 1.7777, |
| "step": 556 |
| }, |
| { |
| "epoch": 0.38526716237247105, |
| "grad_norm": 0.8458102345466614, |
| "learning_rate": 6.482155863073562e-05, |
| "loss": 2.1761, |
| "step": 557 |
| }, |
| { |
| "epoch": 0.3859588448901954, |
| "grad_norm": 0.9602516293525696, |
| "learning_rate": 6.474872541879097e-05, |
| "loss": 1.224, |
| "step": 558 |
| }, |
| { |
| "epoch": 0.3866505274079198, |
| "grad_norm": 0.7018793821334839, |
| "learning_rate": 6.467589220684633e-05, |
| "loss": 1.8217, |
| "step": 559 |
| }, |
| { |
| "epoch": 0.3873422099256441, |
| "grad_norm": 0.796327531337738, |
| "learning_rate": 6.460305899490167e-05, |
| "loss": 1.9399, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.3880338924433685, |
| "grad_norm": 0.6899397373199463, |
| "learning_rate": 6.453022578295704e-05, |
| "loss": 0.8488, |
| "step": 561 |
| }, |
| { |
| "epoch": 0.38872557496109283, |
| "grad_norm": 0.6222304701805115, |
| "learning_rate": 6.445739257101238e-05, |
| "loss": 0.7811, |
| "step": 562 |
| }, |
| { |
| "epoch": 0.3894172574788172, |
| "grad_norm": 0.7914639115333557, |
| "learning_rate": 6.438455935906774e-05, |
| "loss": 2.0951, |
| "step": 563 |
| }, |
| { |
| "epoch": 0.3901089399965416, |
| "grad_norm": 0.8506953120231628, |
| "learning_rate": 6.431172614712309e-05, |
| "loss": 1.3678, |
| "step": 564 |
| }, |
| { |
| "epoch": 0.39080062251426595, |
| "grad_norm": 0.6404629349708557, |
| "learning_rate": 6.423889293517844e-05, |
| "loss": 1.0092, |
| "step": 565 |
| }, |
| { |
| "epoch": 0.39149230503199034, |
| "grad_norm": 0.7604504227638245, |
| "learning_rate": 6.41660597232338e-05, |
| "loss": 1.931, |
| "step": 566 |
| }, |
| { |
| "epoch": 0.3921839875497147, |
| "grad_norm": 0.7102979421615601, |
| "learning_rate": 6.409322651128915e-05, |
| "loss": 1.3211, |
| "step": 567 |
| }, |
| { |
| "epoch": 0.39287567006743906, |
| "grad_norm": 0.7402350902557373, |
| "learning_rate": 6.402039329934451e-05, |
| "loss": 1.5225, |
| "step": 568 |
| }, |
| { |
| "epoch": 0.3935673525851634, |
| "grad_norm": 0.9871742129325867, |
| "learning_rate": 6.394756008739986e-05, |
| "loss": 0.7057, |
| "step": 569 |
| }, |
| { |
| "epoch": 0.3942590351028878, |
| "grad_norm": 0.7585322856903076, |
| "learning_rate": 6.387472687545522e-05, |
| "loss": 1.5484, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.3949507176206121, |
| "grad_norm": 0.746465265750885, |
| "learning_rate": 6.380189366351056e-05, |
| "loss": 1.4404, |
| "step": 571 |
| }, |
| { |
| "epoch": 0.3956424001383365, |
| "grad_norm": 0.7660443186759949, |
| "learning_rate": 6.372906045156591e-05, |
| "loss": 1.8797, |
| "step": 572 |
| }, |
| { |
| "epoch": 0.39633408265606085, |
| "grad_norm": 0.7403152585029602, |
| "learning_rate": 6.365622723962127e-05, |
| "loss": 1.8291, |
| "step": 573 |
| }, |
| { |
| "epoch": 0.39702576517378524, |
| "grad_norm": 1.007710576057434, |
| "learning_rate": 6.358339402767662e-05, |
| "loss": 1.3267, |
| "step": 574 |
| }, |
| { |
| "epoch": 0.3977174476915096, |
| "grad_norm": 0.9685802459716797, |
| "learning_rate": 6.351056081573198e-05, |
| "loss": 2.0822, |
| "step": 575 |
| }, |
| { |
| "epoch": 0.39840913020923396, |
| "grad_norm": 0.8278066515922546, |
| "learning_rate": 6.343772760378733e-05, |
| "loss": 1.3005, |
| "step": 576 |
| }, |
| { |
| "epoch": 0.39910081272695835, |
| "grad_norm": 0.7012304067611694, |
| "learning_rate": 6.336489439184269e-05, |
| "loss": 0.9913, |
| "step": 577 |
| }, |
| { |
| "epoch": 0.3997924952446827, |
| "grad_norm": 1.449615716934204, |
| "learning_rate": 6.329206117989804e-05, |
| "loss": 1.3947, |
| "step": 578 |
| }, |
| { |
| "epoch": 0.4004841777624071, |
| "grad_norm": 1.1024595499038696, |
| "learning_rate": 6.321922796795339e-05, |
| "loss": 1.683, |
| "step": 579 |
| }, |
| { |
| "epoch": 0.4011758602801314, |
| "grad_norm": 1.3790334463119507, |
| "learning_rate": 6.314639475600875e-05, |
| "loss": 1.3824, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.4018675427978558, |
| "grad_norm": 0.754559338092804, |
| "learning_rate": 6.30735615440641e-05, |
| "loss": 1.9189, |
| "step": 581 |
| }, |
| { |
| "epoch": 0.40255922531558014, |
| "grad_norm": 0.9193564653396606, |
| "learning_rate": 6.300072833211945e-05, |
| "loss": 1.7524, |
| "step": 582 |
| }, |
| { |
| "epoch": 0.40325090783330453, |
| "grad_norm": 0.6305272579193115, |
| "learning_rate": 6.29278951201748e-05, |
| "loss": 1.0415, |
| "step": 583 |
| }, |
| { |
| "epoch": 0.40394259035102886, |
| "grad_norm": 0.7553091645240784, |
| "learning_rate": 6.285506190823015e-05, |
| "loss": 1.4771, |
| "step": 584 |
| }, |
| { |
| "epoch": 0.40463427286875325, |
| "grad_norm": 0.9118616580963135, |
| "learning_rate": 6.278222869628551e-05, |
| "loss": 0.9804, |
| "step": 585 |
| }, |
| { |
| "epoch": 0.4053259553864776, |
| "grad_norm": 0.9057390093803406, |
| "learning_rate": 6.270939548434086e-05, |
| "loss": 1.8537, |
| "step": 586 |
| }, |
| { |
| "epoch": 0.406017637904202, |
| "grad_norm": 0.808927595615387, |
| "learning_rate": 6.263656227239622e-05, |
| "loss": 1.3161, |
| "step": 587 |
| }, |
| { |
| "epoch": 0.4067093204219263, |
| "grad_norm": 0.7657842040061951, |
| "learning_rate": 6.256372906045157e-05, |
| "loss": 1.3956, |
| "step": 588 |
| }, |
| { |
| "epoch": 0.4074010029396507, |
| "grad_norm": 0.7779245376586914, |
| "learning_rate": 6.249089584850693e-05, |
| "loss": 1.5218, |
| "step": 589 |
| }, |
| { |
| "epoch": 0.40809268545737504, |
| "grad_norm": 0.813758373260498, |
| "learning_rate": 6.241806263656228e-05, |
| "loss": 1.6086, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.4087843679750994, |
| "grad_norm": 0.9651972055435181, |
| "learning_rate": 6.234522942461762e-05, |
| "loss": 2.0356, |
| "step": 591 |
| }, |
| { |
| "epoch": 0.4094760504928238, |
| "grad_norm": 0.9631683826446533, |
| "learning_rate": 6.227239621267298e-05, |
| "loss": 2.1457, |
| "step": 592 |
| }, |
| { |
| "epoch": 0.41016773301054815, |
| "grad_norm": 0.6648775339126587, |
| "learning_rate": 6.219956300072833e-05, |
| "loss": 1.3306, |
| "step": 593 |
| }, |
| { |
| "epoch": 0.41085941552827254, |
| "grad_norm": 1.3052250146865845, |
| "learning_rate": 6.212672978878369e-05, |
| "loss": 1.8813, |
| "step": 594 |
| }, |
| { |
| "epoch": 0.4115510980459969, |
| "grad_norm": 0.8607529401779175, |
| "learning_rate": 6.205389657683904e-05, |
| "loss": 1.9435, |
| "step": 595 |
| }, |
| { |
| "epoch": 0.41224278056372127, |
| "grad_norm": 0.7908481359481812, |
| "learning_rate": 6.19810633648944e-05, |
| "loss": 1.7544, |
| "step": 596 |
| }, |
| { |
| "epoch": 0.4129344630814456, |
| "grad_norm": 0.8440048694610596, |
| "learning_rate": 6.190823015294975e-05, |
| "loss": 1.8535, |
| "step": 597 |
| }, |
| { |
| "epoch": 0.41362614559917, |
| "grad_norm": 1.0176901817321777, |
| "learning_rate": 6.18353969410051e-05, |
| "loss": 1.5932, |
| "step": 598 |
| }, |
| { |
| "epoch": 0.4143178281168943, |
| "grad_norm": 0.8359641432762146, |
| "learning_rate": 6.176256372906046e-05, |
| "loss": 1.6704, |
| "step": 599 |
| }, |
| { |
| "epoch": 0.4150095106346187, |
| "grad_norm": 0.8534523248672485, |
| "learning_rate": 6.16897305171158e-05, |
| "loss": 1.7888, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.41570119315234305, |
| "grad_norm": 0.8510312438011169, |
| "learning_rate": 6.161689730517117e-05, |
| "loss": 1.492, |
| "step": 601 |
| }, |
| { |
| "epoch": 0.41639287567006744, |
| "grad_norm": 0.9183124303817749, |
| "learning_rate": 6.154406409322651e-05, |
| "loss": 1.1547, |
| "step": 602 |
| }, |
| { |
| "epoch": 0.4170845581877918, |
| "grad_norm": 0.7638151049613953, |
| "learning_rate": 6.147123088128187e-05, |
| "loss": 1.198, |
| "step": 603 |
| }, |
| { |
| "epoch": 0.41777624070551617, |
| "grad_norm": 0.835200846195221, |
| "learning_rate": 6.139839766933721e-05, |
| "loss": 1.8262, |
| "step": 604 |
| }, |
| { |
| "epoch": 0.41846792322324056, |
| "grad_norm": 0.961685836315155, |
| "learning_rate": 6.132556445739257e-05, |
| "loss": 2.0815, |
| "step": 605 |
| }, |
| { |
| "epoch": 0.4191596057409649, |
| "grad_norm": 1.1949551105499268, |
| "learning_rate": 6.125273124544793e-05, |
| "loss": 2.0426, |
| "step": 606 |
| }, |
| { |
| "epoch": 0.4198512882586893, |
| "grad_norm": 0.6756623983383179, |
| "learning_rate": 6.117989803350328e-05, |
| "loss": 1.1775, |
| "step": 607 |
| }, |
| { |
| "epoch": 0.4205429707764136, |
| "grad_norm": 0.8606211543083191, |
| "learning_rate": 6.110706482155864e-05, |
| "loss": 1.1293, |
| "step": 608 |
| }, |
| { |
| "epoch": 0.421234653294138, |
| "grad_norm": 0.823352575302124, |
| "learning_rate": 6.103423160961399e-05, |
| "loss": 1.7001, |
| "step": 609 |
| }, |
| { |
| "epoch": 0.42192633581186234, |
| "grad_norm": 1.0490659475326538, |
| "learning_rate": 6.096139839766935e-05, |
| "loss": 1.6179, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.42261801832958673, |
| "grad_norm": 0.972795307636261, |
| "learning_rate": 6.088856518572469e-05, |
| "loss": 2.1116, |
| "step": 611 |
| }, |
| { |
| "epoch": 0.42330970084731107, |
| "grad_norm": 0.795642077922821, |
| "learning_rate": 6.081573197378004e-05, |
| "loss": 1.7389, |
| "step": 612 |
| }, |
| { |
| "epoch": 0.42400138336503546, |
| "grad_norm": 0.796417772769928, |
| "learning_rate": 6.07428987618354e-05, |
| "loss": 1.2999, |
| "step": 613 |
| }, |
| { |
| "epoch": 0.4246930658827598, |
| "grad_norm": 0.8704690933227539, |
| "learning_rate": 6.067006554989075e-05, |
| "loss": 1.9508, |
| "step": 614 |
| }, |
| { |
| "epoch": 0.4253847484004842, |
| "grad_norm": 0.899750292301178, |
| "learning_rate": 6.0597232337946106e-05, |
| "loss": 1.9528, |
| "step": 615 |
| }, |
| { |
| "epoch": 0.4260764309182085, |
| "grad_norm": 0.7315880656242371, |
| "learning_rate": 6.052439912600146e-05, |
| "loss": 1.6963, |
| "step": 616 |
| }, |
| { |
| "epoch": 0.4267681134359329, |
| "grad_norm": 0.5647531151771545, |
| "learning_rate": 6.045156591405682e-05, |
| "loss": 1.1703, |
| "step": 617 |
| }, |
| { |
| "epoch": 0.4274597959536573, |
| "grad_norm": 0.9686012268066406, |
| "learning_rate": 6.037873270211216e-05, |
| "loss": 1.9199, |
| "step": 618 |
| }, |
| { |
| "epoch": 0.42815147847138163, |
| "grad_norm": 0.692129909992218, |
| "learning_rate": 6.0305899490167516e-05, |
| "loss": 0.9099, |
| "step": 619 |
| }, |
| { |
| "epoch": 0.428843160989106, |
| "grad_norm": 0.823519229888916, |
| "learning_rate": 6.023306627822287e-05, |
| "loss": 1.8828, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.42953484350683035, |
| "grad_norm": 0.8990175127983093, |
| "learning_rate": 6.0160233066278225e-05, |
| "loss": 1.4318, |
| "step": 621 |
| }, |
| { |
| "epoch": 0.43022652602455475, |
| "grad_norm": 0.8949781656265259, |
| "learning_rate": 6.008739985433358e-05, |
| "loss": 2.0491, |
| "step": 622 |
| }, |
| { |
| "epoch": 0.4309182085422791, |
| "grad_norm": 1.0537045001983643, |
| "learning_rate": 6.001456664238893e-05, |
| "loss": 1.8443, |
| "step": 623 |
| }, |
| { |
| "epoch": 0.43160989106000347, |
| "grad_norm": 0.647614598274231, |
| "learning_rate": 5.994173343044428e-05, |
| "loss": 1.7638, |
| "step": 624 |
| }, |
| { |
| "epoch": 0.4323015735777278, |
| "grad_norm": 0.9621549844741821, |
| "learning_rate": 5.9868900218499635e-05, |
| "loss": 1.6114, |
| "step": 625 |
| }, |
| { |
| "epoch": 0.4329932560954522, |
| "grad_norm": 1.032599925994873, |
| "learning_rate": 5.979606700655499e-05, |
| "loss": 1.9123, |
| "step": 626 |
| }, |
| { |
| "epoch": 0.43368493861317653, |
| "grad_norm": 0.7238906621932983, |
| "learning_rate": 5.9723233794610344e-05, |
| "loss": 1.4896, |
| "step": 627 |
| }, |
| { |
| "epoch": 0.4343766211309009, |
| "grad_norm": 0.7447595596313477, |
| "learning_rate": 5.96504005826657e-05, |
| "loss": 1.5476, |
| "step": 628 |
| }, |
| { |
| "epoch": 0.43506830364862525, |
| "grad_norm": 0.6987977027893066, |
| "learning_rate": 5.957756737072105e-05, |
| "loss": 1.4778, |
| "step": 629 |
| }, |
| { |
| "epoch": 0.43575998616634964, |
| "grad_norm": 0.8449330925941467, |
| "learning_rate": 5.950473415877641e-05, |
| "loss": 1.8849, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.43645166868407403, |
| "grad_norm": 1.0556013584136963, |
| "learning_rate": 5.9431900946831754e-05, |
| "loss": 2.2383, |
| "step": 631 |
| }, |
| { |
| "epoch": 0.43714335120179837, |
| "grad_norm": 0.6370812654495239, |
| "learning_rate": 5.935906773488711e-05, |
| "loss": 1.5145, |
| "step": 632 |
| }, |
| { |
| "epoch": 0.43783503371952276, |
| "grad_norm": 0.7922269701957703, |
| "learning_rate": 5.928623452294246e-05, |
| "loss": 1.5945, |
| "step": 633 |
| }, |
| { |
| "epoch": 0.4385267162372471, |
| "grad_norm": 0.9030670523643494, |
| "learning_rate": 5.921340131099782e-05, |
| "loss": 1.9727, |
| "step": 634 |
| }, |
| { |
| "epoch": 0.4392183987549715, |
| "grad_norm": 0.5330315828323364, |
| "learning_rate": 5.914056809905317e-05, |
| "loss": 1.1115, |
| "step": 635 |
| }, |
| { |
| "epoch": 0.4399100812726958, |
| "grad_norm": 0.7586291432380676, |
| "learning_rate": 5.9067734887108526e-05, |
| "loss": 1.3721, |
| "step": 636 |
| }, |
| { |
| "epoch": 0.4406017637904202, |
| "grad_norm": 0.8726906776428223, |
| "learning_rate": 5.899490167516388e-05, |
| "loss": 1.6429, |
| "step": 637 |
| }, |
| { |
| "epoch": 0.44129344630814454, |
| "grad_norm": 0.8924012184143066, |
| "learning_rate": 5.892206846321923e-05, |
| "loss": 1.7596, |
| "step": 638 |
| }, |
| { |
| "epoch": 0.44198512882586893, |
| "grad_norm": 0.9185534715652466, |
| "learning_rate": 5.884923525127458e-05, |
| "loss": 1.8224, |
| "step": 639 |
| }, |
| { |
| "epoch": 0.44267681134359327, |
| "grad_norm": 0.8137707114219666, |
| "learning_rate": 5.8776402039329936e-05, |
| "loss": 1.7631, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.44336849386131766, |
| "grad_norm": 0.7134371399879456, |
| "learning_rate": 5.870356882738529e-05, |
| "loss": 1.8246, |
| "step": 641 |
| }, |
| { |
| "epoch": 0.444060176379042, |
| "grad_norm": 0.7280315160751343, |
| "learning_rate": 5.8630735615440645e-05, |
| "loss": 1.5007, |
| "step": 642 |
| }, |
| { |
| "epoch": 0.4447518588967664, |
| "grad_norm": 0.6889392733573914, |
| "learning_rate": 5.8557902403496e-05, |
| "loss": 1.2957, |
| "step": 643 |
| }, |
| { |
| "epoch": 0.4454435414144908, |
| "grad_norm": 0.6772429943084717, |
| "learning_rate": 5.8485069191551346e-05, |
| "loss": 1.2119, |
| "step": 644 |
| }, |
| { |
| "epoch": 0.4461352239322151, |
| "grad_norm": 0.7719807028770447, |
| "learning_rate": 5.84122359796067e-05, |
| "loss": 1.0095, |
| "step": 645 |
| }, |
| { |
| "epoch": 0.4468269064499395, |
| "grad_norm": 0.9226011037826538, |
| "learning_rate": 5.8339402767662055e-05, |
| "loss": 1.7821, |
| "step": 646 |
| }, |
| { |
| "epoch": 0.44751858896766383, |
| "grad_norm": 0.6220186948776245, |
| "learning_rate": 5.826656955571741e-05, |
| "loss": 1.1275, |
| "step": 647 |
| }, |
| { |
| "epoch": 0.4482102714853882, |
| "grad_norm": 0.6376112699508667, |
| "learning_rate": 5.8193736343772763e-05, |
| "loss": 1.4188, |
| "step": 648 |
| }, |
| { |
| "epoch": 0.44890195400311256, |
| "grad_norm": 0.7491716146469116, |
| "learning_rate": 5.812090313182812e-05, |
| "loss": 1.5822, |
| "step": 649 |
| }, |
| { |
| "epoch": 0.44959363652083695, |
| "grad_norm": 0.6484271287918091, |
| "learning_rate": 5.804806991988347e-05, |
| "loss": 1.6037, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.4502853190385613, |
| "grad_norm": 1.5858505964279175, |
| "learning_rate": 5.797523670793882e-05, |
| "loss": 1.4725, |
| "step": 651 |
| }, |
| { |
| "epoch": 0.4509770015562857, |
| "grad_norm": 1.0042520761489868, |
| "learning_rate": 5.7902403495994174e-05, |
| "loss": 1.6445, |
| "step": 652 |
| }, |
| { |
| "epoch": 0.45166868407401, |
| "grad_norm": 1.0305542945861816, |
| "learning_rate": 5.782957028404953e-05, |
| "loss": 1.4438, |
| "step": 653 |
| }, |
| { |
| "epoch": 0.4523603665917344, |
| "grad_norm": 0.7213695049285889, |
| "learning_rate": 5.775673707210488e-05, |
| "loss": 1.6495, |
| "step": 654 |
| }, |
| { |
| "epoch": 0.45305204910945873, |
| "grad_norm": 0.8074948191642761, |
| "learning_rate": 5.768390386016024e-05, |
| "loss": 1.1349, |
| "step": 655 |
| }, |
| { |
| "epoch": 0.4537437316271831, |
| "grad_norm": 0.9579366445541382, |
| "learning_rate": 5.761107064821559e-05, |
| "loss": 1.421, |
| "step": 656 |
| }, |
| { |
| "epoch": 0.4544354141449075, |
| "grad_norm": 0.7831091284751892, |
| "learning_rate": 5.7538237436270945e-05, |
| "loss": 1.8771, |
| "step": 657 |
| }, |
| { |
| "epoch": 0.45512709666263185, |
| "grad_norm": 0.7758937478065491, |
| "learning_rate": 5.746540422432629e-05, |
| "loss": 1.7355, |
| "step": 658 |
| }, |
| { |
| "epoch": 0.45581877918035624, |
| "grad_norm": 0.8883340358734131, |
| "learning_rate": 5.739257101238165e-05, |
| "loss": 1.7985, |
| "step": 659 |
| }, |
| { |
| "epoch": 0.45651046169808057, |
| "grad_norm": 0.7148769497871399, |
| "learning_rate": 5.7319737800437e-05, |
| "loss": 2.0838, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.45720214421580496, |
| "grad_norm": 0.7475634217262268, |
| "learning_rate": 5.7246904588492356e-05, |
| "loss": 1.1599, |
| "step": 661 |
| }, |
| { |
| "epoch": 0.4578938267335293, |
| "grad_norm": 0.9443761706352234, |
| "learning_rate": 5.717407137654771e-05, |
| "loss": 1.9477, |
| "step": 662 |
| }, |
| { |
| "epoch": 0.4585855092512537, |
| "grad_norm": 0.8133770823478699, |
| "learning_rate": 5.7101238164603064e-05, |
| "loss": 1.8349, |
| "step": 663 |
| }, |
| { |
| "epoch": 0.459277191768978, |
| "grad_norm": 0.7030238509178162, |
| "learning_rate": 5.702840495265841e-05, |
| "loss": 0.7075, |
| "step": 664 |
| }, |
| { |
| "epoch": 0.4599688742867024, |
| "grad_norm": 0.6615720987319946, |
| "learning_rate": 5.6955571740713766e-05, |
| "loss": 1.4996, |
| "step": 665 |
| }, |
| { |
| "epoch": 0.46066055680442675, |
| "grad_norm": 0.9358487725257874, |
| "learning_rate": 5.688273852876912e-05, |
| "loss": 1.4268, |
| "step": 666 |
| }, |
| { |
| "epoch": 0.46135223932215114, |
| "grad_norm": 0.6841732859611511, |
| "learning_rate": 5.6809905316824475e-05, |
| "loss": 1.7628, |
| "step": 667 |
| }, |
| { |
| "epoch": 0.46204392183987547, |
| "grad_norm": 0.7229800820350647, |
| "learning_rate": 5.673707210487983e-05, |
| "loss": 1.3031, |
| "step": 668 |
| }, |
| { |
| "epoch": 0.46273560435759986, |
| "grad_norm": 0.655465304851532, |
| "learning_rate": 5.666423889293518e-05, |
| "loss": 1.9593, |
| "step": 669 |
| }, |
| { |
| "epoch": 0.46342728687532425, |
| "grad_norm": 0.6893927454948425, |
| "learning_rate": 5.659140568099054e-05, |
| "loss": 0.9872, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.4641189693930486, |
| "grad_norm": 1.0941834449768066, |
| "learning_rate": 5.6518572469045885e-05, |
| "loss": 1.4924, |
| "step": 671 |
| }, |
| { |
| "epoch": 0.464810651910773, |
| "grad_norm": 0.893545925617218, |
| "learning_rate": 5.644573925710124e-05, |
| "loss": 1.531, |
| "step": 672 |
| }, |
| { |
| "epoch": 0.4655023344284973, |
| "grad_norm": 0.7984673976898193, |
| "learning_rate": 5.6372906045156594e-05, |
| "loss": 1.79, |
| "step": 673 |
| }, |
| { |
| "epoch": 0.4661940169462217, |
| "grad_norm": 0.5964581370353699, |
| "learning_rate": 5.630007283321195e-05, |
| "loss": 1.1119, |
| "step": 674 |
| }, |
| { |
| "epoch": 0.46688569946394604, |
| "grad_norm": 0.7665301561355591, |
| "learning_rate": 5.62272396212673e-05, |
| "loss": 1.3566, |
| "step": 675 |
| }, |
| { |
| "epoch": 0.4675773819816704, |
| "grad_norm": 0.8093408346176147, |
| "learning_rate": 5.6154406409322656e-05, |
| "loss": 1.5565, |
| "step": 676 |
| }, |
| { |
| "epoch": 0.46826906449939476, |
| "grad_norm": 0.8169633150100708, |
| "learning_rate": 5.608157319737801e-05, |
| "loss": 2.2779, |
| "step": 677 |
| }, |
| { |
| "epoch": 0.46896074701711915, |
| "grad_norm": 0.8401588797569275, |
| "learning_rate": 5.600873998543336e-05, |
| "loss": 1.9076, |
| "step": 678 |
| }, |
| { |
| "epoch": 0.4696524295348435, |
| "grad_norm": 1.4105268716812134, |
| "learning_rate": 5.593590677348871e-05, |
| "loss": 1.4305, |
| "step": 679 |
| }, |
| { |
| "epoch": 0.4703441120525679, |
| "grad_norm": 1.0228116512298584, |
| "learning_rate": 5.586307356154407e-05, |
| "loss": 2.0295, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.4710357945702922, |
| "grad_norm": 0.8564938306808472, |
| "learning_rate": 5.579024034959942e-05, |
| "loss": 1.6077, |
| "step": 681 |
| }, |
| { |
| "epoch": 0.4717274770880166, |
| "grad_norm": 0.8669772148132324, |
| "learning_rate": 5.5717407137654775e-05, |
| "loss": 1.584, |
| "step": 682 |
| }, |
| { |
| "epoch": 0.472419159605741, |
| "grad_norm": 0.5886315703392029, |
| "learning_rate": 5.564457392571013e-05, |
| "loss": 1.4101, |
| "step": 683 |
| }, |
| { |
| "epoch": 0.4731108421234653, |
| "grad_norm": 0.8294202089309692, |
| "learning_rate": 5.557174071376548e-05, |
| "loss": 1.8993, |
| "step": 684 |
| }, |
| { |
| "epoch": 0.4738025246411897, |
| "grad_norm": 1.0754139423370361, |
| "learning_rate": 5.549890750182083e-05, |
| "loss": 1.654, |
| "step": 685 |
| }, |
| { |
| "epoch": 0.47449420715891405, |
| "grad_norm": 0.7597669363021851, |
| "learning_rate": 5.5426074289876186e-05, |
| "loss": 2.0938, |
| "step": 686 |
| }, |
| { |
| "epoch": 0.47518588967663844, |
| "grad_norm": 0.7472283244132996, |
| "learning_rate": 5.535324107793154e-05, |
| "loss": 1.6874, |
| "step": 687 |
| }, |
| { |
| "epoch": 0.4758775721943628, |
| "grad_norm": 1.1373130083084106, |
| "learning_rate": 5.5280407865986894e-05, |
| "loss": 1.5681, |
| "step": 688 |
| }, |
| { |
| "epoch": 0.47656925471208716, |
| "grad_norm": 0.6962531805038452, |
| "learning_rate": 5.520757465404225e-05, |
| "loss": 1.2423, |
| "step": 689 |
| }, |
| { |
| "epoch": 0.4772609372298115, |
| "grad_norm": 0.8981777429580688, |
| "learning_rate": 5.51347414420976e-05, |
| "loss": 1.4854, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.4779526197475359, |
| "grad_norm": 0.7226567268371582, |
| "learning_rate": 5.506190823015295e-05, |
| "loss": 2.2022, |
| "step": 691 |
| }, |
| { |
| "epoch": 0.4786443022652602, |
| "grad_norm": 0.9245135188102722, |
| "learning_rate": 5.4989075018208305e-05, |
| "loss": 1.3642, |
| "step": 692 |
| }, |
| { |
| "epoch": 0.4793359847829846, |
| "grad_norm": 1.0049363374710083, |
| "learning_rate": 5.491624180626366e-05, |
| "loss": 1.9998, |
| "step": 693 |
| }, |
| { |
| "epoch": 0.48002766730070895, |
| "grad_norm": 0.6878874897956848, |
| "learning_rate": 5.484340859431901e-05, |
| "loss": 1.9002, |
| "step": 694 |
| }, |
| { |
| "epoch": 0.48071934981843334, |
| "grad_norm": 1.1163276433944702, |
| "learning_rate": 5.477057538237437e-05, |
| "loss": 1.1385, |
| "step": 695 |
| }, |
| { |
| "epoch": 0.48141103233615773, |
| "grad_norm": 0.7806943655014038, |
| "learning_rate": 5.469774217042972e-05, |
| "loss": 1.0122, |
| "step": 696 |
| }, |
| { |
| "epoch": 0.48210271485388206, |
| "grad_norm": 0.9799820184707642, |
| "learning_rate": 5.4624908958485076e-05, |
| "loss": 2.0761, |
| "step": 697 |
| }, |
| { |
| "epoch": 0.48279439737160645, |
| "grad_norm": 0.5801637768745422, |
| "learning_rate": 5.4552075746540424e-05, |
| "loss": 0.8659, |
| "step": 698 |
| }, |
| { |
| "epoch": 0.4834860798893308, |
| "grad_norm": 0.7793147563934326, |
| "learning_rate": 5.447924253459578e-05, |
| "loss": 1.4955, |
| "step": 699 |
| }, |
| { |
| "epoch": 0.4841777624070552, |
| "grad_norm": 0.7253298759460449, |
| "learning_rate": 5.440640932265113e-05, |
| "loss": 0.8938, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.4848694449247795, |
| "grad_norm": 0.8305007219314575, |
| "learning_rate": 5.4333576110706487e-05, |
| "loss": 1.3896, |
| "step": 701 |
| }, |
| { |
| "epoch": 0.4855611274425039, |
| "grad_norm": 0.8099757432937622, |
| "learning_rate": 5.426074289876184e-05, |
| "loss": 1.7134, |
| "step": 702 |
| }, |
| { |
| "epoch": 0.48625280996022824, |
| "grad_norm": 0.7706416845321655, |
| "learning_rate": 5.4187909686817195e-05, |
| "loss": 0.8483, |
| "step": 703 |
| }, |
| { |
| "epoch": 0.48694449247795263, |
| "grad_norm": 2.7785534858703613, |
| "learning_rate": 5.4115076474872536e-05, |
| "loss": 1.5148, |
| "step": 704 |
| }, |
| { |
| "epoch": 0.48763617499567696, |
| "grad_norm": 0.7698436975479126, |
| "learning_rate": 5.40422432629279e-05, |
| "loss": 2.031, |
| "step": 705 |
| }, |
| { |
| "epoch": 0.48832785751340135, |
| "grad_norm": 0.8159623742103577, |
| "learning_rate": 5.396941005098325e-05, |
| "loss": 2.0041, |
| "step": 706 |
| }, |
| { |
| "epoch": 0.4890195400311257, |
| "grad_norm": 0.6865976452827454, |
| "learning_rate": 5.3896576839038606e-05, |
| "loss": 1.2877, |
| "step": 707 |
| }, |
| { |
| "epoch": 0.4897112225488501, |
| "grad_norm": 0.9447488784790039, |
| "learning_rate": 5.382374362709396e-05, |
| "loss": 2.1087, |
| "step": 708 |
| }, |
| { |
| "epoch": 0.49040290506657447, |
| "grad_norm": 0.9315760731697083, |
| "learning_rate": 5.3750910415149314e-05, |
| "loss": 1.5459, |
| "step": 709 |
| }, |
| { |
| "epoch": 0.4910945875842988, |
| "grad_norm": 0.7628729939460754, |
| "learning_rate": 5.367807720320467e-05, |
| "loss": 1.6846, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.4917862701020232, |
| "grad_norm": 0.7759138345718384, |
| "learning_rate": 5.360524399126001e-05, |
| "loss": 1.6179, |
| "step": 711 |
| }, |
| { |
| "epoch": 0.4924779526197475, |
| "grad_norm": 0.7866867184638977, |
| "learning_rate": 5.353241077931537e-05, |
| "loss": 2.2264, |
| "step": 712 |
| }, |
| { |
| "epoch": 0.4931696351374719, |
| "grad_norm": 0.7560875415802002, |
| "learning_rate": 5.3459577567370724e-05, |
| "loss": 1.5638, |
| "step": 713 |
| }, |
| { |
| "epoch": 0.49386131765519625, |
| "grad_norm": 0.6578519940376282, |
| "learning_rate": 5.338674435542608e-05, |
| "loss": 1.0194, |
| "step": 714 |
| }, |
| { |
| "epoch": 0.49455300017292064, |
| "grad_norm": 0.8581376671791077, |
| "learning_rate": 5.331391114348143e-05, |
| "loss": 1.7412, |
| "step": 715 |
| }, |
| { |
| "epoch": 0.495244682690645, |
| "grad_norm": 0.8838049173355103, |
| "learning_rate": 5.324107793153679e-05, |
| "loss": 2.0492, |
| "step": 716 |
| }, |
| { |
| "epoch": 0.49593636520836937, |
| "grad_norm": 0.7301944494247437, |
| "learning_rate": 5.316824471959214e-05, |
| "loss": 1.8718, |
| "step": 717 |
| }, |
| { |
| "epoch": 0.4966280477260937, |
| "grad_norm": 0.8501876592636108, |
| "learning_rate": 5.309541150764748e-05, |
| "loss": 1.9824, |
| "step": 718 |
| }, |
| { |
| "epoch": 0.4973197302438181, |
| "grad_norm": 0.8642913699150085, |
| "learning_rate": 5.3022578295702843e-05, |
| "loss": 2.0727, |
| "step": 719 |
| }, |
| { |
| "epoch": 0.4980114127615424, |
| "grad_norm": 0.7409586310386658, |
| "learning_rate": 5.29497450837582e-05, |
| "loss": 2.0216, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.4987030952792668, |
| "grad_norm": 1.2024013996124268, |
| "learning_rate": 5.287691187181355e-05, |
| "loss": 1.534, |
| "step": 721 |
| }, |
| { |
| "epoch": 0.4993947777969912, |
| "grad_norm": 0.7344438433647156, |
| "learning_rate": 5.2804078659868906e-05, |
| "loss": 1.7692, |
| "step": 722 |
| }, |
| { |
| "epoch": 0.5000864603147156, |
| "grad_norm": 0.8432995080947876, |
| "learning_rate": 5.273124544792426e-05, |
| "loss": 1.1382, |
| "step": 723 |
| }, |
| { |
| "epoch": 0.5007781428324399, |
| "grad_norm": 0.8637827038764954, |
| "learning_rate": 5.26584122359796e-05, |
| "loss": 2.0577, |
| "step": 724 |
| }, |
| { |
| "epoch": 0.5014698253501643, |
| "grad_norm": 0.7211031913757324, |
| "learning_rate": 5.2585579024034956e-05, |
| "loss": 1.0286, |
| "step": 725 |
| }, |
| { |
| "epoch": 0.5021615078678886, |
| "grad_norm": 0.8980405330657959, |
| "learning_rate": 5.251274581209032e-05, |
| "loss": 1.6481, |
| "step": 726 |
| }, |
| { |
| "epoch": 0.502853190385613, |
| "grad_norm": 1.0838844776153564, |
| "learning_rate": 5.243991260014567e-05, |
| "loss": 1.8133, |
| "step": 727 |
| }, |
| { |
| "epoch": 0.5035448729033374, |
| "grad_norm": 0.7398082613945007, |
| "learning_rate": 5.2367079388201025e-05, |
| "loss": 1.9674, |
| "step": 728 |
| }, |
| { |
| "epoch": 0.5042365554210617, |
| "grad_norm": 0.9297565817832947, |
| "learning_rate": 5.229424617625638e-05, |
| "loss": 1.5748, |
| "step": 729 |
| }, |
| { |
| "epoch": 0.504928237938786, |
| "grad_norm": 1.0690850019454956, |
| "learning_rate": 5.2221412964311734e-05, |
| "loss": 1.5343, |
| "step": 730 |
| }, |
| { |
| "epoch": 0.5056199204565105, |
| "grad_norm": 0.8005755543708801, |
| "learning_rate": 5.2148579752367075e-05, |
| "loss": 1.7446, |
| "step": 731 |
| }, |
| { |
| "epoch": 0.5063116029742348, |
| "grad_norm": 0.9279308915138245, |
| "learning_rate": 5.207574654042243e-05, |
| "loss": 2.0124, |
| "step": 732 |
| }, |
| { |
| "epoch": 0.5070032854919592, |
| "grad_norm": 0.8205484747886658, |
| "learning_rate": 5.200291332847779e-05, |
| "loss": 1.9278, |
| "step": 733 |
| }, |
| { |
| "epoch": 0.5076949680096835, |
| "grad_norm": 1.0283253192901611, |
| "learning_rate": 5.1930080116533144e-05, |
| "loss": 1.6584, |
| "step": 734 |
| }, |
| { |
| "epoch": 0.508386650527408, |
| "grad_norm": 0.9164149165153503, |
| "learning_rate": 5.18572469045885e-05, |
| "loss": 1.5658, |
| "step": 735 |
| }, |
| { |
| "epoch": 0.5090783330451323, |
| "grad_norm": 0.8904675841331482, |
| "learning_rate": 5.178441369264385e-05, |
| "loss": 2.054, |
| "step": 736 |
| }, |
| { |
| "epoch": 0.5097700155628566, |
| "grad_norm": 0.731334924697876, |
| "learning_rate": 5.171158048069921e-05, |
| "loss": 1.8062, |
| "step": 737 |
| }, |
| { |
| "epoch": 0.5104616980805811, |
| "grad_norm": 1.436216115951538, |
| "learning_rate": 5.163874726875455e-05, |
| "loss": 2.4866, |
| "step": 738 |
| }, |
| { |
| "epoch": 0.5111533805983054, |
| "grad_norm": 0.7042586803436279, |
| "learning_rate": 5.15659140568099e-05, |
| "loss": 1.6228, |
| "step": 739 |
| }, |
| { |
| "epoch": 0.5118450631160297, |
| "grad_norm": 0.63124018907547, |
| "learning_rate": 5.149308084486526e-05, |
| "loss": 0.9537, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.5125367456337541, |
| "grad_norm": 0.8653015494346619, |
| "learning_rate": 5.142024763292062e-05, |
| "loss": 2.0306, |
| "step": 741 |
| }, |
| { |
| "epoch": 0.5132284281514785, |
| "grad_norm": 0.8902332782745361, |
| "learning_rate": 5.134741442097597e-05, |
| "loss": 2.0709, |
| "step": 742 |
| }, |
| { |
| "epoch": 0.5139201106692028, |
| "grad_norm": 0.8607166409492493, |
| "learning_rate": 5.1274581209031326e-05, |
| "loss": 1.635, |
| "step": 743 |
| }, |
| { |
| "epoch": 0.5146117931869272, |
| "grad_norm": 0.8155879974365234, |
| "learning_rate": 5.120174799708667e-05, |
| "loss": 1.0902, |
| "step": 744 |
| }, |
| { |
| "epoch": 0.5153034757046515, |
| "grad_norm": 0.8006990551948547, |
| "learning_rate": 5.112891478514202e-05, |
| "loss": 1.2486, |
| "step": 745 |
| }, |
| { |
| "epoch": 0.515995158222376, |
| "grad_norm": 0.7772015929222107, |
| "learning_rate": 5.1056081573197375e-05, |
| "loss": 2.1048, |
| "step": 746 |
| }, |
| { |
| "epoch": 0.5166868407401003, |
| "grad_norm": 0.7376934289932251, |
| "learning_rate": 5.0983248361252736e-05, |
| "loss": 1.9748, |
| "step": 747 |
| }, |
| { |
| "epoch": 0.5173785232578246, |
| "grad_norm": 0.9872801899909973, |
| "learning_rate": 5.091041514930809e-05, |
| "loss": 1.3626, |
| "step": 748 |
| }, |
| { |
| "epoch": 0.5180702057755491, |
| "grad_norm": 0.6437541246414185, |
| "learning_rate": 5.0837581937363445e-05, |
| "loss": 1.8523, |
| "step": 749 |
| }, |
| { |
| "epoch": 0.5187618882932734, |
| "grad_norm": 0.8643835186958313, |
| "learning_rate": 5.07647487254188e-05, |
| "loss": 1.5464, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.5194535708109977, |
| "grad_norm": 0.9346618056297302, |
| "learning_rate": 5.069191551347414e-05, |
| "loss": 1.7027, |
| "step": 751 |
| }, |
| { |
| "epoch": 0.5201452533287221, |
| "grad_norm": 0.8096154928207397, |
| "learning_rate": 5.0619082301529494e-05, |
| "loss": 1.4638, |
| "step": 752 |
| }, |
| { |
| "epoch": 0.5208369358464465, |
| "grad_norm": 0.8895930051803589, |
| "learning_rate": 5.054624908958485e-05, |
| "loss": 1.3399, |
| "step": 753 |
| }, |
| { |
| "epoch": 0.5215286183641709, |
| "grad_norm": 0.8496235013008118, |
| "learning_rate": 5.047341587764021e-05, |
| "loss": 0.9693, |
| "step": 754 |
| }, |
| { |
| "epoch": 0.5222203008818952, |
| "grad_norm": 0.7915313243865967, |
| "learning_rate": 5.0400582665695564e-05, |
| "loss": 1.4108, |
| "step": 755 |
| }, |
| { |
| "epoch": 0.5229119833996195, |
| "grad_norm": 0.8515884876251221, |
| "learning_rate": 5.032774945375092e-05, |
| "loss": 1.7595, |
| "step": 756 |
| }, |
| { |
| "epoch": 0.523603665917344, |
| "grad_norm": 0.7750046849250793, |
| "learning_rate": 5.025491624180627e-05, |
| "loss": 1.73, |
| "step": 757 |
| }, |
| { |
| "epoch": 0.5242953484350683, |
| "grad_norm": 0.8660963177680969, |
| "learning_rate": 5.018208302986161e-05, |
| "loss": 1.8401, |
| "step": 758 |
| }, |
| { |
| "epoch": 0.5249870309527926, |
| "grad_norm": 1.214325189590454, |
| "learning_rate": 5.010924981791697e-05, |
| "loss": 1.5471, |
| "step": 759 |
| }, |
| { |
| "epoch": 0.525678713470517, |
| "grad_norm": 1.0521632432937622, |
| "learning_rate": 5.003641660597232e-05, |
| "loss": 2.0549, |
| "step": 760 |
| }, |
| { |
| "epoch": 0.5263703959882414, |
| "grad_norm": 0.8383963108062744, |
| "learning_rate": 4.996358339402768e-05, |
| "loss": 1.9966, |
| "step": 761 |
| }, |
| { |
| "epoch": 0.5270620785059658, |
| "grad_norm": 0.9236989617347717, |
| "learning_rate": 4.989075018208304e-05, |
| "loss": 1.3734, |
| "step": 762 |
| }, |
| { |
| "epoch": 0.5277537610236901, |
| "grad_norm": 0.7525227665901184, |
| "learning_rate": 4.9817916970138385e-05, |
| "loss": 1.2582, |
| "step": 763 |
| }, |
| { |
| "epoch": 0.5284454435414145, |
| "grad_norm": 0.8061596751213074, |
| "learning_rate": 4.974508375819374e-05, |
| "loss": 2.0085, |
| "step": 764 |
| }, |
| { |
| "epoch": 0.5291371260591389, |
| "grad_norm": 1.0033149719238281, |
| "learning_rate": 4.967225054624909e-05, |
| "loss": 1.5858, |
| "step": 765 |
| }, |
| { |
| "epoch": 0.5298288085768632, |
| "grad_norm": 1.3156208992004395, |
| "learning_rate": 4.959941733430444e-05, |
| "loss": 1.8546, |
| "step": 766 |
| }, |
| { |
| "epoch": 0.5305204910945875, |
| "grad_norm": 0.7604823112487793, |
| "learning_rate": 4.9526584122359795e-05, |
| "loss": 1.7595, |
| "step": 767 |
| }, |
| { |
| "epoch": 0.531212173612312, |
| "grad_norm": 0.9594501852989197, |
| "learning_rate": 4.9453750910415156e-05, |
| "loss": 1.507, |
| "step": 768 |
| }, |
| { |
| "epoch": 0.5319038561300363, |
| "grad_norm": 1.176438808441162, |
| "learning_rate": 4.9380917698470504e-05, |
| "loss": 1.6748, |
| "step": 769 |
| }, |
| { |
| "epoch": 0.5325955386477607, |
| "grad_norm": 0.6893585324287415, |
| "learning_rate": 4.930808448652586e-05, |
| "loss": 1.326, |
| "step": 770 |
| }, |
| { |
| "epoch": 0.533287221165485, |
| "grad_norm": 0.8642687797546387, |
| "learning_rate": 4.923525127458121e-05, |
| "loss": 1.1461, |
| "step": 771 |
| }, |
| { |
| "epoch": 0.5339789036832094, |
| "grad_norm": 0.6313591003417969, |
| "learning_rate": 4.9162418062636567e-05, |
| "loss": 1.5494, |
| "step": 772 |
| }, |
| { |
| "epoch": 0.5346705862009338, |
| "grad_norm": 0.7172280550003052, |
| "learning_rate": 4.9089584850691914e-05, |
| "loss": 1.4831, |
| "step": 773 |
| }, |
| { |
| "epoch": 0.5353622687186581, |
| "grad_norm": 0.8146588206291199, |
| "learning_rate": 4.901675163874727e-05, |
| "loss": 1.913, |
| "step": 774 |
| }, |
| { |
| "epoch": 0.5360539512363826, |
| "grad_norm": 0.9037981033325195, |
| "learning_rate": 4.894391842680263e-05, |
| "loss": 1.7836, |
| "step": 775 |
| }, |
| { |
| "epoch": 0.5367456337541069, |
| "grad_norm": 0.7782710194587708, |
| "learning_rate": 4.887108521485798e-05, |
| "loss": 1.4689, |
| "step": 776 |
| }, |
| { |
| "epoch": 0.5374373162718312, |
| "grad_norm": 1.111527681350708, |
| "learning_rate": 4.879825200291333e-05, |
| "loss": 1.6938, |
| "step": 777 |
| }, |
| { |
| "epoch": 0.5381289987895556, |
| "grad_norm": 1.2994364500045776, |
| "learning_rate": 4.8725418790968686e-05, |
| "loss": 1.529, |
| "step": 778 |
| }, |
| { |
| "epoch": 0.53882068130728, |
| "grad_norm": 0.7461956143379211, |
| "learning_rate": 4.865258557902403e-05, |
| "loss": 1.1475, |
| "step": 779 |
| }, |
| { |
| "epoch": 0.5395123638250043, |
| "grad_norm": 0.6900501847267151, |
| "learning_rate": 4.857975236707939e-05, |
| "loss": 1.8377, |
| "step": 780 |
| }, |
| { |
| "epoch": 0.5402040463427287, |
| "grad_norm": 0.7007532715797424, |
| "learning_rate": 4.850691915513474e-05, |
| "loss": 1.4117, |
| "step": 781 |
| }, |
| { |
| "epoch": 0.540895728860453, |
| "grad_norm": 0.8662421703338623, |
| "learning_rate": 4.84340859431901e-05, |
| "loss": 1.7831, |
| "step": 782 |
| }, |
| { |
| "epoch": 0.5415874113781775, |
| "grad_norm": 1.2154408693313599, |
| "learning_rate": 4.836125273124545e-05, |
| "loss": 1.9868, |
| "step": 783 |
| }, |
| { |
| "epoch": 0.5422790938959018, |
| "grad_norm": 0.822338342666626, |
| "learning_rate": 4.8288419519300804e-05, |
| "loss": 1.8789, |
| "step": 784 |
| }, |
| { |
| "epoch": 0.5429707764136261, |
| "grad_norm": 0.9804028868675232, |
| "learning_rate": 4.821558630735616e-05, |
| "loss": 2.2036, |
| "step": 785 |
| }, |
| { |
| "epoch": 0.5436624589313505, |
| "grad_norm": 0.9605854749679565, |
| "learning_rate": 4.8142753095411506e-05, |
| "loss": 2.0455, |
| "step": 786 |
| }, |
| { |
| "epoch": 0.5443541414490749, |
| "grad_norm": 0.7948291301727295, |
| "learning_rate": 4.806991988346686e-05, |
| "loss": 1.529, |
| "step": 787 |
| }, |
| { |
| "epoch": 0.5450458239667992, |
| "grad_norm": 0.7471349239349365, |
| "learning_rate": 4.7997086671522215e-05, |
| "loss": 1.5366, |
| "step": 788 |
| }, |
| { |
| "epoch": 0.5457375064845236, |
| "grad_norm": 1.0526931285858154, |
| "learning_rate": 4.792425345957757e-05, |
| "loss": 1.2396, |
| "step": 789 |
| }, |
| { |
| "epoch": 0.546429189002248, |
| "grad_norm": 1.0676295757293701, |
| "learning_rate": 4.7851420247632923e-05, |
| "loss": 1.6639, |
| "step": 790 |
| }, |
| { |
| "epoch": 0.5471208715199724, |
| "grad_norm": 0.9911693334579468, |
| "learning_rate": 4.777858703568828e-05, |
| "loss": 1.8713, |
| "step": 791 |
| }, |
| { |
| "epoch": 0.5478125540376967, |
| "grad_norm": 0.5897760391235352, |
| "learning_rate": 4.770575382374363e-05, |
| "loss": 1.4593, |
| "step": 792 |
| }, |
| { |
| "epoch": 0.548504236555421, |
| "grad_norm": 0.836234986782074, |
| "learning_rate": 4.763292061179898e-05, |
| "loss": 1.3373, |
| "step": 793 |
| }, |
| { |
| "epoch": 0.5491959190731455, |
| "grad_norm": 1.0234719514846802, |
| "learning_rate": 4.7560087399854334e-05, |
| "loss": 1.3758, |
| "step": 794 |
| }, |
| { |
| "epoch": 0.5498876015908698, |
| "grad_norm": 0.6555935144424438, |
| "learning_rate": 4.748725418790969e-05, |
| "loss": 1.0075, |
| "step": 795 |
| }, |
| { |
| "epoch": 0.5505792841085941, |
| "grad_norm": 0.7756921052932739, |
| "learning_rate": 4.741442097596504e-05, |
| "loss": 1.5594, |
| "step": 796 |
| }, |
| { |
| "epoch": 0.5512709666263185, |
| "grad_norm": 0.8114323616027832, |
| "learning_rate": 4.73415877640204e-05, |
| "loss": 1.9788, |
| "step": 797 |
| }, |
| { |
| "epoch": 0.5519626491440429, |
| "grad_norm": 1.0409117937088013, |
| "learning_rate": 4.726875455207575e-05, |
| "loss": 1.4971, |
| "step": 798 |
| }, |
| { |
| "epoch": 0.5526543316617673, |
| "grad_norm": 0.963651716709137, |
| "learning_rate": 4.71959213401311e-05, |
| "loss": 1.8012, |
| "step": 799 |
| }, |
| { |
| "epoch": 0.5533460141794916, |
| "grad_norm": 0.7546253204345703, |
| "learning_rate": 4.712308812818645e-05, |
| "loss": 2.0704, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.554037696697216, |
| "grad_norm": 0.7837393283843994, |
| "learning_rate": 4.705025491624181e-05, |
| "loss": 1.9567, |
| "step": 801 |
| }, |
| { |
| "epoch": 0.5547293792149404, |
| "grad_norm": 0.9750471711158752, |
| "learning_rate": 4.697742170429716e-05, |
| "loss": 1.8202, |
| "step": 802 |
| }, |
| { |
| "epoch": 0.5554210617326647, |
| "grad_norm": 0.656570315361023, |
| "learning_rate": 4.6904588492352516e-05, |
| "loss": 1.9521, |
| "step": 803 |
| }, |
| { |
| "epoch": 0.556112744250389, |
| "grad_norm": 0.7636544108390808, |
| "learning_rate": 4.683175528040787e-05, |
| "loss": 1.6885, |
| "step": 804 |
| }, |
| { |
| "epoch": 0.5568044267681135, |
| "grad_norm": 0.6734775900840759, |
| "learning_rate": 4.6758922068463224e-05, |
| "loss": 1.4624, |
| "step": 805 |
| }, |
| { |
| "epoch": 0.5574961092858378, |
| "grad_norm": 0.7191459536552429, |
| "learning_rate": 4.668608885651857e-05, |
| "loss": 1.9275, |
| "step": 806 |
| }, |
| { |
| "epoch": 0.5581877918035621, |
| "grad_norm": 0.7927097082138062, |
| "learning_rate": 4.6613255644573926e-05, |
| "loss": 1.3038, |
| "step": 807 |
| }, |
| { |
| "epoch": 0.5588794743212865, |
| "grad_norm": 0.7056376934051514, |
| "learning_rate": 4.654042243262928e-05, |
| "loss": 1.4776, |
| "step": 808 |
| }, |
| { |
| "epoch": 0.5595711568390109, |
| "grad_norm": 0.6944011449813843, |
| "learning_rate": 4.6467589220684635e-05, |
| "loss": 1.6238, |
| "step": 809 |
| }, |
| { |
| "epoch": 0.5602628393567353, |
| "grad_norm": 0.6770850419998169, |
| "learning_rate": 4.639475600873999e-05, |
| "loss": 2.0007, |
| "step": 810 |
| }, |
| { |
| "epoch": 0.5609545218744596, |
| "grad_norm": 0.6012453436851501, |
| "learning_rate": 4.632192279679534e-05, |
| "loss": 1.3431, |
| "step": 811 |
| }, |
| { |
| "epoch": 0.5616462043921839, |
| "grad_norm": 0.8556742072105408, |
| "learning_rate": 4.62490895848507e-05, |
| "loss": 1.9744, |
| "step": 812 |
| }, |
| { |
| "epoch": 0.5623378869099084, |
| "grad_norm": 0.9271799921989441, |
| "learning_rate": 4.6176256372906045e-05, |
| "loss": 2.0651, |
| "step": 813 |
| }, |
| { |
| "epoch": 0.5630295694276327, |
| "grad_norm": 0.7125703692436218, |
| "learning_rate": 4.61034231609614e-05, |
| "loss": 1.519, |
| "step": 814 |
| }, |
| { |
| "epoch": 0.563721251945357, |
| "grad_norm": 0.721099853515625, |
| "learning_rate": 4.6030589949016754e-05, |
| "loss": 1.4397, |
| "step": 815 |
| }, |
| { |
| "epoch": 0.5644129344630815, |
| "grad_norm": 0.8622443675994873, |
| "learning_rate": 4.595775673707211e-05, |
| "loss": 1.5684, |
| "step": 816 |
| }, |
| { |
| "epoch": 0.5651046169808058, |
| "grad_norm": 0.8319584727287292, |
| "learning_rate": 4.588492352512746e-05, |
| "loss": 1.8106, |
| "step": 817 |
| }, |
| { |
| "epoch": 0.5657962994985302, |
| "grad_norm": 0.7659180760383606, |
| "learning_rate": 4.5812090313182816e-05, |
| "loss": 1.976, |
| "step": 818 |
| }, |
| { |
| "epoch": 0.5664879820162545, |
| "grad_norm": 0.658638060092926, |
| "learning_rate": 4.5739257101238164e-05, |
| "loss": 1.6156, |
| "step": 819 |
| }, |
| { |
| "epoch": 0.5671796645339789, |
| "grad_norm": 0.7831644415855408, |
| "learning_rate": 4.566642388929352e-05, |
| "loss": 1.7169, |
| "step": 820 |
| }, |
| { |
| "epoch": 0.5678713470517033, |
| "grad_norm": 0.8587795495986938, |
| "learning_rate": 4.559359067734887e-05, |
| "loss": 1.8784, |
| "step": 821 |
| }, |
| { |
| "epoch": 0.5685630295694276, |
| "grad_norm": 0.6490662693977356, |
| "learning_rate": 4.552075746540423e-05, |
| "loss": 1.2044, |
| "step": 822 |
| }, |
| { |
| "epoch": 0.569254712087152, |
| "grad_norm": 0.8098453879356384, |
| "learning_rate": 4.544792425345958e-05, |
| "loss": 2.0041, |
| "step": 823 |
| }, |
| { |
| "epoch": 0.5699463946048764, |
| "grad_norm": 0.9287192225456238, |
| "learning_rate": 4.5375091041514935e-05, |
| "loss": 2.0794, |
| "step": 824 |
| }, |
| { |
| "epoch": 0.5706380771226007, |
| "grad_norm": 0.6558881998062134, |
| "learning_rate": 4.530225782957029e-05, |
| "loss": 1.1153, |
| "step": 825 |
| }, |
| { |
| "epoch": 0.5713297596403251, |
| "grad_norm": 0.9448340535163879, |
| "learning_rate": 4.522942461762564e-05, |
| "loss": 1.8924, |
| "step": 826 |
| }, |
| { |
| "epoch": 0.5720214421580495, |
| "grad_norm": 0.9030715823173523, |
| "learning_rate": 4.515659140568099e-05, |
| "loss": 1.487, |
| "step": 827 |
| }, |
| { |
| "epoch": 0.5727131246757738, |
| "grad_norm": 0.8691999316215515, |
| "learning_rate": 4.5083758193736346e-05, |
| "loss": 1.709, |
| "step": 828 |
| }, |
| { |
| "epoch": 0.5734048071934982, |
| "grad_norm": 0.802944540977478, |
| "learning_rate": 4.50109249817917e-05, |
| "loss": 1.5163, |
| "step": 829 |
| }, |
| { |
| "epoch": 0.5740964897112225, |
| "grad_norm": 0.7917433381080627, |
| "learning_rate": 4.4938091769847054e-05, |
| "loss": 1.5238, |
| "step": 830 |
| }, |
| { |
| "epoch": 0.574788172228947, |
| "grad_norm": 0.8852903842926025, |
| "learning_rate": 4.486525855790241e-05, |
| "loss": 1.4459, |
| "step": 831 |
| }, |
| { |
| "epoch": 0.5754798547466713, |
| "grad_norm": 0.8966250419616699, |
| "learning_rate": 4.479242534595776e-05, |
| "loss": 1.4654, |
| "step": 832 |
| }, |
| { |
| "epoch": 0.5761715372643956, |
| "grad_norm": 1.3262107372283936, |
| "learning_rate": 4.471959213401311e-05, |
| "loss": 1.5453, |
| "step": 833 |
| }, |
| { |
| "epoch": 0.57686321978212, |
| "grad_norm": 0.7559188604354858, |
| "learning_rate": 4.4646758922068465e-05, |
| "loss": 1.5227, |
| "step": 834 |
| }, |
| { |
| "epoch": 0.5775549022998444, |
| "grad_norm": 0.9680230617523193, |
| "learning_rate": 4.457392571012382e-05, |
| "loss": 1.8038, |
| "step": 835 |
| }, |
| { |
| "epoch": 0.5782465848175687, |
| "grad_norm": 0.8509007096290588, |
| "learning_rate": 4.450109249817917e-05, |
| "loss": 1.8162, |
| "step": 836 |
| }, |
| { |
| "epoch": 0.5789382673352931, |
| "grad_norm": 1.4922651052474976, |
| "learning_rate": 4.442825928623453e-05, |
| "loss": 1.5922, |
| "step": 837 |
| }, |
| { |
| "epoch": 0.5796299498530174, |
| "grad_norm": 1.0057311058044434, |
| "learning_rate": 4.435542607428988e-05, |
| "loss": 1.6555, |
| "step": 838 |
| }, |
| { |
| "epoch": 0.5803216323707419, |
| "grad_norm": 0.7542306184768677, |
| "learning_rate": 4.428259286234523e-05, |
| "loss": 1.7829, |
| "step": 839 |
| }, |
| { |
| "epoch": 0.5810133148884662, |
| "grad_norm": 0.7741349935531616, |
| "learning_rate": 4.4209759650400584e-05, |
| "loss": 1.6429, |
| "step": 840 |
| }, |
| { |
| "epoch": 0.5817049974061905, |
| "grad_norm": 0.6510467529296875, |
| "learning_rate": 4.413692643845594e-05, |
| "loss": 1.6901, |
| "step": 841 |
| }, |
| { |
| "epoch": 0.582396679923915, |
| "grad_norm": 0.5973620414733887, |
| "learning_rate": 4.406409322651129e-05, |
| "loss": 1.8884, |
| "step": 842 |
| }, |
| { |
| "epoch": 0.5830883624416393, |
| "grad_norm": 0.7993059158325195, |
| "learning_rate": 4.3991260014566647e-05, |
| "loss": 1.9353, |
| "step": 843 |
| }, |
| { |
| "epoch": 0.5837800449593636, |
| "grad_norm": 0.9664002656936646, |
| "learning_rate": 4.3918426802622e-05, |
| "loss": 1.5514, |
| "step": 844 |
| }, |
| { |
| "epoch": 0.584471727477088, |
| "grad_norm": 1.7549793720245361, |
| "learning_rate": 4.3845593590677355e-05, |
| "loss": 1.3181, |
| "step": 845 |
| }, |
| { |
| "epoch": 0.5851634099948124, |
| "grad_norm": 0.681312084197998, |
| "learning_rate": 4.37727603787327e-05, |
| "loss": 1.3649, |
| "step": 846 |
| }, |
| { |
| "epoch": 0.5858550925125368, |
| "grad_norm": 1.23701012134552, |
| "learning_rate": 4.369992716678806e-05, |
| "loss": 1.7591, |
| "step": 847 |
| }, |
| { |
| "epoch": 0.5865467750302611, |
| "grad_norm": 0.6490361094474792, |
| "learning_rate": 4.362709395484341e-05, |
| "loss": 0.9716, |
| "step": 848 |
| }, |
| { |
| "epoch": 0.5872384575479854, |
| "grad_norm": 1.2864362001419067, |
| "learning_rate": 4.355426074289876e-05, |
| "loss": 1.2925, |
| "step": 849 |
| }, |
| { |
| "epoch": 0.5879301400657099, |
| "grad_norm": 0.8159788250923157, |
| "learning_rate": 4.348142753095411e-05, |
| "loss": 1.6182, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.5886218225834342, |
| "grad_norm": 0.9866076111793518, |
| "learning_rate": 4.3408594319009474e-05, |
| "loss": 1.0921, |
| "step": 851 |
| }, |
| { |
| "epoch": 0.5893135051011585, |
| "grad_norm": 0.7778481841087341, |
| "learning_rate": 4.333576110706483e-05, |
| "loss": 1.8508, |
| "step": 852 |
| }, |
| { |
| "epoch": 0.590005187618883, |
| "grad_norm": 0.7202186584472656, |
| "learning_rate": 4.3262927895120176e-05, |
| "loss": 2.0207, |
| "step": 853 |
| }, |
| { |
| "epoch": 0.5906968701366073, |
| "grad_norm": 0.764106035232544, |
| "learning_rate": 4.319009468317553e-05, |
| "loss": 1.6755, |
| "step": 854 |
| }, |
| { |
| "epoch": 0.5913885526543317, |
| "grad_norm": 0.993752121925354, |
| "learning_rate": 4.3117261471230884e-05, |
| "loss": 2.061, |
| "step": 855 |
| }, |
| { |
| "epoch": 0.592080235172056, |
| "grad_norm": 0.9889275431632996, |
| "learning_rate": 4.304442825928623e-05, |
| "loss": 2.0183, |
| "step": 856 |
| }, |
| { |
| "epoch": 0.5927719176897804, |
| "grad_norm": 0.7371869683265686, |
| "learning_rate": 4.2971595047341586e-05, |
| "loss": 1.8013, |
| "step": 857 |
| }, |
| { |
| "epoch": 0.5934636002075048, |
| "grad_norm": 0.8173306584358215, |
| "learning_rate": 4.289876183539695e-05, |
| "loss": 1.8409, |
| "step": 858 |
| }, |
| { |
| "epoch": 0.5941552827252291, |
| "grad_norm": 0.8699966669082642, |
| "learning_rate": 4.2825928623452295e-05, |
| "loss": 1.1185, |
| "step": 859 |
| }, |
| { |
| "epoch": 0.5948469652429534, |
| "grad_norm": 0.8679646253585815, |
| "learning_rate": 4.275309541150765e-05, |
| "loss": 1.7849, |
| "step": 860 |
| }, |
| { |
| "epoch": 0.5955386477606779, |
| "grad_norm": 0.7400544881820679, |
| "learning_rate": 4.2680262199563003e-05, |
| "loss": 1.4845, |
| "step": 861 |
| }, |
| { |
| "epoch": 0.5962303302784022, |
| "grad_norm": 0.9016357064247131, |
| "learning_rate": 4.260742898761836e-05, |
| "loss": 1.6556, |
| "step": 862 |
| }, |
| { |
| "epoch": 0.5969220127961266, |
| "grad_norm": 1.0750559568405151, |
| "learning_rate": 4.2534595775673705e-05, |
| "loss": 1.2046, |
| "step": 863 |
| }, |
| { |
| "epoch": 0.5976136953138509, |
| "grad_norm": 0.9797933101654053, |
| "learning_rate": 4.246176256372906e-05, |
| "loss": 1.9871, |
| "step": 864 |
| }, |
| { |
| "epoch": 0.5983053778315753, |
| "grad_norm": 0.9287171959877014, |
| "learning_rate": 4.238892935178442e-05, |
| "loss": 2.0108, |
| "step": 865 |
| }, |
| { |
| "epoch": 0.5989970603492997, |
| "grad_norm": 1.2748676538467407, |
| "learning_rate": 4.231609613983977e-05, |
| "loss": 1.8482, |
| "step": 866 |
| }, |
| { |
| "epoch": 0.599688742867024, |
| "grad_norm": 0.7953082919120789, |
| "learning_rate": 4.224326292789512e-05, |
| "loss": 1.9459, |
| "step": 867 |
| }, |
| { |
| "epoch": 0.6003804253847485, |
| "grad_norm": 0.6961773037910461, |
| "learning_rate": 4.217042971595048e-05, |
| "loss": 1.4124, |
| "step": 868 |
| }, |
| { |
| "epoch": 0.6010721079024728, |
| "grad_norm": 0.7349817156791687, |
| "learning_rate": 4.2097596504005824e-05, |
| "loss": 1.3384, |
| "step": 869 |
| }, |
| { |
| "epoch": 0.6017637904201971, |
| "grad_norm": 0.8988999724388123, |
| "learning_rate": 4.202476329206118e-05, |
| "loss": 2.1146, |
| "step": 870 |
| }, |
| { |
| "epoch": 0.6024554729379215, |
| "grad_norm": 0.7482579946517944, |
| "learning_rate": 4.195193008011653e-05, |
| "loss": 1.9195, |
| "step": 871 |
| }, |
| { |
| "epoch": 0.6031471554556459, |
| "grad_norm": 0.584476888179779, |
| "learning_rate": 4.1879096868171894e-05, |
| "loss": 1.1254, |
| "step": 872 |
| }, |
| { |
| "epoch": 0.6038388379733702, |
| "grad_norm": 0.9063954949378967, |
| "learning_rate": 4.180626365622724e-05, |
| "loss": 1.655, |
| "step": 873 |
| }, |
| { |
| "epoch": 0.6045305204910946, |
| "grad_norm": 1.784583568572998, |
| "learning_rate": 4.1733430444282596e-05, |
| "loss": 1.6756, |
| "step": 874 |
| }, |
| { |
| "epoch": 0.6052222030088189, |
| "grad_norm": 1.1327074766159058, |
| "learning_rate": 4.166059723233795e-05, |
| "loss": 1.6496, |
| "step": 875 |
| }, |
| { |
| "epoch": 0.6059138855265433, |
| "grad_norm": 0.6708528399467468, |
| "learning_rate": 4.15877640203933e-05, |
| "loss": 1.4513, |
| "step": 876 |
| }, |
| { |
| "epoch": 0.6066055680442677, |
| "grad_norm": 0.8013389110565186, |
| "learning_rate": 4.151493080844865e-05, |
| "loss": 1.2203, |
| "step": 877 |
| }, |
| { |
| "epoch": 0.607297250561992, |
| "grad_norm": 0.9567274451255798, |
| "learning_rate": 4.1442097596504006e-05, |
| "loss": 1.6625, |
| "step": 878 |
| }, |
| { |
| "epoch": 0.6079889330797165, |
| "grad_norm": 0.7005029916763306, |
| "learning_rate": 4.136926438455936e-05, |
| "loss": 1.7659, |
| "step": 879 |
| }, |
| { |
| "epoch": 0.6086806155974408, |
| "grad_norm": 0.7626811861991882, |
| "learning_rate": 4.1296431172614715e-05, |
| "loss": 1.9302, |
| "step": 880 |
| }, |
| { |
| "epoch": 0.6093722981151651, |
| "grad_norm": 0.8964532017707825, |
| "learning_rate": 4.122359796067007e-05, |
| "loss": 1.5873, |
| "step": 881 |
| }, |
| { |
| "epoch": 0.6100639806328895, |
| "grad_norm": 1.0764849185943604, |
| "learning_rate": 4.115076474872542e-05, |
| "loss": 1.7932, |
| "step": 882 |
| }, |
| { |
| "epoch": 0.6107556631506139, |
| "grad_norm": 0.6796227097511292, |
| "learning_rate": 4.107793153678077e-05, |
| "loss": 1.4911, |
| "step": 883 |
| }, |
| { |
| "epoch": 0.6114473456683382, |
| "grad_norm": 0.8213468790054321, |
| "learning_rate": 4.1005098324836125e-05, |
| "loss": 1.101, |
| "step": 884 |
| }, |
| { |
| "epoch": 0.6121390281860626, |
| "grad_norm": 0.7105699777603149, |
| "learning_rate": 4.093226511289148e-05, |
| "loss": 1.583, |
| "step": 885 |
| }, |
| { |
| "epoch": 0.6128307107037869, |
| "grad_norm": 0.8570541143417358, |
| "learning_rate": 4.0859431900946834e-05, |
| "loss": 1.4963, |
| "step": 886 |
| }, |
| { |
| "epoch": 0.6135223932215114, |
| "grad_norm": 0.9847023487091064, |
| "learning_rate": 4.078659868900219e-05, |
| "loss": 1.7706, |
| "step": 887 |
| }, |
| { |
| "epoch": 0.6142140757392357, |
| "grad_norm": 0.7415423393249512, |
| "learning_rate": 4.071376547705754e-05, |
| "loss": 1.7496, |
| "step": 888 |
| }, |
| { |
| "epoch": 0.61490575825696, |
| "grad_norm": 0.8785969614982605, |
| "learning_rate": 4.064093226511289e-05, |
| "loss": 1.3073, |
| "step": 889 |
| }, |
| { |
| "epoch": 0.6155974407746844, |
| "grad_norm": 0.8583815097808838, |
| "learning_rate": 4.0568099053168244e-05, |
| "loss": 1.5086, |
| "step": 890 |
| }, |
| { |
| "epoch": 0.6162891232924088, |
| "grad_norm": 4.068897724151611, |
| "learning_rate": 4.04952658412236e-05, |
| "loss": 1.9179, |
| "step": 891 |
| }, |
| { |
| "epoch": 0.6169808058101331, |
| "grad_norm": 0.781101644039154, |
| "learning_rate": 4.042243262927895e-05, |
| "loss": 1.7548, |
| "step": 892 |
| }, |
| { |
| "epoch": 0.6176724883278575, |
| "grad_norm": 0.7089797854423523, |
| "learning_rate": 4.034959941733431e-05, |
| "loss": 0.8754, |
| "step": 893 |
| }, |
| { |
| "epoch": 0.6183641708455819, |
| "grad_norm": 0.8193789124488831, |
| "learning_rate": 4.027676620538966e-05, |
| "loss": 1.8492, |
| "step": 894 |
| }, |
| { |
| "epoch": 0.6190558533633063, |
| "grad_norm": 1.1896588802337646, |
| "learning_rate": 4.0203932993445015e-05, |
| "loss": 1.3831, |
| "step": 895 |
| }, |
| { |
| "epoch": 0.6197475358810306, |
| "grad_norm": 0.7735778093338013, |
| "learning_rate": 4.013109978150036e-05, |
| "loss": 1.3693, |
| "step": 896 |
| }, |
| { |
| "epoch": 0.6204392183987549, |
| "grad_norm": 0.8395145535469055, |
| "learning_rate": 4.005826656955572e-05, |
| "loss": 2.0921, |
| "step": 897 |
| }, |
| { |
| "epoch": 0.6211309009164794, |
| "grad_norm": 0.8883162140846252, |
| "learning_rate": 3.998543335761107e-05, |
| "loss": 1.1215, |
| "step": 898 |
| }, |
| { |
| "epoch": 0.6218225834342037, |
| "grad_norm": 0.7758648991584778, |
| "learning_rate": 3.9912600145666426e-05, |
| "loss": 2.1886, |
| "step": 899 |
| }, |
| { |
| "epoch": 0.622514265951928, |
| "grad_norm": 1.0782335996627808, |
| "learning_rate": 3.983976693372178e-05, |
| "loss": 1.6127, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.6232059484696524, |
| "grad_norm": 0.7295915484428406, |
| "learning_rate": 3.9766933721777134e-05, |
| "loss": 1.4735, |
| "step": 901 |
| }, |
| { |
| "epoch": 0.6238976309873768, |
| "grad_norm": 0.8186253309249878, |
| "learning_rate": 3.969410050983249e-05, |
| "loss": 1.9706, |
| "step": 902 |
| }, |
| { |
| "epoch": 0.6245893135051012, |
| "grad_norm": 0.9425630569458008, |
| "learning_rate": 3.9621267297887836e-05, |
| "loss": 1.7289, |
| "step": 903 |
| }, |
| { |
| "epoch": 0.6252809960228255, |
| "grad_norm": 1.7592432498931885, |
| "learning_rate": 3.954843408594319e-05, |
| "loss": 1.7906, |
| "step": 904 |
| }, |
| { |
| "epoch": 0.6259726785405499, |
| "grad_norm": 0.8689557909965515, |
| "learning_rate": 3.9475600873998545e-05, |
| "loss": 1.7125, |
| "step": 905 |
| }, |
| { |
| "epoch": 0.6266643610582743, |
| "grad_norm": 0.7336760759353638, |
| "learning_rate": 3.94027676620539e-05, |
| "loss": 1.2892, |
| "step": 906 |
| }, |
| { |
| "epoch": 0.6273560435759986, |
| "grad_norm": 1.0234981775283813, |
| "learning_rate": 3.932993445010925e-05, |
| "loss": 1.4871, |
| "step": 907 |
| }, |
| { |
| "epoch": 0.628047726093723, |
| "grad_norm": 0.62737637758255, |
| "learning_rate": 3.925710123816461e-05, |
| "loss": 1.4537, |
| "step": 908 |
| }, |
| { |
| "epoch": 0.6287394086114474, |
| "grad_norm": 0.8133891224861145, |
| "learning_rate": 3.9184268026219955e-05, |
| "loss": 1.9147, |
| "step": 909 |
| }, |
| { |
| "epoch": 0.6294310911291717, |
| "grad_norm": 0.8863133192062378, |
| "learning_rate": 3.911143481427531e-05, |
| "loss": 1.4371, |
| "step": 910 |
| }, |
| { |
| "epoch": 0.6301227736468961, |
| "grad_norm": 0.8457992076873779, |
| "learning_rate": 3.9038601602330664e-05, |
| "loss": 1.4767, |
| "step": 911 |
| }, |
| { |
| "epoch": 0.6308144561646204, |
| "grad_norm": 0.9177572727203369, |
| "learning_rate": 3.896576839038602e-05, |
| "loss": 0.9703, |
| "step": 912 |
| }, |
| { |
| "epoch": 0.6315061386823448, |
| "grad_norm": 0.798324465751648, |
| "learning_rate": 3.889293517844137e-05, |
| "loss": 1.21, |
| "step": 913 |
| }, |
| { |
| "epoch": 0.6321978212000692, |
| "grad_norm": 0.8874804973602295, |
| "learning_rate": 3.8820101966496726e-05, |
| "loss": 1.5766, |
| "step": 914 |
| }, |
| { |
| "epoch": 0.6328895037177935, |
| "grad_norm": 0.7579106092453003, |
| "learning_rate": 3.874726875455208e-05, |
| "loss": 2.0235, |
| "step": 915 |
| }, |
| { |
| "epoch": 0.6335811862355178, |
| "grad_norm": 0.8905123472213745, |
| "learning_rate": 3.867443554260743e-05, |
| "loss": 1.4235, |
| "step": 916 |
| }, |
| { |
| "epoch": 0.6342728687532423, |
| "grad_norm": 0.7457150816917419, |
| "learning_rate": 3.860160233066278e-05, |
| "loss": 1.005, |
| "step": 917 |
| }, |
| { |
| "epoch": 0.6349645512709666, |
| "grad_norm": 0.7743381261825562, |
| "learning_rate": 3.852876911871814e-05, |
| "loss": 1.5984, |
| "step": 918 |
| }, |
| { |
| "epoch": 0.635656233788691, |
| "grad_norm": 0.9183463454246521, |
| "learning_rate": 3.845593590677349e-05, |
| "loss": 1.9001, |
| "step": 919 |
| }, |
| { |
| "epoch": 0.6363479163064154, |
| "grad_norm": 0.9490715861320496, |
| "learning_rate": 3.8383102694828845e-05, |
| "loss": 1.6814, |
| "step": 920 |
| }, |
| { |
| "epoch": 0.6370395988241397, |
| "grad_norm": 0.7596708536148071, |
| "learning_rate": 3.83102694828842e-05, |
| "loss": 1.3015, |
| "step": 921 |
| }, |
| { |
| "epoch": 0.6377312813418641, |
| "grad_norm": 0.8084985017776489, |
| "learning_rate": 3.8237436270939554e-05, |
| "loss": 1.4776, |
| "step": 922 |
| }, |
| { |
| "epoch": 0.6384229638595884, |
| "grad_norm": 1.5200563669204712, |
| "learning_rate": 3.81646030589949e-05, |
| "loss": 1.5271, |
| "step": 923 |
| }, |
| { |
| "epoch": 0.6391146463773129, |
| "grad_norm": 0.9168257117271423, |
| "learning_rate": 3.8091769847050256e-05, |
| "loss": 0.865, |
| "step": 924 |
| }, |
| { |
| "epoch": 0.6398063288950372, |
| "grad_norm": 0.8897766470909119, |
| "learning_rate": 3.801893663510561e-05, |
| "loss": 1.7975, |
| "step": 925 |
| }, |
| { |
| "epoch": 0.6404980114127615, |
| "grad_norm": 0.8529428839683533, |
| "learning_rate": 3.7946103423160964e-05, |
| "loss": 1.5159, |
| "step": 926 |
| }, |
| { |
| "epoch": 0.6411896939304859, |
| "grad_norm": 0.9676948189735413, |
| "learning_rate": 3.787327021121632e-05, |
| "loss": 0.9847, |
| "step": 927 |
| }, |
| { |
| "epoch": 0.6418813764482103, |
| "grad_norm": 0.8767004013061523, |
| "learning_rate": 3.780043699927167e-05, |
| "loss": 2.0668, |
| "step": 928 |
| }, |
| { |
| "epoch": 0.6425730589659346, |
| "grad_norm": 0.7345457673072815, |
| "learning_rate": 3.772760378732702e-05, |
| "loss": 1.8291, |
| "step": 929 |
| }, |
| { |
| "epoch": 0.643264741483659, |
| "grad_norm": 1.1258862018585205, |
| "learning_rate": 3.7654770575382375e-05, |
| "loss": 1.9181, |
| "step": 930 |
| }, |
| { |
| "epoch": 0.6439564240013833, |
| "grad_norm": 1.059696078300476, |
| "learning_rate": 3.758193736343773e-05, |
| "loss": 1.1648, |
| "step": 931 |
| }, |
| { |
| "epoch": 0.6446481065191078, |
| "grad_norm": 0.875996470451355, |
| "learning_rate": 3.750910415149308e-05, |
| "loss": 1.8461, |
| "step": 932 |
| }, |
| { |
| "epoch": 0.6453397890368321, |
| "grad_norm": 0.7454437017440796, |
| "learning_rate": 3.743627093954844e-05, |
| "loss": 1.8346, |
| "step": 933 |
| }, |
| { |
| "epoch": 0.6460314715545564, |
| "grad_norm": 1.0619977712631226, |
| "learning_rate": 3.736343772760379e-05, |
| "loss": 1.1181, |
| "step": 934 |
| }, |
| { |
| "epoch": 0.6467231540722809, |
| "grad_norm": 0.7725276350975037, |
| "learning_rate": 3.7290604515659146e-05, |
| "loss": 1.3964, |
| "step": 935 |
| }, |
| { |
| "epoch": 0.6474148365900052, |
| "grad_norm": 0.9066617488861084, |
| "learning_rate": 3.7217771303714494e-05, |
| "loss": 1.8135, |
| "step": 936 |
| }, |
| { |
| "epoch": 0.6481065191077295, |
| "grad_norm": 0.7097616195678711, |
| "learning_rate": 3.714493809176985e-05, |
| "loss": 2.001, |
| "step": 937 |
| }, |
| { |
| "epoch": 0.6487982016254539, |
| "grad_norm": 0.9724728465080261, |
| "learning_rate": 3.70721048798252e-05, |
| "loss": 2.0217, |
| "step": 938 |
| }, |
| { |
| "epoch": 0.6494898841431783, |
| "grad_norm": 0.7304125428199768, |
| "learning_rate": 3.699927166788055e-05, |
| "loss": 1.5426, |
| "step": 939 |
| }, |
| { |
| "epoch": 0.6501815666609027, |
| "grad_norm": 0.886914074420929, |
| "learning_rate": 3.692643845593591e-05, |
| "loss": 1.6769, |
| "step": 940 |
| }, |
| { |
| "epoch": 0.650873249178627, |
| "grad_norm": 1.057144284248352, |
| "learning_rate": 3.6853605243991265e-05, |
| "loss": 1.4546, |
| "step": 941 |
| }, |
| { |
| "epoch": 0.6515649316963513, |
| "grad_norm": 0.9073127508163452, |
| "learning_rate": 3.678077203204662e-05, |
| "loss": 1.6194, |
| "step": 942 |
| }, |
| { |
| "epoch": 0.6522566142140758, |
| "grad_norm": 0.9237063527107239, |
| "learning_rate": 3.670793882010197e-05, |
| "loss": 1.4516, |
| "step": 943 |
| }, |
| { |
| "epoch": 0.6529482967318001, |
| "grad_norm": 0.7701244354248047, |
| "learning_rate": 3.663510560815732e-05, |
| "loss": 1.7599, |
| "step": 944 |
| }, |
| { |
| "epoch": 0.6536399792495244, |
| "grad_norm": 0.7972707152366638, |
| "learning_rate": 3.6562272396212676e-05, |
| "loss": 1.9227, |
| "step": 945 |
| }, |
| { |
| "epoch": 0.6543316617672489, |
| "grad_norm": 0.6878631711006165, |
| "learning_rate": 3.648943918426802e-05, |
| "loss": 1.2225, |
| "step": 946 |
| }, |
| { |
| "epoch": 0.6550233442849732, |
| "grad_norm": 0.6954039335250854, |
| "learning_rate": 3.6416605972323384e-05, |
| "loss": 1.3706, |
| "step": 947 |
| }, |
| { |
| "epoch": 0.6557150268026976, |
| "grad_norm": 0.8597190380096436, |
| "learning_rate": 3.634377276037874e-05, |
| "loss": 2.0226, |
| "step": 948 |
| }, |
| { |
| "epoch": 0.6564067093204219, |
| "grad_norm": 0.8058347702026367, |
| "learning_rate": 3.6270939548434086e-05, |
| "loss": 1.8904, |
| "step": 949 |
| }, |
| { |
| "epoch": 0.6570983918381463, |
| "grad_norm": 0.7838011384010315, |
| "learning_rate": 3.619810633648944e-05, |
| "loss": 1.1612, |
| "step": 950 |
| }, |
| { |
| "epoch": 0.6577900743558707, |
| "grad_norm": 0.9218417406082153, |
| "learning_rate": 3.6125273124544795e-05, |
| "loss": 1.9737, |
| "step": 951 |
| }, |
| { |
| "epoch": 0.658481756873595, |
| "grad_norm": 0.7816157937049866, |
| "learning_rate": 3.605243991260015e-05, |
| "loss": 0.86, |
| "step": 952 |
| }, |
| { |
| "epoch": 0.6591734393913193, |
| "grad_norm": 0.8489840626716614, |
| "learning_rate": 3.5979606700655496e-05, |
| "loss": 2.0043, |
| "step": 953 |
| }, |
| { |
| "epoch": 0.6598651219090438, |
| "grad_norm": 1.0207152366638184, |
| "learning_rate": 3.590677348871086e-05, |
| "loss": 1.4315, |
| "step": 954 |
| }, |
| { |
| "epoch": 0.6605568044267681, |
| "grad_norm": 0.8000298738479614, |
| "learning_rate": 3.583394027676621e-05, |
| "loss": 1.9495, |
| "step": 955 |
| }, |
| { |
| "epoch": 0.6612484869444925, |
| "grad_norm": 0.8676748871803284, |
| "learning_rate": 3.576110706482156e-05, |
| "loss": 1.7678, |
| "step": 956 |
| }, |
| { |
| "epoch": 0.6619401694622168, |
| "grad_norm": 0.9304187297821045, |
| "learning_rate": 3.5688273852876913e-05, |
| "loss": 2.0562, |
| "step": 957 |
| }, |
| { |
| "epoch": 0.6626318519799412, |
| "grad_norm": 1.0136486291885376, |
| "learning_rate": 3.561544064093227e-05, |
| "loss": 1.7028, |
| "step": 958 |
| }, |
| { |
| "epoch": 0.6633235344976656, |
| "grad_norm": 0.735945463180542, |
| "learning_rate": 3.5542607428987615e-05, |
| "loss": 1.3553, |
| "step": 959 |
| }, |
| { |
| "epoch": 0.6640152170153899, |
| "grad_norm": 0.7199145555496216, |
| "learning_rate": 3.546977421704297e-05, |
| "loss": 1.3213, |
| "step": 960 |
| }, |
| { |
| "epoch": 0.6647068995331143, |
| "grad_norm": 0.8235880732536316, |
| "learning_rate": 3.539694100509833e-05, |
| "loss": 2.0212, |
| "step": 961 |
| }, |
| { |
| "epoch": 0.6653985820508387, |
| "grad_norm": 1.330508828163147, |
| "learning_rate": 3.5324107793153685e-05, |
| "loss": 1.8431, |
| "step": 962 |
| }, |
| { |
| "epoch": 0.666090264568563, |
| "grad_norm": 0.7981826663017273, |
| "learning_rate": 3.525127458120903e-05, |
| "loss": 1.1926, |
| "step": 963 |
| }, |
| { |
| "epoch": 0.6667819470862874, |
| "grad_norm": 0.9556028842926025, |
| "learning_rate": 3.517844136926439e-05, |
| "loss": 1.1749, |
| "step": 964 |
| }, |
| { |
| "epoch": 0.6674736296040118, |
| "grad_norm": 0.9828068614006042, |
| "learning_rate": 3.510560815731974e-05, |
| "loss": 1.4142, |
| "step": 965 |
| }, |
| { |
| "epoch": 0.6681653121217361, |
| "grad_norm": 1.730177879333496, |
| "learning_rate": 3.503277494537509e-05, |
| "loss": 1.8473, |
| "step": 966 |
| }, |
| { |
| "epoch": 0.6688569946394605, |
| "grad_norm": 0.7303544878959656, |
| "learning_rate": 3.495994173343044e-05, |
| "loss": 1.3033, |
| "step": 967 |
| }, |
| { |
| "epoch": 0.6695486771571848, |
| "grad_norm": 0.7854034900665283, |
| "learning_rate": 3.4887108521485804e-05, |
| "loss": 1.2283, |
| "step": 968 |
| }, |
| { |
| "epoch": 0.6702403596749092, |
| "grad_norm": 0.8185420036315918, |
| "learning_rate": 3.481427530954115e-05, |
| "loss": 2.0494, |
| "step": 969 |
| }, |
| { |
| "epoch": 0.6709320421926336, |
| "grad_norm": 0.7962689995765686, |
| "learning_rate": 3.4741442097596506e-05, |
| "loss": 1.9523, |
| "step": 970 |
| }, |
| { |
| "epoch": 0.6716237247103579, |
| "grad_norm": 0.9332857131958008, |
| "learning_rate": 3.466860888565186e-05, |
| "loss": 1.6991, |
| "step": 971 |
| }, |
| { |
| "epoch": 0.6723154072280824, |
| "grad_norm": 0.7964072823524475, |
| "learning_rate": 3.4595775673707214e-05, |
| "loss": 1.6715, |
| "step": 972 |
| }, |
| { |
| "epoch": 0.6730070897458067, |
| "grad_norm": 0.8391403555870056, |
| "learning_rate": 3.452294246176256e-05, |
| "loss": 1.8878, |
| "step": 973 |
| }, |
| { |
| "epoch": 0.673698772263531, |
| "grad_norm": 0.8566287159919739, |
| "learning_rate": 3.4450109249817916e-05, |
| "loss": 1.7805, |
| "step": 974 |
| }, |
| { |
| "epoch": 0.6743904547812554, |
| "grad_norm": 0.9097205400466919, |
| "learning_rate": 3.437727603787328e-05, |
| "loss": 1.4648, |
| "step": 975 |
| }, |
| { |
| "epoch": 0.6750821372989798, |
| "grad_norm": 0.9566125273704529, |
| "learning_rate": 3.4304442825928625e-05, |
| "loss": 1.4794, |
| "step": 976 |
| }, |
| { |
| "epoch": 0.6757738198167041, |
| "grad_norm": 0.89398193359375, |
| "learning_rate": 3.423160961398398e-05, |
| "loss": 2.0932, |
| "step": 977 |
| }, |
| { |
| "epoch": 0.6764655023344285, |
| "grad_norm": 0.8663723468780518, |
| "learning_rate": 3.415877640203933e-05, |
| "loss": 1.8625, |
| "step": 978 |
| }, |
| { |
| "epoch": 0.6771571848521528, |
| "grad_norm": 0.751266598701477, |
| "learning_rate": 3.408594319009468e-05, |
| "loss": 1.0625, |
| "step": 979 |
| }, |
| { |
| "epoch": 0.6778488673698773, |
| "grad_norm": 0.8049336671829224, |
| "learning_rate": 3.4013109978150035e-05, |
| "loss": 1.3388, |
| "step": 980 |
| }, |
| { |
| "epoch": 0.6785405498876016, |
| "grad_norm": 0.8025321364402771, |
| "learning_rate": 3.394027676620539e-05, |
| "loss": 1.4493, |
| "step": 981 |
| }, |
| { |
| "epoch": 0.6792322324053259, |
| "grad_norm": 0.8283219933509827, |
| "learning_rate": 3.386744355426075e-05, |
| "loss": 1.1765, |
| "step": 982 |
| }, |
| { |
| "epoch": 0.6799239149230503, |
| "grad_norm": 1.0506592988967896, |
| "learning_rate": 3.37946103423161e-05, |
| "loss": 1.2619, |
| "step": 983 |
| }, |
| { |
| "epoch": 0.6806155974407747, |
| "grad_norm": 0.5937513709068298, |
| "learning_rate": 3.372177713037145e-05, |
| "loss": 0.9384, |
| "step": 984 |
| }, |
| { |
| "epoch": 0.681307279958499, |
| "grad_norm": 0.6834938526153564, |
| "learning_rate": 3.3648943918426806e-05, |
| "loss": 1.0192, |
| "step": 985 |
| }, |
| { |
| "epoch": 0.6819989624762234, |
| "grad_norm": 1.1329364776611328, |
| "learning_rate": 3.3576110706482154e-05, |
| "loss": 2.2567, |
| "step": 986 |
| }, |
| { |
| "epoch": 0.6826906449939478, |
| "grad_norm": 0.7666782736778259, |
| "learning_rate": 3.350327749453751e-05, |
| "loss": 1.8783, |
| "step": 987 |
| }, |
| { |
| "epoch": 0.6833823275116722, |
| "grad_norm": 1.2282352447509766, |
| "learning_rate": 3.343044428259286e-05, |
| "loss": 1.5103, |
| "step": 988 |
| }, |
| { |
| "epoch": 0.6840740100293965, |
| "grad_norm": 0.7626004219055176, |
| "learning_rate": 3.335761107064822e-05, |
| "loss": 1.3147, |
| "step": 989 |
| }, |
| { |
| "epoch": 0.6847656925471208, |
| "grad_norm": 1.0253794193267822, |
| "learning_rate": 3.328477785870357e-05, |
| "loss": 1.7115, |
| "step": 990 |
| }, |
| { |
| "epoch": 0.6854573750648453, |
| "grad_norm": 0.890225887298584, |
| "learning_rate": 3.3211944646758925e-05, |
| "loss": 1.6278, |
| "step": 991 |
| }, |
| { |
| "epoch": 0.6861490575825696, |
| "grad_norm": 1.0109024047851562, |
| "learning_rate": 3.313911143481428e-05, |
| "loss": 1.5538, |
| "step": 992 |
| }, |
| { |
| "epoch": 0.686840740100294, |
| "grad_norm": 0.8567763566970825, |
| "learning_rate": 3.306627822286963e-05, |
| "loss": 1.7241, |
| "step": 993 |
| }, |
| { |
| "epoch": 0.6875324226180183, |
| "grad_norm": 0.7532871961593628, |
| "learning_rate": 3.299344501092498e-05, |
| "loss": 1.3609, |
| "step": 994 |
| }, |
| { |
| "epoch": 0.6882241051357427, |
| "grad_norm": 0.9524408578872681, |
| "learning_rate": 3.2920611798980336e-05, |
| "loss": 1.9429, |
| "step": 995 |
| }, |
| { |
| "epoch": 0.6889157876534671, |
| "grad_norm": 0.6177567839622498, |
| "learning_rate": 3.284777858703569e-05, |
| "loss": 0.9766, |
| "step": 996 |
| }, |
| { |
| "epoch": 0.6896074701711914, |
| "grad_norm": 1.1738656759262085, |
| "learning_rate": 3.2774945375091044e-05, |
| "loss": 1.6276, |
| "step": 997 |
| }, |
| { |
| "epoch": 0.6902991526889158, |
| "grad_norm": 1.018707036972046, |
| "learning_rate": 3.27021121631464e-05, |
| "loss": 0.8201, |
| "step": 998 |
| }, |
| { |
| "epoch": 0.6909908352066402, |
| "grad_norm": 0.8498096466064453, |
| "learning_rate": 3.2629278951201746e-05, |
| "loss": 0.9932, |
| "step": 999 |
| }, |
| { |
| "epoch": 0.6916825177243645, |
| "grad_norm": 0.8145414590835571, |
| "learning_rate": 3.25564457392571e-05, |
| "loss": 1.9895, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.6916825177243645, |
| "eval_loss": 1.4734511375427246, |
| "eval_runtime": 636.6322, |
| "eval_samples_per_second": 2.018, |
| "eval_steps_per_second": 1.01, |
| "step": 1000 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 1446, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 1, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 3.178081149272064e+16, |
| "train_batch_size": 2, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|