| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.9878213802435725, | |
| "eval_steps": 500, | |
| "global_step": 276, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.010825439783491205, | |
| "grad_norm": 5.819816606329325, | |
| "learning_rate": 2.8571428571428573e-06, | |
| "loss": 0.8569, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.02165087956698241, | |
| "grad_norm": 5.824103393653863, | |
| "learning_rate": 5.7142857142857145e-06, | |
| "loss": 0.8605, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.03247631935047361, | |
| "grad_norm": 5.449628641963284, | |
| "learning_rate": 8.571428571428571e-06, | |
| "loss": 0.8526, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.04330175913396482, | |
| "grad_norm": 2.304622496597546, | |
| "learning_rate": 1.1428571428571429e-05, | |
| "loss": 0.776, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.05412719891745602, | |
| "grad_norm": 3.9211129699075964, | |
| "learning_rate": 1.4285714285714287e-05, | |
| "loss": 0.7549, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.06495263870094722, | |
| "grad_norm": 4.191352696153633, | |
| "learning_rate": 1.7142857142857142e-05, | |
| "loss": 0.7511, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.07577807848443843, | |
| "grad_norm": 4.286357012899222, | |
| "learning_rate": 2e-05, | |
| "loss": 0.7344, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.08660351826792964, | |
| "grad_norm": 2.827488821367777, | |
| "learning_rate": 2.2857142857142858e-05, | |
| "loss": 0.7287, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.09742895805142084, | |
| "grad_norm": 2.7210962800813157, | |
| "learning_rate": 2.5714285714285718e-05, | |
| "loss": 0.6716, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.10825439783491204, | |
| "grad_norm": 2.0910388281962677, | |
| "learning_rate": 2.8571428571428574e-05, | |
| "loss": 0.6654, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.11907983761840325, | |
| "grad_norm": 1.5036337205773866, | |
| "learning_rate": 3.142857142857143e-05, | |
| "loss": 0.6374, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.12990527740189445, | |
| "grad_norm": 1.2903721050783283, | |
| "learning_rate": 3.4285714285714284e-05, | |
| "loss": 0.6297, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.14073071718538566, | |
| "grad_norm": 1.4942476484292093, | |
| "learning_rate": 3.714285714285715e-05, | |
| "loss": 0.614, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.15155615696887687, | |
| "grad_norm": 1.1573056639932546, | |
| "learning_rate": 4e-05, | |
| "loss": 0.6242, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.16238159675236807, | |
| "grad_norm": 1.3567163955990846, | |
| "learning_rate": 4.2857142857142856e-05, | |
| "loss": 0.6083, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.17320703653585928, | |
| "grad_norm": 1.0833410878759577, | |
| "learning_rate": 4.5714285714285716e-05, | |
| "loss": 0.598, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.18403247631935046, | |
| "grad_norm": 1.5165046199330672, | |
| "learning_rate": 4.857142857142857e-05, | |
| "loss": 0.5939, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.19485791610284167, | |
| "grad_norm": 0.9386763484950851, | |
| "learning_rate": 5.1428571428571436e-05, | |
| "loss": 0.5752, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.20568335588633288, | |
| "grad_norm": 1.5556049091111643, | |
| "learning_rate": 5.4285714285714295e-05, | |
| "loss": 0.5709, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.2165087956698241, | |
| "grad_norm": 1.2090709113436298, | |
| "learning_rate": 5.714285714285715e-05, | |
| "loss": 0.5734, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.2273342354533153, | |
| "grad_norm": 1.6184191302395516, | |
| "learning_rate": 6.000000000000001e-05, | |
| "loss": 0.5716, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.2381596752368065, | |
| "grad_norm": 1.196774148850343, | |
| "learning_rate": 6.285714285714286e-05, | |
| "loss": 0.5697, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.2489851150202977, | |
| "grad_norm": 1.3957440961304397, | |
| "learning_rate": 6.571428571428571e-05, | |
| "loss": 0.5618, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.2598105548037889, | |
| "grad_norm": 1.1530340597242459, | |
| "learning_rate": 6.857142857142857e-05, | |
| "loss": 0.5676, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.2706359945872801, | |
| "grad_norm": 1.4067612714316569, | |
| "learning_rate": 7.142857142857143e-05, | |
| "loss": 0.5574, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.2814614343707713, | |
| "grad_norm": 1.2887562790881204, | |
| "learning_rate": 7.42857142857143e-05, | |
| "loss": 0.5382, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.2922868741542625, | |
| "grad_norm": 1.3060124429504023, | |
| "learning_rate": 7.714285714285715e-05, | |
| "loss": 0.5428, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.30311231393775373, | |
| "grad_norm": 0.9883707020889994, | |
| "learning_rate": 8e-05, | |
| "loss": 0.5457, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.31393775372124494, | |
| "grad_norm": 1.0876934359026393, | |
| "learning_rate": 7.999679062421315e-05, | |
| "loss": 0.5457, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.32476319350473615, | |
| "grad_norm": 1.411392955639978, | |
| "learning_rate": 7.998716301185722e-05, | |
| "loss": 0.5412, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.33558863328822736, | |
| "grad_norm": 1.0738613910818682, | |
| "learning_rate": 7.997111870786354e-05, | |
| "loss": 0.5391, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.34641407307171856, | |
| "grad_norm": 1.273871982692977, | |
| "learning_rate": 7.994866028684212e-05, | |
| "loss": 0.5422, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.3572395128552097, | |
| "grad_norm": 0.9898777883054408, | |
| "learning_rate": 7.991979135266861e-05, | |
| "loss": 0.5386, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.3680649526387009, | |
| "grad_norm": 1.2065676888460215, | |
| "learning_rate": 7.98845165379059e-05, | |
| "loss": 0.5421, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.37889039242219213, | |
| "grad_norm": 0.9168228655036887, | |
| "learning_rate": 7.984284150306085e-05, | |
| "loss": 0.5216, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.38971583220568334, | |
| "grad_norm": 1.3633210658375563, | |
| "learning_rate": 7.97947729356758e-05, | |
| "loss": 0.5222, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.40054127198917455, | |
| "grad_norm": 1.0022641361037112, | |
| "learning_rate": 7.974031854925562e-05, | |
| "loss": 0.5244, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.41136671177266576, | |
| "grad_norm": 1.4290805331505272, | |
| "learning_rate": 7.967948708202972e-05, | |
| "loss": 0.515, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.42219215155615697, | |
| "grad_norm": 0.7497159385074146, | |
| "learning_rate": 7.961228829555003e-05, | |
| "loss": 0.5186, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.4330175913396482, | |
| "grad_norm": 1.7521562586528496, | |
| "learning_rate": 7.953873297312447e-05, | |
| "loss": 0.5134, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.4438430311231394, | |
| "grad_norm": 0.9470335891121822, | |
| "learning_rate": 7.945883291808655e-05, | |
| "loss": 0.5135, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.4546684709066306, | |
| "grad_norm": 1.5820458180443764, | |
| "learning_rate": 7.937260095190137e-05, | |
| "loss": 0.5098, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.4654939106901218, | |
| "grad_norm": 1.1413271018542415, | |
| "learning_rate": 7.928005091210817e-05, | |
| "loss": 0.5084, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.476319350473613, | |
| "grad_norm": 1.235088329539578, | |
| "learning_rate": 7.918119765009979e-05, | |
| "loss": 0.5076, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.4871447902571042, | |
| "grad_norm": 0.9400198098528566, | |
| "learning_rate": 7.907605702873948e-05, | |
| "loss": 0.5116, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.4979702300405954, | |
| "grad_norm": 0.9375690956173464, | |
| "learning_rate": 7.896464591981549e-05, | |
| "loss": 0.5054, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.5087956698240866, | |
| "grad_norm": 0.7329487861115176, | |
| "learning_rate": 7.884698220133357e-05, | |
| "loss": 0.5058, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.5196211096075778, | |
| "grad_norm": 0.7698588584111778, | |
| "learning_rate": 7.872308475464818e-05, | |
| "loss": 0.5067, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.530446549391069, | |
| "grad_norm": 1.1885268830362357, | |
| "learning_rate": 7.859297346143258e-05, | |
| "loss": 0.512, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.5412719891745602, | |
| "grad_norm": 0.992648856368562, | |
| "learning_rate": 7.84566692004885e-05, | |
| "loss": 0.5069, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.5520974289580515, | |
| "grad_norm": 0.6542981592253104, | |
| "learning_rate": 7.831419384439565e-05, | |
| "loss": 0.4961, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.5629228687415426, | |
| "grad_norm": 0.6705665645989084, | |
| "learning_rate": 7.816557025600196e-05, | |
| "loss": 0.4932, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.5737483085250338, | |
| "grad_norm": 0.7155749845228387, | |
| "learning_rate": 7.80108222847547e-05, | |
| "loss": 0.4914, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.584573748308525, | |
| "grad_norm": 0.6715018406328525, | |
| "learning_rate": 7.784997476287349e-05, | |
| "loss": 0.5019, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.5953991880920162, | |
| "grad_norm": 0.6472911168886215, | |
| "learning_rate": 7.76830535013654e-05, | |
| "loss": 0.4906, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.6062246278755075, | |
| "grad_norm": 0.5246297983132437, | |
| "learning_rate": 7.751008528588322e-05, | |
| "loss": 0.4921, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.6170500676589986, | |
| "grad_norm": 0.7340091731691611, | |
| "learning_rate": 7.733109787242708e-05, | |
| "loss": 0.4908, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.6278755074424899, | |
| "grad_norm": 0.5863315204444673, | |
| "learning_rate": 7.71461199828905e-05, | |
| "loss": 0.4882, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.638700947225981, | |
| "grad_norm": 0.4917106703308541, | |
| "learning_rate": 7.695518130045147e-05, | |
| "loss": 0.4759, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.6495263870094723, | |
| "grad_norm": 0.5085996804192014, | |
| "learning_rate": 7.675831246480923e-05, | |
| "loss": 0.4828, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.6603518267929634, | |
| "grad_norm": 0.3791829332742154, | |
| "learning_rate": 7.655554506726747e-05, | |
| "loss": 0.4929, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.6711772665764547, | |
| "grad_norm": 0.45113244917619744, | |
| "learning_rate": 7.6346911645665e-05, | |
| "loss": 0.4819, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.6820027063599459, | |
| "grad_norm": 0.5424688733563033, | |
| "learning_rate": 7.61324456791544e-05, | |
| "loss": 0.4918, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.6928281461434371, | |
| "grad_norm": 0.6423919157046518, | |
| "learning_rate": 7.591218158282968e-05, | |
| "loss": 0.4859, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.7036535859269283, | |
| "grad_norm": 0.4769550019591345, | |
| "learning_rate": 7.568615470220369e-05, | |
| "loss": 0.4788, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.7144790257104194, | |
| "grad_norm": 0.4916800880863017, | |
| "learning_rate": 7.545440130753634e-05, | |
| "loss": 0.4878, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.7253044654939107, | |
| "grad_norm": 0.5132399155606483, | |
| "learning_rate": 7.52169585880143e-05, | |
| "loss": 0.485, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.7361299052774019, | |
| "grad_norm": 0.5456840828131174, | |
| "learning_rate": 7.497386464578329e-05, | |
| "loss": 0.493, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.7469553450608931, | |
| "grad_norm": 0.42739423010421, | |
| "learning_rate": 7.472515848983394e-05, | |
| "loss": 0.4832, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.7577807848443843, | |
| "grad_norm": 0.39278695193041646, | |
| "learning_rate": 7.447088002974199e-05, | |
| "loss": 0.4667, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.7686062246278755, | |
| "grad_norm": 0.42770508464730317, | |
| "learning_rate": 7.421107006926408e-05, | |
| "loss": 0.4704, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.7794316644113667, | |
| "grad_norm": 0.38470840620649965, | |
| "learning_rate": 7.394577029979004e-05, | |
| "loss": 0.481, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.790257104194858, | |
| "grad_norm": 0.4193281189626006, | |
| "learning_rate": 7.367502329365268e-05, | |
| "loss": 0.4861, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.8010825439783491, | |
| "grad_norm": 0.3827059322776369, | |
| "learning_rate": 7.33988724972963e-05, | |
| "loss": 0.4816, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.8119079837618404, | |
| "grad_norm": 0.42566487076408904, | |
| "learning_rate": 7.311736222430487e-05, | |
| "loss": 0.4777, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.8227334235453315, | |
| "grad_norm": 0.5025211405144976, | |
| "learning_rate": 7.283053764829106e-05, | |
| "loss": 0.4714, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.8335588633288228, | |
| "grad_norm": 0.5741883625162677, | |
| "learning_rate": 7.253844479564737e-05, | |
| "loss": 0.4702, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.8443843031123139, | |
| "grad_norm": 0.6696935120948854, | |
| "learning_rate": 7.224113053816021e-05, | |
| "loss": 0.4753, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.8552097428958051, | |
| "grad_norm": 0.9542436165626025, | |
| "learning_rate": 7.193864258548855e-05, | |
| "loss": 0.481, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.8660351826792964, | |
| "grad_norm": 1.3281901358631187, | |
| "learning_rate": 7.163102947750794e-05, | |
| "loss": 0.4778, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.8768606224627875, | |
| "grad_norm": 0.5708463379757338, | |
| "learning_rate": 7.131834057652142e-05, | |
| "loss": 0.4821, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.8876860622462788, | |
| "grad_norm": 0.7494967553337677, | |
| "learning_rate": 7.100062605933835e-05, | |
| "loss": 0.4713, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.8985115020297699, | |
| "grad_norm": 0.9260092880954705, | |
| "learning_rate": 7.067793690922268e-05, | |
| "loss": 0.4781, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.9093369418132612, | |
| "grad_norm": 0.8465930202833822, | |
| "learning_rate": 7.035032490771165e-05, | |
| "loss": 0.4767, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.9201623815967523, | |
| "grad_norm": 0.6266134622658083, | |
| "learning_rate": 7.001784262630652e-05, | |
| "loss": 0.4723, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.9309878213802436, | |
| "grad_norm": 0.517620839883759, | |
| "learning_rate": 6.968054341803644e-05, | |
| "loss": 0.4764, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.9418132611637348, | |
| "grad_norm": 0.47952633754576535, | |
| "learning_rate": 6.933848140889705e-05, | |
| "loss": 0.4707, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.952638700947226, | |
| "grad_norm": 0.5180177997945453, | |
| "learning_rate": 6.89917114891648e-05, | |
| "loss": 0.4582, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.9634641407307172, | |
| "grad_norm": 0.4941659598515683, | |
| "learning_rate": 6.864028930458892e-05, | |
| "loss": 0.4725, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.9742895805142084, | |
| "grad_norm": 0.45376418056714024, | |
| "learning_rate": 6.828427124746191e-05, | |
| "loss": 0.4714, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.9851150202976996, | |
| "grad_norm": 0.48154401549761827, | |
| "learning_rate": 6.792371444757037e-05, | |
| "loss": 0.4717, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.9959404600811907, | |
| "grad_norm": 0.42010718019610865, | |
| "learning_rate": 6.755867676302747e-05, | |
| "loss": 0.4739, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 1.006765899864682, | |
| "grad_norm": 0.6423260667772067, | |
| "learning_rate": 6.718921677098853e-05, | |
| "loss": 0.7337, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 1.0175913396481733, | |
| "grad_norm": 1.1368044021426411, | |
| "learning_rate": 6.681539375825115e-05, | |
| "loss": 0.4587, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 1.0284167794316643, | |
| "grad_norm": 1.0167270144863594, | |
| "learning_rate": 6.643726771174164e-05, | |
| "loss": 0.4565, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 1.0392422192151556, | |
| "grad_norm": 1.2040116357489603, | |
| "learning_rate": 6.60548993088889e-05, | |
| "loss": 0.4528, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 1.0500676589986468, | |
| "grad_norm": 0.5280008439304368, | |
| "learning_rate": 6.56683499078876e-05, | |
| "loss": 0.4391, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 1.060893098782138, | |
| "grad_norm": 0.9513992072340473, | |
| "learning_rate": 6.527768153785216e-05, | |
| "loss": 0.4546, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 1.0717185385656292, | |
| "grad_norm": 0.8945534088892716, | |
| "learning_rate": 6.488295688886295e-05, | |
| "loss": 0.4464, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 1.0825439783491204, | |
| "grad_norm": 0.4803593551438963, | |
| "learning_rate": 6.448423930190653e-05, | |
| "loss": 0.4424, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 1.0933694181326117, | |
| "grad_norm": 0.7132337199031038, | |
| "learning_rate": 6.408159275871132e-05, | |
| "loss": 0.4438, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 1.104194857916103, | |
| "grad_norm": 0.501493078502592, | |
| "learning_rate": 6.36750818714807e-05, | |
| "loss": 0.4404, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 1.115020297699594, | |
| "grad_norm": 0.5704909263043828, | |
| "learning_rate": 6.326477187252455e-05, | |
| "loss": 0.4463, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 1.1258457374830853, | |
| "grad_norm": 0.39768047755318864, | |
| "learning_rate": 6.28507286037917e-05, | |
| "loss": 0.4361, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 1.1366711772665765, | |
| "grad_norm": 0.4237888580471298, | |
| "learning_rate": 6.243301850630419e-05, | |
| "loss": 0.4411, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 1.1474966170500678, | |
| "grad_norm": 0.3977516102857721, | |
| "learning_rate": 6.201170860949565e-05, | |
| "loss": 0.4449, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 1.1583220568335588, | |
| "grad_norm": 0.36439054553654765, | |
| "learning_rate": 6.15868665204552e-05, | |
| "loss": 0.4397, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 1.16914749661705, | |
| "grad_norm": 0.3172050356576391, | |
| "learning_rate": 6.11585604130785e-05, | |
| "loss": 0.4333, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 1.1799729364005414, | |
| "grad_norm": 0.3164444436480277, | |
| "learning_rate": 6.072685901712808e-05, | |
| "loss": 0.4399, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 1.1907983761840324, | |
| "grad_norm": 0.359823261717206, | |
| "learning_rate": 6.02918316072043e-05, | |
| "loss": 0.445, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 1.2016238159675237, | |
| "grad_norm": 0.3440856769960641, | |
| "learning_rate": 5.9853547991628967e-05, | |
| "loss": 0.4427, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 1.212449255751015, | |
| "grad_norm": 0.30097200903097815, | |
| "learning_rate": 5.941207850124325e-05, | |
| "loss": 0.4374, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 1.2232746955345062, | |
| "grad_norm": 0.34337859696161266, | |
| "learning_rate": 5.896749397812181e-05, | |
| "loss": 0.4331, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 1.2341001353179972, | |
| "grad_norm": 0.26682437143432414, | |
| "learning_rate": 5.8519865764204834e-05, | |
| "loss": 0.4402, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 1.2449255751014885, | |
| "grad_norm": 0.2925864820274926, | |
| "learning_rate": 5.8069265689849884e-05, | |
| "loss": 0.4463, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 1.2557510148849798, | |
| "grad_norm": 0.26873879358337216, | |
| "learning_rate": 5.761576606230538e-05, | |
| "loss": 0.4383, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 1.266576454668471, | |
| "grad_norm": 0.26750905627915955, | |
| "learning_rate": 5.7159439654107506e-05, | |
| "loss": 0.4354, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 1.277401894451962, | |
| "grad_norm": 0.27140890739231865, | |
| "learning_rate": 5.6700359691402533e-05, | |
| "loss": 0.4358, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 1.2882273342354533, | |
| "grad_norm": 0.2688052091840491, | |
| "learning_rate": 5.6238599842196285e-05, | |
| "loss": 0.4399, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 1.2990527740189446, | |
| "grad_norm": 0.23237176189036496, | |
| "learning_rate": 5.5774234204532746e-05, | |
| "loss": 0.4315, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 1.3098782138024356, | |
| "grad_norm": 0.2582793553376856, | |
| "learning_rate": 5.5307337294603595e-05, | |
| "loss": 0.4389, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 1.320703653585927, | |
| "grad_norm": 0.25998032303590113, | |
| "learning_rate": 5.483798403479072e-05, | |
| "loss": 0.4345, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 1.3315290933694182, | |
| "grad_norm": 0.22654274496707347, | |
| "learning_rate": 5.436624974164349e-05, | |
| "loss": 0.4375, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 1.3423545331529092, | |
| "grad_norm": 0.2132025776512317, | |
| "learning_rate": 5.389221011379281e-05, | |
| "loss": 0.4352, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 1.3531799729364005, | |
| "grad_norm": 0.22987678181579743, | |
| "learning_rate": 5.3415941219803895e-05, | |
| "loss": 0.4353, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 1.3640054127198917, | |
| "grad_norm": 0.23663441032885751, | |
| "learning_rate": 5.2937519485969525e-05, | |
| "loss": 0.4346, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 1.374830852503383, | |
| "grad_norm": 0.17389154001646567, | |
| "learning_rate": 5.245702168404616e-05, | |
| "loss": 0.4275, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 1.3856562922868743, | |
| "grad_norm": 0.23503149155366482, | |
| "learning_rate": 5.1974524918934336e-05, | |
| "loss": 0.43, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 1.3964817320703653, | |
| "grad_norm": 0.19074540514177896, | |
| "learning_rate": 5.14901066163058e-05, | |
| "loss": 0.4293, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 1.4073071718538566, | |
| "grad_norm": 0.20989639643707073, | |
| "learning_rate": 5.1003844510179126e-05, | |
| "loss": 0.4344, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.4181326116373478, | |
| "grad_norm": 0.21768312209974275, | |
| "learning_rate": 5.0515816630445795e-05, | |
| "loss": 0.4353, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 1.4289580514208389, | |
| "grad_norm": 0.22770642990656514, | |
| "learning_rate": 5.002610129034883e-05, | |
| "loss": 0.437, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 1.4397834912043301, | |
| "grad_norm": 0.15851558321482667, | |
| "learning_rate": 4.953477707391597e-05, | |
| "loss": 0.4312, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 1.4506089309878214, | |
| "grad_norm": 0.21065971212702458, | |
| "learning_rate": 4.90419228233494e-05, | |
| "loss": 0.4327, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 1.4614343707713127, | |
| "grad_norm": 0.1977186790743507, | |
| "learning_rate": 4.854761762637403e-05, | |
| "loss": 0.4331, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 1.472259810554804, | |
| "grad_norm": 0.15698947407325928, | |
| "learning_rate": 4.805194080354641e-05, | |
| "loss": 0.4319, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 1.483085250338295, | |
| "grad_norm": 0.19537159673686585, | |
| "learning_rate": 4.7554971895526175e-05, | |
| "loss": 0.4291, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 1.4939106901217862, | |
| "grad_norm": 0.20964272537167541, | |
| "learning_rate": 4.705679065031235e-05, | |
| "loss": 0.4355, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 1.5047361299052775, | |
| "grad_norm": 0.17865795982071064, | |
| "learning_rate": 4.6557477010446206e-05, | |
| "loss": 0.4343, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 1.5155615696887685, | |
| "grad_norm": 0.15541860075622516, | |
| "learning_rate": 4.605711110018307e-05, | |
| "loss": 0.434, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.5263870094722598, | |
| "grad_norm": 0.2043684142895673, | |
| "learning_rate": 4.555577321263477e-05, | |
| "loss": 0.4302, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 1.537212449255751, | |
| "grad_norm": 0.23426309604432982, | |
| "learning_rate": 4.505354379688518e-05, | |
| "loss": 0.4312, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 1.548037889039242, | |
| "grad_norm": 0.18216887416633568, | |
| "learning_rate": 4.4550503445080606e-05, | |
| "loss": 0.4374, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 1.5588633288227334, | |
| "grad_norm": 0.20488378818829997, | |
| "learning_rate": 4.4046732879497295e-05, | |
| "loss": 0.4294, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 1.5696887686062246, | |
| "grad_norm": 0.17988085916095425, | |
| "learning_rate": 4.354231293958801e-05, | |
| "loss": 0.4278, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 1.5805142083897157, | |
| "grad_norm": 0.21375464685800274, | |
| "learning_rate": 4.3037324569009854e-05, | |
| "loss": 0.428, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 1.5913396481732072, | |
| "grad_norm": 0.1595944513780177, | |
| "learning_rate": 4.2531848802635264e-05, | |
| "loss": 0.4239, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 1.6021650879566982, | |
| "grad_norm": 0.20710029491988388, | |
| "learning_rate": 4.202596675354851e-05, | |
| "loss": 0.4439, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 1.6129905277401895, | |
| "grad_norm": 0.18760267517688003, | |
| "learning_rate": 4.151975960002958e-05, | |
| "loss": 0.4257, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 1.6238159675236807, | |
| "grad_norm": 0.17989788441602386, | |
| "learning_rate": 4.101330857252752e-05, | |
| "loss": 0.4323, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.6346414073071718, | |
| "grad_norm": 0.23287464779978895, | |
| "learning_rate": 4.050669494062561e-05, | |
| "loss": 0.4332, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 1.645466847090663, | |
| "grad_norm": 0.189323052196139, | |
| "learning_rate": 4e-05, | |
| "loss": 0.427, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 1.6562922868741543, | |
| "grad_norm": 0.184349576978975, | |
| "learning_rate": 3.9493305059374405e-05, | |
| "loss": 0.4391, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 1.6671177266576453, | |
| "grad_norm": 0.17387257518482524, | |
| "learning_rate": 3.8986691427472496e-05, | |
| "loss": 0.4319, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 1.6779431664411368, | |
| "grad_norm": 0.15742800866720608, | |
| "learning_rate": 3.8480240399970436e-05, | |
| "loss": 0.4252, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 1.6887686062246279, | |
| "grad_norm": 0.16048755324367, | |
| "learning_rate": 3.7974033246451496e-05, | |
| "loss": 0.4331, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 1.699594046008119, | |
| "grad_norm": 0.14348416896463495, | |
| "learning_rate": 3.746815119736475e-05, | |
| "loss": 0.4195, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 1.7104194857916104, | |
| "grad_norm": 0.17718953008091573, | |
| "learning_rate": 3.696267543099016e-05, | |
| "loss": 0.4309, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 1.7212449255751014, | |
| "grad_norm": 0.16663132788494395, | |
| "learning_rate": 3.6457687060412e-05, | |
| "loss": 0.4373, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 1.7320703653585927, | |
| "grad_norm": 0.1554923374408675, | |
| "learning_rate": 3.595326712050272e-05, | |
| "loss": 0.4315, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 1.742895805142084, | |
| "grad_norm": 0.16686688749371534, | |
| "learning_rate": 3.5449496554919414e-05, | |
| "loss": 0.428, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 1.753721244925575, | |
| "grad_norm": 0.14235924539764902, | |
| "learning_rate": 3.494645620311484e-05, | |
| "loss": 0.4333, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 1.7645466847090663, | |
| "grad_norm": 0.15429586926594233, | |
| "learning_rate": 3.444422678736525e-05, | |
| "loss": 0.4274, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 1.7753721244925575, | |
| "grad_norm": 0.13427568170852947, | |
| "learning_rate": 3.394288889981695e-05, | |
| "loss": 0.4249, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 1.7861975642760486, | |
| "grad_norm": 0.15839460744629516, | |
| "learning_rate": 3.34425229895538e-05, | |
| "loss": 0.432, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 1.79702300405954, | |
| "grad_norm": 0.1758768222614395, | |
| "learning_rate": 3.294320934968768e-05, | |
| "loss": 0.425, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 1.8078484438430311, | |
| "grad_norm": 0.11883069616568503, | |
| "learning_rate": 3.2445028104473845e-05, | |
| "loss": 0.4308, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 1.8186738836265224, | |
| "grad_norm": 0.161190973835667, | |
| "learning_rate": 3.194805919645359e-05, | |
| "loss": 0.4271, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 1.8294993234100136, | |
| "grad_norm": 0.1313474169203658, | |
| "learning_rate": 3.145238237362596e-05, | |
| "loss": 0.4354, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 1.8403247631935047, | |
| "grad_norm": 0.12301362731071704, | |
| "learning_rate": 3.0958077176650606e-05, | |
| "loss": 0.4264, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 1.851150202976996, | |
| "grad_norm": 0.13438366925384954, | |
| "learning_rate": 3.0465222926084036e-05, | |
| "loss": 0.4227, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 1.8619756427604872, | |
| "grad_norm": 0.12345365081159011, | |
| "learning_rate": 2.997389870965118e-05, | |
| "loss": 0.4235, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 1.8728010825439783, | |
| "grad_norm": 0.1392819812761936, | |
| "learning_rate": 2.948418336955421e-05, | |
| "loss": 0.4177, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 1.8836265223274695, | |
| "grad_norm": 0.11380005409294593, | |
| "learning_rate": 2.899615548982088e-05, | |
| "loss": 0.4302, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 1.8944519621109608, | |
| "grad_norm": 0.13716568406501112, | |
| "learning_rate": 2.8509893383694213e-05, | |
| "loss": 0.4244, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 1.9052774018944518, | |
| "grad_norm": 0.11681622216346851, | |
| "learning_rate": 2.8025475081065684e-05, | |
| "loss": 0.4196, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 1.9161028416779433, | |
| "grad_norm": 0.13014991063416603, | |
| "learning_rate": 2.754297831595385e-05, | |
| "loss": 0.4209, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 1.9269282814614344, | |
| "grad_norm": 0.12953501131145828, | |
| "learning_rate": 2.7062480514030478e-05, | |
| "loss": 0.4312, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 1.9377537212449256, | |
| "grad_norm": 0.12609752034740299, | |
| "learning_rate": 2.658405878019612e-05, | |
| "loss": 0.4249, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 1.9485791610284169, | |
| "grad_norm": 0.11439337767790579, | |
| "learning_rate": 2.6107789886207195e-05, | |
| "loss": 0.4276, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.959404600811908, | |
| "grad_norm": 0.11452476603160673, | |
| "learning_rate": 2.563375025835652e-05, | |
| "loss": 0.4259, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 1.9702300405953992, | |
| "grad_norm": 0.11836884964888351, | |
| "learning_rate": 2.5162015965209295e-05, | |
| "loss": 0.4225, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 1.9810554803788905, | |
| "grad_norm": 0.12165013663722433, | |
| "learning_rate": 2.4692662705396412e-05, | |
| "loss": 0.4301, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 1.9918809201623815, | |
| "grad_norm": 0.11089256984034979, | |
| "learning_rate": 2.4225765795467267e-05, | |
| "loss": 0.4307, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 2.002706359945873, | |
| "grad_norm": 0.22370472517367998, | |
| "learning_rate": 2.376140015780372e-05, | |
| "loss": 0.6768, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 2.013531799729364, | |
| "grad_norm": 0.16796651530032833, | |
| "learning_rate": 2.3299640308597487e-05, | |
| "loss": 0.4004, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 2.024357239512855, | |
| "grad_norm": 0.15763976975134172, | |
| "learning_rate": 2.2840560345892518e-05, | |
| "loss": 0.3995, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 2.0351826792963466, | |
| "grad_norm": 0.1631884946178966, | |
| "learning_rate": 2.2384233937694626e-05, | |
| "loss": 0.4004, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 2.0460081190798376, | |
| "grad_norm": 0.1491153083339053, | |
| "learning_rate": 2.1930734310150116e-05, | |
| "loss": 0.3943, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 2.0568335588633286, | |
| "grad_norm": 0.152568681392906, | |
| "learning_rate": 2.1480134235795173e-05, | |
| "loss": 0.399, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 2.06765899864682, | |
| "grad_norm": 0.14747005749198763, | |
| "learning_rate": 2.10325060218782e-05, | |
| "loss": 0.404, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 2.078484438430311, | |
| "grad_norm": 0.16539294650384473, | |
| "learning_rate": 2.0587921498756768e-05, | |
| "loss": 0.4077, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 2.089309878213802, | |
| "grad_norm": 0.12972950402899025, | |
| "learning_rate": 2.014645200837105e-05, | |
| "loss": 0.4003, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 2.1001353179972937, | |
| "grad_norm": 0.1543237826355855, | |
| "learning_rate": 1.9708168392795718e-05, | |
| "loss": 0.3922, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 2.1109607577807847, | |
| "grad_norm": 0.14698806599717074, | |
| "learning_rate": 1.9273140982871936e-05, | |
| "loss": 0.4018, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 2.121786197564276, | |
| "grad_norm": 0.1391992210631873, | |
| "learning_rate": 1.8841439586921515e-05, | |
| "loss": 0.4006, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 2.1326116373477673, | |
| "grad_norm": 0.14394534424723107, | |
| "learning_rate": 1.841313347954482e-05, | |
| "loss": 0.3957, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 2.1434370771312583, | |
| "grad_norm": 0.11934910379991102, | |
| "learning_rate": 1.7988291390504348e-05, | |
| "loss": 0.3946, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 2.15426251691475, | |
| "grad_norm": 0.14350614860081573, | |
| "learning_rate": 1.7566981493695828e-05, | |
| "loss": 0.4013, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 2.165087956698241, | |
| "grad_norm": 0.11899602863308896, | |
| "learning_rate": 1.71492713962083e-05, | |
| "loss": 0.4036, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 2.175913396481732, | |
| "grad_norm": 0.12062781775498721, | |
| "learning_rate": 1.673522812747544e-05, | |
| "loss": 0.3993, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 2.1867388362652234, | |
| "grad_norm": 0.11257342728764269, | |
| "learning_rate": 1.6324918128519306e-05, | |
| "loss": 0.3965, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 2.1975642760487144, | |
| "grad_norm": 0.11149512463483689, | |
| "learning_rate": 1.5918407241288678e-05, | |
| "loss": 0.403, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 2.208389715832206, | |
| "grad_norm": 0.10884317622434123, | |
| "learning_rate": 1.5515760698093485e-05, | |
| "loss": 0.4012, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 2.219215155615697, | |
| "grad_norm": 0.11653784084735175, | |
| "learning_rate": 1.511704311113705e-05, | |
| "loss": 0.3989, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 2.230040595399188, | |
| "grad_norm": 0.10418561090507017, | |
| "learning_rate": 1.4722318462147844e-05, | |
| "loss": 0.3998, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 2.2408660351826795, | |
| "grad_norm": 0.11104706876794065, | |
| "learning_rate": 1.4331650092112406e-05, | |
| "loss": 0.4037, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 2.2516914749661705, | |
| "grad_norm": 0.10501349011970908, | |
| "learning_rate": 1.394510069111112e-05, | |
| "loss": 0.3944, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 2.2625169147496615, | |
| "grad_norm": 0.10150324538523472, | |
| "learning_rate": 1.3562732288258377e-05, | |
| "loss": 0.3998, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 2.273342354533153, | |
| "grad_norm": 0.09891734543071261, | |
| "learning_rate": 1.3184606241748857e-05, | |
| "loss": 0.3952, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 2.284167794316644, | |
| "grad_norm": 0.1028338968410784, | |
| "learning_rate": 1.2810783229011486e-05, | |
| "loss": 0.3965, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 2.2949932341001356, | |
| "grad_norm": 0.10517361059986559, | |
| "learning_rate": 1.2441323236972536e-05, | |
| "loss": 0.3965, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 2.3058186738836266, | |
| "grad_norm": 0.1012317459777251, | |
| "learning_rate": 1.2076285552429642e-05, | |
| "loss": 0.4045, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 2.3166441136671176, | |
| "grad_norm": 0.10129231317849015, | |
| "learning_rate": 1.1715728752538103e-05, | |
| "loss": 0.399, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 2.3274695534506087, | |
| "grad_norm": 0.09889286761459576, | |
| "learning_rate": 1.1359710695411086e-05, | |
| "loss": 0.3997, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 2.3382949932341, | |
| "grad_norm": 0.10712510906320237, | |
| "learning_rate": 1.100828851083521e-05, | |
| "loss": 0.4056, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 2.349120433017591, | |
| "grad_norm": 0.11267265505681792, | |
| "learning_rate": 1.0661518591102973e-05, | |
| "loss": 0.3991, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 2.3599458728010827, | |
| "grad_norm": 0.1059839439844302, | |
| "learning_rate": 1.0319456581963578e-05, | |
| "loss": 0.4066, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 2.3707713125845737, | |
| "grad_norm": 0.10234179201730713, | |
| "learning_rate": 9.982157373693502e-06, | |
| "loss": 0.4038, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 2.381596752368065, | |
| "grad_norm": 0.11592356283054443, | |
| "learning_rate": 9.649675092288366e-06, | |
| "loss": 0.3914, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 2.3924221921515563, | |
| "grad_norm": 0.10506304371683683, | |
| "learning_rate": 9.322063090777331e-06, | |
| "loss": 0.3992, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 2.4032476319350473, | |
| "grad_norm": 0.1036436583931566, | |
| "learning_rate": 8.99937394066165e-06, | |
| "loss": 0.4049, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 2.414073071718539, | |
| "grad_norm": 0.11032708227586546, | |
| "learning_rate": 8.681659423478587e-06, | |
| "loss": 0.4044, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 2.42489851150203, | |
| "grad_norm": 0.10054731972112925, | |
| "learning_rate": 8.368970522492064e-06, | |
| "loss": 0.401, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 2.435723951285521, | |
| "grad_norm": 0.09032101894350991, | |
| "learning_rate": 8.06135741451146e-06, | |
| "loss": 0.3994, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 2.4465493910690124, | |
| "grad_norm": 0.10684435926371892, | |
| "learning_rate": 7.758869461839808e-06, | |
| "loss": 0.4024, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 2.4573748308525034, | |
| "grad_norm": 0.08847978409027292, | |
| "learning_rate": 7.461555204352655e-06, | |
| "loss": 0.3958, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 2.4682002706359945, | |
| "grad_norm": 0.09347883820205226, | |
| "learning_rate": 7.169462351708958e-06, | |
| "loss": 0.3979, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 2.479025710419486, | |
| "grad_norm": 0.08617414663781357, | |
| "learning_rate": 6.882637775695147e-06, | |
| "loss": 0.3946, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 2.489851150202977, | |
| "grad_norm": 0.08841620926123635, | |
| "learning_rate": 6.60112750270371e-06, | |
| "loss": 0.3956, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 2.500676589986468, | |
| "grad_norm": 0.09378540237055451, | |
| "learning_rate": 6.324976706347317e-06, | |
| "loss": 0.405, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 2.5115020297699595, | |
| "grad_norm": 0.09914335692411994, | |
| "learning_rate": 6.054229700209959e-06, | |
| "loss": 0.402, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 2.5223274695534506, | |
| "grad_norm": 0.08234472595355508, | |
| "learning_rate": 5.788929930735916e-06, | |
| "loss": 0.3973, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 2.533152909336942, | |
| "grad_norm": 0.08513825880471276, | |
| "learning_rate": 5.529119970258014e-06, | |
| "loss": 0.4004, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 2.543978349120433, | |
| "grad_norm": 0.08462445088863371, | |
| "learning_rate": 5.274841510166062e-06, | |
| "loss": 0.393, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 2.554803788903924, | |
| "grad_norm": 0.08398690168459941, | |
| "learning_rate": 5.026135354216717e-06, | |
| "loss": 0.3924, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 2.565629228687415, | |
| "grad_norm": 0.08301006412922159, | |
| "learning_rate": 4.783041411985716e-06, | |
| "loss": 0.3996, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 2.5764546684709067, | |
| "grad_norm": 0.08453775182796612, | |
| "learning_rate": 4.545598692463675e-06, | |
| "loss": 0.3982, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 2.5872801082543977, | |
| "grad_norm": 0.08332326065487239, | |
| "learning_rate": 4.3138452977963266e-06, | |
| "loss": 0.3955, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 2.598105548037889, | |
| "grad_norm": 0.0880569429861736, | |
| "learning_rate": 4.087818417170337e-06, | |
| "loss": 0.4023, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 2.60893098782138, | |
| "grad_norm": 0.08523725208230046, | |
| "learning_rate": 3.867554320845601e-06, | |
| "loss": 0.4054, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 2.6197564276048713, | |
| "grad_norm": 0.0841139024919145, | |
| "learning_rate": 3.6530883543350038e-06, | |
| "loss": 0.4049, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 2.6305818673883627, | |
| "grad_norm": 0.08497377694909106, | |
| "learning_rate": 3.4444549327325325e-06, | |
| "loss": 0.3957, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 2.641407307171854, | |
| "grad_norm": 0.09193406360384537, | |
| "learning_rate": 3.241687535190776e-06, | |
| "loss": 0.394, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 2.6522327469553453, | |
| "grad_norm": 0.08952710895691597, | |
| "learning_rate": 3.0448186995485307e-06, | |
| "loss": 0.395, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 2.6630581867388363, | |
| "grad_norm": 0.08488862663071034, | |
| "learning_rate": 2.853880017109516e-06, | |
| "loss": 0.3955, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 2.6738836265223274, | |
| "grad_norm": 0.08269355473254857, | |
| "learning_rate": 2.6689021275729366e-06, | |
| "loss": 0.4018, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 2.6847090663058184, | |
| "grad_norm": 0.08423416357018129, | |
| "learning_rate": 2.489914714116788e-06, | |
| "loss": 0.3967, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 2.69553450608931, | |
| "grad_norm": 0.08472851596664387, | |
| "learning_rate": 2.316946498634605e-06, | |
| "loss": 0.3932, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 2.706359945872801, | |
| "grad_norm": 0.0830080866090561, | |
| "learning_rate": 2.1500252371265253e-06, | |
| "loss": 0.404, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 2.7171853856562924, | |
| "grad_norm": 0.08175536738617865, | |
| "learning_rate": 1.989177715245307e-06, | |
| "loss": 0.4003, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 2.7280108254397835, | |
| "grad_norm": 0.08008628691021832, | |
| "learning_rate": 1.8344297439980475e-06, | |
| "loss": 0.4036, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 2.7388362652232745, | |
| "grad_norm": 0.08044205434546692, | |
| "learning_rate": 1.685806155604346e-06, | |
| "loss": 0.4023, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 2.749661705006766, | |
| "grad_norm": 0.07803360132695795, | |
| "learning_rate": 1.5433307995115043e-06, | |
| "loss": 0.3965, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 2.760487144790257, | |
| "grad_norm": 0.07685585628831833, | |
| "learning_rate": 1.4070265385674176e-06, | |
| "loss": 0.3922, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 2.7713125845737485, | |
| "grad_norm": 0.07992843960914281, | |
| "learning_rate": 1.276915245351833e-06, | |
| "loss": 0.3907, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 2.7821380243572396, | |
| "grad_norm": 0.07626058515188562, | |
| "learning_rate": 1.1530177986664425e-06, | |
| "loss": 0.4046, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 2.7929634641407306, | |
| "grad_norm": 0.07527973794844814, | |
| "learning_rate": 1.0353540801845229e-06, | |
| "loss": 0.3975, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 2.803788903924222, | |
| "grad_norm": 0.07488363117295638, | |
| "learning_rate": 9.239429712605274e-07, | |
| "loss": 0.396, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 2.814614343707713, | |
| "grad_norm": 0.07225352469040812, | |
| "learning_rate": 8.188023499002206e-07, | |
| "loss": 0.4016, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 2.825439783491204, | |
| "grad_norm": 0.0738013010435412, | |
| "learning_rate": 7.199490878918314e-07, | |
| "loss": 0.3912, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 2.8362652232746957, | |
| "grad_norm": 0.07384650417814832, | |
| "learning_rate": 6.273990480986314e-07, | |
| "loss": 0.3964, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 2.8470906630581867, | |
| "grad_norm": 0.07477788701246599, | |
| "learning_rate": 5.411670819134651e-07, | |
| "loss": 0.4007, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 2.8579161028416777, | |
| "grad_norm": 0.07909269529410881, | |
| "learning_rate": 4.6126702687554483e-07, | |
| "loss": 0.3963, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 2.8687415426251692, | |
| "grad_norm": 0.07592128485064209, | |
| "learning_rate": 3.8771170444996895e-07, | |
| "loss": 0.4006, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 2.8795669824086603, | |
| "grad_norm": 0.07366933798288977, | |
| "learning_rate": 3.2051291797027925e-07, | |
| "loss": 0.3991, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 2.8903924221921518, | |
| "grad_norm": 0.0750804647694085, | |
| "learning_rate": 2.59681450744389e-07, | |
| "loss": 0.3948, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 2.901217861975643, | |
| "grad_norm": 0.0727883460460439, | |
| "learning_rate": 2.0522706432419382e-07, | |
| "loss": 0.396, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 2.912043301759134, | |
| "grad_norm": 0.07264744604462643, | |
| "learning_rate": 1.5715849693916264e-07, | |
| "loss": 0.3979, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 2.9228687415426253, | |
| "grad_norm": 0.07246555666741276, | |
| "learning_rate": 1.1548346209410366e-07, | |
| "loss": 0.3997, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 2.9336941813261164, | |
| "grad_norm": 0.07381457346874619, | |
| "learning_rate": 8.020864733140343e-08, | |
| "loss": 0.3966, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 2.944519621109608, | |
| "grad_norm": 0.07336188684383566, | |
| "learning_rate": 5.133971315788966e-08, | |
| "loss": 0.4021, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 2.955345060893099, | |
| "grad_norm": 0.07480237876528813, | |
| "learning_rate": 2.8881292136468952e-08, | |
| "loss": 0.3994, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 2.96617050067659, | |
| "grad_norm": 0.07567566789699567, | |
| "learning_rate": 1.2836988142779228e-08, | |
| "loss": 0.3982, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 2.976995940460081, | |
| "grad_norm": 0.07340342180814576, | |
| "learning_rate": 3.209375786856761e-09, | |
| "loss": 0.4065, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 2.9878213802435725, | |
| "grad_norm": 0.07089869505627004, | |
| "learning_rate": 0.0, | |
| "loss": 0.3935, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 2.9878213802435725, | |
| "step": 276, | |
| "total_flos": 4629166754365440.0, | |
| "train_loss": 0.46006364839664404, | |
| "train_runtime": 30135.3512, | |
| "train_samples_per_second": 4.707, | |
| "train_steps_per_second": 0.009 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 276, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 4629166754365440.0, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |