| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.0, | |
| "eval_steps": 0, | |
| "global_step": 388311, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0038628831014315846, | |
| "grad_norm": 0.028026340529322624, | |
| "learning_rate": 2.5701335714040445e-06, | |
| "loss": 0.0035, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.007725766202863169, | |
| "grad_norm": 0.028945209458470345, | |
| "learning_rate": 5.145417711087456e-06, | |
| "loss": 0.0024, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.011588649304294754, | |
| "grad_norm": 0.023790603503584862, | |
| "learning_rate": 7.720701850770868e-06, | |
| "loss": 0.0022, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.015451532405726338, | |
| "grad_norm": 0.012105235829949379, | |
| "learning_rate": 1.0295985990454279e-05, | |
| "loss": 0.002, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.01931441550715792, | |
| "grad_norm": 0.007807546760886908, | |
| "learning_rate": 1.2871270130137691e-05, | |
| "loss": 0.0019, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.023177298608589508, | |
| "grad_norm": 0.004866514354944229, | |
| "learning_rate": 1.54465542698211e-05, | |
| "loss": 0.0019, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.02704018171002109, | |
| "grad_norm": 0.004499210510402918, | |
| "learning_rate": 1.8021838409504514e-05, | |
| "loss": 0.0018, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.030903064811452677, | |
| "grad_norm": 0.0029713744297623634, | |
| "learning_rate": 2.0597122549187926e-05, | |
| "loss": 0.0018, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.03476594791288426, | |
| "grad_norm": 0.002635620068758726, | |
| "learning_rate": 2.3172406688871335e-05, | |
| "loss": 0.0017, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.03862883101431584, | |
| "grad_norm": 0.0026769228279590607, | |
| "learning_rate": 2.5747690828554747e-05, | |
| "loss": 0.0017, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.04249171411574743, | |
| "grad_norm": 0.004493319429457188, | |
| "learning_rate": 2.8322974968238163e-05, | |
| "loss": 0.0016, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.046354597217179015, | |
| "grad_norm": 0.002984994789585471, | |
| "learning_rate": 3.089825910792157e-05, | |
| "loss": 0.0016, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.0502174803186106, | |
| "grad_norm": 0.002657888922840357, | |
| "learning_rate": 3.3473543247604984e-05, | |
| "loss": 0.0016, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.05408036342004218, | |
| "grad_norm": 0.0023709412198513746, | |
| "learning_rate": 3.604882738728839e-05, | |
| "loss": 0.0016, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.057943246521473764, | |
| "grad_norm": 0.00325986510142684, | |
| "learning_rate": 3.862411152697181e-05, | |
| "loss": 0.0015, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.061806129622905354, | |
| "grad_norm": 0.0019731384236365557, | |
| "learning_rate": 4.119939566665521e-05, | |
| "loss": 0.0015, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.06566901272433694, | |
| "grad_norm": 0.002514706226065755, | |
| "learning_rate": 4.377467980633863e-05, | |
| "loss": 0.0015, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.06953189582576852, | |
| "grad_norm": 0.005514668766409159, | |
| "learning_rate": 4.634996394602204e-05, | |
| "loss": 0.0014, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.0733947789272001, | |
| "grad_norm": 0.002325449837371707, | |
| "learning_rate": 4.892524808570545e-05, | |
| "loss": 0.0014, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.07725766202863169, | |
| "grad_norm": 0.00256099714897573, | |
| "learning_rate": 5.150053222538886e-05, | |
| "loss": 0.0014, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.08112054513006327, | |
| "grad_norm": 0.002316329861059785, | |
| "learning_rate": 5.4075816365072276e-05, | |
| "loss": 0.0013, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.08498342823149486, | |
| "grad_norm": 0.002595042111352086, | |
| "learning_rate": 5.6651100504755685e-05, | |
| "loss": 0.0013, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.08884631133292645, | |
| "grad_norm": 0.002004101872444153, | |
| "learning_rate": 5.92263846444391e-05, | |
| "loss": 0.0013, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 0.09270919443435803, | |
| "grad_norm": 0.001948538818396628, | |
| "learning_rate": 6.180166878412251e-05, | |
| "loss": 0.0012, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.09657207753578961, | |
| "grad_norm": 0.002029005205258727, | |
| "learning_rate": 6.437695292380593e-05, | |
| "loss": 0.0012, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 0.1004349606372212, | |
| "grad_norm": 0.001954406499862671, | |
| "learning_rate": 6.695223706348933e-05, | |
| "loss": 0.0012, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 0.10429784373865278, | |
| "grad_norm": 0.001649603364057839, | |
| "learning_rate": 6.952752120317274e-05, | |
| "loss": 0.0012, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 0.10816072684008436, | |
| "grad_norm": 0.002146226353943348, | |
| "learning_rate": 7.210280534285616e-05, | |
| "loss": 0.0011, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 0.11202360994151594, | |
| "grad_norm": 0.002416988369077444, | |
| "learning_rate": 7.467808948253957e-05, | |
| "loss": 0.0011, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 0.11588649304294753, | |
| "grad_norm": 0.0018118915613740683, | |
| "learning_rate": 7.725337362222298e-05, | |
| "loss": 0.0011, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 0.11974937614437912, | |
| "grad_norm": 0.0018452458316460252, | |
| "learning_rate": 7.982865776190639e-05, | |
| "loss": 0.0011, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 0.12361225924581071, | |
| "grad_norm": 0.0015237265033647418, | |
| "learning_rate": 8.240394190158981e-05, | |
| "loss": 0.0011, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 0.12747514234724228, | |
| "grad_norm": 0.0019381915917620063, | |
| "learning_rate": 8.497922604127322e-05, | |
| "loss": 0.001, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 0.13133802544867387, | |
| "grad_norm": 0.0018706010887399316, | |
| "learning_rate": 8.755451018095661e-05, | |
| "loss": 0.001, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 0.13520090855010547, | |
| "grad_norm": 0.0016168917063623667, | |
| "learning_rate": 9.012979432064003e-05, | |
| "loss": 0.001, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 0.13906379165153704, | |
| "grad_norm": 0.0015931539237499237, | |
| "learning_rate": 9.270507846032344e-05, | |
| "loss": 0.001, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 0.14292667475296864, | |
| "grad_norm": 0.0014986397000029683, | |
| "learning_rate": 9.528036260000686e-05, | |
| "loss": 0.001, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 0.1467895578544002, | |
| "grad_norm": 0.0014668807853013277, | |
| "learning_rate": 9.785564673969028e-05, | |
| "loss": 0.0009, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 0.1506524409558318, | |
| "grad_norm": 0.0013408615486696362, | |
| "learning_rate": 0.00010043093087937369, | |
| "loss": 0.0009, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 0.15451532405726337, | |
| "grad_norm": 0.0015196386957541108, | |
| "learning_rate": 0.00010300621501905708, | |
| "loss": 0.0009, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 0.15837820715869497, | |
| "grad_norm": 0.0013054045848548412, | |
| "learning_rate": 0.0001055814991587405, | |
| "loss": 0.0009, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 0.16224109026012654, | |
| "grad_norm": 0.001247019157744944, | |
| "learning_rate": 0.00010815678329842391, | |
| "loss": 0.0009, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 0.16610397336155813, | |
| "grad_norm": 0.001298928284086287, | |
| "learning_rate": 0.00011073206743810733, | |
| "loss": 0.0008, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 0.16996685646298973, | |
| "grad_norm": 0.0011672589462250471, | |
| "learning_rate": 0.00011330735157779074, | |
| "loss": 0.0008, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 0.1738297395644213, | |
| "grad_norm": 0.0012577781453728676, | |
| "learning_rate": 0.00011588263571747416, | |
| "loss": 0.0008, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 0.1776926226658529, | |
| "grad_norm": 0.0011562498984858394, | |
| "learning_rate": 0.00011845791985715756, | |
| "loss": 0.0008, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 0.18155550576728446, | |
| "grad_norm": 0.0011603948660194874, | |
| "learning_rate": 0.00012103320399684098, | |
| "loss": 0.0008, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 0.18541838886871606, | |
| "grad_norm": 0.0011685957433655858, | |
| "learning_rate": 0.00012360848813652438, | |
| "loss": 0.0008, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 0.18928127197014763, | |
| "grad_norm": 0.0012253538006916642, | |
| "learning_rate": 0.0001261837722762078, | |
| "loss": 0.0008, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 0.19314415507157923, | |
| "grad_norm": 0.0009547853842377663, | |
| "learning_rate": 0.0001287590564158912, | |
| "loss": 0.0008, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 0.1970070381730108, | |
| "grad_norm": 0.0009738618973642588, | |
| "learning_rate": 0.0001313343405555746, | |
| "loss": 0.0007, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 0.2008699212744424, | |
| "grad_norm": 0.0009221744840033352, | |
| "learning_rate": 0.00013390962469525801, | |
| "loss": 0.0007, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 0.204732804375874, | |
| "grad_norm": 0.0010526985861361027, | |
| "learning_rate": 0.00013648490883494144, | |
| "loss": 0.0007, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 0.20859568747730556, | |
| "grad_norm": 0.0010718920966610312, | |
| "learning_rate": 0.00013906019297462485, | |
| "loss": 0.0007, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 0.21245857057873715, | |
| "grad_norm": 0.0010513877496123314, | |
| "learning_rate": 0.00014163547711430828, | |
| "loss": 0.0007, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 0.21632145368016872, | |
| "grad_norm": 0.0010410523973405361, | |
| "learning_rate": 0.00014421076125399168, | |
| "loss": 0.0007, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 0.22018433678160032, | |
| "grad_norm": 0.0010957131162285805, | |
| "learning_rate": 0.00014678604539367508, | |
| "loss": 0.0007, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 0.2240472198830319, | |
| "grad_norm": 0.0011075297370553017, | |
| "learning_rate": 0.0001493613295333585, | |
| "loss": 0.0007, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 0.2279101029844635, | |
| "grad_norm": 0.0009366936865262687, | |
| "learning_rate": 0.00015193661367304188, | |
| "loss": 0.0007, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 0.23177298608589506, | |
| "grad_norm": 0.0009237458580173552, | |
| "learning_rate": 0.00015451189781272534, | |
| "loss": 0.0007, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 0.23563586918732665, | |
| "grad_norm": 0.0009658120106905699, | |
| "learning_rate": 0.00015708718195240872, | |
| "loss": 0.0007, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 0.23949875228875825, | |
| "grad_norm": 0.0009536503930576146, | |
| "learning_rate": 0.00015966246609209214, | |
| "loss": 0.0007, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 0.24336163539018982, | |
| "grad_norm": 0.0009778173407539725, | |
| "learning_rate": 0.00016223775023177555, | |
| "loss": 0.0006, | |
| "step": 31500 | |
| }, | |
| { | |
| "epoch": 0.24722451849162141, | |
| "grad_norm": 0.0010041093919426203, | |
| "learning_rate": 0.00016481303437145898, | |
| "loss": 0.0006, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 0.251087401593053, | |
| "grad_norm": 0.0008569998317398131, | |
| "learning_rate": 0.00016738831851114238, | |
| "loss": 0.0006, | |
| "step": 32500 | |
| }, | |
| { | |
| "epoch": 0.25495028469448455, | |
| "grad_norm": 0.0009386276942677796, | |
| "learning_rate": 0.00016996360265082578, | |
| "loss": 0.0006, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 0.2588131677959162, | |
| "grad_norm": 0.0008849843870848417, | |
| "learning_rate": 0.0001725388867905092, | |
| "loss": 0.0006, | |
| "step": 33500 | |
| }, | |
| { | |
| "epoch": 0.26267605089734775, | |
| "grad_norm": 0.0008240257739089429, | |
| "learning_rate": 0.0001751141709301926, | |
| "loss": 0.0006, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 0.2665389339987793, | |
| "grad_norm": 0.0009251784067600965, | |
| "learning_rate": 0.00017768945506987604, | |
| "loss": 0.0006, | |
| "step": 34500 | |
| }, | |
| { | |
| "epoch": 0.27040181710021094, | |
| "grad_norm": 0.0009474306716583669, | |
| "learning_rate": 0.00018026473920955944, | |
| "loss": 0.0006, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 0.2742647002016425, | |
| "grad_norm": 0.0008043038542382419, | |
| "learning_rate": 0.00018284002334924285, | |
| "loss": 0.0006, | |
| "step": 35500 | |
| }, | |
| { | |
| "epoch": 0.2781275833030741, | |
| "grad_norm": 0.0007399991736747324, | |
| "learning_rate": 0.00018541530748892627, | |
| "loss": 0.0006, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 0.28199046640450565, | |
| "grad_norm": 0.0008425785345025361, | |
| "learning_rate": 0.00018799059162860968, | |
| "loss": 0.0006, | |
| "step": 36500 | |
| }, | |
| { | |
| "epoch": 0.28585334950593727, | |
| "grad_norm": 0.0008299360633827746, | |
| "learning_rate": 0.0001905658757682931, | |
| "loss": 0.0006, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 0.28971623260736884, | |
| "grad_norm": 0.0007926743710413575, | |
| "learning_rate": 0.0001931411599079765, | |
| "loss": 0.0006, | |
| "step": 37500 | |
| }, | |
| { | |
| "epoch": 0.2935791157088004, | |
| "grad_norm": 0.0008646908099763095, | |
| "learning_rate": 0.00019571644404765988, | |
| "loss": 0.0006, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 0.297441998810232, | |
| "grad_norm": 0.000800468900706619, | |
| "learning_rate": 0.0001982917281873433, | |
| "loss": 0.0006, | |
| "step": 38500 | |
| }, | |
| { | |
| "epoch": 0.3013048819116636, | |
| "grad_norm": 0.0007611193577758968, | |
| "learning_rate": 0.00020086701232702671, | |
| "loss": 0.0006, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 0.30516776501309517, | |
| "grad_norm": 0.0007989577716216445, | |
| "learning_rate": 0.00020344229646671014, | |
| "loss": 0.0006, | |
| "step": 39500 | |
| }, | |
| { | |
| "epoch": 0.30903064811452674, | |
| "grad_norm": 0.0007914176676422358, | |
| "learning_rate": 0.00020601758060639355, | |
| "loss": 0.0006, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 0.31289353121595836, | |
| "grad_norm": 0.0007808151422068477, | |
| "learning_rate": 0.00020859286474607698, | |
| "loss": 0.0006, | |
| "step": 40500 | |
| }, | |
| { | |
| "epoch": 0.31675641431738993, | |
| "grad_norm": 0.0007411599508486688, | |
| "learning_rate": 0.00021116814888576038, | |
| "loss": 0.0006, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 0.3206192974188215, | |
| "grad_norm": 0.0007389839738607407, | |
| "learning_rate": 0.00021374343302544378, | |
| "loss": 0.0005, | |
| "step": 41500 | |
| }, | |
| { | |
| "epoch": 0.32448218052025307, | |
| "grad_norm": 0.0008718458120711148, | |
| "learning_rate": 0.0002163187171651272, | |
| "loss": 0.0005, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 0.3283450636216847, | |
| "grad_norm": 0.0007400435279123485, | |
| "learning_rate": 0.0002188940013048106, | |
| "loss": 0.0005, | |
| "step": 42500 | |
| }, | |
| { | |
| "epoch": 0.33220794672311627, | |
| "grad_norm": 0.0006650601280853152, | |
| "learning_rate": 0.00022146928544449404, | |
| "loss": 0.0005, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 0.33607082982454783, | |
| "grad_norm": 0.0006877753767184913, | |
| "learning_rate": 0.00022404456958417744, | |
| "loss": 0.0005, | |
| "step": 43500 | |
| }, | |
| { | |
| "epoch": 0.33993371292597946, | |
| "grad_norm": 0.000792111677583307, | |
| "learning_rate": 0.00022661985372386084, | |
| "loss": 0.0005, | |
| "step": 44000 | |
| }, | |
| { | |
| "epoch": 0.343796596027411, | |
| "grad_norm": 0.0006559982430189848, | |
| "learning_rate": 0.00022919513786354427, | |
| "loss": 0.0005, | |
| "step": 44500 | |
| }, | |
| { | |
| "epoch": 0.3476594791288426, | |
| "grad_norm": 0.000791021331679076, | |
| "learning_rate": 0.00023177042200322768, | |
| "loss": 0.0005, | |
| "step": 45000 | |
| }, | |
| { | |
| "epoch": 0.35152236223027417, | |
| "grad_norm": 0.000694429618306458, | |
| "learning_rate": 0.0002343457061429111, | |
| "loss": 0.0005, | |
| "step": 45500 | |
| }, | |
| { | |
| "epoch": 0.3553852453317058, | |
| "grad_norm": 0.0006511561223305762, | |
| "learning_rate": 0.00023692099028259448, | |
| "loss": 0.0005, | |
| "step": 46000 | |
| }, | |
| { | |
| "epoch": 0.35924812843313736, | |
| "grad_norm": 0.0007060636417008936, | |
| "learning_rate": 0.00023949627442227788, | |
| "loss": 0.0005, | |
| "step": 46500 | |
| }, | |
| { | |
| "epoch": 0.36311101153456893, | |
| "grad_norm": 0.0006298599764704704, | |
| "learning_rate": 0.0002420715585619613, | |
| "loss": 0.0005, | |
| "step": 47000 | |
| }, | |
| { | |
| "epoch": 0.36697389463600055, | |
| "grad_norm": 0.0006566342781297863, | |
| "learning_rate": 0.00024464684270164474, | |
| "loss": 0.0005, | |
| "step": 47500 | |
| }, | |
| { | |
| "epoch": 0.3708367777374321, | |
| "grad_norm": 0.0006428054184652865, | |
| "learning_rate": 0.00024722212684132814, | |
| "loss": 0.0005, | |
| "step": 48000 | |
| }, | |
| { | |
| "epoch": 0.3746996608388637, | |
| "grad_norm": 0.0006790259503759444, | |
| "learning_rate": 0.00024979741098101155, | |
| "loss": 0.0005, | |
| "step": 48500 | |
| }, | |
| { | |
| "epoch": 0.37856254394029526, | |
| "grad_norm": 0.0006684795371256769, | |
| "learning_rate": 0.00025237269512069495, | |
| "loss": 0.0005, | |
| "step": 49000 | |
| }, | |
| { | |
| "epoch": 0.3824254270417269, | |
| "grad_norm": 0.0006840928690508008, | |
| "learning_rate": 0.00025494797926037835, | |
| "loss": 0.0005, | |
| "step": 49500 | |
| }, | |
| { | |
| "epoch": 0.38628831014315845, | |
| "grad_norm": 0.0005870233871974051, | |
| "learning_rate": 0.00025752326340006175, | |
| "loss": 0.0005, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 0.39015119324459, | |
| "grad_norm": 0.0006383236614055932, | |
| "learning_rate": 0.0002600985475397452, | |
| "loss": 0.0005, | |
| "step": 50500 | |
| }, | |
| { | |
| "epoch": 0.3940140763460216, | |
| "grad_norm": 0.0006128019886091352, | |
| "learning_rate": 0.0002626738316794286, | |
| "loss": 0.0005, | |
| "step": 51000 | |
| }, | |
| { | |
| "epoch": 0.3978769594474532, | |
| "grad_norm": 0.0006441852310672402, | |
| "learning_rate": 0.000265249115819112, | |
| "loss": 0.0005, | |
| "step": 51500 | |
| }, | |
| { | |
| "epoch": 0.4017398425488848, | |
| "grad_norm": 0.0005380984512157738, | |
| "learning_rate": 0.0002678243999587954, | |
| "loss": 0.0005, | |
| "step": 52000 | |
| }, | |
| { | |
| "epoch": 0.40560272565031635, | |
| "grad_norm": 0.0006370097398757935, | |
| "learning_rate": 0.0002703996840984788, | |
| "loss": 0.0005, | |
| "step": 52500 | |
| }, | |
| { | |
| "epoch": 0.409465608751748, | |
| "grad_norm": 0.0006471597589552402, | |
| "learning_rate": 0.0002729749682381623, | |
| "loss": 0.0005, | |
| "step": 53000 | |
| }, | |
| { | |
| "epoch": 0.41332849185317955, | |
| "grad_norm": 0.0005368549609556794, | |
| "learning_rate": 0.0002755502523778457, | |
| "loss": 0.0005, | |
| "step": 53500 | |
| }, | |
| { | |
| "epoch": 0.4171913749546111, | |
| "grad_norm": 0.0005874933558516204, | |
| "learning_rate": 0.0002781255365175291, | |
| "loss": 0.0005, | |
| "step": 54000 | |
| }, | |
| { | |
| "epoch": 0.4210542580560427, | |
| "grad_norm": 0.0005409115110523999, | |
| "learning_rate": 0.0002807008206572125, | |
| "loss": 0.0005, | |
| "step": 54500 | |
| }, | |
| { | |
| "epoch": 0.4249171411574743, | |
| "grad_norm": 0.0005910882027819753, | |
| "learning_rate": 0.0002832761047968959, | |
| "loss": 0.0005, | |
| "step": 55000 | |
| }, | |
| { | |
| "epoch": 0.4287800242589059, | |
| "grad_norm": 0.0006001652218401432, | |
| "learning_rate": 0.00028585138893657934, | |
| "loss": 0.0005, | |
| "step": 55500 | |
| }, | |
| { | |
| "epoch": 0.43264290736033745, | |
| "grad_norm": 0.000514556304551661, | |
| "learning_rate": 0.00028842667307626274, | |
| "loss": 0.0005, | |
| "step": 56000 | |
| }, | |
| { | |
| "epoch": 0.43650579046176907, | |
| "grad_norm": 0.0005608577048406005, | |
| "learning_rate": 0.00029100195721594614, | |
| "loss": 0.0005, | |
| "step": 56500 | |
| }, | |
| { | |
| "epoch": 0.44036867356320064, | |
| "grad_norm": 0.0007016558083705604, | |
| "learning_rate": 0.00029357724135562954, | |
| "loss": 0.0005, | |
| "step": 57000 | |
| }, | |
| { | |
| "epoch": 0.4442315566646322, | |
| "grad_norm": 0.0005601047305390239, | |
| "learning_rate": 0.00029615252549531295, | |
| "loss": 0.0005, | |
| "step": 57500 | |
| }, | |
| { | |
| "epoch": 0.4480944397660638, | |
| "grad_norm": 0.0004940334474667907, | |
| "learning_rate": 0.00029872780963499635, | |
| "loss": 0.0005, | |
| "step": 58000 | |
| }, | |
| { | |
| "epoch": 0.4519573228674954, | |
| "grad_norm": 0.000568122137337923, | |
| "learning_rate": 0.00029977004320400405, | |
| "loss": 0.0005, | |
| "step": 58500 | |
| }, | |
| { | |
| "epoch": 0.455820205968927, | |
| "grad_norm": 0.0005688059609383345, | |
| "learning_rate": 0.0002993155831328659, | |
| "loss": 0.0005, | |
| "step": 59000 | |
| }, | |
| { | |
| "epoch": 0.45968308907035854, | |
| "grad_norm": 0.0005606883205473423, | |
| "learning_rate": 0.0002988611230617278, | |
| "loss": 0.0005, | |
| "step": 59500 | |
| }, | |
| { | |
| "epoch": 0.4635459721717901, | |
| "grad_norm": 0.0005766854155808687, | |
| "learning_rate": 0.0002984066629905896, | |
| "loss": 0.0005, | |
| "step": 60000 | |
| }, | |
| { | |
| "epoch": 0.46740885527322173, | |
| "grad_norm": 0.000506491691339761, | |
| "learning_rate": 0.00029795220291945146, | |
| "loss": 0.0005, | |
| "step": 60500 | |
| }, | |
| { | |
| "epoch": 0.4712717383746533, | |
| "grad_norm": 0.0005349202547222376, | |
| "learning_rate": 0.0002974977428483133, | |
| "loss": 0.0005, | |
| "step": 61000 | |
| }, | |
| { | |
| "epoch": 0.4751346214760849, | |
| "grad_norm": 0.0005307575920596719, | |
| "learning_rate": 0.00029704328277717514, | |
| "loss": 0.0005, | |
| "step": 61500 | |
| }, | |
| { | |
| "epoch": 0.4789975045775165, | |
| "grad_norm": 0.0005095390370115638, | |
| "learning_rate": 0.00029658882270603706, | |
| "loss": 0.0005, | |
| "step": 62000 | |
| }, | |
| { | |
| "epoch": 0.48286038767894807, | |
| "grad_norm": 0.0005189366056583822, | |
| "learning_rate": 0.00029613436263489887, | |
| "loss": 0.0005, | |
| "step": 62500 | |
| }, | |
| { | |
| "epoch": 0.48672327078037964, | |
| "grad_norm": 0.0005579465650953352, | |
| "learning_rate": 0.0002956799025637607, | |
| "loss": 0.0005, | |
| "step": 63000 | |
| }, | |
| { | |
| "epoch": 0.4905861538818112, | |
| "grad_norm": 0.000490622769575566, | |
| "learning_rate": 0.0002952254424926226, | |
| "loss": 0.0005, | |
| "step": 63500 | |
| }, | |
| { | |
| "epoch": 0.49444903698324283, | |
| "grad_norm": 0.00047076333430595696, | |
| "learning_rate": 0.0002947709824214844, | |
| "loss": 0.0005, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 0.4983119200846744, | |
| "grad_norm": 0.0005202812608331442, | |
| "learning_rate": 0.0002943165223503463, | |
| "loss": 0.0005, | |
| "step": 64500 | |
| }, | |
| { | |
| "epoch": 0.502174803186106, | |
| "grad_norm": 0.0005745698581449687, | |
| "learning_rate": 0.00029386206227920814, | |
| "loss": 0.0005, | |
| "step": 65000 | |
| }, | |
| { | |
| "epoch": 0.5060376862875375, | |
| "grad_norm": 0.0005540290731005371, | |
| "learning_rate": 0.00029340760220806996, | |
| "loss": 0.0004, | |
| "step": 65500 | |
| }, | |
| { | |
| "epoch": 0.5099005693889691, | |
| "grad_norm": 0.0005399156943894923, | |
| "learning_rate": 0.0002929531421369318, | |
| "loss": 0.0004, | |
| "step": 66000 | |
| }, | |
| { | |
| "epoch": 0.5137634524904008, | |
| "grad_norm": 0.0005016261129640043, | |
| "learning_rate": 0.0002924986820657937, | |
| "loss": 0.0004, | |
| "step": 66500 | |
| }, | |
| { | |
| "epoch": 0.5176263355918324, | |
| "grad_norm": 0.0004883984220214188, | |
| "learning_rate": 0.00029204422199465555, | |
| "loss": 0.0004, | |
| "step": 67000 | |
| }, | |
| { | |
| "epoch": 0.5214892186932639, | |
| "grad_norm": 0.0004978585639037192, | |
| "learning_rate": 0.00029158976192351736, | |
| "loss": 0.0004, | |
| "step": 67500 | |
| }, | |
| { | |
| "epoch": 0.5253521017946955, | |
| "grad_norm": 0.00048788345884531736, | |
| "learning_rate": 0.00029113530185237923, | |
| "loss": 0.0004, | |
| "step": 68000 | |
| }, | |
| { | |
| "epoch": 0.5292149848961271, | |
| "grad_norm": 0.0005122956354171038, | |
| "learning_rate": 0.0002906808417812411, | |
| "loss": 0.0004, | |
| "step": 68500 | |
| }, | |
| { | |
| "epoch": 0.5330778679975586, | |
| "grad_norm": 0.0005075953667983413, | |
| "learning_rate": 0.0002902263817101029, | |
| "loss": 0.0004, | |
| "step": 69000 | |
| }, | |
| { | |
| "epoch": 0.5369407510989902, | |
| "grad_norm": 0.010060743428766727, | |
| "learning_rate": 0.00028977192163896477, | |
| "loss": 0.0004, | |
| "step": 69500 | |
| }, | |
| { | |
| "epoch": 0.5408036342004219, | |
| "grad_norm": 0.00045171918463893235, | |
| "learning_rate": 0.00028931746156782664, | |
| "loss": 0.0004, | |
| "step": 70000 | |
| }, | |
| { | |
| "epoch": 0.5446665173018534, | |
| "grad_norm": 0.00047910166904330254, | |
| "learning_rate": 0.00028886300149668845, | |
| "loss": 0.0004, | |
| "step": 70500 | |
| }, | |
| { | |
| "epoch": 0.548529400403285, | |
| "grad_norm": 0.0004093217430636287, | |
| "learning_rate": 0.0002884085414255503, | |
| "loss": 0.0004, | |
| "step": 71000 | |
| }, | |
| { | |
| "epoch": 0.5523922835047166, | |
| "grad_norm": 0.0005109374760650098, | |
| "learning_rate": 0.0002879540813544122, | |
| "loss": 0.0004, | |
| "step": 71500 | |
| }, | |
| { | |
| "epoch": 0.5562551666061482, | |
| "grad_norm": 0.00047159241512417793, | |
| "learning_rate": 0.00028749962128327405, | |
| "loss": 0.0004, | |
| "step": 72000 | |
| }, | |
| { | |
| "epoch": 0.5601180497075797, | |
| "grad_norm": 0.00048621243331581354, | |
| "learning_rate": 0.00028704516121213586, | |
| "loss": 0.0004, | |
| "step": 72500 | |
| }, | |
| { | |
| "epoch": 0.5639809328090113, | |
| "grad_norm": 0.0005125854513607919, | |
| "learning_rate": 0.0002865907011409977, | |
| "loss": 0.0004, | |
| "step": 73000 | |
| }, | |
| { | |
| "epoch": 0.567843815910443, | |
| "grad_norm": 0.00048265952500514686, | |
| "learning_rate": 0.0002861362410698596, | |
| "loss": 0.0004, | |
| "step": 73500 | |
| }, | |
| { | |
| "epoch": 0.5717066990118745, | |
| "grad_norm": 0.0004471069551073015, | |
| "learning_rate": 0.0002856817809987214, | |
| "loss": 0.0004, | |
| "step": 74000 | |
| }, | |
| { | |
| "epoch": 0.5755695821133061, | |
| "grad_norm": 0.0005123110022395849, | |
| "learning_rate": 0.00028522732092758327, | |
| "loss": 0.0004, | |
| "step": 74500 | |
| }, | |
| { | |
| "epoch": 0.5794324652147377, | |
| "grad_norm": 0.00046480647870339453, | |
| "learning_rate": 0.00028477286085644513, | |
| "loss": 0.0004, | |
| "step": 75000 | |
| }, | |
| { | |
| "epoch": 0.5832953483161692, | |
| "grad_norm": 0.000416387541918084, | |
| "learning_rate": 0.00028431840078530694, | |
| "loss": 0.0004, | |
| "step": 75500 | |
| }, | |
| { | |
| "epoch": 0.5871582314176008, | |
| "grad_norm": 0.00046883479808457196, | |
| "learning_rate": 0.0002838639407141688, | |
| "loss": 0.0004, | |
| "step": 76000 | |
| }, | |
| { | |
| "epoch": 0.5910211145190324, | |
| "grad_norm": 0.00046609266428276896, | |
| "learning_rate": 0.0002834094806430307, | |
| "loss": 0.0004, | |
| "step": 76500 | |
| }, | |
| { | |
| "epoch": 0.594883997620464, | |
| "grad_norm": 0.0004421696939971298, | |
| "learning_rate": 0.00028295502057189254, | |
| "loss": 0.0004, | |
| "step": 77000 | |
| }, | |
| { | |
| "epoch": 0.5987468807218956, | |
| "grad_norm": 0.0005302332574501634, | |
| "learning_rate": 0.0002825005605007544, | |
| "loss": 0.0004, | |
| "step": 77500 | |
| }, | |
| { | |
| "epoch": 0.6026097638233272, | |
| "grad_norm": 0.0004425778752192855, | |
| "learning_rate": 0.0002820461004296162, | |
| "loss": 0.0004, | |
| "step": 78000 | |
| }, | |
| { | |
| "epoch": 0.6064726469247588, | |
| "grad_norm": 0.00043949965038336813, | |
| "learning_rate": 0.0002815916403584781, | |
| "loss": 0.0004, | |
| "step": 78500 | |
| }, | |
| { | |
| "epoch": 0.6103355300261903, | |
| "grad_norm": 0.00048047167365439236, | |
| "learning_rate": 0.00028113718028733995, | |
| "loss": 0.0004, | |
| "step": 79000 | |
| }, | |
| { | |
| "epoch": 0.6141984131276219, | |
| "grad_norm": 0.00046403607120737433, | |
| "learning_rate": 0.0002806827202162018, | |
| "loss": 0.0004, | |
| "step": 79500 | |
| }, | |
| { | |
| "epoch": 0.6180612962290535, | |
| "grad_norm": 0.0004179133102297783, | |
| "learning_rate": 0.00028022826014506363, | |
| "loss": 0.0004, | |
| "step": 80000 | |
| }, | |
| { | |
| "epoch": 0.621924179330485, | |
| "grad_norm": 0.0005025283899158239, | |
| "learning_rate": 0.0002797738000739255, | |
| "loss": 0.0004, | |
| "step": 80500 | |
| }, | |
| { | |
| "epoch": 0.6257870624319167, | |
| "grad_norm": 0.00045130823855288327, | |
| "learning_rate": 0.00027931934000278736, | |
| "loss": 0.0004, | |
| "step": 81000 | |
| }, | |
| { | |
| "epoch": 0.6296499455333483, | |
| "grad_norm": 0.00047835326404310763, | |
| "learning_rate": 0.00027886487993164917, | |
| "loss": 0.0004, | |
| "step": 81500 | |
| }, | |
| { | |
| "epoch": 0.6335128286347799, | |
| "grad_norm": 0.0004951538285240531, | |
| "learning_rate": 0.00027841041986051104, | |
| "loss": 0.0004, | |
| "step": 82000 | |
| }, | |
| { | |
| "epoch": 0.6373757117362114, | |
| "grad_norm": 0.0004999985103495419, | |
| "learning_rate": 0.0002779559597893729, | |
| "loss": 0.0004, | |
| "step": 82500 | |
| }, | |
| { | |
| "epoch": 0.641238594837643, | |
| "grad_norm": 0.00045166266500018537, | |
| "learning_rate": 0.0002775014997182347, | |
| "loss": 0.0004, | |
| "step": 83000 | |
| }, | |
| { | |
| "epoch": 0.6451014779390746, | |
| "grad_norm": 0.0004108847351744771, | |
| "learning_rate": 0.0002770470396470966, | |
| "loss": 0.0004, | |
| "step": 83500 | |
| }, | |
| { | |
| "epoch": 0.6489643610405061, | |
| "grad_norm": 0.0004798057198058814, | |
| "learning_rate": 0.00027659257957595844, | |
| "loss": 0.0004, | |
| "step": 84000 | |
| }, | |
| { | |
| "epoch": 0.6528272441419378, | |
| "grad_norm": 0.0004300622676964849, | |
| "learning_rate": 0.0002761381195048203, | |
| "loss": 0.0004, | |
| "step": 84500 | |
| }, | |
| { | |
| "epoch": 0.6566901272433694, | |
| "grad_norm": 0.00042947594192810357, | |
| "learning_rate": 0.0002756836594336821, | |
| "loss": 0.0004, | |
| "step": 85000 | |
| }, | |
| { | |
| "epoch": 0.660553010344801, | |
| "grad_norm": 0.0004585100687108934, | |
| "learning_rate": 0.000275229199362544, | |
| "loss": 0.0004, | |
| "step": 85500 | |
| }, | |
| { | |
| "epoch": 0.6644158934462325, | |
| "grad_norm": 0.00048575850087217987, | |
| "learning_rate": 0.00027477473929140585, | |
| "loss": 0.0004, | |
| "step": 86000 | |
| }, | |
| { | |
| "epoch": 0.6682787765476641, | |
| "grad_norm": 0.0004099896177649498, | |
| "learning_rate": 0.00027432027922026766, | |
| "loss": 0.0004, | |
| "step": 86500 | |
| }, | |
| { | |
| "epoch": 0.6721416596490957, | |
| "grad_norm": 0.000426844839239493, | |
| "learning_rate": 0.00027386581914912953, | |
| "loss": 0.0004, | |
| "step": 87000 | |
| }, | |
| { | |
| "epoch": 0.6760045427505272, | |
| "grad_norm": 0.000434650486567989, | |
| "learning_rate": 0.0002734113590779914, | |
| "loss": 0.0004, | |
| "step": 87500 | |
| }, | |
| { | |
| "epoch": 0.6798674258519589, | |
| "grad_norm": 0.0004488716658670455, | |
| "learning_rate": 0.0002729568990068532, | |
| "loss": 0.0004, | |
| "step": 88000 | |
| }, | |
| { | |
| "epoch": 0.6837303089533905, | |
| "grad_norm": 0.0004696206597145647, | |
| "learning_rate": 0.0002725024389357151, | |
| "loss": 0.0004, | |
| "step": 88500 | |
| }, | |
| { | |
| "epoch": 0.687593192054822, | |
| "grad_norm": 0.00042553144157864153, | |
| "learning_rate": 0.00027204797886457694, | |
| "loss": 0.0004, | |
| "step": 89000 | |
| }, | |
| { | |
| "epoch": 0.6914560751562536, | |
| "grad_norm": 0.0004377859877422452, | |
| "learning_rate": 0.00027159351879343875, | |
| "loss": 0.0004, | |
| "step": 89500 | |
| }, | |
| { | |
| "epoch": 0.6953189582576852, | |
| "grad_norm": 0.00040723910205997527, | |
| "learning_rate": 0.0002711390587223006, | |
| "loss": 0.0004, | |
| "step": 90000 | |
| }, | |
| { | |
| "epoch": 0.6991818413591168, | |
| "grad_norm": 0.0004500757495407015, | |
| "learning_rate": 0.0002706845986511625, | |
| "loss": 0.0004, | |
| "step": 90500 | |
| }, | |
| { | |
| "epoch": 0.7030447244605483, | |
| "grad_norm": 0.00040941766928881407, | |
| "learning_rate": 0.00027023013858002435, | |
| "loss": 0.0004, | |
| "step": 91000 | |
| }, | |
| { | |
| "epoch": 0.70690760756198, | |
| "grad_norm": 0.00045324547681957483, | |
| "learning_rate": 0.00026977567850888616, | |
| "loss": 0.0004, | |
| "step": 91500 | |
| }, | |
| { | |
| "epoch": 0.7107704906634116, | |
| "grad_norm": 0.00047752921818755567, | |
| "learning_rate": 0.000269321218437748, | |
| "loss": 0.0004, | |
| "step": 92000 | |
| }, | |
| { | |
| "epoch": 0.7146333737648431, | |
| "grad_norm": 0.0004383829946164042, | |
| "learning_rate": 0.0002688667583666099, | |
| "loss": 0.0004, | |
| "step": 92500 | |
| }, | |
| { | |
| "epoch": 0.7184962568662747, | |
| "grad_norm": 0.00045177084393799305, | |
| "learning_rate": 0.0002684122982954717, | |
| "loss": 0.0004, | |
| "step": 93000 | |
| }, | |
| { | |
| "epoch": 0.7223591399677063, | |
| "grad_norm": 0.0004552272439468652, | |
| "learning_rate": 0.0002679578382243336, | |
| "loss": 0.0004, | |
| "step": 93500 | |
| }, | |
| { | |
| "epoch": 0.7262220230691379, | |
| "grad_norm": 0.00043098500464111567, | |
| "learning_rate": 0.00026750337815319543, | |
| "loss": 0.0004, | |
| "step": 94000 | |
| }, | |
| { | |
| "epoch": 0.7300849061705694, | |
| "grad_norm": 0.00043315321090631187, | |
| "learning_rate": 0.0002670489180820573, | |
| "loss": 0.0004, | |
| "step": 94500 | |
| }, | |
| { | |
| "epoch": 0.7339477892720011, | |
| "grad_norm": 0.00043436733540147543, | |
| "learning_rate": 0.00026659445801091916, | |
| "loss": 0.0004, | |
| "step": 95000 | |
| }, | |
| { | |
| "epoch": 0.7378106723734327, | |
| "grad_norm": 0.00040910765528678894, | |
| "learning_rate": 0.000266139997939781, | |
| "loss": 0.0004, | |
| "step": 95500 | |
| }, | |
| { | |
| "epoch": 0.7416735554748642, | |
| "grad_norm": 0.0004274219973012805, | |
| "learning_rate": 0.00026568553786864284, | |
| "loss": 0.0004, | |
| "step": 96000 | |
| }, | |
| { | |
| "epoch": 0.7455364385762958, | |
| "grad_norm": 0.0004410296387504786, | |
| "learning_rate": 0.0002652310777975047, | |
| "loss": 0.0004, | |
| "step": 96500 | |
| }, | |
| { | |
| "epoch": 0.7493993216777274, | |
| "grad_norm": 0.0004376559518277645, | |
| "learning_rate": 0.0002647766177263665, | |
| "loss": 0.0004, | |
| "step": 97000 | |
| }, | |
| { | |
| "epoch": 0.753262204779159, | |
| "grad_norm": 0.00047215758240781724, | |
| "learning_rate": 0.0002643221576552284, | |
| "loss": 0.0004, | |
| "step": 97500 | |
| }, | |
| { | |
| "epoch": 0.7571250878805905, | |
| "grad_norm": 0.00042207614751532674, | |
| "learning_rate": 0.00026386769758409025, | |
| "loss": 0.0004, | |
| "step": 98000 | |
| }, | |
| { | |
| "epoch": 0.7609879709820221, | |
| "grad_norm": 0.00042739673517644405, | |
| "learning_rate": 0.0002634132375129521, | |
| "loss": 0.0004, | |
| "step": 98500 | |
| }, | |
| { | |
| "epoch": 0.7648508540834538, | |
| "grad_norm": 0.00041113453335128725, | |
| "learning_rate": 0.00026295877744181393, | |
| "loss": 0.0004, | |
| "step": 99000 | |
| }, | |
| { | |
| "epoch": 0.7687137371848853, | |
| "grad_norm": 0.00043188530253246427, | |
| "learning_rate": 0.0002625043173706758, | |
| "loss": 0.0004, | |
| "step": 99500 | |
| }, | |
| { | |
| "epoch": 0.7725766202863169, | |
| "grad_norm": 0.00045034836512058973, | |
| "learning_rate": 0.00026204985729953766, | |
| "loss": 0.0004, | |
| "step": 100000 | |
| }, | |
| { | |
| "epoch": 0.7764395033877485, | |
| "grad_norm": 0.0004060341161675751, | |
| "learning_rate": 0.00026159539722839947, | |
| "loss": 0.0004, | |
| "step": 100500 | |
| }, | |
| { | |
| "epoch": 0.78030238648918, | |
| "grad_norm": 0.00040883294423110783, | |
| "learning_rate": 0.00026114093715726134, | |
| "loss": 0.0004, | |
| "step": 101000 | |
| }, | |
| { | |
| "epoch": 0.7841652695906116, | |
| "grad_norm": 0.00044574079220183194, | |
| "learning_rate": 0.0002606864770861232, | |
| "loss": 0.0004, | |
| "step": 101500 | |
| }, | |
| { | |
| "epoch": 0.7880281526920432, | |
| "grad_norm": 0.0004099115321878344, | |
| "learning_rate": 0.000260232017014985, | |
| "loss": 0.0004, | |
| "step": 102000 | |
| }, | |
| { | |
| "epoch": 0.7918910357934749, | |
| "grad_norm": 0.0004170772444922477, | |
| "learning_rate": 0.0002597775569438469, | |
| "loss": 0.0004, | |
| "step": 102500 | |
| }, | |
| { | |
| "epoch": 0.7957539188949064, | |
| "grad_norm": 0.0004352552932687104, | |
| "learning_rate": 0.00025932309687270875, | |
| "loss": 0.0004, | |
| "step": 103000 | |
| }, | |
| { | |
| "epoch": 0.799616801996338, | |
| "grad_norm": 0.00040374931995756924, | |
| "learning_rate": 0.0002588686368015706, | |
| "loss": 0.0004, | |
| "step": 103500 | |
| }, | |
| { | |
| "epoch": 0.8034796850977696, | |
| "grad_norm": 0.00044298075954429805, | |
| "learning_rate": 0.0002584141767304324, | |
| "loss": 0.0004, | |
| "step": 104000 | |
| }, | |
| { | |
| "epoch": 0.8073425681992011, | |
| "grad_norm": 0.00042256712913513184, | |
| "learning_rate": 0.0002579597166592943, | |
| "loss": 0.0004, | |
| "step": 104500 | |
| }, | |
| { | |
| "epoch": 0.8112054513006327, | |
| "grad_norm": 0.00036991469096392393, | |
| "learning_rate": 0.00025750525658815615, | |
| "loss": 0.0004, | |
| "step": 105000 | |
| }, | |
| { | |
| "epoch": 0.8150683344020643, | |
| "grad_norm": 0.0004396368167363107, | |
| "learning_rate": 0.00025705079651701797, | |
| "loss": 0.0004, | |
| "step": 105500 | |
| }, | |
| { | |
| "epoch": 0.818931217503496, | |
| "grad_norm": 0.0004093723255209625, | |
| "learning_rate": 0.00025659633644587983, | |
| "loss": 0.0004, | |
| "step": 106000 | |
| }, | |
| { | |
| "epoch": 0.8227941006049275, | |
| "grad_norm": 0.000397693132981658, | |
| "learning_rate": 0.0002561418763747417, | |
| "loss": 0.0004, | |
| "step": 106500 | |
| }, | |
| { | |
| "epoch": 0.8266569837063591, | |
| "grad_norm": 0.0004059737839270383, | |
| "learning_rate": 0.0002556874163036035, | |
| "loss": 0.0004, | |
| "step": 107000 | |
| }, | |
| { | |
| "epoch": 0.8305198668077907, | |
| "grad_norm": 0.0004193147469777614, | |
| "learning_rate": 0.00025523295623246543, | |
| "loss": 0.0004, | |
| "step": 107500 | |
| }, | |
| { | |
| "epoch": 0.8343827499092222, | |
| "grad_norm": 0.00043243260006420314, | |
| "learning_rate": 0.00025477849616132724, | |
| "loss": 0.0004, | |
| "step": 108000 | |
| }, | |
| { | |
| "epoch": 0.8382456330106538, | |
| "grad_norm": 0.0004471674910746515, | |
| "learning_rate": 0.0002543240360901891, | |
| "loss": 0.0004, | |
| "step": 108500 | |
| }, | |
| { | |
| "epoch": 0.8421085161120854, | |
| "grad_norm": 0.00039739784551784396, | |
| "learning_rate": 0.00025386957601905097, | |
| "loss": 0.0004, | |
| "step": 109000 | |
| }, | |
| { | |
| "epoch": 0.845971399213517, | |
| "grad_norm": 0.00039233858115039766, | |
| "learning_rate": 0.0002534151159479128, | |
| "loss": 0.0004, | |
| "step": 109500 | |
| }, | |
| { | |
| "epoch": 0.8498342823149486, | |
| "grad_norm": 0.00042606148053891957, | |
| "learning_rate": 0.00025296065587677465, | |
| "loss": 0.0004, | |
| "step": 110000 | |
| }, | |
| { | |
| "epoch": 0.8536971654163802, | |
| "grad_norm": 0.0004140585951972753, | |
| "learning_rate": 0.0002525061958056365, | |
| "loss": 0.0004, | |
| "step": 110500 | |
| }, | |
| { | |
| "epoch": 0.8575600485178118, | |
| "grad_norm": 0.0004395502619445324, | |
| "learning_rate": 0.0002520517357344984, | |
| "loss": 0.0004, | |
| "step": 111000 | |
| }, | |
| { | |
| "epoch": 0.8614229316192433, | |
| "grad_norm": 0.0004029794654343277, | |
| "learning_rate": 0.0002515972756633602, | |
| "loss": 0.0004, | |
| "step": 111500 | |
| }, | |
| { | |
| "epoch": 0.8652858147206749, | |
| "grad_norm": 0.00038435307214967906, | |
| "learning_rate": 0.00025114281559222206, | |
| "loss": 0.0004, | |
| "step": 112000 | |
| }, | |
| { | |
| "epoch": 0.8691486978221065, | |
| "grad_norm": 0.00039446449954994023, | |
| "learning_rate": 0.0002506883555210839, | |
| "loss": 0.0004, | |
| "step": 112500 | |
| }, | |
| { | |
| "epoch": 0.8730115809235381, | |
| "grad_norm": 0.0004027863615192473, | |
| "learning_rate": 0.00025023389544994573, | |
| "loss": 0.0004, | |
| "step": 113000 | |
| }, | |
| { | |
| "epoch": 0.8768744640249697, | |
| "grad_norm": 0.00038630064227618277, | |
| "learning_rate": 0.0002497794353788076, | |
| "loss": 0.0004, | |
| "step": 113500 | |
| }, | |
| { | |
| "epoch": 0.8807373471264013, | |
| "grad_norm": 0.0003898652794305235, | |
| "learning_rate": 0.00024932497530766947, | |
| "loss": 0.0004, | |
| "step": 114000 | |
| }, | |
| { | |
| "epoch": 0.8846002302278329, | |
| "grad_norm": 0.0003884608449880034, | |
| "learning_rate": 0.0002488705152365313, | |
| "loss": 0.0004, | |
| "step": 114500 | |
| }, | |
| { | |
| "epoch": 0.8884631133292644, | |
| "grad_norm": 0.0004448189283721149, | |
| "learning_rate": 0.00024841605516539314, | |
| "loss": 0.0004, | |
| "step": 115000 | |
| }, | |
| { | |
| "epoch": 0.892325996430696, | |
| "grad_norm": 0.0004219826078042388, | |
| "learning_rate": 0.000247961595094255, | |
| "loss": 0.0004, | |
| "step": 115500 | |
| }, | |
| { | |
| "epoch": 0.8961888795321276, | |
| "grad_norm": 0.0004262237052898854, | |
| "learning_rate": 0.0002475071350231168, | |
| "loss": 0.0004, | |
| "step": 116000 | |
| }, | |
| { | |
| "epoch": 0.9000517626335592, | |
| "grad_norm": 0.0004264891322236508, | |
| "learning_rate": 0.0002470526749519787, | |
| "loss": 0.0004, | |
| "step": 116500 | |
| }, | |
| { | |
| "epoch": 0.9039146457349908, | |
| "grad_norm": 0.0004334848199505359, | |
| "learning_rate": 0.00024659821488084055, | |
| "loss": 0.0004, | |
| "step": 117000 | |
| }, | |
| { | |
| "epoch": 0.9077775288364224, | |
| "grad_norm": 0.0003926197241526097, | |
| "learning_rate": 0.0002461437548097024, | |
| "loss": 0.0004, | |
| "step": 117500 | |
| }, | |
| { | |
| "epoch": 0.911640411937854, | |
| "grad_norm": 0.0004120821540709585, | |
| "learning_rate": 0.00024568929473856423, | |
| "loss": 0.0004, | |
| "step": 118000 | |
| }, | |
| { | |
| "epoch": 0.9155032950392855, | |
| "grad_norm": 0.00037737927050329745, | |
| "learning_rate": 0.0002452348346674261, | |
| "loss": 0.0004, | |
| "step": 118500 | |
| }, | |
| { | |
| "epoch": 0.9193661781407171, | |
| "grad_norm": 0.0003999023465439677, | |
| "learning_rate": 0.00024478037459628796, | |
| "loss": 0.0004, | |
| "step": 119000 | |
| }, | |
| { | |
| "epoch": 0.9232290612421487, | |
| "grad_norm": 0.00039843874401412904, | |
| "learning_rate": 0.00024432591452514977, | |
| "loss": 0.0004, | |
| "step": 119500 | |
| }, | |
| { | |
| "epoch": 0.9270919443435802, | |
| "grad_norm": 0.0004592763143591583, | |
| "learning_rate": 0.00024387145445401166, | |
| "loss": 0.0004, | |
| "step": 120000 | |
| }, | |
| { | |
| "epoch": 0.9309548274450119, | |
| "grad_norm": 0.00041658771806396544, | |
| "learning_rate": 0.0002434169943828735, | |
| "loss": 0.0004, | |
| "step": 120500 | |
| }, | |
| { | |
| "epoch": 0.9348177105464435, | |
| "grad_norm": 0.000446771620772779, | |
| "learning_rate": 0.00024296253431173534, | |
| "loss": 0.0004, | |
| "step": 121000 | |
| }, | |
| { | |
| "epoch": 0.938680593647875, | |
| "grad_norm": 0.00039099738933146, | |
| "learning_rate": 0.0002425080742405972, | |
| "loss": 0.0004, | |
| "step": 121500 | |
| }, | |
| { | |
| "epoch": 0.9425434767493066, | |
| "grad_norm": 0.0004380798782221973, | |
| "learning_rate": 0.00024205361416945905, | |
| "loss": 0.0004, | |
| "step": 122000 | |
| }, | |
| { | |
| "epoch": 0.9464063598507382, | |
| "grad_norm": 0.0004325969784986228, | |
| "learning_rate": 0.0002415991540983209, | |
| "loss": 0.0004, | |
| "step": 122500 | |
| }, | |
| { | |
| "epoch": 0.9502692429521697, | |
| "grad_norm": 0.00042953985393978655, | |
| "learning_rate": 0.00024114469402718275, | |
| "loss": 0.0004, | |
| "step": 123000 | |
| }, | |
| { | |
| "epoch": 0.9541321260536013, | |
| "grad_norm": 0.00042357659549452364, | |
| "learning_rate": 0.0002406902339560446, | |
| "loss": 0.0004, | |
| "step": 123500 | |
| }, | |
| { | |
| "epoch": 0.957995009155033, | |
| "grad_norm": 0.00041984530980698764, | |
| "learning_rate": 0.00024023577388490645, | |
| "loss": 0.0004, | |
| "step": 124000 | |
| }, | |
| { | |
| "epoch": 0.9618578922564646, | |
| "grad_norm": 0.0003806228924077004, | |
| "learning_rate": 0.0002397813138137683, | |
| "loss": 0.0004, | |
| "step": 124500 | |
| }, | |
| { | |
| "epoch": 0.9657207753578961, | |
| "grad_norm": 0.00043450563680380583, | |
| "learning_rate": 0.00023932685374263016, | |
| "loss": 0.0004, | |
| "step": 125000 | |
| }, | |
| { | |
| "epoch": 0.9695836584593277, | |
| "grad_norm": 0.00040280865505337715, | |
| "learning_rate": 0.000238872393671492, | |
| "loss": 0.0004, | |
| "step": 125500 | |
| }, | |
| { | |
| "epoch": 0.9734465415607593, | |
| "grad_norm": 0.00039368733996525407, | |
| "learning_rate": 0.00023841793360035384, | |
| "loss": 0.0004, | |
| "step": 126000 | |
| }, | |
| { | |
| "epoch": 0.9773094246621908, | |
| "grad_norm": 0.0004507504927460104, | |
| "learning_rate": 0.0002379634735292157, | |
| "loss": 0.0004, | |
| "step": 126500 | |
| }, | |
| { | |
| "epoch": 0.9811723077636224, | |
| "grad_norm": 0.00042234413558617234, | |
| "learning_rate": 0.00023750901345807754, | |
| "loss": 0.0004, | |
| "step": 127000 | |
| }, | |
| { | |
| "epoch": 0.9850351908650541, | |
| "grad_norm": 0.00039364254917018116, | |
| "learning_rate": 0.0002370545533869394, | |
| "loss": 0.0004, | |
| "step": 127500 | |
| }, | |
| { | |
| "epoch": 0.9888980739664857, | |
| "grad_norm": 0.00035987445153295994, | |
| "learning_rate": 0.00023660009331580124, | |
| "loss": 0.0004, | |
| "step": 128000 | |
| }, | |
| { | |
| "epoch": 0.9927609570679172, | |
| "grad_norm": 0.00039482262218371034, | |
| "learning_rate": 0.00023614563324466308, | |
| "loss": 0.0004, | |
| "step": 128500 | |
| }, | |
| { | |
| "epoch": 0.9966238401693488, | |
| "grad_norm": 0.0004762003954965621, | |
| "learning_rate": 0.00023569117317352495, | |
| "loss": 0.0004, | |
| "step": 129000 | |
| }, | |
| { | |
| "epoch": 1.0004867232707804, | |
| "grad_norm": 0.0004270481294952333, | |
| "learning_rate": 0.0002352367131023868, | |
| "loss": 0.0004, | |
| "step": 129500 | |
| }, | |
| { | |
| "epoch": 1.004349606372212, | |
| "grad_norm": 0.00039340462535619736, | |
| "learning_rate": 0.00023478225303124868, | |
| "loss": 0.0004, | |
| "step": 130000 | |
| }, | |
| { | |
| "epoch": 1.0082124894736435, | |
| "grad_norm": 0.0004116026684641838, | |
| "learning_rate": 0.00023432779296011052, | |
| "loss": 0.0004, | |
| "step": 130500 | |
| }, | |
| { | |
| "epoch": 1.012075372575075, | |
| "grad_norm": 0.0003860292781610042, | |
| "learning_rate": 0.00023387333288897233, | |
| "loss": 0.0004, | |
| "step": 131000 | |
| }, | |
| { | |
| "epoch": 1.0159382556765066, | |
| "grad_norm": 0.00040404967148788273, | |
| "learning_rate": 0.00023341887281783422, | |
| "loss": 0.0004, | |
| "step": 131500 | |
| }, | |
| { | |
| "epoch": 1.0198011387779382, | |
| "grad_norm": 0.0004312015080358833, | |
| "learning_rate": 0.00023296441274669606, | |
| "loss": 0.0004, | |
| "step": 132000 | |
| }, | |
| { | |
| "epoch": 1.0236640218793698, | |
| "grad_norm": 0.0004256974207237363, | |
| "learning_rate": 0.00023250995267555793, | |
| "loss": 0.0004, | |
| "step": 132500 | |
| }, | |
| { | |
| "epoch": 1.0275269049808016, | |
| "grad_norm": 0.0003821359423454851, | |
| "learning_rate": 0.00023205549260441977, | |
| "loss": 0.0004, | |
| "step": 133000 | |
| }, | |
| { | |
| "epoch": 1.0313897880822331, | |
| "grad_norm": 0.000378616590751335, | |
| "learning_rate": 0.0002316010325332816, | |
| "loss": 0.0004, | |
| "step": 133500 | |
| }, | |
| { | |
| "epoch": 1.0352526711836647, | |
| "grad_norm": 0.0003908917715307325, | |
| "learning_rate": 0.00023114657246214347, | |
| "loss": 0.0004, | |
| "step": 134000 | |
| }, | |
| { | |
| "epoch": 1.0391155542850963, | |
| "grad_norm": 0.00039952111546881497, | |
| "learning_rate": 0.0002306921123910053, | |
| "loss": 0.0004, | |
| "step": 134500 | |
| }, | |
| { | |
| "epoch": 1.0429784373865278, | |
| "grad_norm": 0.0004296097031328827, | |
| "learning_rate": 0.00023023765231986717, | |
| "loss": 0.0004, | |
| "step": 135000 | |
| }, | |
| { | |
| "epoch": 1.0468413204879594, | |
| "grad_norm": 0.00045530457282438874, | |
| "learning_rate": 0.000229783192248729, | |
| "loss": 0.0004, | |
| "step": 135500 | |
| }, | |
| { | |
| "epoch": 1.050704203589391, | |
| "grad_norm": 0.0004504481330513954, | |
| "learning_rate": 0.00022932873217759085, | |
| "loss": 0.0004, | |
| "step": 136000 | |
| }, | |
| { | |
| "epoch": 1.0545670866908226, | |
| "grad_norm": 0.0003698394284583628, | |
| "learning_rate": 0.00022887427210645272, | |
| "loss": 0.0004, | |
| "step": 136500 | |
| }, | |
| { | |
| "epoch": 1.0584299697922541, | |
| "grad_norm": 0.00040104315849021077, | |
| "learning_rate": 0.00022841981203531456, | |
| "loss": 0.0004, | |
| "step": 137000 | |
| }, | |
| { | |
| "epoch": 1.0622928528936857, | |
| "grad_norm": 0.00039140283479355276, | |
| "learning_rate": 0.00022796535196417642, | |
| "loss": 0.0004, | |
| "step": 137500 | |
| }, | |
| { | |
| "epoch": 1.0661557359951173, | |
| "grad_norm": 0.0004166768048889935, | |
| "learning_rate": 0.00022751089189303826, | |
| "loss": 0.0004, | |
| "step": 138000 | |
| }, | |
| { | |
| "epoch": 1.0700186190965488, | |
| "grad_norm": 0.00044842908391728997, | |
| "learning_rate": 0.0002270564318219001, | |
| "loss": 0.0004, | |
| "step": 138500 | |
| }, | |
| { | |
| "epoch": 1.0738815021979804, | |
| "grad_norm": 0.00040768241160549223, | |
| "learning_rate": 0.00022660197175076196, | |
| "loss": 0.0004, | |
| "step": 139000 | |
| }, | |
| { | |
| "epoch": 1.077744385299412, | |
| "grad_norm": 0.0004266213800292462, | |
| "learning_rate": 0.0002261475116796238, | |
| "loss": 0.0004, | |
| "step": 139500 | |
| }, | |
| { | |
| "epoch": 1.0816072684008438, | |
| "grad_norm": 0.0003989775141235441, | |
| "learning_rate": 0.00022569305160848564, | |
| "loss": 0.0004, | |
| "step": 140000 | |
| }, | |
| { | |
| "epoch": 1.0854701515022753, | |
| "grad_norm": 0.0003877159033436328, | |
| "learning_rate": 0.0002252385915373475, | |
| "loss": 0.0004, | |
| "step": 140500 | |
| }, | |
| { | |
| "epoch": 1.089333034603707, | |
| "grad_norm": 0.000394685281207785, | |
| "learning_rate": 0.00022478413146620935, | |
| "loss": 0.0004, | |
| "step": 141000 | |
| }, | |
| { | |
| "epoch": 1.0931959177051385, | |
| "grad_norm": 0.00042225164361298084, | |
| "learning_rate": 0.0002243296713950712, | |
| "loss": 0.0004, | |
| "step": 141500 | |
| }, | |
| { | |
| "epoch": 1.09705880080657, | |
| "grad_norm": 0.00043836236000061035, | |
| "learning_rate": 0.00022387521132393305, | |
| "loss": 0.0004, | |
| "step": 142000 | |
| }, | |
| { | |
| "epoch": 1.1009216839080016, | |
| "grad_norm": 0.00039904468576423824, | |
| "learning_rate": 0.0002234207512527949, | |
| "loss": 0.0004, | |
| "step": 142500 | |
| }, | |
| { | |
| "epoch": 1.1047845670094332, | |
| "grad_norm": 0.0003950648242607713, | |
| "learning_rate": 0.00022296629118165675, | |
| "loss": 0.0004, | |
| "step": 143000 | |
| }, | |
| { | |
| "epoch": 1.1086474501108647, | |
| "grad_norm": 0.0003653796447906643, | |
| "learning_rate": 0.0002225118311105186, | |
| "loss": 0.0004, | |
| "step": 143500 | |
| }, | |
| { | |
| "epoch": 1.1125103332122963, | |
| "grad_norm": 0.0004174882487859577, | |
| "learning_rate": 0.00022205737103938046, | |
| "loss": 0.0004, | |
| "step": 144000 | |
| }, | |
| { | |
| "epoch": 1.1163732163137279, | |
| "grad_norm": 0.00038426730316132307, | |
| "learning_rate": 0.0002216029109682423, | |
| "loss": 0.0004, | |
| "step": 144500 | |
| }, | |
| { | |
| "epoch": 1.1202360994151594, | |
| "grad_norm": 0.0004381178005132824, | |
| "learning_rate": 0.00022114845089710414, | |
| "loss": 0.0004, | |
| "step": 145000 | |
| }, | |
| { | |
| "epoch": 1.124098982516591, | |
| "grad_norm": 0.00036438272218219936, | |
| "learning_rate": 0.00022069399082596603, | |
| "loss": 0.0004, | |
| "step": 145500 | |
| }, | |
| { | |
| "epoch": 1.1279618656180226, | |
| "grad_norm": 0.00037406469346024096, | |
| "learning_rate": 0.00022023953075482784, | |
| "loss": 0.0004, | |
| "step": 146000 | |
| }, | |
| { | |
| "epoch": 1.1318247487194544, | |
| "grad_norm": 0.0004003301728516817, | |
| "learning_rate": 0.00021978507068368973, | |
| "loss": 0.0004, | |
| "step": 146500 | |
| }, | |
| { | |
| "epoch": 1.1356876318208857, | |
| "grad_norm": 0.00044306329800747335, | |
| "learning_rate": 0.00021933061061255157, | |
| "loss": 0.0004, | |
| "step": 147000 | |
| }, | |
| { | |
| "epoch": 1.1395505149223175, | |
| "grad_norm": 0.00036800315137952566, | |
| "learning_rate": 0.0002188761505414134, | |
| "loss": 0.0004, | |
| "step": 147500 | |
| }, | |
| { | |
| "epoch": 1.143413398023749, | |
| "grad_norm": 0.0003712923207785934, | |
| "learning_rate": 0.00021842169047027528, | |
| "loss": 0.0004, | |
| "step": 148000 | |
| }, | |
| { | |
| "epoch": 1.1472762811251807, | |
| "grad_norm": 0.000403615616960451, | |
| "learning_rate": 0.00021796723039913711, | |
| "loss": 0.0004, | |
| "step": 148500 | |
| }, | |
| { | |
| "epoch": 1.1511391642266122, | |
| "grad_norm": 0.00039503254811279476, | |
| "learning_rate": 0.00021751277032799898, | |
| "loss": 0.0004, | |
| "step": 149000 | |
| }, | |
| { | |
| "epoch": 1.1550020473280438, | |
| "grad_norm": 0.00040640676161274314, | |
| "learning_rate": 0.00021705831025686082, | |
| "loss": 0.0004, | |
| "step": 149500 | |
| }, | |
| { | |
| "epoch": 1.1588649304294754, | |
| "grad_norm": 0.00039924593875184655, | |
| "learning_rate": 0.00021660385018572266, | |
| "loss": 0.0004, | |
| "step": 150000 | |
| }, | |
| { | |
| "epoch": 1.162727813530907, | |
| "grad_norm": 0.00041021950892172754, | |
| "learning_rate": 0.00021614939011458452, | |
| "loss": 0.0004, | |
| "step": 150500 | |
| }, | |
| { | |
| "epoch": 1.1665906966323385, | |
| "grad_norm": 0.0003925120981875807, | |
| "learning_rate": 0.00021569493004344636, | |
| "loss": 0.0004, | |
| "step": 151000 | |
| }, | |
| { | |
| "epoch": 1.17045357973377, | |
| "grad_norm": 0.0003923355252481997, | |
| "learning_rate": 0.00021524046997230823, | |
| "loss": 0.0004, | |
| "step": 151500 | |
| }, | |
| { | |
| "epoch": 1.1743164628352016, | |
| "grad_norm": 0.00036843877751380205, | |
| "learning_rate": 0.00021478600990117007, | |
| "loss": 0.0004, | |
| "step": 152000 | |
| }, | |
| { | |
| "epoch": 1.1781793459366332, | |
| "grad_norm": 0.0003888097999151796, | |
| "learning_rate": 0.0002143315498300319, | |
| "loss": 0.0004, | |
| "step": 152500 | |
| }, | |
| { | |
| "epoch": 1.1820422290380648, | |
| "grad_norm": 0.00041298934957012534, | |
| "learning_rate": 0.00021387708975889377, | |
| "loss": 0.0004, | |
| "step": 153000 | |
| }, | |
| { | |
| "epoch": 1.1859051121394963, | |
| "grad_norm": 0.0004346439673099667, | |
| "learning_rate": 0.0002134226296877556, | |
| "loss": 0.0004, | |
| "step": 153500 | |
| }, | |
| { | |
| "epoch": 1.1897679952409281, | |
| "grad_norm": 0.0004072737356182188, | |
| "learning_rate": 0.00021296816961661748, | |
| "loss": 0.0004, | |
| "step": 154000 | |
| }, | |
| { | |
| "epoch": 1.1936308783423595, | |
| "grad_norm": 0.00042254169238731265, | |
| "learning_rate": 0.00021251370954547931, | |
| "loss": 0.0004, | |
| "step": 154500 | |
| }, | |
| { | |
| "epoch": 1.1974937614437913, | |
| "grad_norm": 0.00044913325109519064, | |
| "learning_rate": 0.00021205924947434115, | |
| "loss": 0.0004, | |
| "step": 155000 | |
| }, | |
| { | |
| "epoch": 1.2013566445452228, | |
| "grad_norm": 0.0004437014285940677, | |
| "learning_rate": 0.00021160478940320302, | |
| "loss": 0.0003, | |
| "step": 155500 | |
| }, | |
| { | |
| "epoch": 1.2052195276466544, | |
| "grad_norm": 0.00040918594459071755, | |
| "learning_rate": 0.00021115032933206486, | |
| "loss": 0.0003, | |
| "step": 156000 | |
| }, | |
| { | |
| "epoch": 1.209082410748086, | |
| "grad_norm": 0.0004345249617472291, | |
| "learning_rate": 0.00021069586926092672, | |
| "loss": 0.0004, | |
| "step": 156500 | |
| }, | |
| { | |
| "epoch": 1.2129452938495175, | |
| "grad_norm": 0.00039289359119720757, | |
| "learning_rate": 0.00021024140918978856, | |
| "loss": 0.0003, | |
| "step": 157000 | |
| }, | |
| { | |
| "epoch": 1.2168081769509491, | |
| "grad_norm": 0.0003551707195583731, | |
| "learning_rate": 0.0002097869491186504, | |
| "loss": 0.0004, | |
| "step": 157500 | |
| }, | |
| { | |
| "epoch": 1.2206710600523807, | |
| "grad_norm": 0.0004176785587333143, | |
| "learning_rate": 0.00020933248904751227, | |
| "loss": 0.0003, | |
| "step": 158000 | |
| }, | |
| { | |
| "epoch": 1.2245339431538123, | |
| "grad_norm": 0.00039580900920554996, | |
| "learning_rate": 0.0002088780289763741, | |
| "loss": 0.0003, | |
| "step": 158500 | |
| }, | |
| { | |
| "epoch": 1.2283968262552438, | |
| "grad_norm": 0.0004138547810725868, | |
| "learning_rate": 0.00020842356890523597, | |
| "loss": 0.0003, | |
| "step": 159000 | |
| }, | |
| { | |
| "epoch": 1.2322597093566754, | |
| "grad_norm": 0.0004359263402875513, | |
| "learning_rate": 0.0002079691088340978, | |
| "loss": 0.0003, | |
| "step": 159500 | |
| }, | |
| { | |
| "epoch": 1.236122592458107, | |
| "grad_norm": 0.00038180273259058595, | |
| "learning_rate": 0.00020751464876295965, | |
| "loss": 0.0003, | |
| "step": 160000 | |
| }, | |
| { | |
| "epoch": 1.2399854755595385, | |
| "grad_norm": 0.0003900369047187269, | |
| "learning_rate": 0.00020706018869182154, | |
| "loss": 0.0003, | |
| "step": 160500 | |
| }, | |
| { | |
| "epoch": 1.24384835866097, | |
| "grad_norm": 0.0004283432208467275, | |
| "learning_rate": 0.00020660572862068335, | |
| "loss": 0.0003, | |
| "step": 161000 | |
| }, | |
| { | |
| "epoch": 1.247711241762402, | |
| "grad_norm": 0.0003838810371235013, | |
| "learning_rate": 0.00020615126854954524, | |
| "loss": 0.0003, | |
| "step": 161500 | |
| }, | |
| { | |
| "epoch": 1.2515741248638332, | |
| "grad_norm": 0.00044562091352418065, | |
| "learning_rate": 0.00020569680847840708, | |
| "loss": 0.0003, | |
| "step": 162000 | |
| }, | |
| { | |
| "epoch": 1.255437007965265, | |
| "grad_norm": 0.00038624508306384087, | |
| "learning_rate": 0.00020524234840726892, | |
| "loss": 0.0003, | |
| "step": 162500 | |
| }, | |
| { | |
| "epoch": 1.2592998910666966, | |
| "grad_norm": 0.00038577744271606207, | |
| "learning_rate": 0.0002047878883361308, | |
| "loss": 0.0003, | |
| "step": 163000 | |
| }, | |
| { | |
| "epoch": 1.2631627741681282, | |
| "grad_norm": 0.0003903675533365458, | |
| "learning_rate": 0.00020433342826499263, | |
| "loss": 0.0003, | |
| "step": 163500 | |
| }, | |
| { | |
| "epoch": 1.2670256572695597, | |
| "grad_norm": 0.0003998366591986269, | |
| "learning_rate": 0.0002038789681938545, | |
| "loss": 0.0003, | |
| "step": 164000 | |
| }, | |
| { | |
| "epoch": 1.2708885403709913, | |
| "grad_norm": 0.0003925804339814931, | |
| "learning_rate": 0.00020342450812271633, | |
| "loss": 0.0003, | |
| "step": 164500 | |
| }, | |
| { | |
| "epoch": 1.2747514234724229, | |
| "grad_norm": 0.00036960531724616885, | |
| "learning_rate": 0.00020297004805157817, | |
| "loss": 0.0003, | |
| "step": 165000 | |
| }, | |
| { | |
| "epoch": 1.2786143065738544, | |
| "grad_norm": 0.0004027937538921833, | |
| "learning_rate": 0.00020251558798044003, | |
| "loss": 0.0003, | |
| "step": 165500 | |
| }, | |
| { | |
| "epoch": 1.282477189675286, | |
| "grad_norm": 0.0004089568683411926, | |
| "learning_rate": 0.00020206112790930187, | |
| "loss": 0.0003, | |
| "step": 166000 | |
| }, | |
| { | |
| "epoch": 1.2863400727767176, | |
| "grad_norm": 0.0003960439353249967, | |
| "learning_rate": 0.00020160666783816374, | |
| "loss": 0.0003, | |
| "step": 166500 | |
| }, | |
| { | |
| "epoch": 1.2902029558781491, | |
| "grad_norm": 0.00039318378549069166, | |
| "learning_rate": 0.00020115220776702558, | |
| "loss": 0.0003, | |
| "step": 167000 | |
| }, | |
| { | |
| "epoch": 1.2940658389795807, | |
| "grad_norm": 0.0003687525459099561, | |
| "learning_rate": 0.00020069774769588742, | |
| "loss": 0.0003, | |
| "step": 167500 | |
| }, | |
| { | |
| "epoch": 1.2979287220810125, | |
| "grad_norm": 0.0003783398715313524, | |
| "learning_rate": 0.00020024328762474928, | |
| "loss": 0.0003, | |
| "step": 168000 | |
| }, | |
| { | |
| "epoch": 1.3017916051824439, | |
| "grad_norm": 0.0004212854546494782, | |
| "learning_rate": 0.00019978882755361112, | |
| "loss": 0.0003, | |
| "step": 168500 | |
| }, | |
| { | |
| "epoch": 1.3056544882838756, | |
| "grad_norm": 0.0004270244389772415, | |
| "learning_rate": 0.00019933436748247296, | |
| "loss": 0.0003, | |
| "step": 169000 | |
| }, | |
| { | |
| "epoch": 1.3095173713853072, | |
| "grad_norm": 0.00039981535519473255, | |
| "learning_rate": 0.00019887990741133482, | |
| "loss": 0.0003, | |
| "step": 169500 | |
| }, | |
| { | |
| "epoch": 1.3133802544867388, | |
| "grad_norm": 0.00041499731014482677, | |
| "learning_rate": 0.00019842544734019666, | |
| "loss": 0.0003, | |
| "step": 170000 | |
| }, | |
| { | |
| "epoch": 1.3172431375881704, | |
| "grad_norm": 0.0004115419287700206, | |
| "learning_rate": 0.00019797098726905853, | |
| "loss": 0.0003, | |
| "step": 170500 | |
| }, | |
| { | |
| "epoch": 1.321106020689602, | |
| "grad_norm": 0.00044279222493059933, | |
| "learning_rate": 0.00019751652719792037, | |
| "loss": 0.0003, | |
| "step": 171000 | |
| }, | |
| { | |
| "epoch": 1.3249689037910335, | |
| "grad_norm": 0.0004417746386025101, | |
| "learning_rate": 0.0001970620671267822, | |
| "loss": 0.0003, | |
| "step": 171500 | |
| }, | |
| { | |
| "epoch": 1.328831786892465, | |
| "grad_norm": 0.0003883994068019092, | |
| "learning_rate": 0.00019660760705564407, | |
| "loss": 0.0003, | |
| "step": 172000 | |
| }, | |
| { | |
| "epoch": 1.3326946699938966, | |
| "grad_norm": 0.0003858699055854231, | |
| "learning_rate": 0.0001961531469845059, | |
| "loss": 0.0003, | |
| "step": 172500 | |
| }, | |
| { | |
| "epoch": 1.3365575530953282, | |
| "grad_norm": 0.0004506374825723469, | |
| "learning_rate": 0.00019569868691336778, | |
| "loss": 0.0003, | |
| "step": 173000 | |
| }, | |
| { | |
| "epoch": 1.3404204361967598, | |
| "grad_norm": 0.0003691207093652338, | |
| "learning_rate": 0.00019524422684222961, | |
| "loss": 0.0003, | |
| "step": 173500 | |
| }, | |
| { | |
| "epoch": 1.3442833192981913, | |
| "grad_norm": 0.00040023517794907093, | |
| "learning_rate": 0.00019478976677109145, | |
| "loss": 0.0003, | |
| "step": 174000 | |
| }, | |
| { | |
| "epoch": 1.348146202399623, | |
| "grad_norm": 0.00040831902879290283, | |
| "learning_rate": 0.00019433530669995332, | |
| "loss": 0.0003, | |
| "step": 174500 | |
| }, | |
| { | |
| "epoch": 1.3520090855010545, | |
| "grad_norm": 0.00039210630347952247, | |
| "learning_rate": 0.00019388084662881516, | |
| "loss": 0.0003, | |
| "step": 175000 | |
| }, | |
| { | |
| "epoch": 1.3558719686024863, | |
| "grad_norm": 0.0004167640581727028, | |
| "learning_rate": 0.00019342638655767705, | |
| "loss": 0.0003, | |
| "step": 175500 | |
| }, | |
| { | |
| "epoch": 1.3597348517039176, | |
| "grad_norm": 0.0004036327882204205, | |
| "learning_rate": 0.00019297192648653886, | |
| "loss": 0.0003, | |
| "step": 176000 | |
| }, | |
| { | |
| "epoch": 1.3635977348053494, | |
| "grad_norm": 0.0004186241130810231, | |
| "learning_rate": 0.0001925174664154007, | |
| "loss": 0.0003, | |
| "step": 176500 | |
| }, | |
| { | |
| "epoch": 1.367460617906781, | |
| "grad_norm": 0.00042677807505242527, | |
| "learning_rate": 0.0001920630063442626, | |
| "loss": 0.0003, | |
| "step": 177000 | |
| }, | |
| { | |
| "epoch": 1.3713235010082125, | |
| "grad_norm": 0.0003888640203513205, | |
| "learning_rate": 0.00019160854627312443, | |
| "loss": 0.0003, | |
| "step": 177500 | |
| }, | |
| { | |
| "epoch": 1.375186384109644, | |
| "grad_norm": 0.00038852522266097367, | |
| "learning_rate": 0.0001911540862019863, | |
| "loss": 0.0003, | |
| "step": 178000 | |
| }, | |
| { | |
| "epoch": 1.3790492672110757, | |
| "grad_norm": 0.00048570215585641563, | |
| "learning_rate": 0.00019069962613084814, | |
| "loss": 0.0003, | |
| "step": 178500 | |
| }, | |
| { | |
| "epoch": 1.3829121503125072, | |
| "grad_norm": 0.0004395216528791934, | |
| "learning_rate": 0.00019024516605970997, | |
| "loss": 0.0003, | |
| "step": 179000 | |
| }, | |
| { | |
| "epoch": 1.3867750334139388, | |
| "grad_norm": 0.00039375273627229035, | |
| "learning_rate": 0.00018979070598857184, | |
| "loss": 0.0003, | |
| "step": 179500 | |
| }, | |
| { | |
| "epoch": 1.3906379165153704, | |
| "grad_norm": 0.00038475391920655966, | |
| "learning_rate": 0.00018933624591743368, | |
| "loss": 0.0003, | |
| "step": 180000 | |
| }, | |
| { | |
| "epoch": 1.394500799616802, | |
| "grad_norm": 0.0003974135033786297, | |
| "learning_rate": 0.00018888178584629554, | |
| "loss": 0.0003, | |
| "step": 180500 | |
| }, | |
| { | |
| "epoch": 1.3983636827182335, | |
| "grad_norm": 0.00041810667607933283, | |
| "learning_rate": 0.00018842732577515738, | |
| "loss": 0.0003, | |
| "step": 181000 | |
| }, | |
| { | |
| "epoch": 1.402226565819665, | |
| "grad_norm": 0.00039118548738770187, | |
| "learning_rate": 0.00018797286570401922, | |
| "loss": 0.0003, | |
| "step": 181500 | |
| }, | |
| { | |
| "epoch": 1.4060894489210969, | |
| "grad_norm": 0.0004240362031850964, | |
| "learning_rate": 0.0001875184056328811, | |
| "loss": 0.0003, | |
| "step": 182000 | |
| }, | |
| { | |
| "epoch": 1.4099523320225282, | |
| "grad_norm": 0.00044009560951963067, | |
| "learning_rate": 0.00018706394556174293, | |
| "loss": 0.0003, | |
| "step": 182500 | |
| }, | |
| { | |
| "epoch": 1.41381521512396, | |
| "grad_norm": 0.00038786401273682714, | |
| "learning_rate": 0.0001866094854906048, | |
| "loss": 0.0003, | |
| "step": 183000 | |
| }, | |
| { | |
| "epoch": 1.4176780982253914, | |
| "grad_norm": 0.0003855082031805068, | |
| "learning_rate": 0.00018615502541946663, | |
| "loss": 0.0003, | |
| "step": 183500 | |
| }, | |
| { | |
| "epoch": 1.4215409813268232, | |
| "grad_norm": 0.00035129828029312193, | |
| "learning_rate": 0.00018570056534832847, | |
| "loss": 0.0003, | |
| "step": 184000 | |
| }, | |
| { | |
| "epoch": 1.4254038644282547, | |
| "grad_norm": 0.0004144856648053974, | |
| "learning_rate": 0.00018524610527719033, | |
| "loss": 0.0003, | |
| "step": 184500 | |
| }, | |
| { | |
| "epoch": 1.4292667475296863, | |
| "grad_norm": 0.00038916736957617104, | |
| "learning_rate": 0.00018479164520605217, | |
| "loss": 0.0003, | |
| "step": 185000 | |
| }, | |
| { | |
| "epoch": 1.4331296306311179, | |
| "grad_norm": 0.00037746617454104125, | |
| "learning_rate": 0.00018433718513491404, | |
| "loss": 0.0003, | |
| "step": 185500 | |
| }, | |
| { | |
| "epoch": 1.4369925137325494, | |
| "grad_norm": 0.0004193016211502254, | |
| "learning_rate": 0.00018388272506377588, | |
| "loss": 0.0003, | |
| "step": 186000 | |
| }, | |
| { | |
| "epoch": 1.440855396833981, | |
| "grad_norm": 0.0003769499890040606, | |
| "learning_rate": 0.00018342826499263772, | |
| "loss": 0.0003, | |
| "step": 186500 | |
| }, | |
| { | |
| "epoch": 1.4447182799354126, | |
| "grad_norm": 0.00040736031951382756, | |
| "learning_rate": 0.00018297380492149958, | |
| "loss": 0.0003, | |
| "step": 187000 | |
| }, | |
| { | |
| "epoch": 1.4485811630368441, | |
| "grad_norm": 0.00042587725329212844, | |
| "learning_rate": 0.00018251934485036142, | |
| "loss": 0.0003, | |
| "step": 187500 | |
| }, | |
| { | |
| "epoch": 1.4524440461382757, | |
| "grad_norm": 0.0004074377939105034, | |
| "learning_rate": 0.00018206488477922329, | |
| "loss": 0.0003, | |
| "step": 188000 | |
| }, | |
| { | |
| "epoch": 1.4563069292397073, | |
| "grad_norm": 0.000427786901127547, | |
| "learning_rate": 0.00018161042470808512, | |
| "loss": 0.0003, | |
| "step": 188500 | |
| }, | |
| { | |
| "epoch": 1.4601698123411389, | |
| "grad_norm": 0.0004227866302244365, | |
| "learning_rate": 0.00018115596463694696, | |
| "loss": 0.0003, | |
| "step": 189000 | |
| }, | |
| { | |
| "epoch": 1.4640326954425706, | |
| "grad_norm": 0.00043102685594931245, | |
| "learning_rate": 0.00018070150456580883, | |
| "loss": 0.0003, | |
| "step": 189500 | |
| }, | |
| { | |
| "epoch": 1.467895578544002, | |
| "grad_norm": 0.0003675154584925622, | |
| "learning_rate": 0.00018024704449467067, | |
| "loss": 0.0003, | |
| "step": 190000 | |
| }, | |
| { | |
| "epoch": 1.4717584616454338, | |
| "grad_norm": 0.0004437029710970819, | |
| "learning_rate": 0.00017979258442353256, | |
| "loss": 0.0003, | |
| "step": 190500 | |
| }, | |
| { | |
| "epoch": 1.4756213447468653, | |
| "grad_norm": 0.00044143290142528713, | |
| "learning_rate": 0.0001793381243523944, | |
| "loss": 0.0003, | |
| "step": 191000 | |
| }, | |
| { | |
| "epoch": 1.479484227848297, | |
| "grad_norm": 0.00038855167804285884, | |
| "learning_rate": 0.0001788836642812562, | |
| "loss": 0.0003, | |
| "step": 191500 | |
| }, | |
| { | |
| "epoch": 1.4833471109497285, | |
| "grad_norm": 0.00041832137503661215, | |
| "learning_rate": 0.0001784292042101181, | |
| "loss": 0.0003, | |
| "step": 192000 | |
| }, | |
| { | |
| "epoch": 1.48720999405116, | |
| "grad_norm": 0.00042216904694214463, | |
| "learning_rate": 0.00017797474413897994, | |
| "loss": 0.0003, | |
| "step": 192500 | |
| }, | |
| { | |
| "epoch": 1.4910728771525916, | |
| "grad_norm": 0.0003619820927269757, | |
| "learning_rate": 0.0001775202840678418, | |
| "loss": 0.0003, | |
| "step": 193000 | |
| }, | |
| { | |
| "epoch": 1.4949357602540232, | |
| "grad_norm": 0.0004148418374825269, | |
| "learning_rate": 0.00017706582399670365, | |
| "loss": 0.0003, | |
| "step": 193500 | |
| }, | |
| { | |
| "epoch": 1.4987986433554548, | |
| "grad_norm": 0.0003889719373546541, | |
| "learning_rate": 0.00017661136392556548, | |
| "loss": 0.0003, | |
| "step": 194000 | |
| }, | |
| { | |
| "epoch": 1.5026615264568863, | |
| "grad_norm": 0.00035413564182817936, | |
| "learning_rate": 0.00017615690385442735, | |
| "loss": 0.0003, | |
| "step": 194500 | |
| }, | |
| { | |
| "epoch": 1.506524409558318, | |
| "grad_norm": 0.00041614958900026977, | |
| "learning_rate": 0.0001757024437832892, | |
| "loss": 0.0003, | |
| "step": 195000 | |
| }, | |
| { | |
| "epoch": 1.5103872926597495, | |
| "grad_norm": 0.00038349401438608766, | |
| "learning_rate": 0.00017524798371215103, | |
| "loss": 0.0003, | |
| "step": 195500 | |
| }, | |
| { | |
| "epoch": 1.5142501757611813, | |
| "grad_norm": 0.0004736898990813643, | |
| "learning_rate": 0.0001747935236410129, | |
| "loss": 0.0003, | |
| "step": 196000 | |
| }, | |
| { | |
| "epoch": 1.5181130588626126, | |
| "grad_norm": 0.0003606528916861862, | |
| "learning_rate": 0.00017433906356987473, | |
| "loss": 0.0003, | |
| "step": 196500 | |
| }, | |
| { | |
| "epoch": 1.5219759419640444, | |
| "grad_norm": 0.00039227615343406796, | |
| "learning_rate": 0.0001738846034987366, | |
| "loss": 0.0003, | |
| "step": 197000 | |
| }, | |
| { | |
| "epoch": 1.5258388250654757, | |
| "grad_norm": 0.00038570541073568165, | |
| "learning_rate": 0.00017343014342759844, | |
| "loss": 0.0003, | |
| "step": 197500 | |
| }, | |
| { | |
| "epoch": 1.5297017081669075, | |
| "grad_norm": 0.0003896007256116718, | |
| "learning_rate": 0.00017297568335646028, | |
| "loss": 0.0003, | |
| "step": 198000 | |
| }, | |
| { | |
| "epoch": 1.5335645912683389, | |
| "grad_norm": 0.00037037860602140427, | |
| "learning_rate": 0.00017252122328532214, | |
| "loss": 0.0003, | |
| "step": 198500 | |
| }, | |
| { | |
| "epoch": 1.5374274743697707, | |
| "grad_norm": 0.0004178315575700253, | |
| "learning_rate": 0.00017206676321418398, | |
| "loss": 0.0003, | |
| "step": 199000 | |
| }, | |
| { | |
| "epoch": 1.5412903574712022, | |
| "grad_norm": 0.0004494404711294919, | |
| "learning_rate": 0.00017161230314304585, | |
| "loss": 0.0003, | |
| "step": 199500 | |
| }, | |
| { | |
| "epoch": 1.5451532405726338, | |
| "grad_norm": 0.00043085639481432736, | |
| "learning_rate": 0.00017115784307190768, | |
| "loss": 0.0003, | |
| "step": 200000 | |
| }, | |
| { | |
| "epoch": 1.5490161236740654, | |
| "grad_norm": 0.0003994444850832224, | |
| "learning_rate": 0.00017070338300076952, | |
| "loss": 0.0003, | |
| "step": 200500 | |
| }, | |
| { | |
| "epoch": 1.552879006775497, | |
| "grad_norm": 0.00038502513780258596, | |
| "learning_rate": 0.0001702489229296314, | |
| "loss": 0.0003, | |
| "step": 201000 | |
| }, | |
| { | |
| "epoch": 1.5567418898769285, | |
| "grad_norm": 0.00040339346742257476, | |
| "learning_rate": 0.00016979446285849323, | |
| "loss": 0.0003, | |
| "step": 201500 | |
| }, | |
| { | |
| "epoch": 1.56060477297836, | |
| "grad_norm": 0.0004164897254668176, | |
| "learning_rate": 0.0001693400027873551, | |
| "loss": 0.0003, | |
| "step": 202000 | |
| }, | |
| { | |
| "epoch": 1.5644676560797919, | |
| "grad_norm": 0.0004254815576132387, | |
| "learning_rate": 0.00016888554271621693, | |
| "loss": 0.0003, | |
| "step": 202500 | |
| }, | |
| { | |
| "epoch": 1.5683305391812232, | |
| "grad_norm": 0.00037603103555738926, | |
| "learning_rate": 0.00016843108264507877, | |
| "loss": 0.0003, | |
| "step": 203000 | |
| }, | |
| { | |
| "epoch": 1.572193422282655, | |
| "grad_norm": 0.0004439698241185397, | |
| "learning_rate": 0.00016797662257394064, | |
| "loss": 0.0003, | |
| "step": 203500 | |
| }, | |
| { | |
| "epoch": 1.5760563053840864, | |
| "grad_norm": 0.0004298971325624734, | |
| "learning_rate": 0.00016752216250280247, | |
| "loss": 0.0003, | |
| "step": 204000 | |
| }, | |
| { | |
| "epoch": 1.5799191884855182, | |
| "grad_norm": 0.00040610809810459614, | |
| "learning_rate": 0.00016706770243166434, | |
| "loss": 0.0003, | |
| "step": 204500 | |
| }, | |
| { | |
| "epoch": 1.5837820715869495, | |
| "grad_norm": 0.00037626715493388474, | |
| "learning_rate": 0.00016661324236052618, | |
| "loss": 0.0003, | |
| "step": 205000 | |
| }, | |
| { | |
| "epoch": 1.5876449546883813, | |
| "grad_norm": 0.0004322736931499094, | |
| "learning_rate": 0.00016615878228938802, | |
| "loss": 0.0003, | |
| "step": 205500 | |
| }, | |
| { | |
| "epoch": 1.5915078377898126, | |
| "grad_norm": 0.0003791552153415978, | |
| "learning_rate": 0.0001657043222182499, | |
| "loss": 0.0003, | |
| "step": 206000 | |
| }, | |
| { | |
| "epoch": 1.5953707208912444, | |
| "grad_norm": 0.0004476773028727621, | |
| "learning_rate": 0.00016524986214711172, | |
| "loss": 0.0003, | |
| "step": 206500 | |
| }, | |
| { | |
| "epoch": 1.599233603992676, | |
| "grad_norm": 0.0004227587196510285, | |
| "learning_rate": 0.00016479540207597361, | |
| "loss": 0.0003, | |
| "step": 207000 | |
| }, | |
| { | |
| "epoch": 1.6030964870941076, | |
| "grad_norm": 0.0004329047806095332, | |
| "learning_rate": 0.00016434094200483545, | |
| "loss": 0.0003, | |
| "step": 207500 | |
| }, | |
| { | |
| "epoch": 1.6069593701955391, | |
| "grad_norm": 0.0004244181327521801, | |
| "learning_rate": 0.0001638864819336973, | |
| "loss": 0.0003, | |
| "step": 208000 | |
| }, | |
| { | |
| "epoch": 1.6108222532969707, | |
| "grad_norm": 0.0004290763463359326, | |
| "learning_rate": 0.00016343202186255916, | |
| "loss": 0.0003, | |
| "step": 208500 | |
| }, | |
| { | |
| "epoch": 1.6146851363984023, | |
| "grad_norm": 0.00040158795309253037, | |
| "learning_rate": 0.000162977561791421, | |
| "loss": 0.0003, | |
| "step": 209000 | |
| }, | |
| { | |
| "epoch": 1.6185480194998338, | |
| "grad_norm": 0.00042520073475316167, | |
| "learning_rate": 0.00016252310172028286, | |
| "loss": 0.0003, | |
| "step": 209500 | |
| }, | |
| { | |
| "epoch": 1.6224109026012656, | |
| "grad_norm": 0.0004901690408587456, | |
| "learning_rate": 0.0001620686416491447, | |
| "loss": 0.0003, | |
| "step": 210000 | |
| }, | |
| { | |
| "epoch": 1.626273785702697, | |
| "grad_norm": 0.00038485441473312676, | |
| "learning_rate": 0.00016161418157800654, | |
| "loss": 0.0003, | |
| "step": 210500 | |
| }, | |
| { | |
| "epoch": 1.6301366688041288, | |
| "grad_norm": 0.00040810354403220117, | |
| "learning_rate": 0.0001611597215068684, | |
| "loss": 0.0003, | |
| "step": 211000 | |
| }, | |
| { | |
| "epoch": 1.6339995519055601, | |
| "grad_norm": 0.00039863213896751404, | |
| "learning_rate": 0.00016070526143573024, | |
| "loss": 0.0003, | |
| "step": 211500 | |
| }, | |
| { | |
| "epoch": 1.637862435006992, | |
| "grad_norm": 0.0004517484048847109, | |
| "learning_rate": 0.0001602508013645921, | |
| "loss": 0.0003, | |
| "step": 212000 | |
| }, | |
| { | |
| "epoch": 1.6417253181084233, | |
| "grad_norm": 0.0003811582282651216, | |
| "learning_rate": 0.00015979634129345395, | |
| "loss": 0.0003, | |
| "step": 212500 | |
| }, | |
| { | |
| "epoch": 1.645588201209855, | |
| "grad_norm": 0.000378860771888867, | |
| "learning_rate": 0.00015934188122231579, | |
| "loss": 0.0003, | |
| "step": 213000 | |
| }, | |
| { | |
| "epoch": 1.6494510843112866, | |
| "grad_norm": 0.00045831358875148, | |
| "learning_rate": 0.00015888742115117765, | |
| "loss": 0.0003, | |
| "step": 213500 | |
| }, | |
| { | |
| "epoch": 1.6533139674127182, | |
| "grad_norm": 0.0004245250893291086, | |
| "learning_rate": 0.0001584329610800395, | |
| "loss": 0.0003, | |
| "step": 214000 | |
| }, | |
| { | |
| "epoch": 1.6571768505141498, | |
| "grad_norm": 0.0004042689106427133, | |
| "learning_rate": 0.00015797850100890136, | |
| "loss": 0.0003, | |
| "step": 214500 | |
| }, | |
| { | |
| "epoch": 1.6610397336155813, | |
| "grad_norm": 0.0003839880519080907, | |
| "learning_rate": 0.0001575240409377632, | |
| "loss": 0.0003, | |
| "step": 215000 | |
| }, | |
| { | |
| "epoch": 1.664902616717013, | |
| "grad_norm": 0.0004094207542948425, | |
| "learning_rate": 0.00015706958086662503, | |
| "loss": 0.0003, | |
| "step": 215500 | |
| }, | |
| { | |
| "epoch": 1.6687654998184445, | |
| "grad_norm": 0.0004042787477374077, | |
| "learning_rate": 0.0001566151207954869, | |
| "loss": 0.0003, | |
| "step": 216000 | |
| }, | |
| { | |
| "epoch": 1.672628382919876, | |
| "grad_norm": 0.0003925784258171916, | |
| "learning_rate": 0.00015616066072434874, | |
| "loss": 0.0003, | |
| "step": 216500 | |
| }, | |
| { | |
| "epoch": 1.6764912660213076, | |
| "grad_norm": 0.00037218185025267303, | |
| "learning_rate": 0.0001557062006532106, | |
| "loss": 0.0003, | |
| "step": 217000 | |
| }, | |
| { | |
| "epoch": 1.6803541491227394, | |
| "grad_norm": 0.0004271686193533242, | |
| "learning_rate": 0.00015525174058207244, | |
| "loss": 0.0003, | |
| "step": 217500 | |
| }, | |
| { | |
| "epoch": 1.6842170322241707, | |
| "grad_norm": 0.0004018540494143963, | |
| "learning_rate": 0.00015479728051093428, | |
| "loss": 0.0003, | |
| "step": 218000 | |
| }, | |
| { | |
| "epoch": 1.6880799153256025, | |
| "grad_norm": 0.00041026700637303293, | |
| "learning_rate": 0.00015434282043979615, | |
| "loss": 0.0003, | |
| "step": 218500 | |
| }, | |
| { | |
| "epoch": 1.6919427984270339, | |
| "grad_norm": 0.00038516061613336205, | |
| "learning_rate": 0.00015388836036865798, | |
| "loss": 0.0003, | |
| "step": 219000 | |
| }, | |
| { | |
| "epoch": 1.6958056815284657, | |
| "grad_norm": 0.00036750882281921804, | |
| "learning_rate": 0.00015343390029751985, | |
| "loss": 0.0003, | |
| "step": 219500 | |
| }, | |
| { | |
| "epoch": 1.699668564629897, | |
| "grad_norm": 0.00046027119969949126, | |
| "learning_rate": 0.0001529794402263817, | |
| "loss": 0.0003, | |
| "step": 220000 | |
| }, | |
| { | |
| "epoch": 1.7035314477313288, | |
| "grad_norm": 0.00037043061456643045, | |
| "learning_rate": 0.00015252498015524353, | |
| "loss": 0.0003, | |
| "step": 220500 | |
| }, | |
| { | |
| "epoch": 1.7073943308327604, | |
| "grad_norm": 0.0004286552139092237, | |
| "learning_rate": 0.00015207052008410542, | |
| "loss": 0.0003, | |
| "step": 221000 | |
| }, | |
| { | |
| "epoch": 1.711257213934192, | |
| "grad_norm": 0.0004185969883110374, | |
| "learning_rate": 0.00015161606001296723, | |
| "loss": 0.0003, | |
| "step": 221500 | |
| }, | |
| { | |
| "epoch": 1.7151200970356235, | |
| "grad_norm": 0.0003913992550224066, | |
| "learning_rate": 0.00015116159994182907, | |
| "loss": 0.0003, | |
| "step": 222000 | |
| }, | |
| { | |
| "epoch": 1.718982980137055, | |
| "grad_norm": 0.00038768764352425933, | |
| "learning_rate": 0.00015070713987069096, | |
| "loss": 0.0003, | |
| "step": 222500 | |
| }, | |
| { | |
| "epoch": 1.7228458632384867, | |
| "grad_norm": 0.00038810214027762413, | |
| "learning_rate": 0.0001502526797995528, | |
| "loss": 0.0003, | |
| "step": 223000 | |
| }, | |
| { | |
| "epoch": 1.7267087463399182, | |
| "grad_norm": 0.00042798841604962945, | |
| "learning_rate": 0.00014979821972841464, | |
| "loss": 0.0003, | |
| "step": 223500 | |
| }, | |
| { | |
| "epoch": 1.73057162944135, | |
| "grad_norm": 0.00038101564859971404, | |
| "learning_rate": 0.0001493437596572765, | |
| "loss": 0.0003, | |
| "step": 224000 | |
| }, | |
| { | |
| "epoch": 1.7344345125427814, | |
| "grad_norm": 0.0003932290128432214, | |
| "learning_rate": 0.00014888929958613834, | |
| "loss": 0.0003, | |
| "step": 224500 | |
| }, | |
| { | |
| "epoch": 1.7382973956442132, | |
| "grad_norm": 0.00039348064456135035, | |
| "learning_rate": 0.0001484348395150002, | |
| "loss": 0.0003, | |
| "step": 225000 | |
| }, | |
| { | |
| "epoch": 1.7421602787456445, | |
| "grad_norm": 0.00038930537994019687, | |
| "learning_rate": 0.00014798037944386205, | |
| "loss": 0.0003, | |
| "step": 225500 | |
| }, | |
| { | |
| "epoch": 1.7460231618470763, | |
| "grad_norm": 0.00040366893517784774, | |
| "learning_rate": 0.0001475259193727239, | |
| "loss": 0.0003, | |
| "step": 226000 | |
| }, | |
| { | |
| "epoch": 1.7498860449485076, | |
| "grad_norm": 0.0004505430406425148, | |
| "learning_rate": 0.00014707145930158575, | |
| "loss": 0.0003, | |
| "step": 226500 | |
| }, | |
| { | |
| "epoch": 1.7537489280499394, | |
| "grad_norm": 0.0003945432254113257, | |
| "learning_rate": 0.0001466169992304476, | |
| "loss": 0.0003, | |
| "step": 227000 | |
| }, | |
| { | |
| "epoch": 1.7576118111513708, | |
| "grad_norm": 0.0004522661038208753, | |
| "learning_rate": 0.00014616253915930946, | |
| "loss": 0.0003, | |
| "step": 227500 | |
| }, | |
| { | |
| "epoch": 1.7614746942528026, | |
| "grad_norm": 0.0003905027697328478, | |
| "learning_rate": 0.0001457080790881713, | |
| "loss": 0.0003, | |
| "step": 228000 | |
| }, | |
| { | |
| "epoch": 1.7653375773542341, | |
| "grad_norm": 0.00039171066600829363, | |
| "learning_rate": 0.00014525361901703313, | |
| "loss": 0.0003, | |
| "step": 228500 | |
| }, | |
| { | |
| "epoch": 1.7692004604556657, | |
| "grad_norm": 0.00041425134986639023, | |
| "learning_rate": 0.000144799158945895, | |
| "loss": 0.0003, | |
| "step": 229000 | |
| }, | |
| { | |
| "epoch": 1.7730633435570973, | |
| "grad_norm": 0.00039298326009884477, | |
| "learning_rate": 0.00014434469887475687, | |
| "loss": 0.0003, | |
| "step": 229500 | |
| }, | |
| { | |
| "epoch": 1.7769262266585288, | |
| "grad_norm": 0.00038489836151711643, | |
| "learning_rate": 0.0001438902388036187, | |
| "loss": 0.0003, | |
| "step": 230000 | |
| }, | |
| { | |
| "epoch": 1.7807891097599604, | |
| "grad_norm": 0.0004210007027722895, | |
| "learning_rate": 0.00014343577873248054, | |
| "loss": 0.0003, | |
| "step": 230500 | |
| }, | |
| { | |
| "epoch": 1.784651992861392, | |
| "grad_norm": 0.00042244375799782574, | |
| "learning_rate": 0.0001429813186613424, | |
| "loss": 0.0003, | |
| "step": 231000 | |
| }, | |
| { | |
| "epoch": 1.7885148759628238, | |
| "grad_norm": 0.00044437378528527915, | |
| "learning_rate": 0.00014252685859020425, | |
| "loss": 0.0003, | |
| "step": 231500 | |
| }, | |
| { | |
| "epoch": 1.7923777590642551, | |
| "grad_norm": 0.0003892597451340407, | |
| "learning_rate": 0.0001420723985190661, | |
| "loss": 0.0003, | |
| "step": 232000 | |
| }, | |
| { | |
| "epoch": 1.796240642165687, | |
| "grad_norm": 0.00042284041410312057, | |
| "learning_rate": 0.00014161793844792795, | |
| "loss": 0.0003, | |
| "step": 232500 | |
| }, | |
| { | |
| "epoch": 1.8001035252671183, | |
| "grad_norm": 0.00037430404336191714, | |
| "learning_rate": 0.0001411634783767898, | |
| "loss": 0.0003, | |
| "step": 233000 | |
| }, | |
| { | |
| "epoch": 1.80396640836855, | |
| "grad_norm": 0.0004308871866669506, | |
| "learning_rate": 0.00014070901830565166, | |
| "loss": 0.0003, | |
| "step": 233500 | |
| }, | |
| { | |
| "epoch": 1.8078292914699814, | |
| "grad_norm": 0.0004083754902239889, | |
| "learning_rate": 0.0001402545582345135, | |
| "loss": 0.0003, | |
| "step": 234000 | |
| }, | |
| { | |
| "epoch": 1.8116921745714132, | |
| "grad_norm": 0.00041078985668718815, | |
| "learning_rate": 0.00013980009816337536, | |
| "loss": 0.0003, | |
| "step": 234500 | |
| }, | |
| { | |
| "epoch": 1.8155550576728448, | |
| "grad_norm": 0.0004413351707626134, | |
| "learning_rate": 0.0001393456380922372, | |
| "loss": 0.0003, | |
| "step": 235000 | |
| }, | |
| { | |
| "epoch": 1.8194179407742763, | |
| "grad_norm": 0.00039201107574626803, | |
| "learning_rate": 0.00013889117802109904, | |
| "loss": 0.0003, | |
| "step": 235500 | |
| }, | |
| { | |
| "epoch": 1.823280823875708, | |
| "grad_norm": 0.00039923322037793696, | |
| "learning_rate": 0.0001384367179499609, | |
| "loss": 0.0003, | |
| "step": 236000 | |
| }, | |
| { | |
| "epoch": 1.8271437069771395, | |
| "grad_norm": 0.0004075372125953436, | |
| "learning_rate": 0.00013798225787882274, | |
| "loss": 0.0003, | |
| "step": 236500 | |
| }, | |
| { | |
| "epoch": 1.831006590078571, | |
| "grad_norm": 0.00040116155287250876, | |
| "learning_rate": 0.0001375277978076846, | |
| "loss": 0.0003, | |
| "step": 237000 | |
| }, | |
| { | |
| "epoch": 1.8348694731800026, | |
| "grad_norm": 0.00041852614958770573, | |
| "learning_rate": 0.00013707333773654647, | |
| "loss": 0.0003, | |
| "step": 237500 | |
| }, | |
| { | |
| "epoch": 1.8387323562814342, | |
| "grad_norm": 0.0004257333348505199, | |
| "learning_rate": 0.0001366188776654083, | |
| "loss": 0.0003, | |
| "step": 238000 | |
| }, | |
| { | |
| "epoch": 1.8425952393828657, | |
| "grad_norm": 0.0003763121203519404, | |
| "learning_rate": 0.00013616441759427015, | |
| "loss": 0.0003, | |
| "step": 238500 | |
| }, | |
| { | |
| "epoch": 1.8464581224842975, | |
| "grad_norm": 0.00041360125760547817, | |
| "learning_rate": 0.00013570995752313202, | |
| "loss": 0.0003, | |
| "step": 239000 | |
| }, | |
| { | |
| "epoch": 1.8503210055857289, | |
| "grad_norm": 0.00042873475467786193, | |
| "learning_rate": 0.00013525549745199385, | |
| "loss": 0.0003, | |
| "step": 239500 | |
| }, | |
| { | |
| "epoch": 1.8541838886871607, | |
| "grad_norm": 0.00039483854197897017, | |
| "learning_rate": 0.00013480103738085572, | |
| "loss": 0.0003, | |
| "step": 240000 | |
| }, | |
| { | |
| "epoch": 1.858046771788592, | |
| "grad_norm": 0.00039821091922931373, | |
| "learning_rate": 0.00013434657730971756, | |
| "loss": 0.0003, | |
| "step": 240500 | |
| }, | |
| { | |
| "epoch": 1.8619096548900238, | |
| "grad_norm": 0.0003865666803903878, | |
| "learning_rate": 0.0001338921172385794, | |
| "loss": 0.0003, | |
| "step": 241000 | |
| }, | |
| { | |
| "epoch": 1.8657725379914551, | |
| "grad_norm": 0.00039214183925651014, | |
| "learning_rate": 0.00013343765716744126, | |
| "loss": 0.0003, | |
| "step": 241500 | |
| }, | |
| { | |
| "epoch": 1.869635421092887, | |
| "grad_norm": 0.0004279193817637861, | |
| "learning_rate": 0.0001329831970963031, | |
| "loss": 0.0003, | |
| "step": 242000 | |
| }, | |
| { | |
| "epoch": 1.8734983041943185, | |
| "grad_norm": 0.00042890734039247036, | |
| "learning_rate": 0.00013252873702516494, | |
| "loss": 0.0003, | |
| "step": 242500 | |
| }, | |
| { | |
| "epoch": 1.87736118729575, | |
| "grad_norm": 0.00040093736606650054, | |
| "learning_rate": 0.0001320742769540268, | |
| "loss": 0.0003, | |
| "step": 243000 | |
| }, | |
| { | |
| "epoch": 1.8812240703971816, | |
| "grad_norm": 0.00038263245369307697, | |
| "learning_rate": 0.00013161981688288864, | |
| "loss": 0.0003, | |
| "step": 243500 | |
| }, | |
| { | |
| "epoch": 1.8850869534986132, | |
| "grad_norm": 0.00043640268268063664, | |
| "learning_rate": 0.0001311653568117505, | |
| "loss": 0.0003, | |
| "step": 244000 | |
| }, | |
| { | |
| "epoch": 1.8889498366000448, | |
| "grad_norm": 0.0004298704443499446, | |
| "learning_rate": 0.00013071089674061238, | |
| "loss": 0.0003, | |
| "step": 244500 | |
| }, | |
| { | |
| "epoch": 1.8928127197014764, | |
| "grad_norm": 0.0004053697339259088, | |
| "learning_rate": 0.0001302564366694742, | |
| "loss": 0.0003, | |
| "step": 245000 | |
| }, | |
| { | |
| "epoch": 1.8966756028029081, | |
| "grad_norm": 0.0004045435052830726, | |
| "learning_rate": 0.00012980197659833605, | |
| "loss": 0.0003, | |
| "step": 245500 | |
| }, | |
| { | |
| "epoch": 1.9005384859043395, | |
| "grad_norm": 0.00037249817978590727, | |
| "learning_rate": 0.00012934751652719792, | |
| "loss": 0.0003, | |
| "step": 246000 | |
| }, | |
| { | |
| "epoch": 1.9044013690057713, | |
| "grad_norm": 0.0004331369127612561, | |
| "learning_rate": 0.00012889305645605976, | |
| "loss": 0.0003, | |
| "step": 246500 | |
| }, | |
| { | |
| "epoch": 1.9082642521072026, | |
| "grad_norm": 0.00039425501017831266, | |
| "learning_rate": 0.00012843859638492162, | |
| "loss": 0.0003, | |
| "step": 247000 | |
| }, | |
| { | |
| "epoch": 1.9121271352086344, | |
| "grad_norm": 0.00040850628283806145, | |
| "learning_rate": 0.00012798413631378346, | |
| "loss": 0.0003, | |
| "step": 247500 | |
| }, | |
| { | |
| "epoch": 1.9159900183100658, | |
| "grad_norm": 0.0004171692708041519, | |
| "learning_rate": 0.0001275296762426453, | |
| "loss": 0.0003, | |
| "step": 248000 | |
| }, | |
| { | |
| "epoch": 1.9198529014114976, | |
| "grad_norm": 0.00042486467282287776, | |
| "learning_rate": 0.00012707521617150717, | |
| "loss": 0.0003, | |
| "step": 248500 | |
| }, | |
| { | |
| "epoch": 1.923715784512929, | |
| "grad_norm": 0.0003857310221064836, | |
| "learning_rate": 0.000126620756100369, | |
| "loss": 0.0003, | |
| "step": 249000 | |
| }, | |
| { | |
| "epoch": 1.9275786676143607, | |
| "grad_norm": 0.00043836142867803574, | |
| "learning_rate": 0.00012616629602923087, | |
| "loss": 0.0003, | |
| "step": 249500 | |
| }, | |
| { | |
| "epoch": 1.9314415507157923, | |
| "grad_norm": 0.00039056732202880085, | |
| "learning_rate": 0.0001257118359580927, | |
| "loss": 0.0003, | |
| "step": 250000 | |
| }, | |
| { | |
| "epoch": 1.9353044338172238, | |
| "grad_norm": 0.0004863255890086293, | |
| "learning_rate": 0.00012525737588695455, | |
| "loss": 0.0003, | |
| "step": 250500 | |
| }, | |
| { | |
| "epoch": 1.9391673169186554, | |
| "grad_norm": 0.00042994500836357474, | |
| "learning_rate": 0.00012480291581581641, | |
| "loss": 0.0003, | |
| "step": 251000 | |
| }, | |
| { | |
| "epoch": 1.943030200020087, | |
| "grad_norm": 0.0004507972626015544, | |
| "learning_rate": 0.00012434845574467828, | |
| "loss": 0.0003, | |
| "step": 251500 | |
| }, | |
| { | |
| "epoch": 1.9468930831215185, | |
| "grad_norm": 0.000748885446228087, | |
| "learning_rate": 0.00012389399567354012, | |
| "loss": 0.0003, | |
| "step": 252000 | |
| }, | |
| { | |
| "epoch": 1.95075596622295, | |
| "grad_norm": 0.0003968368982896209, | |
| "learning_rate": 0.00012343953560240196, | |
| "loss": 0.0003, | |
| "step": 252500 | |
| }, | |
| { | |
| "epoch": 1.954618849324382, | |
| "grad_norm": 0.0004240422276780009, | |
| "learning_rate": 0.00012298507553126382, | |
| "loss": 0.0003, | |
| "step": 253000 | |
| }, | |
| { | |
| "epoch": 1.9584817324258132, | |
| "grad_norm": 0.00044167175656184554, | |
| "learning_rate": 0.00012253061546012566, | |
| "loss": 0.0003, | |
| "step": 253500 | |
| }, | |
| { | |
| "epoch": 1.962344615527245, | |
| "grad_norm": 0.00036484008887782693, | |
| "learning_rate": 0.00012207615538898753, | |
| "loss": 0.0003, | |
| "step": 254000 | |
| }, | |
| { | |
| "epoch": 1.9662074986286764, | |
| "grad_norm": 0.0004044172237627208, | |
| "learning_rate": 0.00012162169531784937, | |
| "loss": 0.0003, | |
| "step": 254500 | |
| }, | |
| { | |
| "epoch": 1.9700703817301082, | |
| "grad_norm": 0.0004093232564628124, | |
| "learning_rate": 0.0001211672352467112, | |
| "loss": 0.0003, | |
| "step": 255000 | |
| }, | |
| { | |
| "epoch": 1.9739332648315395, | |
| "grad_norm": 0.0003724129928741604, | |
| "learning_rate": 0.00012071277517557306, | |
| "loss": 0.0003, | |
| "step": 255500 | |
| }, | |
| { | |
| "epoch": 1.9777961479329713, | |
| "grad_norm": 0.0003869113279506564, | |
| "learning_rate": 0.00012025831510443492, | |
| "loss": 0.0003, | |
| "step": 256000 | |
| }, | |
| { | |
| "epoch": 1.9816590310344029, | |
| "grad_norm": 0.00036866377922706306, | |
| "learning_rate": 0.00011980385503329677, | |
| "loss": 0.0003, | |
| "step": 256500 | |
| }, | |
| { | |
| "epoch": 1.9855219141358345, | |
| "grad_norm": 0.00042447738815099, | |
| "learning_rate": 0.00011934939496215861, | |
| "loss": 0.0003, | |
| "step": 257000 | |
| }, | |
| { | |
| "epoch": 1.989384797237266, | |
| "grad_norm": 0.0003843741142190993, | |
| "learning_rate": 0.00011889493489102046, | |
| "loss": 0.0003, | |
| "step": 257500 | |
| }, | |
| { | |
| "epoch": 1.9932476803386976, | |
| "grad_norm": 0.0004299108113627881, | |
| "learning_rate": 0.00011844047481988232, | |
| "loss": 0.0003, | |
| "step": 258000 | |
| }, | |
| { | |
| "epoch": 1.9971105634401292, | |
| "grad_norm": 0.00039060041308403015, | |
| "learning_rate": 0.00011798601474874417, | |
| "loss": 0.0003, | |
| "step": 258500 | |
| }, | |
| { | |
| "epoch": 2.0009734465415607, | |
| "grad_norm": 0.0004127940337639302, | |
| "learning_rate": 0.00011753155467760602, | |
| "loss": 0.0003, | |
| "step": 259000 | |
| }, | |
| { | |
| "epoch": 2.0048363296429925, | |
| "grad_norm": 0.0003710352466441691, | |
| "learning_rate": 0.00011707709460646786, | |
| "loss": 0.0003, | |
| "step": 259500 | |
| }, | |
| { | |
| "epoch": 2.008699212744424, | |
| "grad_norm": 0.00043169083073735237, | |
| "learning_rate": 0.00011662263453532971, | |
| "loss": 0.0003, | |
| "step": 260000 | |
| }, | |
| { | |
| "epoch": 2.0125620958458557, | |
| "grad_norm": 0.00042857075459323823, | |
| "learning_rate": 0.00011616817446419156, | |
| "loss": 0.0003, | |
| "step": 260500 | |
| }, | |
| { | |
| "epoch": 2.016424978947287, | |
| "grad_norm": 0.00046496940194629133, | |
| "learning_rate": 0.00011571371439305342, | |
| "loss": 0.0003, | |
| "step": 261000 | |
| }, | |
| { | |
| "epoch": 2.020287862048719, | |
| "grad_norm": 0.00039056496461853385, | |
| "learning_rate": 0.00011525925432191527, | |
| "loss": 0.0003, | |
| "step": 261500 | |
| }, | |
| { | |
| "epoch": 2.02415074515015, | |
| "grad_norm": 0.000406107195885852, | |
| "learning_rate": 0.00011480479425077711, | |
| "loss": 0.0003, | |
| "step": 262000 | |
| }, | |
| { | |
| "epoch": 2.028013628251582, | |
| "grad_norm": 0.00039516916149295866, | |
| "learning_rate": 0.00011435033417963896, | |
| "loss": 0.0003, | |
| "step": 262500 | |
| }, | |
| { | |
| "epoch": 2.0318765113530133, | |
| "grad_norm": 0.0003748384478967637, | |
| "learning_rate": 0.00011389587410850081, | |
| "loss": 0.0003, | |
| "step": 263000 | |
| }, | |
| { | |
| "epoch": 2.035739394454445, | |
| "grad_norm": 0.0003946254728361964, | |
| "learning_rate": 0.00011344141403736268, | |
| "loss": 0.0003, | |
| "step": 263500 | |
| }, | |
| { | |
| "epoch": 2.0396022775558764, | |
| "grad_norm": 0.00039266227395273745, | |
| "learning_rate": 0.00011298695396622453, | |
| "loss": 0.0003, | |
| "step": 264000 | |
| }, | |
| { | |
| "epoch": 2.043465160657308, | |
| "grad_norm": 0.0003905179037246853, | |
| "learning_rate": 0.00011253249389508637, | |
| "loss": 0.0003, | |
| "step": 264500 | |
| }, | |
| { | |
| "epoch": 2.0473280437587396, | |
| "grad_norm": 0.00040395421092398465, | |
| "learning_rate": 0.00011207803382394822, | |
| "loss": 0.0003, | |
| "step": 265000 | |
| }, | |
| { | |
| "epoch": 2.0511909268601713, | |
| "grad_norm": 0.00040508355596102774, | |
| "learning_rate": 0.00011162357375281007, | |
| "loss": 0.0003, | |
| "step": 265500 | |
| }, | |
| { | |
| "epoch": 2.055053809961603, | |
| "grad_norm": 0.0004007483657915145, | |
| "learning_rate": 0.00011116911368167192, | |
| "loss": 0.0003, | |
| "step": 266000 | |
| }, | |
| { | |
| "epoch": 2.0589166930630345, | |
| "grad_norm": 0.00040279951645061374, | |
| "learning_rate": 0.00011071465361053378, | |
| "loss": 0.0003, | |
| "step": 266500 | |
| }, | |
| { | |
| "epoch": 2.0627795761644663, | |
| "grad_norm": 0.0004587202565744519, | |
| "learning_rate": 0.00011026019353939561, | |
| "loss": 0.0003, | |
| "step": 267000 | |
| }, | |
| { | |
| "epoch": 2.0666424592658976, | |
| "grad_norm": 0.0003896713024005294, | |
| "learning_rate": 0.00010980573346825747, | |
| "loss": 0.0003, | |
| "step": 267500 | |
| }, | |
| { | |
| "epoch": 2.0705053423673294, | |
| "grad_norm": 0.00039689254481345415, | |
| "learning_rate": 0.00010935127339711932, | |
| "loss": 0.0003, | |
| "step": 268000 | |
| }, | |
| { | |
| "epoch": 2.0743682254687608, | |
| "grad_norm": 0.00041530997259542346, | |
| "learning_rate": 0.00010889681332598117, | |
| "loss": 0.0003, | |
| "step": 268500 | |
| }, | |
| { | |
| "epoch": 2.0782311085701926, | |
| "grad_norm": 0.00047515874030068517, | |
| "learning_rate": 0.00010844235325484301, | |
| "loss": 0.0003, | |
| "step": 269000 | |
| }, | |
| { | |
| "epoch": 2.082093991671624, | |
| "grad_norm": 0.0003935998538509011, | |
| "learning_rate": 0.00010798789318370486, | |
| "loss": 0.0003, | |
| "step": 269500 | |
| }, | |
| { | |
| "epoch": 2.0859568747730557, | |
| "grad_norm": 0.00048738697660155594, | |
| "learning_rate": 0.00010753343311256671, | |
| "loss": 0.0003, | |
| "step": 270000 | |
| }, | |
| { | |
| "epoch": 2.089819757874487, | |
| "grad_norm": 0.00038931367453187704, | |
| "learning_rate": 0.00010707897304142857, | |
| "loss": 0.0003, | |
| "step": 270500 | |
| }, | |
| { | |
| "epoch": 2.093682640975919, | |
| "grad_norm": 0.0004456916358321905, | |
| "learning_rate": 0.00010662451297029043, | |
| "loss": 0.0003, | |
| "step": 271000 | |
| }, | |
| { | |
| "epoch": 2.09754552407735, | |
| "grad_norm": 0.00042469296022318304, | |
| "learning_rate": 0.00010617005289915226, | |
| "loss": 0.0003, | |
| "step": 271500 | |
| }, | |
| { | |
| "epoch": 2.101408407178782, | |
| "grad_norm": 0.0004127691499888897, | |
| "learning_rate": 0.00010571559282801412, | |
| "loss": 0.0003, | |
| "step": 272000 | |
| }, | |
| { | |
| "epoch": 2.1052712902802133, | |
| "grad_norm": 0.0004185699508525431, | |
| "learning_rate": 0.00010526113275687598, | |
| "loss": 0.0003, | |
| "step": 272500 | |
| }, | |
| { | |
| "epoch": 2.109134173381645, | |
| "grad_norm": 0.0004032867436762899, | |
| "learning_rate": 0.00010480667268573783, | |
| "loss": 0.0003, | |
| "step": 273000 | |
| }, | |
| { | |
| "epoch": 2.112997056483077, | |
| "grad_norm": 0.00039662199560552835, | |
| "learning_rate": 0.00010435221261459968, | |
| "loss": 0.0003, | |
| "step": 273500 | |
| }, | |
| { | |
| "epoch": 2.1168599395845082, | |
| "grad_norm": 0.0004110955342184752, | |
| "learning_rate": 0.00010389775254346152, | |
| "loss": 0.0003, | |
| "step": 274000 | |
| }, | |
| { | |
| "epoch": 2.12072282268594, | |
| "grad_norm": 0.0004184690478723496, | |
| "learning_rate": 0.00010344329247232337, | |
| "loss": 0.0003, | |
| "step": 274500 | |
| }, | |
| { | |
| "epoch": 2.1245857057873714, | |
| "grad_norm": 0.0004132980538997799, | |
| "learning_rate": 0.00010298883240118522, | |
| "loss": 0.0003, | |
| "step": 275000 | |
| }, | |
| { | |
| "epoch": 2.128448588888803, | |
| "grad_norm": 0.0004049827402923256, | |
| "learning_rate": 0.00010253437233004707, | |
| "loss": 0.0003, | |
| "step": 275500 | |
| }, | |
| { | |
| "epoch": 2.1323114719902345, | |
| "grad_norm": 0.00039436816587112844, | |
| "learning_rate": 0.00010207991225890893, | |
| "loss": 0.0003, | |
| "step": 276000 | |
| }, | |
| { | |
| "epoch": 2.1361743550916663, | |
| "grad_norm": 0.00044596754014492035, | |
| "learning_rate": 0.00010162545218777077, | |
| "loss": 0.0003, | |
| "step": 276500 | |
| }, | |
| { | |
| "epoch": 2.1400372381930977, | |
| "grad_norm": 0.0004116700729355216, | |
| "learning_rate": 0.00010117099211663262, | |
| "loss": 0.0003, | |
| "step": 277000 | |
| }, | |
| { | |
| "epoch": 2.1439001212945294, | |
| "grad_norm": 0.00036984015605412424, | |
| "learning_rate": 0.00010071653204549447, | |
| "loss": 0.0003, | |
| "step": 277500 | |
| }, | |
| { | |
| "epoch": 2.147763004395961, | |
| "grad_norm": 0.0004131913010496646, | |
| "learning_rate": 0.00010026207197435632, | |
| "loss": 0.0003, | |
| "step": 278000 | |
| }, | |
| { | |
| "epoch": 2.1516258874973926, | |
| "grad_norm": 0.00039576427661813796, | |
| "learning_rate": 9.980761190321819e-05, | |
| "loss": 0.0003, | |
| "step": 278500 | |
| }, | |
| { | |
| "epoch": 2.155488770598824, | |
| "grad_norm": 0.0003931752871721983, | |
| "learning_rate": 9.935315183208001e-05, | |
| "loss": 0.0003, | |
| "step": 279000 | |
| }, | |
| { | |
| "epoch": 2.1593516537002557, | |
| "grad_norm": 0.0004005175724159926, | |
| "learning_rate": 9.889869176094188e-05, | |
| "loss": 0.0003, | |
| "step": 279500 | |
| }, | |
| { | |
| "epoch": 2.1632145368016875, | |
| "grad_norm": 0.00038955482887104154, | |
| "learning_rate": 9.844423168980373e-05, | |
| "loss": 0.0003, | |
| "step": 280000 | |
| }, | |
| { | |
| "epoch": 2.167077419903119, | |
| "grad_norm": 0.0004296032420825213, | |
| "learning_rate": 9.798977161866558e-05, | |
| "loss": 0.0003, | |
| "step": 280500 | |
| }, | |
| { | |
| "epoch": 2.1709403030045507, | |
| "grad_norm": 0.0003988723619841039, | |
| "learning_rate": 9.753531154752743e-05, | |
| "loss": 0.0003, | |
| "step": 281000 | |
| }, | |
| { | |
| "epoch": 2.174803186105982, | |
| "grad_norm": 0.0004284825408831239, | |
| "learning_rate": 9.708085147638927e-05, | |
| "loss": 0.0003, | |
| "step": 281500 | |
| }, | |
| { | |
| "epoch": 2.178666069207414, | |
| "grad_norm": 0.00043784480658359826, | |
| "learning_rate": 9.662639140525113e-05, | |
| "loss": 0.0003, | |
| "step": 282000 | |
| }, | |
| { | |
| "epoch": 2.182528952308845, | |
| "grad_norm": 0.0004204624274279922, | |
| "learning_rate": 9.617193133411298e-05, | |
| "loss": 0.0003, | |
| "step": 282500 | |
| }, | |
| { | |
| "epoch": 2.186391835410277, | |
| "grad_norm": 0.00045462080743163824, | |
| "learning_rate": 9.571747126297483e-05, | |
| "loss": 0.0003, | |
| "step": 283000 | |
| }, | |
| { | |
| "epoch": 2.1902547185117083, | |
| "grad_norm": 0.000426099868491292, | |
| "learning_rate": 9.526301119183667e-05, | |
| "loss": 0.0003, | |
| "step": 283500 | |
| }, | |
| { | |
| "epoch": 2.19411760161314, | |
| "grad_norm": 0.0004363481712061912, | |
| "learning_rate": 9.480855112069852e-05, | |
| "loss": 0.0003, | |
| "step": 284000 | |
| }, | |
| { | |
| "epoch": 2.1979804847145714, | |
| "grad_norm": 0.00041816759039647877, | |
| "learning_rate": 9.435409104956037e-05, | |
| "loss": 0.0003, | |
| "step": 284500 | |
| }, | |
| { | |
| "epoch": 2.201843367816003, | |
| "grad_norm": 0.00039576852577738464, | |
| "learning_rate": 9.389963097842222e-05, | |
| "loss": 0.0003, | |
| "step": 285000 | |
| }, | |
| { | |
| "epoch": 2.2057062509174346, | |
| "grad_norm": 0.0003871625813189894, | |
| "learning_rate": 9.344517090728409e-05, | |
| "loss": 0.0003, | |
| "step": 285500 | |
| }, | |
| { | |
| "epoch": 2.2095691340188663, | |
| "grad_norm": 0.0004058276826981455, | |
| "learning_rate": 9.299071083614592e-05, | |
| "loss": 0.0003, | |
| "step": 286000 | |
| }, | |
| { | |
| "epoch": 2.213432017120298, | |
| "grad_norm": 0.0003888069768436253, | |
| "learning_rate": 9.253625076500778e-05, | |
| "loss": 0.0003, | |
| "step": 286500 | |
| }, | |
| { | |
| "epoch": 2.2172949002217295, | |
| "grad_norm": 0.0003826101601589471, | |
| "learning_rate": 9.208179069386963e-05, | |
| "loss": 0.0003, | |
| "step": 287000 | |
| }, | |
| { | |
| "epoch": 2.2211577833231613, | |
| "grad_norm": 0.0004133163602091372, | |
| "learning_rate": 9.162733062273149e-05, | |
| "loss": 0.0003, | |
| "step": 287500 | |
| }, | |
| { | |
| "epoch": 2.2250206664245926, | |
| "grad_norm": 0.0003799792903009802, | |
| "learning_rate": 9.117287055159334e-05, | |
| "loss": 0.0003, | |
| "step": 288000 | |
| }, | |
| { | |
| "epoch": 2.2288835495260244, | |
| "grad_norm": 0.0004137216310482472, | |
| "learning_rate": 9.071841048045518e-05, | |
| "loss": 0.0003, | |
| "step": 288500 | |
| }, | |
| { | |
| "epoch": 2.2327464326274558, | |
| "grad_norm": 0.0004602668632287532, | |
| "learning_rate": 9.026395040931703e-05, | |
| "loss": 0.0003, | |
| "step": 289000 | |
| }, | |
| { | |
| "epoch": 2.2366093157288875, | |
| "grad_norm": 0.0004129618755541742, | |
| "learning_rate": 8.980949033817888e-05, | |
| "loss": 0.0003, | |
| "step": 289500 | |
| }, | |
| { | |
| "epoch": 2.240472198830319, | |
| "grad_norm": 0.0004122887330595404, | |
| "learning_rate": 8.935503026704073e-05, | |
| "loss": 0.0003, | |
| "step": 290000 | |
| }, | |
| { | |
| "epoch": 2.2443350819317507, | |
| "grad_norm": 0.00042714428855106235, | |
| "learning_rate": 8.890057019590258e-05, | |
| "loss": 0.0003, | |
| "step": 290500 | |
| }, | |
| { | |
| "epoch": 2.248197965033182, | |
| "grad_norm": 0.0004340863088145852, | |
| "learning_rate": 8.844611012476442e-05, | |
| "loss": 0.0003, | |
| "step": 291000 | |
| }, | |
| { | |
| "epoch": 2.252060848134614, | |
| "grad_norm": 0.00038818069151602685, | |
| "learning_rate": 8.799165005362628e-05, | |
| "loss": 0.0003, | |
| "step": 291500 | |
| }, | |
| { | |
| "epoch": 2.255923731236045, | |
| "grad_norm": 0.00039604620542377234, | |
| "learning_rate": 8.753718998248813e-05, | |
| "loss": 0.0003, | |
| "step": 292000 | |
| }, | |
| { | |
| "epoch": 2.259786614337477, | |
| "grad_norm": 0.00037946528755128384, | |
| "learning_rate": 8.708272991134998e-05, | |
| "loss": 0.0003, | |
| "step": 292500 | |
| }, | |
| { | |
| "epoch": 2.2636494974389088, | |
| "grad_norm": 0.00045615085400640965, | |
| "learning_rate": 8.662826984021185e-05, | |
| "loss": 0.0003, | |
| "step": 293000 | |
| }, | |
| { | |
| "epoch": 2.26751238054034, | |
| "grad_norm": 0.0004230223421473056, | |
| "learning_rate": 8.617380976907367e-05, | |
| "loss": 0.0003, | |
| "step": 293500 | |
| }, | |
| { | |
| "epoch": 2.2713752636417714, | |
| "grad_norm": 0.0004022732318844646, | |
| "learning_rate": 8.571934969793554e-05, | |
| "loss": 0.0003, | |
| "step": 294000 | |
| }, | |
| { | |
| "epoch": 2.2752381467432032, | |
| "grad_norm": 0.0004257276304997504, | |
| "learning_rate": 8.526488962679739e-05, | |
| "loss": 0.0003, | |
| "step": 294500 | |
| }, | |
| { | |
| "epoch": 2.279101029844635, | |
| "grad_norm": 0.00045862089609727263, | |
| "learning_rate": 8.481042955565924e-05, | |
| "loss": 0.0003, | |
| "step": 295000 | |
| }, | |
| { | |
| "epoch": 2.2829639129460664, | |
| "grad_norm": 0.0004398869932629168, | |
| "learning_rate": 8.435596948452108e-05, | |
| "loss": 0.0003, | |
| "step": 295500 | |
| }, | |
| { | |
| "epoch": 2.286826796047498, | |
| "grad_norm": 0.00044305986375547945, | |
| "learning_rate": 8.390150941338293e-05, | |
| "loss": 0.0003, | |
| "step": 296000 | |
| }, | |
| { | |
| "epoch": 2.2906896791489295, | |
| "grad_norm": 0.0004642159619834274, | |
| "learning_rate": 8.344704934224478e-05, | |
| "loss": 0.0003, | |
| "step": 296500 | |
| }, | |
| { | |
| "epoch": 2.2945525622503613, | |
| "grad_norm": 0.00043294980423524976, | |
| "learning_rate": 8.299258927110664e-05, | |
| "loss": 0.0003, | |
| "step": 297000 | |
| }, | |
| { | |
| "epoch": 2.2984154453517927, | |
| "grad_norm": 0.0004789868835359812, | |
| "learning_rate": 8.253812919996849e-05, | |
| "loss": 0.0003, | |
| "step": 297500 | |
| }, | |
| { | |
| "epoch": 2.3022783284532244, | |
| "grad_norm": 0.00042497721733525395, | |
| "learning_rate": 8.208366912883033e-05, | |
| "loss": 0.0003, | |
| "step": 298000 | |
| }, | |
| { | |
| "epoch": 2.306141211554656, | |
| "grad_norm": 0.00044182001147419214, | |
| "learning_rate": 8.162920905769218e-05, | |
| "loss": 0.0003, | |
| "step": 298500 | |
| }, | |
| { | |
| "epoch": 2.3100040946560876, | |
| "grad_norm": 0.00042325007962062955, | |
| "learning_rate": 8.117474898655403e-05, | |
| "loss": 0.0003, | |
| "step": 299000 | |
| }, | |
| { | |
| "epoch": 2.313866977757519, | |
| "grad_norm": 0.0004093611496500671, | |
| "learning_rate": 8.072028891541588e-05, | |
| "loss": 0.0003, | |
| "step": 299500 | |
| }, | |
| { | |
| "epoch": 2.3177298608589507, | |
| "grad_norm": 0.00038856887840665877, | |
| "learning_rate": 8.026582884427774e-05, | |
| "loss": 0.0003, | |
| "step": 300000 | |
| }, | |
| { | |
| "epoch": 2.321592743960382, | |
| "grad_norm": 0.00041343894554302096, | |
| "learning_rate": 7.981136877313957e-05, | |
| "loss": 0.0003, | |
| "step": 300500 | |
| }, | |
| { | |
| "epoch": 2.325455627061814, | |
| "grad_norm": 0.0004005722003057599, | |
| "learning_rate": 7.935690870200143e-05, | |
| "loss": 0.0003, | |
| "step": 301000 | |
| }, | |
| { | |
| "epoch": 2.3293185101632456, | |
| "grad_norm": 0.00039640217437408864, | |
| "learning_rate": 7.890244863086329e-05, | |
| "loss": 0.0003, | |
| "step": 301500 | |
| }, | |
| { | |
| "epoch": 2.333181393264677, | |
| "grad_norm": 0.00041457550833001733, | |
| "learning_rate": 7.844798855972514e-05, | |
| "loss": 0.0003, | |
| "step": 302000 | |
| }, | |
| { | |
| "epoch": 2.337044276366109, | |
| "grad_norm": 0.0004301823501009494, | |
| "learning_rate": 7.7993528488587e-05, | |
| "loss": 0.0003, | |
| "step": 302500 | |
| }, | |
| { | |
| "epoch": 2.34090715946754, | |
| "grad_norm": 0.0004285312897991389, | |
| "learning_rate": 7.753906841744883e-05, | |
| "loss": 0.0003, | |
| "step": 303000 | |
| }, | |
| { | |
| "epoch": 2.344770042568972, | |
| "grad_norm": 0.0004082127125002444, | |
| "learning_rate": 7.708460834631069e-05, | |
| "loss": 0.0003, | |
| "step": 303500 | |
| }, | |
| { | |
| "epoch": 2.3486329256704033, | |
| "grad_norm": 0.0004119858785998076, | |
| "learning_rate": 7.663014827517254e-05, | |
| "loss": 0.0003, | |
| "step": 304000 | |
| }, | |
| { | |
| "epoch": 2.352495808771835, | |
| "grad_norm": 0.0004263624723535031, | |
| "learning_rate": 7.617568820403439e-05, | |
| "loss": 0.0003, | |
| "step": 304500 | |
| }, | |
| { | |
| "epoch": 2.3563586918732664, | |
| "grad_norm": 0.00039540533907711506, | |
| "learning_rate": 7.572122813289624e-05, | |
| "loss": 0.0003, | |
| "step": 305000 | |
| }, | |
| { | |
| "epoch": 2.360221574974698, | |
| "grad_norm": 0.0004299231804907322, | |
| "learning_rate": 7.526676806175808e-05, | |
| "loss": 0.0003, | |
| "step": 305500 | |
| }, | |
| { | |
| "epoch": 2.3640844580761295, | |
| "grad_norm": 0.0004845628864131868, | |
| "learning_rate": 7.481230799061993e-05, | |
| "loss": 0.0003, | |
| "step": 306000 | |
| }, | |
| { | |
| "epoch": 2.3679473411775613, | |
| "grad_norm": 0.00041427643736824393, | |
| "learning_rate": 7.435784791948179e-05, | |
| "loss": 0.0003, | |
| "step": 306500 | |
| }, | |
| { | |
| "epoch": 2.3718102242789927, | |
| "grad_norm": 0.0004160687094554305, | |
| "learning_rate": 7.390338784834364e-05, | |
| "loss": 0.0003, | |
| "step": 307000 | |
| }, | |
| { | |
| "epoch": 2.3756731073804245, | |
| "grad_norm": 0.00044749100925400853, | |
| "learning_rate": 7.344892777720549e-05, | |
| "loss": 0.0003, | |
| "step": 307500 | |
| }, | |
| { | |
| "epoch": 2.3795359904818563, | |
| "grad_norm": 0.0004799690213985741, | |
| "learning_rate": 7.299446770606734e-05, | |
| "loss": 0.0003, | |
| "step": 308000 | |
| }, | |
| { | |
| "epoch": 2.3833988735832876, | |
| "grad_norm": 0.0004245197051204741, | |
| "learning_rate": 7.254000763492918e-05, | |
| "loss": 0.0003, | |
| "step": 308500 | |
| }, | |
| { | |
| "epoch": 2.387261756684719, | |
| "grad_norm": 0.00034600042272359133, | |
| "learning_rate": 7.208554756379105e-05, | |
| "loss": 0.0003, | |
| "step": 309000 | |
| }, | |
| { | |
| "epoch": 2.3911246397861508, | |
| "grad_norm": 0.0004245495656505227, | |
| "learning_rate": 7.163108749265289e-05, | |
| "loss": 0.0003, | |
| "step": 309500 | |
| }, | |
| { | |
| "epoch": 2.3949875228875825, | |
| "grad_norm": 0.0004331233212724328, | |
| "learning_rate": 7.117662742151474e-05, | |
| "loss": 0.0003, | |
| "step": 310000 | |
| }, | |
| { | |
| "epoch": 2.398850405989014, | |
| "grad_norm": 0.00041013164445757866, | |
| "learning_rate": 7.072216735037659e-05, | |
| "loss": 0.0003, | |
| "step": 310500 | |
| }, | |
| { | |
| "epoch": 2.4027132890904457, | |
| "grad_norm": 0.00046506704529747367, | |
| "learning_rate": 7.026770727923844e-05, | |
| "loss": 0.0003, | |
| "step": 311000 | |
| }, | |
| { | |
| "epoch": 2.406576172191877, | |
| "grad_norm": 0.0004329345829319209, | |
| "learning_rate": 6.98132472081003e-05, | |
| "loss": 0.0003, | |
| "step": 311500 | |
| }, | |
| { | |
| "epoch": 2.410439055293309, | |
| "grad_norm": 0.00037213502218946815, | |
| "learning_rate": 6.935878713696213e-05, | |
| "loss": 0.0003, | |
| "step": 312000 | |
| }, | |
| { | |
| "epoch": 2.41430193839474, | |
| "grad_norm": 0.0004223994619678706, | |
| "learning_rate": 6.890432706582398e-05, | |
| "loss": 0.0003, | |
| "step": 312500 | |
| }, | |
| { | |
| "epoch": 2.418164821496172, | |
| "grad_norm": 0.00041198599501512945, | |
| "learning_rate": 6.844986699468584e-05, | |
| "loss": 0.0003, | |
| "step": 313000 | |
| }, | |
| { | |
| "epoch": 2.4220277045976033, | |
| "grad_norm": 0.00044434002484194934, | |
| "learning_rate": 6.799540692354769e-05, | |
| "loss": 0.0003, | |
| "step": 313500 | |
| }, | |
| { | |
| "epoch": 2.425890587699035, | |
| "grad_norm": 0.0003673033497761935, | |
| "learning_rate": 6.754094685240954e-05, | |
| "loss": 0.0003, | |
| "step": 314000 | |
| }, | |
| { | |
| "epoch": 2.429753470800467, | |
| "grad_norm": 0.0004261393041815609, | |
| "learning_rate": 6.70864867812714e-05, | |
| "loss": 0.0003, | |
| "step": 314500 | |
| }, | |
| { | |
| "epoch": 2.4336163539018982, | |
| "grad_norm": 0.0004654694930650294, | |
| "learning_rate": 6.663202671013325e-05, | |
| "loss": 0.0003, | |
| "step": 315000 | |
| }, | |
| { | |
| "epoch": 2.4374792370033296, | |
| "grad_norm": 0.00038143552956171334, | |
| "learning_rate": 6.617756663899508e-05, | |
| "loss": 0.0003, | |
| "step": 315500 | |
| }, | |
| { | |
| "epoch": 2.4413421201047614, | |
| "grad_norm": 0.00042159578879363835, | |
| "learning_rate": 6.572310656785694e-05, | |
| "loss": 0.0003, | |
| "step": 316000 | |
| }, | |
| { | |
| "epoch": 2.445205003206193, | |
| "grad_norm": 0.0004413559508975595, | |
| "learning_rate": 6.52686464967188e-05, | |
| "loss": 0.0003, | |
| "step": 316500 | |
| }, | |
| { | |
| "epoch": 2.4490678863076245, | |
| "grad_norm": 0.0005215974524617195, | |
| "learning_rate": 6.481418642558064e-05, | |
| "loss": 0.0003, | |
| "step": 317000 | |
| }, | |
| { | |
| "epoch": 2.4529307694090563, | |
| "grad_norm": 0.000413423782447353, | |
| "learning_rate": 6.435972635444249e-05, | |
| "loss": 0.0003, | |
| "step": 317500 | |
| }, | |
| { | |
| "epoch": 2.4567936525104876, | |
| "grad_norm": 0.00038620055420324206, | |
| "learning_rate": 6.390526628330435e-05, | |
| "loss": 0.0003, | |
| "step": 318000 | |
| }, | |
| { | |
| "epoch": 2.4606565356119194, | |
| "grad_norm": 0.00041074163164012134, | |
| "learning_rate": 6.34508062121662e-05, | |
| "loss": 0.0003, | |
| "step": 318500 | |
| }, | |
| { | |
| "epoch": 2.464519418713351, | |
| "grad_norm": 0.0004115503397770226, | |
| "learning_rate": 6.299634614102805e-05, | |
| "loss": 0.0003, | |
| "step": 319000 | |
| }, | |
| { | |
| "epoch": 2.4683823018147826, | |
| "grad_norm": 0.00046097987797111273, | |
| "learning_rate": 6.254188606988989e-05, | |
| "loss": 0.0003, | |
| "step": 319500 | |
| }, | |
| { | |
| "epoch": 2.472245184916214, | |
| "grad_norm": 0.0004020912747364491, | |
| "learning_rate": 6.208742599875175e-05, | |
| "loss": 0.0003, | |
| "step": 320000 | |
| }, | |
| { | |
| "epoch": 2.4761080680176457, | |
| "grad_norm": 0.000455143628641963, | |
| "learning_rate": 6.163296592761359e-05, | |
| "loss": 0.0003, | |
| "step": 320500 | |
| }, | |
| { | |
| "epoch": 2.479970951119077, | |
| "grad_norm": 0.00041087414138019085, | |
| "learning_rate": 6.117850585647544e-05, | |
| "loss": 0.0003, | |
| "step": 321000 | |
| }, | |
| { | |
| "epoch": 2.483833834220509, | |
| "grad_norm": 0.00041171652264893055, | |
| "learning_rate": 6.072404578533729e-05, | |
| "loss": 0.0003, | |
| "step": 321500 | |
| }, | |
| { | |
| "epoch": 2.48769671732194, | |
| "grad_norm": 0.0004624698485713452, | |
| "learning_rate": 6.026958571419914e-05, | |
| "loss": 0.0003, | |
| "step": 322000 | |
| }, | |
| { | |
| "epoch": 2.491559600423372, | |
| "grad_norm": 0.0004319006693549454, | |
| "learning_rate": 5.9815125643061e-05, | |
| "loss": 0.0003, | |
| "step": 322500 | |
| }, | |
| { | |
| "epoch": 2.495422483524804, | |
| "grad_norm": 0.00045846853754483163, | |
| "learning_rate": 5.9360665571922846e-05, | |
| "loss": 0.0003, | |
| "step": 323000 | |
| }, | |
| { | |
| "epoch": 2.499285366626235, | |
| "grad_norm": 0.00042378256330266595, | |
| "learning_rate": 5.89062055007847e-05, | |
| "loss": 0.0003, | |
| "step": 323500 | |
| }, | |
| { | |
| "epoch": 2.5031482497276665, | |
| "grad_norm": 0.00043979313340969384, | |
| "learning_rate": 5.8451745429646544e-05, | |
| "loss": 0.0003, | |
| "step": 324000 | |
| }, | |
| { | |
| "epoch": 2.5070111328290983, | |
| "grad_norm": 0.00041039736242964864, | |
| "learning_rate": 5.7997285358508396e-05, | |
| "loss": 0.0003, | |
| "step": 324500 | |
| }, | |
| { | |
| "epoch": 2.51087401593053, | |
| "grad_norm": 0.00043092764099128544, | |
| "learning_rate": 5.754282528737025e-05, | |
| "loss": 0.0003, | |
| "step": 325000 | |
| }, | |
| { | |
| "epoch": 2.5147368990319614, | |
| "grad_norm": 0.0004449010593816638, | |
| "learning_rate": 5.7088365216232093e-05, | |
| "loss": 0.0003, | |
| "step": 325500 | |
| }, | |
| { | |
| "epoch": 2.518599782133393, | |
| "grad_norm": 0.00043555087177082896, | |
| "learning_rate": 5.663390514509395e-05, | |
| "loss": 0.0003, | |
| "step": 326000 | |
| }, | |
| { | |
| "epoch": 2.5224626652348245, | |
| "grad_norm": 0.00046992747229523957, | |
| "learning_rate": 5.61794450739558e-05, | |
| "loss": 0.0003, | |
| "step": 326500 | |
| }, | |
| { | |
| "epoch": 2.5263255483362563, | |
| "grad_norm": 0.0004627726739272475, | |
| "learning_rate": 5.572498500281765e-05, | |
| "loss": 0.0003, | |
| "step": 327000 | |
| }, | |
| { | |
| "epoch": 2.5301884314376877, | |
| "grad_norm": 0.0004344623303040862, | |
| "learning_rate": 5.5270524931679495e-05, | |
| "loss": 0.0003, | |
| "step": 327500 | |
| }, | |
| { | |
| "epoch": 2.5340513145391195, | |
| "grad_norm": 0.0004928177804686129, | |
| "learning_rate": 5.481606486054135e-05, | |
| "loss": 0.0003, | |
| "step": 328000 | |
| }, | |
| { | |
| "epoch": 2.537914197640551, | |
| "grad_norm": 0.0004244371084496379, | |
| "learning_rate": 5.43616047894032e-05, | |
| "loss": 0.0003, | |
| "step": 328500 | |
| }, | |
| { | |
| "epoch": 2.5417770807419826, | |
| "grad_norm": 0.0004467975231818855, | |
| "learning_rate": 5.3907144718265045e-05, | |
| "loss": 0.0003, | |
| "step": 329000 | |
| }, | |
| { | |
| "epoch": 2.5456399638434144, | |
| "grad_norm": 0.00041408653487451375, | |
| "learning_rate": 5.34526846471269e-05, | |
| "loss": 0.0003, | |
| "step": 329500 | |
| }, | |
| { | |
| "epoch": 2.5495028469448457, | |
| "grad_norm": 0.00043784506851807237, | |
| "learning_rate": 5.299822457598874e-05, | |
| "loss": 0.0003, | |
| "step": 330000 | |
| }, | |
| { | |
| "epoch": 2.553365730046277, | |
| "grad_norm": 0.00046141675557009876, | |
| "learning_rate": 5.25437645048506e-05, | |
| "loss": 0.0003, | |
| "step": 330500 | |
| }, | |
| { | |
| "epoch": 2.557228613147709, | |
| "grad_norm": 0.0004683432634919882, | |
| "learning_rate": 5.2089304433712454e-05, | |
| "loss": 0.0003, | |
| "step": 331000 | |
| }, | |
| { | |
| "epoch": 2.5610914962491407, | |
| "grad_norm": 0.0003889546496793628, | |
| "learning_rate": 5.16348443625743e-05, | |
| "loss": 0.0003, | |
| "step": 331500 | |
| }, | |
| { | |
| "epoch": 2.564954379350572, | |
| "grad_norm": 0.0004467256658244878, | |
| "learning_rate": 5.118038429143615e-05, | |
| "loss": 0.0003, | |
| "step": 332000 | |
| }, | |
| { | |
| "epoch": 2.568817262452004, | |
| "grad_norm": 0.00038880863576196134, | |
| "learning_rate": 5.0725924220297997e-05, | |
| "loss": 0.0003, | |
| "step": 332500 | |
| }, | |
| { | |
| "epoch": 2.572680145553435, | |
| "grad_norm": 0.00043496862053871155, | |
| "learning_rate": 5.027146414915985e-05, | |
| "loss": 0.0003, | |
| "step": 333000 | |
| }, | |
| { | |
| "epoch": 2.576543028654867, | |
| "grad_norm": 0.0003715400234796107, | |
| "learning_rate": 4.9817004078021694e-05, | |
| "loss": 0.0003, | |
| "step": 333500 | |
| }, | |
| { | |
| "epoch": 2.5804059117562983, | |
| "grad_norm": 0.000468456739326939, | |
| "learning_rate": 4.936254400688355e-05, | |
| "loss": 0.0003, | |
| "step": 334000 | |
| }, | |
| { | |
| "epoch": 2.58426879485773, | |
| "grad_norm": 0.00040985396481119096, | |
| "learning_rate": 4.8908083935745405e-05, | |
| "loss": 0.0003, | |
| "step": 334500 | |
| }, | |
| { | |
| "epoch": 2.5881316779591614, | |
| "grad_norm": 0.0004460928903426975, | |
| "learning_rate": 4.845362386460725e-05, | |
| "loss": 0.0003, | |
| "step": 335000 | |
| }, | |
| { | |
| "epoch": 2.5919945610605932, | |
| "grad_norm": 0.00039911369094625115, | |
| "learning_rate": 4.79991637934691e-05, | |
| "loss": 0.0003, | |
| "step": 335500 | |
| }, | |
| { | |
| "epoch": 2.595857444162025, | |
| "grad_norm": 0.0004065028333570808, | |
| "learning_rate": 4.754470372233095e-05, | |
| "loss": 0.0003, | |
| "step": 336000 | |
| }, | |
| { | |
| "epoch": 2.5997203272634564, | |
| "grad_norm": 0.0004132339672651142, | |
| "learning_rate": 4.70902436511928e-05, | |
| "loss": 0.0003, | |
| "step": 336500 | |
| }, | |
| { | |
| "epoch": 2.6035832103648877, | |
| "grad_norm": 0.00044415410957299173, | |
| "learning_rate": 4.663578358005466e-05, | |
| "loss": 0.0003, | |
| "step": 337000 | |
| }, | |
| { | |
| "epoch": 2.6074460934663195, | |
| "grad_norm": 0.00043628757703118026, | |
| "learning_rate": 4.6181323508916505e-05, | |
| "loss": 0.0003, | |
| "step": 337500 | |
| }, | |
| { | |
| "epoch": 2.6113089765677513, | |
| "grad_norm": 0.0004943295498378575, | |
| "learning_rate": 4.572686343777836e-05, | |
| "loss": 0.0003, | |
| "step": 338000 | |
| }, | |
| { | |
| "epoch": 2.6151718596691826, | |
| "grad_norm": 0.0004152540641371161, | |
| "learning_rate": 4.52724033666402e-05, | |
| "loss": 0.0003, | |
| "step": 338500 | |
| }, | |
| { | |
| "epoch": 2.6190347427706144, | |
| "grad_norm": 0.0004294981772545725, | |
| "learning_rate": 4.4817943295502054e-05, | |
| "loss": 0.0003, | |
| "step": 339000 | |
| }, | |
| { | |
| "epoch": 2.622897625872046, | |
| "grad_norm": 0.00039649149402976036, | |
| "learning_rate": 4.43634832243639e-05, | |
| "loss": 0.0003, | |
| "step": 339500 | |
| }, | |
| { | |
| "epoch": 2.6267605089734776, | |
| "grad_norm": 0.00043368435581214726, | |
| "learning_rate": 4.390902315322575e-05, | |
| "loss": 0.0003, | |
| "step": 340000 | |
| }, | |
| { | |
| "epoch": 2.630623392074909, | |
| "grad_norm": 0.0004430446424521506, | |
| "learning_rate": 4.3454563082087604e-05, | |
| "loss": 0.0003, | |
| "step": 340500 | |
| }, | |
| { | |
| "epoch": 2.6344862751763407, | |
| "grad_norm": 0.00047145935241132975, | |
| "learning_rate": 4.300010301094945e-05, | |
| "loss": 0.0003, | |
| "step": 341000 | |
| }, | |
| { | |
| "epoch": 2.638349158277772, | |
| "grad_norm": 0.0004192702181171626, | |
| "learning_rate": 4.254564293981131e-05, | |
| "loss": 0.0003, | |
| "step": 341500 | |
| }, | |
| { | |
| "epoch": 2.642212041379204, | |
| "grad_norm": 0.00046445822226814926, | |
| "learning_rate": 4.2091182868673154e-05, | |
| "loss": 0.0003, | |
| "step": 342000 | |
| }, | |
| { | |
| "epoch": 2.6460749244806356, | |
| "grad_norm": 0.000462083553429693, | |
| "learning_rate": 4.1636722797535006e-05, | |
| "loss": 0.0003, | |
| "step": 342500 | |
| }, | |
| { | |
| "epoch": 2.649937807582067, | |
| "grad_norm": 0.00038474082248285413, | |
| "learning_rate": 4.118226272639686e-05, | |
| "loss": 0.0003, | |
| "step": 343000 | |
| }, | |
| { | |
| "epoch": 2.6538006906834983, | |
| "grad_norm": 0.00039861336699686944, | |
| "learning_rate": 4.07278026552587e-05, | |
| "loss": 0.0003, | |
| "step": 343500 | |
| }, | |
| { | |
| "epoch": 2.65766357378493, | |
| "grad_norm": 0.0004491112194955349, | |
| "learning_rate": 4.0273342584120555e-05, | |
| "loss": 0.0003, | |
| "step": 344000 | |
| }, | |
| { | |
| "epoch": 2.661526456886362, | |
| "grad_norm": 0.00046189481508918107, | |
| "learning_rate": 3.98188825129824e-05, | |
| "loss": 0.0003, | |
| "step": 344500 | |
| }, | |
| { | |
| "epoch": 2.6653893399877933, | |
| "grad_norm": 0.00038176134694367647, | |
| "learning_rate": 3.936442244184426e-05, | |
| "loss": 0.0003, | |
| "step": 345000 | |
| }, | |
| { | |
| "epoch": 2.6692522230892246, | |
| "grad_norm": 0.00037889971281401813, | |
| "learning_rate": 3.890996237070611e-05, | |
| "loss": 0.0003, | |
| "step": 345500 | |
| }, | |
| { | |
| "epoch": 2.6731151061906564, | |
| "grad_norm": 0.0004248527984600514, | |
| "learning_rate": 3.845550229956796e-05, | |
| "loss": 0.0003, | |
| "step": 346000 | |
| }, | |
| { | |
| "epoch": 2.676977989292088, | |
| "grad_norm": 0.00037932791747152805, | |
| "learning_rate": 3.800104222842981e-05, | |
| "loss": 0.0003, | |
| "step": 346500 | |
| }, | |
| { | |
| "epoch": 2.6808408723935195, | |
| "grad_norm": 0.0003727327275555581, | |
| "learning_rate": 3.7546582157291655e-05, | |
| "loss": 0.0003, | |
| "step": 347000 | |
| }, | |
| { | |
| "epoch": 2.6847037554949513, | |
| "grad_norm": 0.00040110255940817297, | |
| "learning_rate": 3.709212208615351e-05, | |
| "loss": 0.0003, | |
| "step": 347500 | |
| }, | |
| { | |
| "epoch": 2.6885666385963827, | |
| "grad_norm": 0.00042014423524960876, | |
| "learning_rate": 3.663766201501536e-05, | |
| "loss": 0.0003, | |
| "step": 348000 | |
| }, | |
| { | |
| "epoch": 2.6924295216978145, | |
| "grad_norm": 0.00038637884426862, | |
| "learning_rate": 3.6183201943877204e-05, | |
| "loss": 0.0003, | |
| "step": 348500 | |
| }, | |
| { | |
| "epoch": 2.696292404799246, | |
| "grad_norm": 0.0004331624659243971, | |
| "learning_rate": 3.572874187273906e-05, | |
| "loss": 0.0003, | |
| "step": 349000 | |
| }, | |
| { | |
| "epoch": 2.7001552879006776, | |
| "grad_norm": 0.00040115389856509864, | |
| "learning_rate": 3.527428180160091e-05, | |
| "loss": 0.0003, | |
| "step": 349500 | |
| }, | |
| { | |
| "epoch": 2.704018171002109, | |
| "grad_norm": 0.0004400424659252167, | |
| "learning_rate": 3.481982173046276e-05, | |
| "loss": 0.0003, | |
| "step": 350000 | |
| }, | |
| { | |
| "epoch": 2.7078810541035407, | |
| "grad_norm": 0.000458619964774698, | |
| "learning_rate": 3.436536165932461e-05, | |
| "loss": 0.0003, | |
| "step": 350500 | |
| }, | |
| { | |
| "epoch": 2.7117439372049725, | |
| "grad_norm": 0.00042235825094394386, | |
| "learning_rate": 3.391090158818646e-05, | |
| "loss": 0.0003, | |
| "step": 351000 | |
| }, | |
| { | |
| "epoch": 2.715606820306404, | |
| "grad_norm": 0.00045935032540000975, | |
| "learning_rate": 3.345644151704831e-05, | |
| "loss": 0.0003, | |
| "step": 351500 | |
| }, | |
| { | |
| "epoch": 2.7194697034078352, | |
| "grad_norm": 0.0004515291075222194, | |
| "learning_rate": 3.3001981445910156e-05, | |
| "loss": 0.0003, | |
| "step": 352000 | |
| }, | |
| { | |
| "epoch": 2.723332586509267, | |
| "grad_norm": 0.00042773320456035435, | |
| "learning_rate": 3.254752137477201e-05, | |
| "loss": 0.0003, | |
| "step": 352500 | |
| }, | |
| { | |
| "epoch": 2.727195469610699, | |
| "grad_norm": 0.00045925399172119796, | |
| "learning_rate": 3.209306130363386e-05, | |
| "loss": 0.0003, | |
| "step": 353000 | |
| }, | |
| { | |
| "epoch": 2.73105835271213, | |
| "grad_norm": 0.0004555301566142589, | |
| "learning_rate": 3.163860123249571e-05, | |
| "loss": 0.0003, | |
| "step": 353500 | |
| }, | |
| { | |
| "epoch": 2.734921235813562, | |
| "grad_norm": 0.000439466122770682, | |
| "learning_rate": 3.118414116135756e-05, | |
| "loss": 0.0003, | |
| "step": 354000 | |
| }, | |
| { | |
| "epoch": 2.7387841189149933, | |
| "grad_norm": 0.00042543449671939015, | |
| "learning_rate": 3.072968109021941e-05, | |
| "loss": 0.0003, | |
| "step": 354500 | |
| }, | |
| { | |
| "epoch": 2.742647002016425, | |
| "grad_norm": 0.00039303614175878465, | |
| "learning_rate": 3.027522101908126e-05, | |
| "loss": 0.0003, | |
| "step": 355000 | |
| }, | |
| { | |
| "epoch": 2.7465098851178564, | |
| "grad_norm": 0.0003858358832076192, | |
| "learning_rate": 2.9820760947943108e-05, | |
| "loss": 0.0003, | |
| "step": 355500 | |
| }, | |
| { | |
| "epoch": 2.750372768219288, | |
| "grad_norm": 0.0004583084082696587, | |
| "learning_rate": 2.9366300876804963e-05, | |
| "loss": 0.0003, | |
| "step": 356000 | |
| }, | |
| { | |
| "epoch": 2.7542356513207196, | |
| "grad_norm": 0.00042337161721661687, | |
| "learning_rate": 2.8911840805666812e-05, | |
| "loss": 0.0003, | |
| "step": 356500 | |
| }, | |
| { | |
| "epoch": 2.7580985344221514, | |
| "grad_norm": 0.0004135474155191332, | |
| "learning_rate": 2.845738073452866e-05, | |
| "loss": 0.0003, | |
| "step": 357000 | |
| }, | |
| { | |
| "epoch": 2.761961417523583, | |
| "grad_norm": 0.0004276975814718753, | |
| "learning_rate": 2.8002920663390513e-05, | |
| "loss": 0.0003, | |
| "step": 357500 | |
| }, | |
| { | |
| "epoch": 2.7658243006250145, | |
| "grad_norm": 0.00042497014510445297, | |
| "learning_rate": 2.754846059225236e-05, | |
| "loss": 0.0003, | |
| "step": 358000 | |
| }, | |
| { | |
| "epoch": 2.769687183726446, | |
| "grad_norm": 0.0004969782894477248, | |
| "learning_rate": 2.709400052111421e-05, | |
| "loss": 0.0003, | |
| "step": 358500 | |
| }, | |
| { | |
| "epoch": 2.7735500668278776, | |
| "grad_norm": 0.0004229978076182306, | |
| "learning_rate": 2.6639540449976066e-05, | |
| "loss": 0.0003, | |
| "step": 359000 | |
| }, | |
| { | |
| "epoch": 2.7774129499293094, | |
| "grad_norm": 0.0004182982083875686, | |
| "learning_rate": 2.6185080378837915e-05, | |
| "loss": 0.0003, | |
| "step": 359500 | |
| }, | |
| { | |
| "epoch": 2.7812758330307408, | |
| "grad_norm": 0.00044540813541971147, | |
| "learning_rate": 2.5730620307699763e-05, | |
| "loss": 0.0003, | |
| "step": 360000 | |
| }, | |
| { | |
| "epoch": 2.7851387161321726, | |
| "grad_norm": 0.0004250209603924304, | |
| "learning_rate": 2.5276160236561612e-05, | |
| "loss": 0.0003, | |
| "step": 360500 | |
| }, | |
| { | |
| "epoch": 2.789001599233604, | |
| "grad_norm": 0.00039502166328020394, | |
| "learning_rate": 2.482170016542346e-05, | |
| "loss": 0.0003, | |
| "step": 361000 | |
| }, | |
| { | |
| "epoch": 2.7928644823350357, | |
| "grad_norm": 0.0004499274946283549, | |
| "learning_rate": 2.4367240094285313e-05, | |
| "loss": 0.0003, | |
| "step": 361500 | |
| }, | |
| { | |
| "epoch": 2.796727365436467, | |
| "grad_norm": 0.0003957873850595206, | |
| "learning_rate": 2.3912780023147165e-05, | |
| "loss": 0.0003, | |
| "step": 362000 | |
| }, | |
| { | |
| "epoch": 2.800590248537899, | |
| "grad_norm": 0.000493398925755173, | |
| "learning_rate": 2.3458319952009014e-05, | |
| "loss": 0.0003, | |
| "step": 362500 | |
| }, | |
| { | |
| "epoch": 2.80445313163933, | |
| "grad_norm": 0.00042534625390544534, | |
| "learning_rate": 2.3003859880870866e-05, | |
| "loss": 0.0003, | |
| "step": 363000 | |
| }, | |
| { | |
| "epoch": 2.808316014740762, | |
| "grad_norm": 0.0004305491747800261, | |
| "learning_rate": 2.2549399809732715e-05, | |
| "loss": 0.0003, | |
| "step": 363500 | |
| }, | |
| { | |
| "epoch": 2.8121788978421938, | |
| "grad_norm": 0.0004288312920834869, | |
| "learning_rate": 2.2094939738594564e-05, | |
| "loss": 0.0003, | |
| "step": 364000 | |
| }, | |
| { | |
| "epoch": 2.816041780943625, | |
| "grad_norm": 0.00046938960440456867, | |
| "learning_rate": 2.1640479667456416e-05, | |
| "loss": 0.0003, | |
| "step": 364500 | |
| }, | |
| { | |
| "epoch": 2.8199046640450565, | |
| "grad_norm": 0.00041804375359788537, | |
| "learning_rate": 2.1186019596318268e-05, | |
| "loss": 0.0003, | |
| "step": 365000 | |
| }, | |
| { | |
| "epoch": 2.8237675471464883, | |
| "grad_norm": 0.00041626792517490685, | |
| "learning_rate": 2.0731559525180117e-05, | |
| "loss": 0.0003, | |
| "step": 365500 | |
| }, | |
| { | |
| "epoch": 2.82763043024792, | |
| "grad_norm": 0.0004569618031382561, | |
| "learning_rate": 2.0277099454041966e-05, | |
| "loss": 0.0003, | |
| "step": 366000 | |
| }, | |
| { | |
| "epoch": 2.8314933133493514, | |
| "grad_norm": 0.00040743814315646887, | |
| "learning_rate": 1.9822639382903814e-05, | |
| "loss": 0.0003, | |
| "step": 366500 | |
| }, | |
| { | |
| "epoch": 2.8353561964507827, | |
| "grad_norm": 0.0005269908579066396, | |
| "learning_rate": 1.9368179311765666e-05, | |
| "loss": 0.0003, | |
| "step": 367000 | |
| }, | |
| { | |
| "epoch": 2.8392190795522145, | |
| "grad_norm": 0.0003937318397220224, | |
| "learning_rate": 1.891371924062752e-05, | |
| "loss": 0.0003, | |
| "step": 367500 | |
| }, | |
| { | |
| "epoch": 2.8430819626536463, | |
| "grad_norm": 0.0005019407253712416, | |
| "learning_rate": 1.8459259169489367e-05, | |
| "loss": 0.0003, | |
| "step": 368000 | |
| }, | |
| { | |
| "epoch": 2.8469448457550777, | |
| "grad_norm": 0.00042626584763638675, | |
| "learning_rate": 1.8004799098351216e-05, | |
| "loss": 0.0003, | |
| "step": 368500 | |
| }, | |
| { | |
| "epoch": 2.8508077288565095, | |
| "grad_norm": 0.0004206789599265903, | |
| "learning_rate": 1.755033902721307e-05, | |
| "loss": 0.0003, | |
| "step": 369000 | |
| }, | |
| { | |
| "epoch": 2.854670611957941, | |
| "grad_norm": 0.0004782930773217231, | |
| "learning_rate": 1.7095878956074917e-05, | |
| "loss": 0.0003, | |
| "step": 369500 | |
| }, | |
| { | |
| "epoch": 2.8585334950593726, | |
| "grad_norm": 0.00043975317385047674, | |
| "learning_rate": 1.664141888493677e-05, | |
| "loss": 0.0003, | |
| "step": 370000 | |
| }, | |
| { | |
| "epoch": 2.862396378160804, | |
| "grad_norm": 0.00044531343155540526, | |
| "learning_rate": 1.6186958813798618e-05, | |
| "loss": 0.0003, | |
| "step": 370500 | |
| }, | |
| { | |
| "epoch": 2.8662592612622357, | |
| "grad_norm": 0.0004300449218135327, | |
| "learning_rate": 1.5732498742660467e-05, | |
| "loss": 0.0003, | |
| "step": 371000 | |
| }, | |
| { | |
| "epoch": 2.870122144363667, | |
| "grad_norm": 0.0003669953439384699, | |
| "learning_rate": 1.527803867152232e-05, | |
| "loss": 0.0003, | |
| "step": 371500 | |
| }, | |
| { | |
| "epoch": 2.873985027465099, | |
| "grad_norm": 0.00039420899702236056, | |
| "learning_rate": 1.482357860038417e-05, | |
| "loss": 0.0003, | |
| "step": 372000 | |
| }, | |
| { | |
| "epoch": 2.8778479105665307, | |
| "grad_norm": 0.00044955965131521225, | |
| "learning_rate": 1.436911852924602e-05, | |
| "loss": 0.0003, | |
| "step": 372500 | |
| }, | |
| { | |
| "epoch": 2.881710793667962, | |
| "grad_norm": 0.0004604061250574887, | |
| "learning_rate": 1.391465845810787e-05, | |
| "loss": 0.0003, | |
| "step": 373000 | |
| }, | |
| { | |
| "epoch": 2.8855736767693934, | |
| "grad_norm": 0.00042817433131858706, | |
| "learning_rate": 1.3460198386969719e-05, | |
| "loss": 0.0003, | |
| "step": 373500 | |
| }, | |
| { | |
| "epoch": 2.889436559870825, | |
| "grad_norm": 0.0004006045637652278, | |
| "learning_rate": 1.3005738315831571e-05, | |
| "loss": 0.0003, | |
| "step": 374000 | |
| }, | |
| { | |
| "epoch": 2.893299442972257, | |
| "grad_norm": 0.00040681843529455364, | |
| "learning_rate": 1.255127824469342e-05, | |
| "loss": 0.0003, | |
| "step": 374500 | |
| }, | |
| { | |
| "epoch": 2.8971623260736883, | |
| "grad_norm": 0.0004340211162343621, | |
| "learning_rate": 1.209681817355527e-05, | |
| "loss": 0.0003, | |
| "step": 375000 | |
| }, | |
| { | |
| "epoch": 2.90102520917512, | |
| "grad_norm": 0.0004770898667629808, | |
| "learning_rate": 1.1642358102417121e-05, | |
| "loss": 0.0003, | |
| "step": 375500 | |
| }, | |
| { | |
| "epoch": 2.9048880922765514, | |
| "grad_norm": 0.0004441466589923948, | |
| "learning_rate": 1.1187898031278971e-05, | |
| "loss": 0.0003, | |
| "step": 376000 | |
| }, | |
| { | |
| "epoch": 2.908750975377983, | |
| "grad_norm": 0.00045734934974461794, | |
| "learning_rate": 1.073343796014082e-05, | |
| "loss": 0.0003, | |
| "step": 376500 | |
| }, | |
| { | |
| "epoch": 2.9126138584794146, | |
| "grad_norm": 0.0004386783402878791, | |
| "learning_rate": 1.0278977889002672e-05, | |
| "loss": 0.0003, | |
| "step": 377000 | |
| }, | |
| { | |
| "epoch": 2.9164767415808464, | |
| "grad_norm": 0.0003852332301903516, | |
| "learning_rate": 9.824517817864521e-06, | |
| "loss": 0.0003, | |
| "step": 377500 | |
| }, | |
| { | |
| "epoch": 2.9203396246822777, | |
| "grad_norm": 0.0004367204965092242, | |
| "learning_rate": 9.370057746726372e-06, | |
| "loss": 0.0003, | |
| "step": 378000 | |
| }, | |
| { | |
| "epoch": 2.9242025077837095, | |
| "grad_norm": 0.0004893042496405542, | |
| "learning_rate": 8.915597675588222e-06, | |
| "loss": 0.0003, | |
| "step": 378500 | |
| }, | |
| { | |
| "epoch": 2.9280653908851413, | |
| "grad_norm": 0.0004190478939563036, | |
| "learning_rate": 8.461137604450072e-06, | |
| "loss": 0.0003, | |
| "step": 379000 | |
| }, | |
| { | |
| "epoch": 2.9319282739865726, | |
| "grad_norm": 0.0004581400426104665, | |
| "learning_rate": 8.006677533311923e-06, | |
| "loss": 0.0003, | |
| "step": 379500 | |
| }, | |
| { | |
| "epoch": 2.935791157088004, | |
| "grad_norm": 0.00042551904334686697, | |
| "learning_rate": 7.5522174621737725e-06, | |
| "loss": 0.0003, | |
| "step": 380000 | |
| }, | |
| { | |
| "epoch": 2.9396540401894358, | |
| "grad_norm": 0.0004512323357630521, | |
| "learning_rate": 7.097757391035623e-06, | |
| "loss": 0.0003, | |
| "step": 380500 | |
| }, | |
| { | |
| "epoch": 2.9435169232908676, | |
| "grad_norm": 0.0004587092262227088, | |
| "learning_rate": 6.6432973198974734e-06, | |
| "loss": 0.0003, | |
| "step": 381000 | |
| }, | |
| { | |
| "epoch": 2.947379806392299, | |
| "grad_norm": 0.00046226088306866586, | |
| "learning_rate": 6.188837248759323e-06, | |
| "loss": 0.0003, | |
| "step": 381500 | |
| }, | |
| { | |
| "epoch": 2.9512426894937307, | |
| "grad_norm": 0.0004404925275593996, | |
| "learning_rate": 5.7343771776211735e-06, | |
| "loss": 0.0003, | |
| "step": 382000 | |
| }, | |
| { | |
| "epoch": 2.955105572595162, | |
| "grad_norm": 0.00045682353083975613, | |
| "learning_rate": 5.279917106483024e-06, | |
| "loss": 0.0003, | |
| "step": 382500 | |
| }, | |
| { | |
| "epoch": 2.958968455696594, | |
| "grad_norm": 0.0004541674570646137, | |
| "learning_rate": 4.825457035344874e-06, | |
| "loss": 0.0003, | |
| "step": 383000 | |
| }, | |
| { | |
| "epoch": 2.962831338798025, | |
| "grad_norm": 0.00042987189954146743, | |
| "learning_rate": 4.370996964206724e-06, | |
| "loss": 0.0003, | |
| "step": 383500 | |
| }, | |
| { | |
| "epoch": 2.966694221899457, | |
| "grad_norm": 0.00045570009388029575, | |
| "learning_rate": 3.9165368930685745e-06, | |
| "loss": 0.0003, | |
| "step": 384000 | |
| }, | |
| { | |
| "epoch": 2.9705571050008883, | |
| "grad_norm": 0.0004511605075094849, | |
| "learning_rate": 3.462076821930425e-06, | |
| "loss": 0.0003, | |
| "step": 384500 | |
| }, | |
| { | |
| "epoch": 2.97441998810232, | |
| "grad_norm": 0.000448725710157305, | |
| "learning_rate": 3.007616750792275e-06, | |
| "loss": 0.0003, | |
| "step": 385000 | |
| }, | |
| { | |
| "epoch": 2.978282871203752, | |
| "grad_norm": 0.0004488198319450021, | |
| "learning_rate": 2.553156679654125e-06, | |
| "loss": 0.0003, | |
| "step": 385500 | |
| }, | |
| { | |
| "epoch": 2.9821457543051832, | |
| "grad_norm": 0.000420234544435516, | |
| "learning_rate": 2.0986966085159755e-06, | |
| "loss": 0.0003, | |
| "step": 386000 | |
| }, | |
| { | |
| "epoch": 2.9860086374066146, | |
| "grad_norm": 0.00040884234476834536, | |
| "learning_rate": 1.6442365373778258e-06, | |
| "loss": 0.0003, | |
| "step": 386500 | |
| }, | |
| { | |
| "epoch": 2.9898715205080464, | |
| "grad_norm": 0.0005011099274270236, | |
| "learning_rate": 1.189776466239676e-06, | |
| "loss": 0.0003, | |
| "step": 387000 | |
| }, | |
| { | |
| "epoch": 2.993734403609478, | |
| "grad_norm": 0.00048375382903032005, | |
| "learning_rate": 7.353163951015264e-07, | |
| "loss": 0.0003, | |
| "step": 387500 | |
| }, | |
| { | |
| "epoch": 2.9975972867109095, | |
| "grad_norm": 0.00044487029663287103, | |
| "learning_rate": 2.8085632396337657e-07, | |
| "loss": 0.0003, | |
| "step": 388000 | |
| } | |
| ], | |
| "logging_steps": 500, | |
| "max_steps": 388311, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 129437, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 0.0, | |
| "train_batch_size": 64, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |