| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 1316, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0007598784194528875, | |
| "grad_norm": 18.80578654204849, | |
| "learning_rate": 5.000000000000001e-07, | |
| "loss": 1.9397, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.001519756838905775, | |
| "grad_norm": 19.936979517442442, | |
| "learning_rate": 1.0000000000000002e-06, | |
| "loss": 2.0073, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.0022796352583586625, | |
| "grad_norm": 15.795489225960615, | |
| "learning_rate": 1.5e-06, | |
| "loss": 1.866, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.00303951367781155, | |
| "grad_norm": 15.301926414472275, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 1.7852, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.003799392097264438, | |
| "grad_norm": 16.2282739725826, | |
| "learning_rate": 2.5e-06, | |
| "loss": 1.8538, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.004559270516717325, | |
| "grad_norm": 14.636983158892624, | |
| "learning_rate": 3e-06, | |
| "loss": 1.784, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.005319148936170213, | |
| "grad_norm": 12.55853670109212, | |
| "learning_rate": 3.5e-06, | |
| "loss": 1.5912, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.0060790273556231, | |
| "grad_norm": 14.874439567030636, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 1.6366, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.006838905775075988, | |
| "grad_norm": 10.859035040626171, | |
| "learning_rate": 4.5e-06, | |
| "loss": 1.4395, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.007598784194528876, | |
| "grad_norm": 4.63340689402763, | |
| "learning_rate": 5e-06, | |
| "loss": 1.2645, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.008358662613981762, | |
| "grad_norm": 3.3358641102100797, | |
| "learning_rate": 5.500000000000001e-06, | |
| "loss": 1.2384, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.00911854103343465, | |
| "grad_norm": 3.374539527535742, | |
| "learning_rate": 6e-06, | |
| "loss": 1.1554, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.009878419452887538, | |
| "grad_norm": 3.697822430187619, | |
| "learning_rate": 6.5000000000000004e-06, | |
| "loss": 1.3274, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.010638297872340425, | |
| "grad_norm": 3.498197000653117, | |
| "learning_rate": 7e-06, | |
| "loss": 1.1748, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.011398176291793313, | |
| "grad_norm": 2.5395733312921087, | |
| "learning_rate": 7.500000000000001e-06, | |
| "loss": 1.0877, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.0121580547112462, | |
| "grad_norm": 4.047677386924761, | |
| "learning_rate": 8.000000000000001e-06, | |
| "loss": 1.2251, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.012917933130699088, | |
| "grad_norm": 2.7978886070878635, | |
| "learning_rate": 8.5e-06, | |
| "loss": 1.2122, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.013677811550151976, | |
| "grad_norm": 3.269556053301259, | |
| "learning_rate": 9e-06, | |
| "loss": 1.2179, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.014437689969604863, | |
| "grad_norm": 2.7856939670959395, | |
| "learning_rate": 9.5e-06, | |
| "loss": 1.1372, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.015197568389057751, | |
| "grad_norm": 2.0749026483605637, | |
| "learning_rate": 1e-05, | |
| "loss": 1.0943, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.015957446808510637, | |
| "grad_norm": 1.9516051588642298, | |
| "learning_rate": 1.0500000000000001e-05, | |
| "loss": 1.0491, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.016717325227963525, | |
| "grad_norm": 1.9813644412336127, | |
| "learning_rate": 1.1000000000000001e-05, | |
| "loss": 1.0888, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.017477203647416412, | |
| "grad_norm": 2.4226280871398402, | |
| "learning_rate": 1.15e-05, | |
| "loss": 1.0475, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.0182370820668693, | |
| "grad_norm": 2.63750745875809, | |
| "learning_rate": 1.2e-05, | |
| "loss": 1.16, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.018996960486322188, | |
| "grad_norm": 1.7600606328882897, | |
| "learning_rate": 1.25e-05, | |
| "loss": 0.9607, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.019756838905775075, | |
| "grad_norm": 1.9342081421742727, | |
| "learning_rate": 1.3000000000000001e-05, | |
| "loss": 1.1469, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.020516717325227963, | |
| "grad_norm": 1.7373975012995082, | |
| "learning_rate": 1.3500000000000001e-05, | |
| "loss": 0.9608, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.02127659574468085, | |
| "grad_norm": 1.5642247263325297, | |
| "learning_rate": 1.4e-05, | |
| "loss": 1.0482, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.022036474164133738, | |
| "grad_norm": 1.861765100381632, | |
| "learning_rate": 1.45e-05, | |
| "loss": 1.1197, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.022796352583586626, | |
| "grad_norm": 2.1003606297804236, | |
| "learning_rate": 1.5000000000000002e-05, | |
| "loss": 1.0171, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.023556231003039513, | |
| "grad_norm": 1.8389698006880923, | |
| "learning_rate": 1.55e-05, | |
| "loss": 1.1011, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.0243161094224924, | |
| "grad_norm": 1.5524633264729542, | |
| "learning_rate": 1.6000000000000003e-05, | |
| "loss": 1.0089, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.02507598784194529, | |
| "grad_norm": 1.9676479659023847, | |
| "learning_rate": 1.65e-05, | |
| "loss": 1.0741, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.025835866261398176, | |
| "grad_norm": 2.560829451802257, | |
| "learning_rate": 1.7e-05, | |
| "loss": 1.0388, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.026595744680851064, | |
| "grad_norm": 2.0702414094576223, | |
| "learning_rate": 1.7500000000000002e-05, | |
| "loss": 1.0273, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.02735562310030395, | |
| "grad_norm": 1.9031851017818049, | |
| "learning_rate": 1.8e-05, | |
| "loss": 0.9672, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.02811550151975684, | |
| "grad_norm": 1.5509967869794354, | |
| "learning_rate": 1.8500000000000002e-05, | |
| "loss": 0.8937, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.028875379939209727, | |
| "grad_norm": 2.04534039219301, | |
| "learning_rate": 1.9e-05, | |
| "loss": 1.02, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.029635258358662615, | |
| "grad_norm": 1.7025051455286688, | |
| "learning_rate": 1.95e-05, | |
| "loss": 1.0131, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.030395136778115502, | |
| "grad_norm": 1.5442322349318613, | |
| "learning_rate": 2e-05, | |
| "loss": 0.9192, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.03115501519756839, | |
| "grad_norm": 2.546878971605779, | |
| "learning_rate": 1.9999969691239106e-05, | |
| "loss": 1.0129, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.031914893617021274, | |
| "grad_norm": 2.0851882471381047, | |
| "learning_rate": 1.999987876514015e-05, | |
| "loss": 0.8691, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.03267477203647416, | |
| "grad_norm": 1.6551263289563938, | |
| "learning_rate": 1.9999727222254298e-05, | |
| "loss": 0.9416, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.03343465045592705, | |
| "grad_norm": 1.5537317902737728, | |
| "learning_rate": 1.999951506350017e-05, | |
| "loss": 0.9551, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.03419452887537994, | |
| "grad_norm": 1.9792495738131946, | |
| "learning_rate": 1.999924229016382e-05, | |
| "loss": 0.9638, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.034954407294832825, | |
| "grad_norm": 1.2444365971750562, | |
| "learning_rate": 1.999890890389873e-05, | |
| "loss": 0.9352, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.03571428571428571, | |
| "grad_norm": 1.6183173861380766, | |
| "learning_rate": 1.9998514906725805e-05, | |
| "loss": 1.0689, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.0364741641337386, | |
| "grad_norm": 1.9294426582503834, | |
| "learning_rate": 1.9998060301033363e-05, | |
| "loss": 0.903, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.03723404255319149, | |
| "grad_norm": 1.5431177344979468, | |
| "learning_rate": 1.9997545089577105e-05, | |
| "loss": 0.9321, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.037993920972644375, | |
| "grad_norm": 1.6127246468783536, | |
| "learning_rate": 1.9996969275480116e-05, | |
| "loss": 0.8903, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.03875379939209726, | |
| "grad_norm": 1.701932813930693, | |
| "learning_rate": 1.9996332862232843e-05, | |
| "loss": 0.8329, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.03951367781155015, | |
| "grad_norm": 1.4202165623723457, | |
| "learning_rate": 1.9995635853693057e-05, | |
| "loss": 0.8497, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.04027355623100304, | |
| "grad_norm": 1.5908870733695089, | |
| "learning_rate": 1.999487825408586e-05, | |
| "loss": 0.9123, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.041033434650455926, | |
| "grad_norm": 1.6173689783085115, | |
| "learning_rate": 1.999406006800363e-05, | |
| "loss": 0.9647, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.04179331306990881, | |
| "grad_norm": 1.506047572850385, | |
| "learning_rate": 1.9993181300406006e-05, | |
| "loss": 0.9474, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.0425531914893617, | |
| "grad_norm": 1.734404739733389, | |
| "learning_rate": 1.999224195661986e-05, | |
| "loss": 0.9506, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.04331306990881459, | |
| "grad_norm": 1.5219849562051189, | |
| "learning_rate": 1.9991242042339265e-05, | |
| "loss": 0.9364, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.044072948328267476, | |
| "grad_norm": 2.065850350353832, | |
| "learning_rate": 1.999018156362545e-05, | |
| "loss": 0.9313, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.044832826747720364, | |
| "grad_norm": 1.3320771347319378, | |
| "learning_rate": 1.998906052690677e-05, | |
| "loss": 0.9042, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.04559270516717325, | |
| "grad_norm": 1.5259869083951578, | |
| "learning_rate": 1.9987878938978684e-05, | |
| "loss": 0.9441, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.04635258358662614, | |
| "grad_norm": 1.4153750994381784, | |
| "learning_rate": 1.9986636807003676e-05, | |
| "loss": 0.8424, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.04711246200607903, | |
| "grad_norm": 1.8361063352172935, | |
| "learning_rate": 1.998533413851124e-05, | |
| "loss": 0.9499, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.047872340425531915, | |
| "grad_norm": 1.6803018381682095, | |
| "learning_rate": 1.9983970941397837e-05, | |
| "loss": 0.9408, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.0486322188449848, | |
| "grad_norm": 1.2589759516499712, | |
| "learning_rate": 1.9982547223926826e-05, | |
| "loss": 0.8771, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.04939209726443769, | |
| "grad_norm": 1.5579714941883882, | |
| "learning_rate": 1.998106299472843e-05, | |
| "loss": 0.8856, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.05015197568389058, | |
| "grad_norm": 1.5330331496652656, | |
| "learning_rate": 1.997951826279968e-05, | |
| "loss": 0.9142, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.050911854103343465, | |
| "grad_norm": 1.7871157980642074, | |
| "learning_rate": 1.9977913037504355e-05, | |
| "loss": 0.9652, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.05167173252279635, | |
| "grad_norm": 1.4927076766801692, | |
| "learning_rate": 1.997624732857294e-05, | |
| "loss": 0.8576, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.05243161094224924, | |
| "grad_norm": 1.3902160650547777, | |
| "learning_rate": 1.9974521146102535e-05, | |
| "loss": 0.91, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.05319148936170213, | |
| "grad_norm": 1.4925539365013698, | |
| "learning_rate": 1.9972734500556847e-05, | |
| "loss": 0.8894, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.053951367781155016, | |
| "grad_norm": 1.710259445969903, | |
| "learning_rate": 1.997088740276607e-05, | |
| "loss": 0.8301, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.0547112462006079, | |
| "grad_norm": 1.2857273093969848, | |
| "learning_rate": 1.9968979863926857e-05, | |
| "loss": 0.8579, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.05547112462006079, | |
| "grad_norm": 1.7015365206541173, | |
| "learning_rate": 1.996701189560223e-05, | |
| "loss": 0.8885, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.05623100303951368, | |
| "grad_norm": 1.5683133200998347, | |
| "learning_rate": 1.9964983509721527e-05, | |
| "loss": 0.959, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.056990881458966566, | |
| "grad_norm": 1.6667532459642813, | |
| "learning_rate": 1.9962894718580325e-05, | |
| "loss": 0.7551, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.057750759878419454, | |
| "grad_norm": 1.5771827982991307, | |
| "learning_rate": 1.9960745534840357e-05, | |
| "loss": 0.8604, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.05851063829787234, | |
| "grad_norm": 1.4816031573627018, | |
| "learning_rate": 1.9958535971529434e-05, | |
| "loss": 1.0382, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.05927051671732523, | |
| "grad_norm": 1.4946584443754491, | |
| "learning_rate": 1.9956266042041394e-05, | |
| "loss": 0.9434, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.06003039513677812, | |
| "grad_norm": 1.5771515008850745, | |
| "learning_rate": 1.995393576013598e-05, | |
| "loss": 0.909, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.060790273556231005, | |
| "grad_norm": 1.5062039348637382, | |
| "learning_rate": 1.995154513993878e-05, | |
| "loss": 0.8621, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.06155015197568389, | |
| "grad_norm": 1.6606089352316755, | |
| "learning_rate": 1.9949094195941152e-05, | |
| "loss": 0.8573, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.06231003039513678, | |
| "grad_norm": 2.5006315399495995, | |
| "learning_rate": 1.99465829430001e-05, | |
| "loss": 0.8141, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.06306990881458967, | |
| "grad_norm": 1.4688680295961642, | |
| "learning_rate": 1.9944011396338223e-05, | |
| "loss": 0.8935, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.06382978723404255, | |
| "grad_norm": 1.6575370449467235, | |
| "learning_rate": 1.9941379571543597e-05, | |
| "loss": 0.8648, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.06458966565349544, | |
| "grad_norm": 1.5375840047431035, | |
| "learning_rate": 1.9938687484569694e-05, | |
| "loss": 0.7928, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.06534954407294832, | |
| "grad_norm": 1.454894977961652, | |
| "learning_rate": 1.993593515173528e-05, | |
| "loss": 0.9172, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.06610942249240122, | |
| "grad_norm": 1.5486550998550936, | |
| "learning_rate": 1.99331225897243e-05, | |
| "loss": 0.8492, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.0668693009118541, | |
| "grad_norm": 1.259238646800967, | |
| "learning_rate": 1.993024981558583e-05, | |
| "loss": 0.8039, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.067629179331307, | |
| "grad_norm": 1.3265462884777126, | |
| "learning_rate": 1.9927316846733902e-05, | |
| "loss": 0.9073, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.06838905775075987, | |
| "grad_norm": 1.3099679831388973, | |
| "learning_rate": 1.9924323700947446e-05, | |
| "loss": 0.8695, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.06914893617021277, | |
| "grad_norm": 1.402342068673609, | |
| "learning_rate": 1.9921270396370175e-05, | |
| "loss": 0.9743, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.06990881458966565, | |
| "grad_norm": 1.443374144723623, | |
| "learning_rate": 1.991815695151046e-05, | |
| "loss": 0.9119, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.07066869300911854, | |
| "grad_norm": 1.6278057971167832, | |
| "learning_rate": 1.9914983385241235e-05, | |
| "loss": 0.9257, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.07142857142857142, | |
| "grad_norm": 1.4899411128486526, | |
| "learning_rate": 1.991174971679987e-05, | |
| "loss": 0.7992, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.07218844984802432, | |
| "grad_norm": 1.5524452751872306, | |
| "learning_rate": 1.990845596578807e-05, | |
| "loss": 0.8741, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.0729483282674772, | |
| "grad_norm": 1.4240972388846627, | |
| "learning_rate": 1.9905102152171728e-05, | |
| "loss": 0.95, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.0737082066869301, | |
| "grad_norm": 1.2768170988227376, | |
| "learning_rate": 1.990168829628083e-05, | |
| "loss": 0.7848, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.07446808510638298, | |
| "grad_norm": 1.6873995127360015, | |
| "learning_rate": 1.989821441880933e-05, | |
| "loss": 0.9599, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.07522796352583587, | |
| "grad_norm": 1.644005532727897, | |
| "learning_rate": 1.989468054081501e-05, | |
| "loss": 0.8737, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.07598784194528875, | |
| "grad_norm": 1.4609725243916976, | |
| "learning_rate": 1.9891086683719362e-05, | |
| "loss": 1.0085, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.07674772036474165, | |
| "grad_norm": 1.4415001390289846, | |
| "learning_rate": 1.988743286930746e-05, | |
| "loss": 0.8633, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.07750759878419453, | |
| "grad_norm": 1.6029615916524937, | |
| "learning_rate": 1.988371911972782e-05, | |
| "loss": 0.8327, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.07826747720364742, | |
| "grad_norm": 1.3724153377169241, | |
| "learning_rate": 1.987994545749227e-05, | |
| "loss": 0.794, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.0790273556231003, | |
| "grad_norm": 1.453101692219877, | |
| "learning_rate": 1.9876111905475816e-05, | |
| "loss": 0.8374, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.0797872340425532, | |
| "grad_norm": 1.454307714577757, | |
| "learning_rate": 1.98722184869165e-05, | |
| "loss": 0.861, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.08054711246200608, | |
| "grad_norm": 1.5489455405144372, | |
| "learning_rate": 1.9868265225415263e-05, | |
| "loss": 0.8651, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.08130699088145897, | |
| "grad_norm": 1.4441468156333392, | |
| "learning_rate": 1.9864252144935795e-05, | |
| "loss": 0.7947, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.08206686930091185, | |
| "grad_norm": 1.2836722327153833, | |
| "learning_rate": 1.9860179269804394e-05, | |
| "loss": 0.8173, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.08282674772036475, | |
| "grad_norm": 1.4879254684645808, | |
| "learning_rate": 1.985604662470982e-05, | |
| "loss": 0.7065, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.08358662613981763, | |
| "grad_norm": 1.331174591606538, | |
| "learning_rate": 1.9851854234703146e-05, | |
| "loss": 0.9101, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.08434650455927052, | |
| "grad_norm": 1.3243404899406508, | |
| "learning_rate": 1.9847602125197597e-05, | |
| "loss": 0.9144, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.0851063829787234, | |
| "grad_norm": 1.3169445306756042, | |
| "learning_rate": 1.984329032196841e-05, | |
| "loss": 0.9052, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.0858662613981763, | |
| "grad_norm": 1.4676954485492109, | |
| "learning_rate": 1.9838918851152668e-05, | |
| "loss": 0.8369, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.08662613981762918, | |
| "grad_norm": 1.3212838461574712, | |
| "learning_rate": 1.9834487739249146e-05, | |
| "loss": 0.8199, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.08738601823708207, | |
| "grad_norm": 1.6113731223019465, | |
| "learning_rate": 1.982999701311814e-05, | |
| "loss": 0.8978, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.08814589665653495, | |
| "grad_norm": 1.701349586674783, | |
| "learning_rate": 1.982544669998132e-05, | |
| "loss": 0.8744, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.08890577507598785, | |
| "grad_norm": 1.3719009919541267, | |
| "learning_rate": 1.982083682742156e-05, | |
| "loss": 0.842, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.08966565349544073, | |
| "grad_norm": 1.3618472213922084, | |
| "learning_rate": 1.9816167423382766e-05, | |
| "loss": 0.9114, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.09042553191489362, | |
| "grad_norm": 1.5573781400479945, | |
| "learning_rate": 1.9811438516169703e-05, | |
| "loss": 0.9558, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.0911854103343465, | |
| "grad_norm": 1.3094575881423214, | |
| "learning_rate": 1.9806650134447837e-05, | |
| "loss": 0.9026, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.0919452887537994, | |
| "grad_norm": 1.4256831143444713, | |
| "learning_rate": 1.9801802307243153e-05, | |
| "loss": 0.7495, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.09270516717325228, | |
| "grad_norm": 1.5602629994945898, | |
| "learning_rate": 1.9796895063941978e-05, | |
| "loss": 0.8197, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.09346504559270517, | |
| "grad_norm": 1.421395696096533, | |
| "learning_rate": 1.97919284342908e-05, | |
| "loss": 0.8597, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.09422492401215805, | |
| "grad_norm": 1.4088331723304612, | |
| "learning_rate": 1.9786902448396102e-05, | |
| "loss": 0.9317, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.09498480243161095, | |
| "grad_norm": 1.3938352872597106, | |
| "learning_rate": 1.9781817136724166e-05, | |
| "loss": 0.8218, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.09574468085106383, | |
| "grad_norm": 1.2290780358524915, | |
| "learning_rate": 1.9776672530100886e-05, | |
| "loss": 0.8977, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.09650455927051672, | |
| "grad_norm": 1.2184006058342602, | |
| "learning_rate": 1.9771468659711595e-05, | |
| "loss": 0.7648, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.0972644376899696, | |
| "grad_norm": 1.5603071349947746, | |
| "learning_rate": 1.976620555710087e-05, | |
| "loss": 0.8912, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.0980243161094225, | |
| "grad_norm": 1.7066993665659778, | |
| "learning_rate": 1.9760883254172327e-05, | |
| "loss": 0.7951, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.09878419452887538, | |
| "grad_norm": 1.4670293413202677, | |
| "learning_rate": 1.975550178318845e-05, | |
| "loss": 0.8722, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.09954407294832827, | |
| "grad_norm": 1.2976350541977761, | |
| "learning_rate": 1.9750061176770385e-05, | |
| "loss": 0.7345, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.10030395136778116, | |
| "grad_norm": 1.467154260714796, | |
| "learning_rate": 1.9744561467897735e-05, | |
| "loss": 0.7893, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.10106382978723404, | |
| "grad_norm": 1.3368644928662248, | |
| "learning_rate": 1.9739002689908377e-05, | |
| "loss": 0.9025, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.10182370820668693, | |
| "grad_norm": 1.2842451321782573, | |
| "learning_rate": 1.9733384876498248e-05, | |
| "loss": 0.7811, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.10258358662613981, | |
| "grad_norm": 1.283799339851773, | |
| "learning_rate": 1.9727708061721132e-05, | |
| "loss": 0.8634, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.1033434650455927, | |
| "grad_norm": 1.2741104055349879, | |
| "learning_rate": 1.972197227998848e-05, | |
| "loss": 0.8588, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.10410334346504559, | |
| "grad_norm": 1.5426947015451566, | |
| "learning_rate": 1.9716177566069174e-05, | |
| "loss": 0.8995, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.10486322188449848, | |
| "grad_norm": 1.5697420259183603, | |
| "learning_rate": 1.9710323955089343e-05, | |
| "loss": 0.8022, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.10562310030395136, | |
| "grad_norm": 1.6446364363154833, | |
| "learning_rate": 1.9704411482532116e-05, | |
| "loss": 0.7562, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.10638297872340426, | |
| "grad_norm": 1.2868301569204967, | |
| "learning_rate": 1.9698440184237442e-05, | |
| "loss": 0.8642, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.10714285714285714, | |
| "grad_norm": 1.6612523009559164, | |
| "learning_rate": 1.9692410096401852e-05, | |
| "loss": 0.8484, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.10790273556231003, | |
| "grad_norm": 1.417402864742167, | |
| "learning_rate": 1.968632125557824e-05, | |
| "loss": 0.7841, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.10866261398176291, | |
| "grad_norm": 1.3125256264505836, | |
| "learning_rate": 1.968017369867565e-05, | |
| "loss": 0.7826, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.1094224924012158, | |
| "grad_norm": 1.527206519245764, | |
| "learning_rate": 1.9673967462959052e-05, | |
| "loss": 0.8288, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.11018237082066869, | |
| "grad_norm": 1.4600494939651711, | |
| "learning_rate": 1.966770258604911e-05, | |
| "loss": 0.7591, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.11094224924012158, | |
| "grad_norm": 1.4949730775770051, | |
| "learning_rate": 1.9661379105921948e-05, | |
| "loss": 0.8231, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.11170212765957446, | |
| "grad_norm": 1.3073451814640382, | |
| "learning_rate": 1.9654997060908946e-05, | |
| "loss": 0.8329, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.11246200607902736, | |
| "grad_norm": 1.5942079475100348, | |
| "learning_rate": 1.9648556489696472e-05, | |
| "loss": 0.8154, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.11322188449848024, | |
| "grad_norm": 1.6251063108892145, | |
| "learning_rate": 1.9642057431325675e-05, | |
| "loss": 0.8908, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.11398176291793313, | |
| "grad_norm": 1.453886084945642, | |
| "learning_rate": 1.963549992519223e-05, | |
| "loss": 0.8031, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.11474164133738601, | |
| "grad_norm": 1.599473373671884, | |
| "learning_rate": 1.9628884011046123e-05, | |
| "loss": 0.7455, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.11550151975683891, | |
| "grad_norm": 1.4430712315336243, | |
| "learning_rate": 1.9622209728991382e-05, | |
| "loss": 0.8584, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.11626139817629179, | |
| "grad_norm": 1.3489245641735763, | |
| "learning_rate": 1.9615477119485855e-05, | |
| "loss": 0.7848, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.11702127659574468, | |
| "grad_norm": 1.495708535271893, | |
| "learning_rate": 1.9608686223340944e-05, | |
| "loss": 0.8357, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.11778115501519756, | |
| "grad_norm": 1.2800014212250443, | |
| "learning_rate": 1.9601837081721387e-05, | |
| "loss": 0.7878, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.11854103343465046, | |
| "grad_norm": 1.4003079417858133, | |
| "learning_rate": 1.9594929736144978e-05, | |
| "loss": 0.8799, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.11930091185410334, | |
| "grad_norm": 1.3522679430413596, | |
| "learning_rate": 1.958796422848233e-05, | |
| "loss": 0.6824, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.12006079027355623, | |
| "grad_norm": 1.2521479635281891, | |
| "learning_rate": 1.9580940600956636e-05, | |
| "loss": 0.7808, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.12082066869300911, | |
| "grad_norm": 1.4833909050209668, | |
| "learning_rate": 1.9573858896143376e-05, | |
| "loss": 0.851, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.12158054711246201, | |
| "grad_norm": 1.4243242003713783, | |
| "learning_rate": 1.9566719156970095e-05, | |
| "loss": 0.8802, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.12234042553191489, | |
| "grad_norm": 1.3166173735126552, | |
| "learning_rate": 1.955952142671612e-05, | |
| "loss": 0.8127, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.12310030395136778, | |
| "grad_norm": 1.3022361391490747, | |
| "learning_rate": 1.9552265749012306e-05, | |
| "loss": 0.8083, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.12386018237082067, | |
| "grad_norm": 1.2644882070813588, | |
| "learning_rate": 1.9544952167840777e-05, | |
| "loss": 0.8863, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.12462006079027356, | |
| "grad_norm": 1.6870073547230005, | |
| "learning_rate": 1.9537580727534643e-05, | |
| "loss": 0.8111, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.12537993920972645, | |
| "grad_norm": 1.4948637199761152, | |
| "learning_rate": 1.953015147277776e-05, | |
| "loss": 0.8491, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.12613981762917933, | |
| "grad_norm": 1.5423308226784462, | |
| "learning_rate": 1.9522664448604417e-05, | |
| "loss": 0.9121, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.12689969604863222, | |
| "grad_norm": 1.5910852358644978, | |
| "learning_rate": 1.9515119700399107e-05, | |
| "loss": 0.9078, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.1276595744680851, | |
| "grad_norm": 1.4245628260455785, | |
| "learning_rate": 1.9507517273896224e-05, | |
| "loss": 0.8526, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.128419452887538, | |
| "grad_norm": 1.5104873529456913, | |
| "learning_rate": 1.9499857215179788e-05, | |
| "loss": 0.7717, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.12917933130699089, | |
| "grad_norm": 1.3345306473632632, | |
| "learning_rate": 1.949213957068318e-05, | |
| "loss": 0.8603, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.12993920972644377, | |
| "grad_norm": 1.7304871687021037, | |
| "learning_rate": 1.9484364387188848e-05, | |
| "loss": 0.9014, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.13069908814589665, | |
| "grad_norm": 1.5383845941425578, | |
| "learning_rate": 1.9476531711828027e-05, | |
| "loss": 0.9026, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.13145896656534956, | |
| "grad_norm": 1.38583638759642, | |
| "learning_rate": 1.9468641592080452e-05, | |
| "loss": 0.7653, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.13221884498480244, | |
| "grad_norm": 1.3049100822500128, | |
| "learning_rate": 1.9460694075774082e-05, | |
| "loss": 0.8563, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.13297872340425532, | |
| "grad_norm": 1.3101347980378846, | |
| "learning_rate": 1.9452689211084775e-05, | |
| "loss": 0.7557, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.1337386018237082, | |
| "grad_norm": 1.5375631224236188, | |
| "learning_rate": 1.9444627046536055e-05, | |
| "loss": 0.8439, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.1344984802431611, | |
| "grad_norm": 1.405013477951534, | |
| "learning_rate": 1.9436507630998758e-05, | |
| "loss": 0.7372, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.135258358662614, | |
| "grad_norm": 1.4935758153732357, | |
| "learning_rate": 1.9428331013690763e-05, | |
| "loss": 0.8974, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.13601823708206687, | |
| "grad_norm": 1.4046757318395755, | |
| "learning_rate": 1.9420097244176708e-05, | |
| "loss": 0.8512, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.13677811550151975, | |
| "grad_norm": 1.6701162334872106, | |
| "learning_rate": 1.9411806372367656e-05, | |
| "loss": 0.85, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.13753799392097266, | |
| "grad_norm": 1.173594383013887, | |
| "learning_rate": 1.940345844852082e-05, | |
| "loss": 0.775, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.13829787234042554, | |
| "grad_norm": 1.1102937334390899, | |
| "learning_rate": 1.9395053523239243e-05, | |
| "loss": 0.7863, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.13905775075987842, | |
| "grad_norm": 1.2639106159008624, | |
| "learning_rate": 1.9386591647471508e-05, | |
| "loss": 0.8028, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.1398176291793313, | |
| "grad_norm": 1.4380480482934568, | |
| "learning_rate": 1.9378072872511397e-05, | |
| "loss": 0.8484, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.1405775075987842, | |
| "grad_norm": 1.1359661124801228, | |
| "learning_rate": 1.936949724999762e-05, | |
| "loss": 0.8559, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.1413373860182371, | |
| "grad_norm": 1.4674955469296993, | |
| "learning_rate": 1.936086483191347e-05, | |
| "loss": 0.8156, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.14209726443768997, | |
| "grad_norm": 1.5543739871966384, | |
| "learning_rate": 1.9352175670586534e-05, | |
| "loss": 0.8096, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.14285714285714285, | |
| "grad_norm": 1.2488863120608902, | |
| "learning_rate": 1.934342981868835e-05, | |
| "loss": 0.8411, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.14361702127659576, | |
| "grad_norm": 1.0926594697309404, | |
| "learning_rate": 1.93346273292341e-05, | |
| "loss": 0.7673, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.14437689969604864, | |
| "grad_norm": 1.6100207988144668, | |
| "learning_rate": 1.93257682555823e-05, | |
| "loss": 0.8903, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.14513677811550152, | |
| "grad_norm": 1.0832889340495964, | |
| "learning_rate": 1.9316852651434463e-05, | |
| "loss": 0.8261, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.1458966565349544, | |
| "grad_norm": 1.3225238983981897, | |
| "learning_rate": 1.9307880570834762e-05, | |
| "loss": 0.8363, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.1466565349544073, | |
| "grad_norm": 1.5927313685711804, | |
| "learning_rate": 1.929885206816973e-05, | |
| "loss": 0.9214, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.1474164133738602, | |
| "grad_norm": 1.31978352032617, | |
| "learning_rate": 1.9289767198167918e-05, | |
| "loss": 0.8817, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.14817629179331307, | |
| "grad_norm": 1.6299049121755003, | |
| "learning_rate": 1.9280626015899548e-05, | |
| "loss": 0.701, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.14893617021276595, | |
| "grad_norm": 1.4036455937106598, | |
| "learning_rate": 1.9271428576776206e-05, | |
| "loss": 0.9386, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.14969604863221886, | |
| "grad_norm": 1.2742299940465347, | |
| "learning_rate": 1.9262174936550485e-05, | |
| "loss": 0.8992, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.15045592705167174, | |
| "grad_norm": 1.3509773588645386, | |
| "learning_rate": 1.9252865151315667e-05, | |
| "loss": 0.8405, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.15121580547112462, | |
| "grad_norm": 1.384128557227047, | |
| "learning_rate": 1.9243499277505355e-05, | |
| "loss": 0.7888, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.1519756838905775, | |
| "grad_norm": 1.1884836896724842, | |
| "learning_rate": 1.9234077371893156e-05, | |
| "loss": 0.8602, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.15273556231003038, | |
| "grad_norm": 1.1797264610457638, | |
| "learning_rate": 1.922459949159233e-05, | |
| "loss": 0.7677, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.1534954407294833, | |
| "grad_norm": 1.266781729261725, | |
| "learning_rate": 1.921506569405544e-05, | |
| "loss": 0.8579, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.15425531914893617, | |
| "grad_norm": 1.3095098923300477, | |
| "learning_rate": 1.9205476037073997e-05, | |
| "loss": 0.8669, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.15501519756838905, | |
| "grad_norm": 1.1987134831913058, | |
| "learning_rate": 1.9195830578778133e-05, | |
| "loss": 0.7635, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.15577507598784193, | |
| "grad_norm": 1.3312872017851871, | |
| "learning_rate": 1.918612937763622e-05, | |
| "loss": 0.8335, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.15653495440729484, | |
| "grad_norm": 1.167582076891394, | |
| "learning_rate": 1.917637249245454e-05, | |
| "loss": 0.8597, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.15729483282674772, | |
| "grad_norm": 1.294076745163314, | |
| "learning_rate": 1.9166559982376905e-05, | |
| "loss": 0.9087, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.1580547112462006, | |
| "grad_norm": 1.3389625539126968, | |
| "learning_rate": 1.9156691906884327e-05, | |
| "loss": 0.832, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.15881458966565348, | |
| "grad_norm": 1.6960580283159512, | |
| "learning_rate": 1.914676832579463e-05, | |
| "loss": 0.7974, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.1595744680851064, | |
| "grad_norm": 1.562939087079936, | |
| "learning_rate": 1.913678929926211e-05, | |
| "loss": 0.7893, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.16033434650455927, | |
| "grad_norm": 1.4513276888079942, | |
| "learning_rate": 1.912675488777714e-05, | |
| "loss": 0.7449, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.16109422492401215, | |
| "grad_norm": 1.3154727836409927, | |
| "learning_rate": 1.911666515216585e-05, | |
| "loss": 0.7916, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.16185410334346503, | |
| "grad_norm": 1.1871513283255706, | |
| "learning_rate": 1.9106520153589708e-05, | |
| "loss": 0.9164, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.16261398176291794, | |
| "grad_norm": 1.4827818964427704, | |
| "learning_rate": 1.9096319953545186e-05, | |
| "loss": 0.9302, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.16337386018237082, | |
| "grad_norm": 1.2717421498382024, | |
| "learning_rate": 1.9086064613863366e-05, | |
| "loss": 0.8123, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.1641337386018237, | |
| "grad_norm": 1.4346204830387652, | |
| "learning_rate": 1.9075754196709574e-05, | |
| "loss": 0.8761, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.16489361702127658, | |
| "grad_norm": 1.3299638698198593, | |
| "learning_rate": 1.9065388764583003e-05, | |
| "loss": 0.8366, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.1656534954407295, | |
| "grad_norm": 1.1564047959009294, | |
| "learning_rate": 1.9054968380316342e-05, | |
| "loss": 0.7523, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.16641337386018237, | |
| "grad_norm": 1.250636977817944, | |
| "learning_rate": 1.9044493107075367e-05, | |
| "loss": 0.8059, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.16717325227963525, | |
| "grad_norm": 1.198174186230645, | |
| "learning_rate": 1.90339630083586e-05, | |
| "loss": 0.8386, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.16793313069908813, | |
| "grad_norm": 1.3258125495764745, | |
| "learning_rate": 1.902337814799688e-05, | |
| "loss": 0.9298, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.16869300911854104, | |
| "grad_norm": 1.3109407927796777, | |
| "learning_rate": 1.901273859015301e-05, | |
| "loss": 0.9317, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.16945288753799392, | |
| "grad_norm": 1.2875742790367803, | |
| "learning_rate": 1.900204439932136e-05, | |
| "loss": 0.899, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.1702127659574468, | |
| "grad_norm": 1.4379052150907687, | |
| "learning_rate": 1.899129564032745e-05, | |
| "loss": 0.8466, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.17097264437689969, | |
| "grad_norm": 1.50788762094593, | |
| "learning_rate": 1.898049237832761e-05, | |
| "loss": 0.7985, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.1717325227963526, | |
| "grad_norm": 1.2190391130721603, | |
| "learning_rate": 1.8969634678808523e-05, | |
| "loss": 0.8009, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.17249240121580547, | |
| "grad_norm": 1.251569378774778, | |
| "learning_rate": 1.8958722607586883e-05, | |
| "loss": 0.798, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.17325227963525835, | |
| "grad_norm": 1.1795940152454232, | |
| "learning_rate": 1.8947756230808955e-05, | |
| "loss": 0.8442, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.17401215805471124, | |
| "grad_norm": 1.49930266500084, | |
| "learning_rate": 1.8936735614950196e-05, | |
| "loss": 0.9044, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.17477203647416414, | |
| "grad_norm": 1.297820058078315, | |
| "learning_rate": 1.8925660826814855e-05, | |
| "loss": 0.9109, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.17553191489361702, | |
| "grad_norm": 1.2127812557089592, | |
| "learning_rate": 1.8914531933535548e-05, | |
| "loss": 0.8945, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.1762917933130699, | |
| "grad_norm": 1.1948475205527067, | |
| "learning_rate": 1.8903349002572873e-05, | |
| "loss": 0.9038, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.1770516717325228, | |
| "grad_norm": 1.2781535794479726, | |
| "learning_rate": 1.889211210171498e-05, | |
| "loss": 0.8648, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.1778115501519757, | |
| "grad_norm": 1.2347916150532912, | |
| "learning_rate": 1.8880821299077184e-05, | |
| "loss": 0.8808, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.17857142857142858, | |
| "grad_norm": 1.4144411483022865, | |
| "learning_rate": 1.8869476663101525e-05, | |
| "loss": 0.8211, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.17933130699088146, | |
| "grad_norm": 1.0509663832273464, | |
| "learning_rate": 1.885807826255638e-05, | |
| "loss": 0.8481, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.18009118541033434, | |
| "grad_norm": 1.133201043785056, | |
| "learning_rate": 1.8846626166536027e-05, | |
| "loss": 0.8688, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.18085106382978725, | |
| "grad_norm": 1.2332969634803688, | |
| "learning_rate": 1.883512044446023e-05, | |
| "loss": 0.7612, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.18161094224924013, | |
| "grad_norm": 1.4536002666037933, | |
| "learning_rate": 1.882356116607383e-05, | |
| "loss": 0.8251, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.182370820668693, | |
| "grad_norm": 1.511800297591881, | |
| "learning_rate": 1.8811948401446312e-05, | |
| "loss": 0.8945, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.1831306990881459, | |
| "grad_norm": 1.2737907825316572, | |
| "learning_rate": 1.8800282220971368e-05, | |
| "loss": 0.8461, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.1838905775075988, | |
| "grad_norm": 1.1256722993502581, | |
| "learning_rate": 1.8788562695366495e-05, | |
| "loss": 0.8864, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.18465045592705168, | |
| "grad_norm": 1.350714935633028, | |
| "learning_rate": 1.8776789895672557e-05, | |
| "loss": 0.8372, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.18541033434650456, | |
| "grad_norm": 1.5637902119657874, | |
| "learning_rate": 1.8764963893253346e-05, | |
| "loss": 0.8741, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.18617021276595744, | |
| "grad_norm": 1.0412302165277234, | |
| "learning_rate": 1.875308475979516e-05, | |
| "loss": 0.7989, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.18693009118541035, | |
| "grad_norm": 1.296947092015986, | |
| "learning_rate": 1.8741152567306356e-05, | |
| "loss": 0.7478, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.18768996960486323, | |
| "grad_norm": 1.395789863014078, | |
| "learning_rate": 1.8729167388116934e-05, | |
| "loss": 0.8929, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.1884498480243161, | |
| "grad_norm": 1.3655891861090963, | |
| "learning_rate": 1.8717129294878075e-05, | |
| "loss": 0.8574, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.189209726443769, | |
| "grad_norm": 1.1534705162915855, | |
| "learning_rate": 1.8705038360561724e-05, | |
| "loss": 0.8186, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.1899696048632219, | |
| "grad_norm": 1.2700568173154652, | |
| "learning_rate": 1.869289465846012e-05, | |
| "loss": 0.9538, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.19072948328267478, | |
| "grad_norm": 1.3279487178301215, | |
| "learning_rate": 1.868069826218538e-05, | |
| "loss": 0.8648, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.19148936170212766, | |
| "grad_norm": 1.4087946294877387, | |
| "learning_rate": 1.866844924566904e-05, | |
| "loss": 0.8625, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.19224924012158054, | |
| "grad_norm": 1.1507808790734904, | |
| "learning_rate": 1.8656147683161594e-05, | |
| "loss": 0.8993, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.19300911854103345, | |
| "grad_norm": 1.4659659290396179, | |
| "learning_rate": 1.8643793649232072e-05, | |
| "loss": 0.8219, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.19376899696048633, | |
| "grad_norm": 1.4291362291545917, | |
| "learning_rate": 1.8631387218767564e-05, | |
| "loss": 0.8127, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.1945288753799392, | |
| "grad_norm": 1.2852842749461337, | |
| "learning_rate": 1.8618928466972773e-05, | |
| "loss": 0.8491, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.1952887537993921, | |
| "grad_norm": 1.2924035100964617, | |
| "learning_rate": 1.860641746936957e-05, | |
| "loss": 0.8762, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.196048632218845, | |
| "grad_norm": 1.5054553449203558, | |
| "learning_rate": 1.859385430179652e-05, | |
| "loss": 0.8542, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.19680851063829788, | |
| "grad_norm": 1.1865388745350964, | |
| "learning_rate": 1.8581239040408433e-05, | |
| "loss": 0.7715, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.19756838905775076, | |
| "grad_norm": 1.1533392420577036, | |
| "learning_rate": 1.8568571761675893e-05, | |
| "loss": 0.748, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.19832826747720364, | |
| "grad_norm": 1.3107725757198427, | |
| "learning_rate": 1.855585254238481e-05, | |
| "loss": 0.6938, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.19908814589665655, | |
| "grad_norm": 1.4610839591846172, | |
| "learning_rate": 1.8543081459635937e-05, | |
| "loss": 0.7587, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.19984802431610943, | |
| "grad_norm": 1.3676762931453232, | |
| "learning_rate": 1.853025859084441e-05, | |
| "loss": 0.8067, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.2006079027355623, | |
| "grad_norm": 1.5314324047574401, | |
| "learning_rate": 1.8517384013739287e-05, | |
| "loss": 0.7639, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.2013677811550152, | |
| "grad_norm": 1.4289589466967434, | |
| "learning_rate": 1.8504457806363058e-05, | |
| "loss": 0.7984, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.20212765957446807, | |
| "grad_norm": 1.3804585194622976, | |
| "learning_rate": 1.8491480047071192e-05, | |
| "loss": 0.8321, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.20288753799392098, | |
| "grad_norm": 1.4231105013966496, | |
| "learning_rate": 1.847845081453165e-05, | |
| "loss": 0.8559, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.20364741641337386, | |
| "grad_norm": 1.4136667653628856, | |
| "learning_rate": 1.846537018772441e-05, | |
| "loss": 0.9156, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.20440729483282674, | |
| "grad_norm": 1.4149693797982272, | |
| "learning_rate": 1.845223824594099e-05, | |
| "loss": 0.9681, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.20516717325227962, | |
| "grad_norm": 1.3704606391582022, | |
| "learning_rate": 1.8439055068783966e-05, | |
| "loss": 0.7807, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.20592705167173253, | |
| "grad_norm": 1.3735139173690476, | |
| "learning_rate": 1.8425820736166492e-05, | |
| "loss": 0.8736, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.2066869300911854, | |
| "grad_norm": 1.3048282964394131, | |
| "learning_rate": 1.8412535328311813e-05, | |
| "loss": 0.8582, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.2074468085106383, | |
| "grad_norm": 1.2937317704193294, | |
| "learning_rate": 1.839919892575278e-05, | |
| "loss": 0.8293, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.20820668693009117, | |
| "grad_norm": 1.2425267974079122, | |
| "learning_rate": 1.8385811609331355e-05, | |
| "loss": 0.826, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.20896656534954408, | |
| "grad_norm": 1.4252534938774737, | |
| "learning_rate": 1.837237346019814e-05, | |
| "loss": 0.8089, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.20972644376899696, | |
| "grad_norm": 1.4431036258209107, | |
| "learning_rate": 1.8358884559811855e-05, | |
| "loss": 0.7481, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.21048632218844984, | |
| "grad_norm": 1.7528336950139616, | |
| "learning_rate": 1.834534498993888e-05, | |
| "loss": 0.8431, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.21124620060790272, | |
| "grad_norm": 1.2908486046374708, | |
| "learning_rate": 1.833175483265273e-05, | |
| "loss": 0.8379, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.21200607902735563, | |
| "grad_norm": 1.1186462184612969, | |
| "learning_rate": 1.831811417033357e-05, | |
| "loss": 0.8473, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.2127659574468085, | |
| "grad_norm": 1.2504267520618368, | |
| "learning_rate": 1.8304423085667713e-05, | |
| "loss": 0.7715, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.2135258358662614, | |
| "grad_norm": 1.2373605493311197, | |
| "learning_rate": 1.8290681661647124e-05, | |
| "loss": 0.7526, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.21428571428571427, | |
| "grad_norm": 1.3941842298033964, | |
| "learning_rate": 1.827688998156891e-05, | |
| "loss": 0.8928, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.21504559270516718, | |
| "grad_norm": 1.2044041782995714, | |
| "learning_rate": 1.826304812903481e-05, | |
| "loss": 0.7805, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.21580547112462006, | |
| "grad_norm": 1.284073452732963, | |
| "learning_rate": 1.8249156187950717e-05, | |
| "loss": 0.8235, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.21656534954407294, | |
| "grad_norm": 1.1169016480861682, | |
| "learning_rate": 1.8235214242526125e-05, | |
| "loss": 0.7493, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.21732522796352582, | |
| "grad_norm": 1.2746514883016555, | |
| "learning_rate": 1.8221222377273656e-05, | |
| "loss": 0.7204, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.21808510638297873, | |
| "grad_norm": 1.3926990558985881, | |
| "learning_rate": 1.8207180677008528e-05, | |
| "loss": 0.8976, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.2188449848024316, | |
| "grad_norm": 1.3799768294660295, | |
| "learning_rate": 1.819308922684805e-05, | |
| "loss": 0.8359, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.2196048632218845, | |
| "grad_norm": 1.1714038422999915, | |
| "learning_rate": 1.8178948112211104e-05, | |
| "loss": 0.7875, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.22036474164133737, | |
| "grad_norm": 1.1553734086733076, | |
| "learning_rate": 1.816475741881761e-05, | |
| "loss": 0.7932, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.22112462006079028, | |
| "grad_norm": 1.0379144595265826, | |
| "learning_rate": 1.815051723268805e-05, | |
| "loss": 0.7805, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.22188449848024316, | |
| "grad_norm": 1.284317492407556, | |
| "learning_rate": 1.8136227640142895e-05, | |
| "loss": 0.7862, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.22264437689969604, | |
| "grad_norm": 1.2505334653471631, | |
| "learning_rate": 1.8121888727802113e-05, | |
| "loss": 0.8702, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.22340425531914893, | |
| "grad_norm": 1.116117377786764, | |
| "learning_rate": 1.8107500582584642e-05, | |
| "loss": 0.7421, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.22416413373860183, | |
| "grad_norm": 1.1370782643092594, | |
| "learning_rate": 1.8093063291707847e-05, | |
| "loss": 0.8259, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.22492401215805471, | |
| "grad_norm": 1.2762655902042999, | |
| "learning_rate": 1.807857694268701e-05, | |
| "loss": 0.6667, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.2256838905775076, | |
| "grad_norm": 1.2115933668149352, | |
| "learning_rate": 1.806404162333479e-05, | |
| "loss": 0.7442, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.22644376899696048, | |
| "grad_norm": 1.4448277818304922, | |
| "learning_rate": 1.804945742176069e-05, | |
| "loss": 0.8664, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.22720364741641338, | |
| "grad_norm": 1.1974507872816957, | |
| "learning_rate": 1.8034824426370522e-05, | |
| "loss": 0.6649, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.22796352583586627, | |
| "grad_norm": 1.1638490968704065, | |
| "learning_rate": 1.802014272586589e-05, | |
| "loss": 0.8103, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.22872340425531915, | |
| "grad_norm": 1.0563629488128978, | |
| "learning_rate": 1.8005412409243604e-05, | |
| "loss": 0.8286, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.22948328267477203, | |
| "grad_norm": 1.2626988246860673, | |
| "learning_rate": 1.799063356579521e-05, | |
| "loss": 0.8281, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.23024316109422494, | |
| "grad_norm": 1.4107310767725516, | |
| "learning_rate": 1.797580628510639e-05, | |
| "loss": 0.7421, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.23100303951367782, | |
| "grad_norm": 1.4224801133835634, | |
| "learning_rate": 1.796093065705644e-05, | |
| "loss": 0.8064, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.2317629179331307, | |
| "grad_norm": 1.2197629549919267, | |
| "learning_rate": 1.7946006771817733e-05, | |
| "loss": 0.7765, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.23252279635258358, | |
| "grad_norm": 1.3250097886721177, | |
| "learning_rate": 1.7931034719855166e-05, | |
| "loss": 0.7311, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.23328267477203649, | |
| "grad_norm": 1.3474363498870439, | |
| "learning_rate": 1.7916014591925605e-05, | |
| "loss": 0.7654, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.23404255319148937, | |
| "grad_norm": 1.3456354183708066, | |
| "learning_rate": 1.7900946479077345e-05, | |
| "loss": 0.8405, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.23480243161094225, | |
| "grad_norm": 1.3243678971686481, | |
| "learning_rate": 1.788583047264955e-05, | |
| "loss": 0.7648, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.23556231003039513, | |
| "grad_norm": 1.3021926465526452, | |
| "learning_rate": 1.7870666664271706e-05, | |
| "loss": 0.8253, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.23632218844984804, | |
| "grad_norm": 1.1787205133229242, | |
| "learning_rate": 1.7855455145863064e-05, | |
| "loss": 0.9029, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.23708206686930092, | |
| "grad_norm": 1.3269338460448679, | |
| "learning_rate": 1.784019600963207e-05, | |
| "loss": 0.6925, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.2378419452887538, | |
| "grad_norm": 1.3297547464801671, | |
| "learning_rate": 1.782488934807584e-05, | |
| "loss": 0.7176, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.23860182370820668, | |
| "grad_norm": 1.4771961795702464, | |
| "learning_rate": 1.7809535253979548e-05, | |
| "loss": 0.7968, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.2393617021276596, | |
| "grad_norm": 1.2225853213175695, | |
| "learning_rate": 1.7794133820415916e-05, | |
| "loss": 0.7802, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.24012158054711247, | |
| "grad_norm": 1.284238799057952, | |
| "learning_rate": 1.777868514074462e-05, | |
| "loss": 0.6772, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.24088145896656535, | |
| "grad_norm": 1.313342524516629, | |
| "learning_rate": 1.776318930861172e-05, | |
| "loss": 0.8222, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.24164133738601823, | |
| "grad_norm": 1.186923319189149, | |
| "learning_rate": 1.7747646417949114e-05, | |
| "loss": 0.8177, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.24240121580547114, | |
| "grad_norm": 1.3049523248899455, | |
| "learning_rate": 1.7732056562973956e-05, | |
| "loss": 0.741, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.24316109422492402, | |
| "grad_norm": 1.3479058374457298, | |
| "learning_rate": 1.771641983818808e-05, | |
| "loss": 0.8117, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.2439209726443769, | |
| "grad_norm": 1.291375282657644, | |
| "learning_rate": 1.7700736338377435e-05, | |
| "loss": 0.8009, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.24468085106382978, | |
| "grad_norm": 1.2534036633668106, | |
| "learning_rate": 1.7685006158611514e-05, | |
| "loss": 0.8268, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.2454407294832827, | |
| "grad_norm": 1.3880891809195886, | |
| "learning_rate": 1.7669229394242767e-05, | |
| "loss": 0.8028, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.24620060790273557, | |
| "grad_norm": 1.2677842202912035, | |
| "learning_rate": 1.7653406140906027e-05, | |
| "loss": 0.8207, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.24696048632218845, | |
| "grad_norm": 1.2147944761684033, | |
| "learning_rate": 1.763753649451794e-05, | |
| "loss": 0.8, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.24772036474164133, | |
| "grad_norm": 1.251207808846697, | |
| "learning_rate": 1.7621620551276366e-05, | |
| "loss": 0.7845, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.24848024316109424, | |
| "grad_norm": 1.2140722143097946, | |
| "learning_rate": 1.760565840765981e-05, | |
| "loss": 0.7846, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.24924012158054712, | |
| "grad_norm": 1.1783753906102796, | |
| "learning_rate": 1.758965016042683e-05, | |
| "loss": 0.7864, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.2723774220282704, | |
| "learning_rate": 1.757359590661545e-05, | |
| "loss": 0.7213, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.2507598784194529, | |
| "grad_norm": 1.562103571311114, | |
| "learning_rate": 1.7557495743542586e-05, | |
| "loss": 0.8655, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.25151975683890576, | |
| "grad_norm": 1.2305811275802194, | |
| "learning_rate": 1.754134976880343e-05, | |
| "loss": 0.7578, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.25227963525835867, | |
| "grad_norm": 1.2867558430539152, | |
| "learning_rate": 1.752515808027088e-05, | |
| "loss": 0.7986, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.2530395136778115, | |
| "grad_norm": 1.345738211371174, | |
| "learning_rate": 1.7508920776094943e-05, | |
| "loss": 0.8124, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 0.25379939209726443, | |
| "grad_norm": 1.2170837887079073, | |
| "learning_rate": 1.749263795470213e-05, | |
| "loss": 0.7502, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.25455927051671734, | |
| "grad_norm": 1.2465628724502962, | |
| "learning_rate": 1.7476309714794874e-05, | |
| "loss": 0.7856, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.2553191489361702, | |
| "grad_norm": 1.3162900718142025, | |
| "learning_rate": 1.7459936155350908e-05, | |
| "loss": 0.8461, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.2560790273556231, | |
| "grad_norm": 1.428892952540523, | |
| "learning_rate": 1.7443517375622706e-05, | |
| "loss": 0.7842, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 0.256838905775076, | |
| "grad_norm": 1.3809023572111312, | |
| "learning_rate": 1.742705347513683e-05, | |
| "loss": 0.8102, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.25759878419452886, | |
| "grad_norm": 1.4703401771863496, | |
| "learning_rate": 1.7410544553693368e-05, | |
| "loss": 0.9086, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 0.25835866261398177, | |
| "grad_norm": 1.1634961622200364, | |
| "learning_rate": 1.7393990711365312e-05, | |
| "loss": 0.8519, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.2591185410334346, | |
| "grad_norm": 1.1604153199062972, | |
| "learning_rate": 1.7377392048497954e-05, | |
| "loss": 0.75, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 0.25987841945288753, | |
| "grad_norm": 1.3783473441899683, | |
| "learning_rate": 1.7360748665708268e-05, | |
| "loss": 0.778, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.26063829787234044, | |
| "grad_norm": 1.1651447462805493, | |
| "learning_rate": 1.7344060663884325e-05, | |
| "loss": 0.7459, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 0.2613981762917933, | |
| "grad_norm": 1.465665757400901, | |
| "learning_rate": 1.7327328144184648e-05, | |
| "loss": 0.8115, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.2621580547112462, | |
| "grad_norm": 1.1926180685729153, | |
| "learning_rate": 1.7310551208037627e-05, | |
| "loss": 0.8149, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.2629179331306991, | |
| "grad_norm": 1.2711641713106352, | |
| "learning_rate": 1.729372995714089e-05, | |
| "loss": 0.6643, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.26367781155015196, | |
| "grad_norm": 1.286415016158075, | |
| "learning_rate": 1.7276864493460702e-05, | |
| "loss": 0.8848, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 0.26443768996960487, | |
| "grad_norm": 1.2362154109376788, | |
| "learning_rate": 1.725995491923131e-05, | |
| "loss": 0.7379, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.2651975683890577, | |
| "grad_norm": 1.419964919116019, | |
| "learning_rate": 1.724300133695437e-05, | |
| "loss": 0.8108, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 0.26595744680851063, | |
| "grad_norm": 1.5139212586585424, | |
| "learning_rate": 1.7226003849398292e-05, | |
| "loss": 0.8349, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.26671732522796354, | |
| "grad_norm": 1.3077265298770002, | |
| "learning_rate": 1.720896255959764e-05, | |
| "loss": 0.845, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 0.2674772036474164, | |
| "grad_norm": 1.175770930024848, | |
| "learning_rate": 1.7191877570852482e-05, | |
| "loss": 0.7895, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.2682370820668693, | |
| "grad_norm": 1.128921584054824, | |
| "learning_rate": 1.717474898672779e-05, | |
| "loss": 0.7361, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 0.2689969604863222, | |
| "grad_norm": 1.2974034151619063, | |
| "learning_rate": 1.7157576911052796e-05, | |
| "loss": 0.8448, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.26975683890577506, | |
| "grad_norm": 1.3100491097963063, | |
| "learning_rate": 1.7140361447920363e-05, | |
| "loss": 0.8267, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.270516717325228, | |
| "grad_norm": 1.118655896785131, | |
| "learning_rate": 1.712310270168637e-05, | |
| "loss": 0.716, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.2712765957446808, | |
| "grad_norm": 1.197590922519875, | |
| "learning_rate": 1.7105800776969053e-05, | |
| "loss": 0.7649, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 0.27203647416413373, | |
| "grad_norm": 1.112617316003443, | |
| "learning_rate": 1.7088455778648397e-05, | |
| "loss": 0.873, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.27279635258358664, | |
| "grad_norm": 1.2221978291809814, | |
| "learning_rate": 1.7071067811865477e-05, | |
| "loss": 0.7447, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 0.2735562310030395, | |
| "grad_norm": 1.1061408457374016, | |
| "learning_rate": 1.7053636982021844e-05, | |
| "loss": 0.7463, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.2743161094224924, | |
| "grad_norm": 1.1796701533152332, | |
| "learning_rate": 1.7036163394778865e-05, | |
| "loss": 0.8008, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 0.2750759878419453, | |
| "grad_norm": 1.2097189458771431, | |
| "learning_rate": 1.7018647156057095e-05, | |
| "loss": 0.7162, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.27583586626139817, | |
| "grad_norm": 1.295258074903886, | |
| "learning_rate": 1.7001088372035637e-05, | |
| "loss": 0.7393, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 0.2765957446808511, | |
| "grad_norm": 1.3056818955162495, | |
| "learning_rate": 1.6983487149151486e-05, | |
| "loss": 0.7402, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.2773556231003039, | |
| "grad_norm": 1.3228743821690305, | |
| "learning_rate": 1.696584359409889e-05, | |
| "loss": 0.6607, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.27811550151975684, | |
| "grad_norm": 1.35912391584937, | |
| "learning_rate": 1.6948157813828718e-05, | |
| "loss": 0.807, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.27887537993920974, | |
| "grad_norm": 1.4151040401203427, | |
| "learning_rate": 1.693042991554777e-05, | |
| "loss": 0.8491, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 0.2796352583586626, | |
| "grad_norm": 1.4735408891386483, | |
| "learning_rate": 1.6912660006718186e-05, | |
| "loss": 0.8397, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.2803951367781155, | |
| "grad_norm": 1.3932178888015743, | |
| "learning_rate": 1.6894848195056747e-05, | |
| "loss": 0.8188, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 0.2811550151975684, | |
| "grad_norm": 1.1942116912001022, | |
| "learning_rate": 1.6876994588534234e-05, | |
| "loss": 0.888, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.28191489361702127, | |
| "grad_norm": 1.4612208567952647, | |
| "learning_rate": 1.685909929537479e-05, | |
| "loss": 0.6872, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 0.2826747720364742, | |
| "grad_norm": 1.2773064665976277, | |
| "learning_rate": 1.684116242405525e-05, | |
| "loss": 0.7651, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.28343465045592703, | |
| "grad_norm": 1.4168901405134577, | |
| "learning_rate": 1.6823184083304482e-05, | |
| "loss": 0.7424, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 0.28419452887537994, | |
| "grad_norm": 1.223314361073911, | |
| "learning_rate": 1.680516438210273e-05, | |
| "loss": 0.8284, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.28495440729483285, | |
| "grad_norm": 1.1065740138572266, | |
| "learning_rate": 1.6787103429680955e-05, | |
| "loss": 0.7844, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.2857142857142857, | |
| "grad_norm": 1.2904363907773064, | |
| "learning_rate": 1.676900133552018e-05, | |
| "loss": 0.7732, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 0.2864741641337386, | |
| "grad_norm": 1.0783774677194193, | |
| "learning_rate": 1.6750858209350808e-05, | |
| "loss": 0.8371, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 0.2872340425531915, | |
| "grad_norm": 1.2776012277299917, | |
| "learning_rate": 1.673267416115198e-05, | |
| "loss": 0.8364, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 0.28799392097264437, | |
| "grad_norm": 1.1389838977111437, | |
| "learning_rate": 1.6714449301150883e-05, | |
| "loss": 0.7632, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 0.2887537993920973, | |
| "grad_norm": 1.1947077012904646, | |
| "learning_rate": 1.6696183739822108e-05, | |
| "loss": 0.8588, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.28951367781155013, | |
| "grad_norm": 1.3162887898638367, | |
| "learning_rate": 1.6677877587886956e-05, | |
| "loss": 0.7821, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 0.29027355623100304, | |
| "grad_norm": 1.5507878196219265, | |
| "learning_rate": 1.665953095631279e-05, | |
| "loss": 0.7615, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 0.29103343465045595, | |
| "grad_norm": 1.2980678586798975, | |
| "learning_rate": 1.6641143956312337e-05, | |
| "loss": 0.8447, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 0.2917933130699088, | |
| "grad_norm": 1.5770389831696154, | |
| "learning_rate": 1.6622716699343032e-05, | |
| "loss": 0.8621, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 0.2925531914893617, | |
| "grad_norm": 1.2785659748626166, | |
| "learning_rate": 1.660424929710635e-05, | |
| "loss": 0.7836, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.2933130699088146, | |
| "grad_norm": 1.5162199995466439, | |
| "learning_rate": 1.6585741861547102e-05, | |
| "loss": 0.7928, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 0.29407294832826747, | |
| "grad_norm": 1.118207161482667, | |
| "learning_rate": 1.6567194504852778e-05, | |
| "loss": 0.7509, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 0.2948328267477204, | |
| "grad_norm": 1.4767698938404044, | |
| "learning_rate": 1.6548607339452853e-05, | |
| "loss": 0.7725, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 0.29559270516717323, | |
| "grad_norm": 1.3359182156567684, | |
| "learning_rate": 1.652998047801812e-05, | |
| "loss": 0.8298, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 0.29635258358662614, | |
| "grad_norm": 1.5159797253002374, | |
| "learning_rate": 1.6511314033459994e-05, | |
| "loss": 0.8615, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.29711246200607905, | |
| "grad_norm": 1.3941782547799624, | |
| "learning_rate": 1.649260811892984e-05, | |
| "loss": 0.9382, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 0.2978723404255319, | |
| "grad_norm": 1.160714007610145, | |
| "learning_rate": 1.647386284781828e-05, | |
| "loss": 0.7484, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 0.2986322188449848, | |
| "grad_norm": 1.2652754715044188, | |
| "learning_rate": 1.645507833375449e-05, | |
| "loss": 0.7273, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 0.2993920972644377, | |
| "grad_norm": 1.2079144842063019, | |
| "learning_rate": 1.643625469060555e-05, | |
| "loss": 0.84, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 0.30015197568389057, | |
| "grad_norm": 1.285436404782802, | |
| "learning_rate": 1.6417392032475715e-05, | |
| "loss": 0.845, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.3009118541033435, | |
| "grad_norm": 1.0284192967003212, | |
| "learning_rate": 1.6398490473705742e-05, | |
| "loss": 0.8467, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 0.30167173252279633, | |
| "grad_norm": 1.010553668887754, | |
| "learning_rate": 1.6379550128872202e-05, | |
| "loss": 0.7383, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 0.30243161094224924, | |
| "grad_norm": 1.0332864785215627, | |
| "learning_rate": 1.6360571112786768e-05, | |
| "loss": 0.8021, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 0.30319148936170215, | |
| "grad_norm": 1.2039495633410224, | |
| "learning_rate": 1.6341553540495533e-05, | |
| "loss": 0.7843, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 0.303951367781155, | |
| "grad_norm": 1.3372476079109301, | |
| "learning_rate": 1.6322497527278308e-05, | |
| "loss": 0.737, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.3047112462006079, | |
| "grad_norm": 1.106732601574273, | |
| "learning_rate": 1.6303403188647914e-05, | |
| "loss": 0.7748, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 0.30547112462006076, | |
| "grad_norm": 1.4142126964628685, | |
| "learning_rate": 1.6284270640349516e-05, | |
| "loss": 0.7898, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 0.30623100303951367, | |
| "grad_norm": 1.041136857598702, | |
| "learning_rate": 1.6265099998359868e-05, | |
| "loss": 0.7893, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 0.3069908814589666, | |
| "grad_norm": 1.2436133586572014, | |
| "learning_rate": 1.6245891378886655e-05, | |
| "loss": 0.7635, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 0.30775075987841943, | |
| "grad_norm": 1.053481911998197, | |
| "learning_rate": 1.6226644898367767e-05, | |
| "loss": 0.8193, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.30851063829787234, | |
| "grad_norm": 1.21997792802327, | |
| "learning_rate": 1.62073606734706e-05, | |
| "loss": 0.764, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 0.30927051671732525, | |
| "grad_norm": 1.0680702822408827, | |
| "learning_rate": 1.6188038821091346e-05, | |
| "loss": 0.7622, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 0.3100303951367781, | |
| "grad_norm": 1.21182732132355, | |
| "learning_rate": 1.6168679458354284e-05, | |
| "loss": 0.7673, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 0.310790273556231, | |
| "grad_norm": 1.2965815000869683, | |
| "learning_rate": 1.6149282702611077e-05, | |
| "loss": 0.8008, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 0.31155015197568386, | |
| "grad_norm": 1.4050961684878454, | |
| "learning_rate": 1.6129848671440047e-05, | |
| "loss": 0.6956, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.3123100303951368, | |
| "grad_norm": 1.1564249926499997, | |
| "learning_rate": 1.611037748264548e-05, | |
| "loss": 0.8352, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 0.3130699088145897, | |
| "grad_norm": 1.4244190666064218, | |
| "learning_rate": 1.6090869254256892e-05, | |
| "loss": 0.6971, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 0.31382978723404253, | |
| "grad_norm": 1.3594816229903812, | |
| "learning_rate": 1.6071324104528333e-05, | |
| "loss": 0.8919, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 0.31458966565349544, | |
| "grad_norm": 1.2385608136654, | |
| "learning_rate": 1.6051742151937655e-05, | |
| "loss": 0.755, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 0.31534954407294835, | |
| "grad_norm": 1.225868789955997, | |
| "learning_rate": 1.60321235151858e-05, | |
| "loss": 0.8318, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.3161094224924012, | |
| "grad_norm": 1.2020762622047627, | |
| "learning_rate": 1.6012468313196086e-05, | |
| "loss": 0.8348, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 0.3168693009118541, | |
| "grad_norm": 1.2352639389808124, | |
| "learning_rate": 1.599277666511347e-05, | |
| "loss": 0.8327, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 0.31762917933130697, | |
| "grad_norm": 1.25713241660854, | |
| "learning_rate": 1.5973048690303848e-05, | |
| "loss": 0.7469, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 0.3183890577507599, | |
| "grad_norm": 1.184618090887608, | |
| "learning_rate": 1.5953284508353316e-05, | |
| "loss": 0.7427, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 0.3191489361702128, | |
| "grad_norm": 1.285457183922233, | |
| "learning_rate": 1.593348423906745e-05, | |
| "loss": 0.7762, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.31990881458966564, | |
| "grad_norm": 1.3075545067629168, | |
| "learning_rate": 1.5913648002470562e-05, | |
| "loss": 0.7654, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 0.32066869300911854, | |
| "grad_norm": 1.4023960203315846, | |
| "learning_rate": 1.589377591880501e-05, | |
| "loss": 0.7736, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 0.32142857142857145, | |
| "grad_norm": 1.2696855105901457, | |
| "learning_rate": 1.5873868108530443e-05, | |
| "loss": 0.6396, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 0.3221884498480243, | |
| "grad_norm": 1.3208573319136134, | |
| "learning_rate": 1.585392469232307e-05, | |
| "loss": 0.8724, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 0.3229483282674772, | |
| "grad_norm": 1.1705760084073848, | |
| "learning_rate": 1.5833945791074943e-05, | |
| "loss": 0.7905, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.32370820668693007, | |
| "grad_norm": 1.4383087273869815, | |
| "learning_rate": 1.58139315258932e-05, | |
| "loss": 0.7733, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 0.324468085106383, | |
| "grad_norm": 1.3570515516882151, | |
| "learning_rate": 1.5793882018099365e-05, | |
| "loss": 0.8751, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 0.3252279635258359, | |
| "grad_norm": 1.4171853820482556, | |
| "learning_rate": 1.5773797389228583e-05, | |
| "loss": 0.8455, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 0.32598784194528874, | |
| "grad_norm": 1.2045408838096334, | |
| "learning_rate": 1.5753677761028896e-05, | |
| "loss": 0.7222, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 0.32674772036474165, | |
| "grad_norm": 1.2596439537642594, | |
| "learning_rate": 1.5733523255460506e-05, | |
| "loss": 0.704, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.32750759878419455, | |
| "grad_norm": 1.1906580218111138, | |
| "learning_rate": 1.571333399469503e-05, | |
| "loss": 0.6943, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 0.3282674772036474, | |
| "grad_norm": 1.0501930772430033, | |
| "learning_rate": 1.5693110101114763e-05, | |
| "loss": 0.6897, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 0.3290273556231003, | |
| "grad_norm": 1.2153768732528925, | |
| "learning_rate": 1.5672851697311935e-05, | |
| "loss": 0.8974, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 0.32978723404255317, | |
| "grad_norm": 1.0906628741574973, | |
| "learning_rate": 1.565255890608797e-05, | |
| "loss": 0.7363, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 0.3305471124620061, | |
| "grad_norm": 1.1587847666652895, | |
| "learning_rate": 1.5632231850452745e-05, | |
| "loss": 0.7983, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.331306990881459, | |
| "grad_norm": 1.2881061427604819, | |
| "learning_rate": 1.5611870653623826e-05, | |
| "loss": 0.7911, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 0.33206686930091184, | |
| "grad_norm": 1.277727737736335, | |
| "learning_rate": 1.5591475439025745e-05, | |
| "loss": 0.8579, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 0.33282674772036475, | |
| "grad_norm": 1.1575352825422998, | |
| "learning_rate": 1.557104633028924e-05, | |
| "loss": 0.8016, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 0.33358662613981765, | |
| "grad_norm": 1.2264207653230985, | |
| "learning_rate": 1.5550583451250504e-05, | |
| "loss": 0.7518, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 0.3343465045592705, | |
| "grad_norm": 1.246867150575764, | |
| "learning_rate": 1.5530086925950435e-05, | |
| "loss": 0.7447, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.3351063829787234, | |
| "grad_norm": 1.3715595586193758, | |
| "learning_rate": 1.5509556878633894e-05, | |
| "loss": 0.8092, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 0.33586626139817627, | |
| "grad_norm": 1.29350763875836, | |
| "learning_rate": 1.5488993433748944e-05, | |
| "loss": 0.7851, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 0.3366261398176292, | |
| "grad_norm": 1.259469197331615, | |
| "learning_rate": 1.5468396715946082e-05, | |
| "loss": 0.8206, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 0.3373860182370821, | |
| "grad_norm": 1.3123999858865867, | |
| "learning_rate": 1.5447766850077517e-05, | |
| "loss": 0.8097, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 0.33814589665653494, | |
| "grad_norm": 1.2621219789976301, | |
| "learning_rate": 1.5427103961196378e-05, | |
| "loss": 0.718, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.33890577507598785, | |
| "grad_norm": 1.1675594541120748, | |
| "learning_rate": 1.5406408174555978e-05, | |
| "loss": 0.768, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 0.33966565349544076, | |
| "grad_norm": 1.0854292148917077, | |
| "learning_rate": 1.5385679615609045e-05, | |
| "loss": 0.8413, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 0.3404255319148936, | |
| "grad_norm": 1.3514284214290195, | |
| "learning_rate": 1.5364918410006967e-05, | |
| "loss": 0.836, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 0.3411854103343465, | |
| "grad_norm": 1.116477764114369, | |
| "learning_rate": 1.534412468359903e-05, | |
| "loss": 0.7809, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 0.34194528875379937, | |
| "grad_norm": 1.3861899628664367, | |
| "learning_rate": 1.5323298562431646e-05, | |
| "loss": 0.8396, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.3427051671732523, | |
| "grad_norm": 1.1313937974973802, | |
| "learning_rate": 1.5302440172747606e-05, | |
| "loss": 0.7394, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 0.3434650455927052, | |
| "grad_norm": 1.3596106384247422, | |
| "learning_rate": 1.5281549640985295e-05, | |
| "loss": 0.7078, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 0.34422492401215804, | |
| "grad_norm": 1.2313272969648281, | |
| "learning_rate": 1.5260627093777936e-05, | |
| "loss": 0.8325, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 0.34498480243161095, | |
| "grad_norm": 1.1808264294295199, | |
| "learning_rate": 1.5239672657952833e-05, | |
| "loss": 0.8077, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 0.34574468085106386, | |
| "grad_norm": 1.33079700527023, | |
| "learning_rate": 1.5218686460530579e-05, | |
| "loss": 0.79, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.3465045592705167, | |
| "grad_norm": 1.46122703710675, | |
| "learning_rate": 1.5197668628724302e-05, | |
| "loss": 0.789, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 0.3472644376899696, | |
| "grad_norm": 1.3917045313263725, | |
| "learning_rate": 1.517661928993889e-05, | |
| "loss": 0.8668, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 0.34802431610942247, | |
| "grad_norm": 1.2566442944025935, | |
| "learning_rate": 1.515553857177022e-05, | |
| "loss": 0.773, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 0.3487841945288754, | |
| "grad_norm": 1.0795922548160497, | |
| "learning_rate": 1.5134426602004378e-05, | |
| "loss": 0.8084, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 0.3495440729483283, | |
| "grad_norm": 1.485668401096033, | |
| "learning_rate": 1.5113283508616895e-05, | |
| "loss": 0.733, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.35030395136778114, | |
| "grad_norm": 1.1301567654941176, | |
| "learning_rate": 1.5092109419771962e-05, | |
| "loss": 0.7143, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 0.35106382978723405, | |
| "grad_norm": 1.1978337908610355, | |
| "learning_rate": 1.5070904463821658e-05, | |
| "loss": 0.8646, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 0.3518237082066869, | |
| "grad_norm": 1.2646367812202728, | |
| "learning_rate": 1.5049668769305172e-05, | |
| "loss": 0.8115, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 0.3525835866261398, | |
| "grad_norm": 1.1115715158886703, | |
| "learning_rate": 1.5028402464948023e-05, | |
| "loss": 0.724, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 0.3533434650455927, | |
| "grad_norm": 1.2391821992716336, | |
| "learning_rate": 1.5007105679661276e-05, | |
| "loss": 0.7751, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.3541033434650456, | |
| "grad_norm": 1.318788034218467, | |
| "learning_rate": 1.4985778542540764e-05, | |
| "loss": 0.8098, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 0.3548632218844985, | |
| "grad_norm": 1.2830783947390734, | |
| "learning_rate": 1.4964421182866312e-05, | |
| "loss": 0.8918, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 0.3556231003039514, | |
| "grad_norm": 1.151889625527157, | |
| "learning_rate": 1.4943033730100936e-05, | |
| "loss": 0.7168, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 0.35638297872340424, | |
| "grad_norm": 1.2790849712170538, | |
| "learning_rate": 1.4921616313890073e-05, | |
| "loss": 0.6402, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 0.35714285714285715, | |
| "grad_norm": 1.0004084794331276, | |
| "learning_rate": 1.4900169064060804e-05, | |
| "loss": 0.7251, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.35790273556231, | |
| "grad_norm": 1.3053536603592948, | |
| "learning_rate": 1.4878692110621028e-05, | |
| "loss": 0.7878, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 0.3586626139817629, | |
| "grad_norm": 1.1766285093771964, | |
| "learning_rate": 1.4857185583758722e-05, | |
| "loss": 0.7526, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 0.3594224924012158, | |
| "grad_norm": 1.3487709175466067, | |
| "learning_rate": 1.483564961384112e-05, | |
| "loss": 0.7626, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 0.3601823708206687, | |
| "grad_norm": 1.356854518892926, | |
| "learning_rate": 1.4814084331413938e-05, | |
| "loss": 0.8229, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 0.3609422492401216, | |
| "grad_norm": 1.2024661213101453, | |
| "learning_rate": 1.479248986720057e-05, | |
| "loss": 0.794, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.3617021276595745, | |
| "grad_norm": 1.2796551795114366, | |
| "learning_rate": 1.4770866352101308e-05, | |
| "loss": 0.7381, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 0.36246200607902734, | |
| "grad_norm": 1.1809978963381549, | |
| "learning_rate": 1.474921391719254e-05, | |
| "loss": 0.8248, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 0.36322188449848025, | |
| "grad_norm": 1.2047012381528968, | |
| "learning_rate": 1.472753269372596e-05, | |
| "loss": 0.7493, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 0.3639817629179331, | |
| "grad_norm": 1.2204885091911777, | |
| "learning_rate": 1.4705822813127776e-05, | |
| "loss": 0.8332, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 0.364741641337386, | |
| "grad_norm": 1.0190180051326805, | |
| "learning_rate": 1.4684084406997903e-05, | |
| "loss": 0.8097, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.3655015197568389, | |
| "grad_norm": 1.010916514085341, | |
| "learning_rate": 1.466231760710917e-05, | |
| "loss": 0.7716, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 0.3662613981762918, | |
| "grad_norm": 1.5092990693529875, | |
| "learning_rate": 1.4640522545406519e-05, | |
| "loss": 0.7726, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 0.3670212765957447, | |
| "grad_norm": 1.2547701681651005, | |
| "learning_rate": 1.4618699354006223e-05, | |
| "loss": 0.8615, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 0.3677811550151976, | |
| "grad_norm": 1.1984082141780765, | |
| "learning_rate": 1.4596848165195052e-05, | |
| "loss": 0.6728, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 0.36854103343465044, | |
| "grad_norm": 1.3140514914690362, | |
| "learning_rate": 1.45749691114295e-05, | |
| "loss": 0.818, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.36930091185410335, | |
| "grad_norm": 1.2008368172985484, | |
| "learning_rate": 1.4553062325334968e-05, | |
| "loss": 0.8202, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 0.3700607902735562, | |
| "grad_norm": 1.1079257271212097, | |
| "learning_rate": 1.4531127939704965e-05, | |
| "loss": 0.7939, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 0.3708206686930091, | |
| "grad_norm": 1.1544363853690724, | |
| "learning_rate": 1.4509166087500305e-05, | |
| "loss": 0.8376, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 0.371580547112462, | |
| "grad_norm": 1.1892686450194994, | |
| "learning_rate": 1.4487176901848285e-05, | |
| "loss": 0.758, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 0.3723404255319149, | |
| "grad_norm": 1.2764032366914544, | |
| "learning_rate": 1.4465160516041905e-05, | |
| "loss": 0.7926, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.3731003039513678, | |
| "grad_norm": 1.266424268139601, | |
| "learning_rate": 1.4443117063539039e-05, | |
| "loss": 0.7649, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 0.3738601823708207, | |
| "grad_norm": 1.160935022225602, | |
| "learning_rate": 1.4421046677961627e-05, | |
| "loss": 0.9117, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 0.37462006079027355, | |
| "grad_norm": 1.2304169504704245, | |
| "learning_rate": 1.439894949309489e-05, | |
| "loss": 0.6983, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 0.37537993920972645, | |
| "grad_norm": 1.2268734184193484, | |
| "learning_rate": 1.4376825642886473e-05, | |
| "loss": 0.8481, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 0.3761398176291793, | |
| "grad_norm": 1.1267976015698142, | |
| "learning_rate": 1.435467526144568e-05, | |
| "loss": 0.778, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.3768996960486322, | |
| "grad_norm": 1.1269454528560872, | |
| "learning_rate": 1.4332498483042639e-05, | |
| "loss": 0.7746, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 0.3776595744680851, | |
| "grad_norm": 1.3741014547464558, | |
| "learning_rate": 1.4310295442107472e-05, | |
| "loss": 0.7902, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 0.378419452887538, | |
| "grad_norm": 1.2078933820728968, | |
| "learning_rate": 1.428806627322952e-05, | |
| "loss": 0.8955, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 0.3791793313069909, | |
| "grad_norm": 1.3512670142371217, | |
| "learning_rate": 1.4265811111156491e-05, | |
| "loss": 0.7931, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 0.3799392097264438, | |
| "grad_norm": 1.1941352054197616, | |
| "learning_rate": 1.4243530090793667e-05, | |
| "loss": 0.766, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.38069908814589665, | |
| "grad_norm": 1.1447725649002876, | |
| "learning_rate": 1.4221223347203067e-05, | |
| "loss": 0.8155, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 0.38145896656534956, | |
| "grad_norm": 1.1816673282742236, | |
| "learning_rate": 1.4198891015602648e-05, | |
| "loss": 0.8726, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 0.3822188449848024, | |
| "grad_norm": 1.2141907289664886, | |
| "learning_rate": 1.4176533231365463e-05, | |
| "loss": 0.8084, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 0.3829787234042553, | |
| "grad_norm": 1.5206267684616313, | |
| "learning_rate": 1.4154150130018867e-05, | |
| "loss": 0.7056, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 0.3837386018237082, | |
| "grad_norm": 1.2307733684080757, | |
| "learning_rate": 1.4131741847243665e-05, | |
| "loss": 0.83, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.3844984802431611, | |
| "grad_norm": 1.3410942533205799, | |
| "learning_rate": 1.4109308518873321e-05, | |
| "loss": 0.8515, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 0.385258358662614, | |
| "grad_norm": 1.271440796202491, | |
| "learning_rate": 1.4086850280893107e-05, | |
| "loss": 0.7042, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 0.3860182370820669, | |
| "grad_norm": 1.303266493940009, | |
| "learning_rate": 1.40643672694393e-05, | |
| "loss": 0.7574, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 0.38677811550151975, | |
| "grad_norm": 1.1149311188966764, | |
| "learning_rate": 1.4041859620798341e-05, | |
| "loss": 0.7639, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 0.38753799392097266, | |
| "grad_norm": 1.284236299811628, | |
| "learning_rate": 1.4019327471406021e-05, | |
| "loss": 0.7993, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.3882978723404255, | |
| "grad_norm": 1.2416650873534774, | |
| "learning_rate": 1.3996770957846643e-05, | |
| "loss": 0.7882, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 0.3890577507598784, | |
| "grad_norm": 1.1946922250305798, | |
| "learning_rate": 1.3974190216852203e-05, | |
| "loss": 0.7896, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 0.3898176291793313, | |
| "grad_norm": 1.1918737601263765, | |
| "learning_rate": 1.3951585385301557e-05, | |
| "loss": 0.7496, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 0.3905775075987842, | |
| "grad_norm": 1.2656431867092106, | |
| "learning_rate": 1.3928956600219593e-05, | |
| "loss": 0.7709, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 0.3913373860182371, | |
| "grad_norm": 1.247575714519452, | |
| "learning_rate": 1.3906303998776392e-05, | |
| "loss": 0.6178, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.39209726443769, | |
| "grad_norm": 1.379298975514798, | |
| "learning_rate": 1.388362771828642e-05, | |
| "loss": 0.8437, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 0.39285714285714285, | |
| "grad_norm": 1.2868947918073375, | |
| "learning_rate": 1.3860927896207665e-05, | |
| "loss": 0.7108, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 0.39361702127659576, | |
| "grad_norm": 1.305621041702867, | |
| "learning_rate": 1.383820467014082e-05, | |
| "loss": 0.8491, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 0.3943768996960486, | |
| "grad_norm": 1.5946129748170261, | |
| "learning_rate": 1.3815458177828455e-05, | |
| "loss": 0.7633, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 0.3951367781155015, | |
| "grad_norm": 1.1919308843594276, | |
| "learning_rate": 1.3792688557154166e-05, | |
| "loss": 0.7372, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.3958966565349544, | |
| "grad_norm": 1.3255329743332758, | |
| "learning_rate": 1.3769895946141753e-05, | |
| "loss": 0.7961, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 0.3966565349544073, | |
| "grad_norm": 1.396828064970446, | |
| "learning_rate": 1.3747080482954378e-05, | |
| "loss": 0.8364, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 0.3974164133738602, | |
| "grad_norm": 1.287250609675237, | |
| "learning_rate": 1.3724242305893716e-05, | |
| "loss": 0.8365, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 0.3981762917933131, | |
| "grad_norm": 1.1259886717395222, | |
| "learning_rate": 1.3701381553399147e-05, | |
| "loss": 0.7055, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 0.39893617021276595, | |
| "grad_norm": 1.087304716591874, | |
| "learning_rate": 1.3678498364046877e-05, | |
| "loss": 0.7656, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.39969604863221886, | |
| "grad_norm": 1.0839039613499073, | |
| "learning_rate": 1.3655592876549135e-05, | |
| "loss": 0.7866, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 0.4004559270516717, | |
| "grad_norm": 1.119797796587366, | |
| "learning_rate": 1.363266522975331e-05, | |
| "loss": 0.7462, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 0.4012158054711246, | |
| "grad_norm": 1.262582436782854, | |
| "learning_rate": 1.3609715562641116e-05, | |
| "loss": 0.8224, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 0.40197568389057753, | |
| "grad_norm": 1.4344581984129021, | |
| "learning_rate": 1.358674401432774e-05, | |
| "loss": 0.6018, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 0.4027355623100304, | |
| "grad_norm": 1.128158674833865, | |
| "learning_rate": 1.3563750724061025e-05, | |
| "loss": 0.7771, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.4034954407294833, | |
| "grad_norm": 1.251640115665959, | |
| "learning_rate": 1.354073583122059e-05, | |
| "loss": 0.7563, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 0.40425531914893614, | |
| "grad_norm": 1.4284496061204808, | |
| "learning_rate": 1.3517699475317016e-05, | |
| "loss": 0.8184, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 0.40501519756838905, | |
| "grad_norm": 1.203958098789956, | |
| "learning_rate": 1.3494641795990986e-05, | |
| "loss": 0.7099, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 0.40577507598784196, | |
| "grad_norm": 1.2291976824414712, | |
| "learning_rate": 1.3471562933012432e-05, | |
| "loss": 0.761, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 0.4065349544072948, | |
| "grad_norm": 1.1656511582009284, | |
| "learning_rate": 1.3448463026279706e-05, | |
| "loss": 0.702, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.4072948328267477, | |
| "grad_norm": 1.1772122357486738, | |
| "learning_rate": 1.3425342215818718e-05, | |
| "loss": 0.8396, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 0.40805471124620063, | |
| "grad_norm": 1.3203475489579708, | |
| "learning_rate": 1.3402200641782089e-05, | |
| "loss": 0.6796, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 0.4088145896656535, | |
| "grad_norm": 7.6653490669251205, | |
| "learning_rate": 1.3379038444448307e-05, | |
| "loss": 0.8066, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 0.4095744680851064, | |
| "grad_norm": 1.1029819491287283, | |
| "learning_rate": 1.335585576422087e-05, | |
| "loss": 0.8144, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 0.41033434650455924, | |
| "grad_norm": 1.1450160822088717, | |
| "learning_rate": 1.3332652741627445e-05, | |
| "loss": 0.8413, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.41109422492401215, | |
| "grad_norm": 1.110782821163964, | |
| "learning_rate": 1.3309429517318999e-05, | |
| "loss": 0.6991, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 0.41185410334346506, | |
| "grad_norm": 1.2364607612487595, | |
| "learning_rate": 1.3286186232068972e-05, | |
| "loss": 0.786, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 0.4126139817629179, | |
| "grad_norm": 1.1062614363018903, | |
| "learning_rate": 1.326292302677239e-05, | |
| "loss": 0.7893, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 0.4133738601823708, | |
| "grad_norm": 1.2999062168984064, | |
| "learning_rate": 1.3239640042445037e-05, | |
| "loss": 0.7307, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 0.41413373860182373, | |
| "grad_norm": 1.107785860573515, | |
| "learning_rate": 1.3216337420222602e-05, | |
| "loss": 0.6911, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.4148936170212766, | |
| "grad_norm": 1.1981709859226055, | |
| "learning_rate": 1.31930153013598e-05, | |
| "loss": 0.8968, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 0.4156534954407295, | |
| "grad_norm": 1.1692340117547408, | |
| "learning_rate": 1.3169673827229539e-05, | |
| "loss": 0.8105, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 0.41641337386018235, | |
| "grad_norm": 1.0645771315368144, | |
| "learning_rate": 1.3146313139322051e-05, | |
| "loss": 0.8319, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 0.41717325227963525, | |
| "grad_norm": 1.1541717096731297, | |
| "learning_rate": 1.3122933379244036e-05, | |
| "loss": 0.7895, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 0.41793313069908816, | |
| "grad_norm": 1.1392028529560088, | |
| "learning_rate": 1.3099534688717806e-05, | |
| "loss": 0.7282, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.418693009118541, | |
| "grad_norm": 1.0890923762915308, | |
| "learning_rate": 1.3076117209580418e-05, | |
| "loss": 0.8085, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 0.4194528875379939, | |
| "grad_norm": 1.2704899587569098, | |
| "learning_rate": 1.3052681083782837e-05, | |
| "loss": 0.695, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 0.42021276595744683, | |
| "grad_norm": 1.5485957723256327, | |
| "learning_rate": 1.3029226453389044e-05, | |
| "loss": 0.9376, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 0.4209726443768997, | |
| "grad_norm": 1.2272594145149078, | |
| "learning_rate": 1.3005753460575195e-05, | |
| "loss": 0.8046, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 0.4217325227963526, | |
| "grad_norm": 1.1982599200950923, | |
| "learning_rate": 1.2982262247628758e-05, | |
| "loss": 0.6733, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.42249240121580545, | |
| "grad_norm": 1.6452177912531496, | |
| "learning_rate": 1.2958752956947645e-05, | |
| "loss": 0.7165, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 0.42325227963525835, | |
| "grad_norm": 1.1795539556661114, | |
| "learning_rate": 1.2935225731039349e-05, | |
| "loss": 0.8882, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 0.42401215805471126, | |
| "grad_norm": 1.2549839812331454, | |
| "learning_rate": 1.2911680712520082e-05, | |
| "loss": 0.8346, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 0.4247720364741641, | |
| "grad_norm": 1.2172758156063759, | |
| "learning_rate": 1.2888118044113913e-05, | |
| "loss": 0.7664, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 0.425531914893617, | |
| "grad_norm": 1.17770634547905, | |
| "learning_rate": 1.2864537868651894e-05, | |
| "loss": 0.7954, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.42629179331306993, | |
| "grad_norm": 1.261564580874549, | |
| "learning_rate": 1.2840940329071213e-05, | |
| "loss": 0.6888, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 0.4270516717325228, | |
| "grad_norm": 1.2773233328816456, | |
| "learning_rate": 1.2817325568414299e-05, | |
| "loss": 0.7508, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 0.4278115501519757, | |
| "grad_norm": 1.0987478638545596, | |
| "learning_rate": 1.2793693729827984e-05, | |
| "loss": 0.7558, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 0.42857142857142855, | |
| "grad_norm": 1.1100818859241546, | |
| "learning_rate": 1.2770044956562613e-05, | |
| "loss": 0.8737, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 0.42933130699088146, | |
| "grad_norm": 1.3087957935532264, | |
| "learning_rate": 1.2746379391971191e-05, | |
| "loss": 0.6717, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.43009118541033436, | |
| "grad_norm": 1.2369179591486499, | |
| "learning_rate": 1.272269717950851e-05, | |
| "loss": 0.7027, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 0.4308510638297872, | |
| "grad_norm": 1.2512521772895409, | |
| "learning_rate": 1.2698998462730265e-05, | |
| "loss": 0.7311, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 0.4316109422492401, | |
| "grad_norm": 1.2292837539985202, | |
| "learning_rate": 1.2675283385292212e-05, | |
| "loss": 0.7307, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 0.43237082066869303, | |
| "grad_norm": 1.3446333583544032, | |
| "learning_rate": 1.2651552090949265e-05, | |
| "loss": 0.7973, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 0.4331306990881459, | |
| "grad_norm": 1.1129588109454025, | |
| "learning_rate": 1.2627804723554653e-05, | |
| "loss": 0.8397, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.4338905775075988, | |
| "grad_norm": 1.1505313444956853, | |
| "learning_rate": 1.2604041427059037e-05, | |
| "loss": 0.8551, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 0.43465045592705165, | |
| "grad_norm": 1.180024296724761, | |
| "learning_rate": 1.2580262345509621e-05, | |
| "loss": 0.7406, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 0.43541033434650456, | |
| "grad_norm": 1.1840554015750546, | |
| "learning_rate": 1.2556467623049313e-05, | |
| "loss": 0.7765, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 0.43617021276595747, | |
| "grad_norm": 1.2993663570236609, | |
| "learning_rate": 1.2532657403915821e-05, | |
| "loss": 0.7663, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 0.4369300911854103, | |
| "grad_norm": 1.1938711645351547, | |
| "learning_rate": 1.2508831832440795e-05, | |
| "loss": 0.8068, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.4376899696048632, | |
| "grad_norm": 1.3111377224776326, | |
| "learning_rate": 1.248499105304894e-05, | |
| "loss": 0.8097, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 0.43844984802431614, | |
| "grad_norm": 1.3136341369823523, | |
| "learning_rate": 1.2461135210257156e-05, | |
| "loss": 0.5929, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 0.439209726443769, | |
| "grad_norm": 1.6018645897007295, | |
| "learning_rate": 1.2437264448673647e-05, | |
| "loss": 0.7632, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 0.4399696048632219, | |
| "grad_norm": 1.113843255117769, | |
| "learning_rate": 1.2413378912997058e-05, | |
| "loss": 0.7563, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 0.44072948328267475, | |
| "grad_norm": 1.287498381668339, | |
| "learning_rate": 1.2389478748015584e-05, | |
| "loss": 0.7723, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.44148936170212766, | |
| "grad_norm": 1.4604588617616656, | |
| "learning_rate": 1.2365564098606103e-05, | |
| "loss": 0.7165, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 0.44224924012158057, | |
| "grad_norm": 1.1843362242996534, | |
| "learning_rate": 1.2341635109733292e-05, | |
| "loss": 0.7036, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 0.4430091185410334, | |
| "grad_norm": 1.285540078880297, | |
| "learning_rate": 1.2317691926448753e-05, | |
| "loss": 0.6935, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 0.44376899696048633, | |
| "grad_norm": 1.2139986195688235, | |
| "learning_rate": 1.2293734693890131e-05, | |
| "loss": 0.6651, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 0.44452887537993924, | |
| "grad_norm": 1.0300723266411738, | |
| "learning_rate": 1.226976355728023e-05, | |
| "loss": 0.698, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.4452887537993921, | |
| "grad_norm": 1.1373620307966847, | |
| "learning_rate": 1.2245778661926138e-05, | |
| "loss": 0.7876, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 0.446048632218845, | |
| "grad_norm": 1.1842911118510402, | |
| "learning_rate": 1.222178015321835e-05, | |
| "loss": 0.7987, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 0.44680851063829785, | |
| "grad_norm": 1.0283022156168402, | |
| "learning_rate": 1.2197768176629876e-05, | |
| "loss": 0.7757, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 0.44756838905775076, | |
| "grad_norm": 1.1270799464982473, | |
| "learning_rate": 1.2173742877715374e-05, | |
| "loss": 0.7772, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 0.44832826747720367, | |
| "grad_norm": 1.3241482244789882, | |
| "learning_rate": 1.2149704402110243e-05, | |
| "loss": 0.7538, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.4490881458966565, | |
| "grad_norm": 1.3803962121662587, | |
| "learning_rate": 1.2125652895529766e-05, | |
| "loss": 0.7377, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 0.44984802431610943, | |
| "grad_norm": 1.1358520780175547, | |
| "learning_rate": 1.2101588503768224e-05, | |
| "loss": 0.8042, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 0.4506079027355623, | |
| "grad_norm": 1.0308326337505642, | |
| "learning_rate": 1.2077511372697986e-05, | |
| "loss": 0.6726, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 0.4513677811550152, | |
| "grad_norm": 1.4827344701166258, | |
| "learning_rate": 1.2053421648268661e-05, | |
| "loss": 0.752, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 0.4521276595744681, | |
| "grad_norm": 1.2152094731239294, | |
| "learning_rate": 1.2029319476506183e-05, | |
| "loss": 0.8397, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.45288753799392095, | |
| "grad_norm": 1.2095466858294417, | |
| "learning_rate": 1.2005205003511948e-05, | |
| "loss": 0.6973, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 0.45364741641337386, | |
| "grad_norm": 1.128641784906088, | |
| "learning_rate": 1.1981078375461917e-05, | |
| "loss": 0.8055, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 0.45440729483282677, | |
| "grad_norm": 1.2832265052414376, | |
| "learning_rate": 1.1956939738605722e-05, | |
| "loss": 0.8485, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 0.4551671732522796, | |
| "grad_norm": 1.320094564842837, | |
| "learning_rate": 1.1932789239265803e-05, | |
| "loss": 0.748, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 0.45592705167173253, | |
| "grad_norm": 1.0328846656573532, | |
| "learning_rate": 1.1908627023836504e-05, | |
| "loss": 0.7968, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.4566869300911854, | |
| "grad_norm": 1.0695862445910687, | |
| "learning_rate": 1.1884453238783185e-05, | |
| "loss": 0.6985, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 0.4574468085106383, | |
| "grad_norm": 1.203684585681864, | |
| "learning_rate": 1.1860268030641337e-05, | |
| "loss": 0.6677, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 0.4582066869300912, | |
| "grad_norm": 1.2239685532776878, | |
| "learning_rate": 1.1836071546015704e-05, | |
| "loss": 0.7315, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 0.45896656534954405, | |
| "grad_norm": 1.1174496254300246, | |
| "learning_rate": 1.1811863931579377e-05, | |
| "loss": 0.7735, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 0.45972644376899696, | |
| "grad_norm": 1.274228428297079, | |
| "learning_rate": 1.1787645334072912e-05, | |
| "loss": 0.6979, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.46048632218844987, | |
| "grad_norm": 1.1216074493958696, | |
| "learning_rate": 1.176341590030345e-05, | |
| "loss": 0.8083, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 0.4612462006079027, | |
| "grad_norm": 1.3308989498544481, | |
| "learning_rate": 1.1739175777143813e-05, | |
| "loss": 0.7616, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 0.46200607902735563, | |
| "grad_norm": 1.2402103381684604, | |
| "learning_rate": 1.1714925111531619e-05, | |
| "loss": 0.6652, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 0.4627659574468085, | |
| "grad_norm": 1.3473899099322548, | |
| "learning_rate": 1.169066405046839e-05, | |
| "loss": 0.6977, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 0.4635258358662614, | |
| "grad_norm": 1.2969368805538337, | |
| "learning_rate": 1.1666392741018675e-05, | |
| "loss": 0.839, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.4642857142857143, | |
| "grad_norm": 1.178244400612319, | |
| "learning_rate": 1.1642111330309129e-05, | |
| "loss": 0.772, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 0.46504559270516715, | |
| "grad_norm": 1.0724552547882107, | |
| "learning_rate": 1.161781996552765e-05, | |
| "loss": 0.8022, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 0.46580547112462006, | |
| "grad_norm": 1.4001345648503962, | |
| "learning_rate": 1.159351879392247e-05, | |
| "loss": 0.8061, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 0.46656534954407297, | |
| "grad_norm": 1.280097664667771, | |
| "learning_rate": 1.1569207962801264e-05, | |
| "loss": 0.8004, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 0.4673252279635258, | |
| "grad_norm": 1.0329186509706394, | |
| "learning_rate": 1.1544887619530277e-05, | |
| "loss": 0.8188, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.46808510638297873, | |
| "grad_norm": 1.0373041164313308, | |
| "learning_rate": 1.152055791153339e-05, | |
| "loss": 0.779, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 0.4688449848024316, | |
| "grad_norm": 1.2728125022474566, | |
| "learning_rate": 1.1496218986291274e-05, | |
| "loss": 0.727, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 0.4696048632218845, | |
| "grad_norm": 1.1466689080234558, | |
| "learning_rate": 1.1471870991340459e-05, | |
| "loss": 0.7259, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 0.4703647416413374, | |
| "grad_norm": 1.3931059662125809, | |
| "learning_rate": 1.1447514074272452e-05, | |
| "loss": 0.9308, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 0.47112462006079026, | |
| "grad_norm": 1.1058280371702607, | |
| "learning_rate": 1.1423148382732854e-05, | |
| "loss": 0.6694, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.47188449848024316, | |
| "grad_norm": 1.1591511819217963, | |
| "learning_rate": 1.1398774064420444e-05, | |
| "loss": 0.768, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 0.4726443768996961, | |
| "grad_norm": 1.1389602231832467, | |
| "learning_rate": 1.1374391267086301e-05, | |
| "loss": 0.7817, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 0.4734042553191489, | |
| "grad_norm": 1.2020933841404111, | |
| "learning_rate": 1.1350000138532902e-05, | |
| "loss": 0.8292, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 0.47416413373860183, | |
| "grad_norm": 1.2901415761432122, | |
| "learning_rate": 1.1325600826613221e-05, | |
| "loss": 0.7412, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 0.4749240121580547, | |
| "grad_norm": 1.0264813424311687, | |
| "learning_rate": 1.1301193479229842e-05, | |
| "loss": 0.7589, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.4756838905775076, | |
| "grad_norm": 1.223866530284065, | |
| "learning_rate": 1.1276778244334055e-05, | |
| "loss": 0.8515, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 0.4764437689969605, | |
| "grad_norm": 1.2268165414487393, | |
| "learning_rate": 1.1252355269924965e-05, | |
| "loss": 0.7997, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 0.47720364741641336, | |
| "grad_norm": 1.1019572366525499, | |
| "learning_rate": 1.1227924704048585e-05, | |
| "loss": 0.7186, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 0.47796352583586627, | |
| "grad_norm": 1.248231761757093, | |
| "learning_rate": 1.1203486694796956e-05, | |
| "loss": 0.8022, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 0.4787234042553192, | |
| "grad_norm": 1.5037452462748153, | |
| "learning_rate": 1.1179041390307235e-05, | |
| "loss": 0.8366, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.479483282674772, | |
| "grad_norm": 1.1461301244558273, | |
| "learning_rate": 1.1154588938760795e-05, | |
| "loss": 0.8648, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 0.48024316109422494, | |
| "grad_norm": 1.0844433988145132, | |
| "learning_rate": 1.1130129488382341e-05, | |
| "loss": 0.8469, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 0.4810030395136778, | |
| "grad_norm": 1.1842970009991822, | |
| "learning_rate": 1.1105663187438997e-05, | |
| "loss": 0.7498, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 0.4817629179331307, | |
| "grad_norm": 1.1112248823980015, | |
| "learning_rate": 1.1081190184239418e-05, | |
| "loss": 0.7086, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 0.4825227963525836, | |
| "grad_norm": 1.292640779965634, | |
| "learning_rate": 1.1056710627132885e-05, | |
| "loss": 0.7176, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.48328267477203646, | |
| "grad_norm": 1.223731288345465, | |
| "learning_rate": 1.1032224664508406e-05, | |
| "loss": 0.7102, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 0.48404255319148937, | |
| "grad_norm": 1.346688117427275, | |
| "learning_rate": 1.1007732444793815e-05, | |
| "loss": 0.779, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 0.4848024316109423, | |
| "grad_norm": 1.33804298287085, | |
| "learning_rate": 1.0983234116454885e-05, | |
| "loss": 0.7339, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 0.48556231003039513, | |
| "grad_norm": 1.3252570001274584, | |
| "learning_rate": 1.0958729827994406e-05, | |
| "loss": 0.8626, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 0.48632218844984804, | |
| "grad_norm": 1.1068256432262975, | |
| "learning_rate": 1.09342197279513e-05, | |
| "loss": 0.7626, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.4870820668693009, | |
| "grad_norm": 1.3117562526125528, | |
| "learning_rate": 1.090970396489973e-05, | |
| "loss": 0.7296, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 0.4878419452887538, | |
| "grad_norm": 1.2986040450725451, | |
| "learning_rate": 1.0885182687448162e-05, | |
| "loss": 0.7432, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 0.4886018237082067, | |
| "grad_norm": 1.1291196795567748, | |
| "learning_rate": 1.086065604423851e-05, | |
| "loss": 0.7813, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 0.48936170212765956, | |
| "grad_norm": 1.0478325418689176, | |
| "learning_rate": 1.0836124183945209e-05, | |
| "loss": 0.7989, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 0.49012158054711247, | |
| "grad_norm": 1.3229442097838244, | |
| "learning_rate": 1.0811587255274313e-05, | |
| "loss": 0.7641, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.4908814589665654, | |
| "grad_norm": 1.2805025362981792, | |
| "learning_rate": 1.0787045406962605e-05, | |
| "loss": 0.7312, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 0.49164133738601823, | |
| "grad_norm": 1.2029011049332292, | |
| "learning_rate": 1.0762498787776688e-05, | |
| "loss": 0.809, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 0.49240121580547114, | |
| "grad_norm": 1.197970046333178, | |
| "learning_rate": 1.073794754651208e-05, | |
| "loss": 0.7452, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 0.493161094224924, | |
| "grad_norm": 1.0976660287981335, | |
| "learning_rate": 1.0713391831992324e-05, | |
| "loss": 0.8028, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 0.4939209726443769, | |
| "grad_norm": 1.5306488565665117, | |
| "learning_rate": 1.0688831793068078e-05, | |
| "loss": 0.7682, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.4946808510638298, | |
| "grad_norm": 1.2705138796077722, | |
| "learning_rate": 1.0664267578616208e-05, | |
| "loss": 0.71, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 0.49544072948328266, | |
| "grad_norm": 1.1684471011081035, | |
| "learning_rate": 1.0639699337538897e-05, | |
| "loss": 0.7128, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 0.49620060790273557, | |
| "grad_norm": 1.1267505527793704, | |
| "learning_rate": 1.0615127218762733e-05, | |
| "loss": 0.6616, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 0.4969604863221885, | |
| "grad_norm": 1.2432539863921401, | |
| "learning_rate": 1.059055137123781e-05, | |
| "loss": 0.6311, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 0.49772036474164133, | |
| "grad_norm": 1.1566444920397811, | |
| "learning_rate": 1.0565971943936826e-05, | |
| "loss": 0.6663, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.49848024316109424, | |
| "grad_norm": 1.1357974741818124, | |
| "learning_rate": 1.0541389085854177e-05, | |
| "loss": 0.7914, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 0.4992401215805471, | |
| "grad_norm": 1.111614770399477, | |
| "learning_rate": 1.0516802946005059e-05, | |
| "loss": 0.7244, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.1437148132630974, | |
| "learning_rate": 1.0492213673424554e-05, | |
| "loss": 0.7567, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 0.5007598784194529, | |
| "grad_norm": 1.19755067515104, | |
| "learning_rate": 1.0467621417166744e-05, | |
| "loss": 0.8892, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 0.5015197568389058, | |
| "grad_norm": 1.3967990832542359, | |
| "learning_rate": 1.0443026326303789e-05, | |
| "loss": 0.8066, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.5022796352583586, | |
| "grad_norm": 1.193444269626328, | |
| "learning_rate": 1.0418428549925033e-05, | |
| "loss": 0.7158, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 0.5030395136778115, | |
| "grad_norm": 1.124127018307968, | |
| "learning_rate": 1.0393828237136108e-05, | |
| "loss": 0.7884, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 0.5037993920972644, | |
| "grad_norm": 1.3355063829990172, | |
| "learning_rate": 1.0369225537058004e-05, | |
| "loss": 0.8913, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 0.5045592705167173, | |
| "grad_norm": 1.0653967941928015, | |
| "learning_rate": 1.0344620598826199e-05, | |
| "loss": 0.6553, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 0.5053191489361702, | |
| "grad_norm": 1.2647551086192201, | |
| "learning_rate": 1.0320013571589727e-05, | |
| "loss": 0.7184, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.506079027355623, | |
| "grad_norm": 1.4772313312357277, | |
| "learning_rate": 1.0295404604510286e-05, | |
| "loss": 0.7727, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 0.506838905775076, | |
| "grad_norm": 1.3041568464394342, | |
| "learning_rate": 1.0270793846761347e-05, | |
| "loss": 0.7942, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 0.5075987841945289, | |
| "grad_norm": 1.158146011205988, | |
| "learning_rate": 1.0246181447527213e-05, | |
| "loss": 0.7395, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 0.5083586626139818, | |
| "grad_norm": 1.2073953008814802, | |
| "learning_rate": 1.0221567556002154e-05, | |
| "loss": 0.8264, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 0.5091185410334347, | |
| "grad_norm": 1.076413159175656, | |
| "learning_rate": 1.0196952321389482e-05, | |
| "loss": 0.7775, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.5098784194528876, | |
| "grad_norm": 1.1525840662045868, | |
| "learning_rate": 1.0172335892900645e-05, | |
| "loss": 0.7242, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 0.5106382978723404, | |
| "grad_norm": 1.271664069473118, | |
| "learning_rate": 1.0147718419754335e-05, | |
| "loss": 0.7416, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 0.5113981762917933, | |
| "grad_norm": 1.3572568498543502, | |
| "learning_rate": 1.0123100051175567e-05, | |
| "loss": 0.7729, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 0.5121580547112462, | |
| "grad_norm": 1.2803969575530498, | |
| "learning_rate": 1.0098480936394801e-05, | |
| "loss": 0.7177, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 0.5129179331306991, | |
| "grad_norm": 1.4891572575085688, | |
| "learning_rate": 1.0073861224647002e-05, | |
| "loss": 0.7062, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.513677811550152, | |
| "grad_norm": 1.2164307032985082, | |
| "learning_rate": 1.0049241065170765e-05, | |
| "loss": 0.8279, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 0.5144376899696048, | |
| "grad_norm": 1.2141790444746627, | |
| "learning_rate": 1.0024620607207393e-05, | |
| "loss": 0.7174, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 0.5151975683890577, | |
| "grad_norm": 0.9763701464494112, | |
| "learning_rate": 1e-05, | |
| "loss": 0.6775, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 0.5159574468085106, | |
| "grad_norm": 1.1763198867421552, | |
| "learning_rate": 9.97537939279261e-06, | |
| "loss": 0.7799, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 0.5167173252279635, | |
| "grad_norm": 1.610529899867949, | |
| "learning_rate": 9.950758934829242e-06, | |
| "loss": 0.6313, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.5174772036474165, | |
| "grad_norm": 1.2810564241843232, | |
| "learning_rate": 9.926138775352998e-06, | |
| "loss": 0.7065, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 0.5182370820668692, | |
| "grad_norm": 1.3325397022094134, | |
| "learning_rate": 9.901519063605204e-06, | |
| "loss": 0.8092, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 0.5189969604863222, | |
| "grad_norm": 1.0322497821219359, | |
| "learning_rate": 9.876899948824435e-06, | |
| "loss": 0.7264, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 0.5197568389057751, | |
| "grad_norm": 1.037271529807826, | |
| "learning_rate": 9.85228158024567e-06, | |
| "loss": 0.7442, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 0.520516717325228, | |
| "grad_norm": 1.1242545110647684, | |
| "learning_rate": 9.82766410709936e-06, | |
| "loss": 0.7711, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.5212765957446809, | |
| "grad_norm": 1.1670125333793142, | |
| "learning_rate": 9.80304767861052e-06, | |
| "loss": 0.6906, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 0.5220364741641338, | |
| "grad_norm": 1.2469119033780844, | |
| "learning_rate": 9.77843244399785e-06, | |
| "loss": 0.7755, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 0.5227963525835866, | |
| "grad_norm": 1.3233282548779752, | |
| "learning_rate": 9.75381855247279e-06, | |
| "loss": 0.8425, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 0.5235562310030395, | |
| "grad_norm": 0.9984402836686759, | |
| "learning_rate": 9.729206153238658e-06, | |
| "loss": 0.7236, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 0.5243161094224924, | |
| "grad_norm": 1.067583065176986, | |
| "learning_rate": 9.704595395489714e-06, | |
| "loss": 0.7604, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.5250759878419453, | |
| "grad_norm": 1.422303271953774, | |
| "learning_rate": 9.679986428410276e-06, | |
| "loss": 0.5904, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 0.5258358662613982, | |
| "grad_norm": 1.1232075982204, | |
| "learning_rate": 9.655379401173804e-06, | |
| "loss": 0.8013, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 0.526595744680851, | |
| "grad_norm": 1.4687380554303027, | |
| "learning_rate": 9.630774462942002e-06, | |
| "loss": 0.6882, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 0.5273556231003039, | |
| "grad_norm": 1.1167886952971091, | |
| "learning_rate": 9.606171762863899e-06, | |
| "loss": 0.7441, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 0.5281155015197568, | |
| "grad_norm": 1.2919843371274333, | |
| "learning_rate": 9.581571450074969e-06, | |
| "loss": 0.7971, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.5288753799392097, | |
| "grad_norm": 1.2441787920133387, | |
| "learning_rate": 9.556973673696214e-06, | |
| "loss": 0.662, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 0.5296352583586627, | |
| "grad_norm": 1.0798674336998493, | |
| "learning_rate": 9.53237858283326e-06, | |
| "loss": 0.8314, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 0.5303951367781155, | |
| "grad_norm": 1.3353578792940557, | |
| "learning_rate": 9.507786326575451e-06, | |
| "loss": 0.8175, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 0.5311550151975684, | |
| "grad_norm": 1.1487082413093388, | |
| "learning_rate": 9.483197053994948e-06, | |
| "loss": 0.7941, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 0.5319148936170213, | |
| "grad_norm": 1.023779043189916, | |
| "learning_rate": 9.458610914145826e-06, | |
| "loss": 0.7281, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.5326747720364742, | |
| "grad_norm": 1.183848923188154, | |
| "learning_rate": 9.434028056063178e-06, | |
| "loss": 0.773, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 0.5334346504559271, | |
| "grad_norm": 1.238781958359031, | |
| "learning_rate": 9.409448628762194e-06, | |
| "loss": 0.7676, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 0.53419452887538, | |
| "grad_norm": 1.2892673674980164, | |
| "learning_rate": 9.38487278123727e-06, | |
| "loss": 0.703, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 0.5349544072948328, | |
| "grad_norm": 1.4894690682074119, | |
| "learning_rate": 9.360300662461104e-06, | |
| "loss": 0.6824, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 0.5357142857142857, | |
| "grad_norm": 1.1256762983865307, | |
| "learning_rate": 9.335732421383794e-06, | |
| "loss": 0.7852, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.5364741641337386, | |
| "grad_norm": 1.2723192671736578, | |
| "learning_rate": 9.311168206931925e-06, | |
| "loss": 0.8694, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 0.5372340425531915, | |
| "grad_norm": 1.2077420371153875, | |
| "learning_rate": 9.286608168007678e-06, | |
| "loss": 0.678, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 0.5379939209726444, | |
| "grad_norm": 1.1904858031472638, | |
| "learning_rate": 9.262052453487924e-06, | |
| "loss": 0.6702, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 0.5387537993920972, | |
| "grad_norm": 1.2872241733120375, | |
| "learning_rate": 9.237501212223314e-06, | |
| "loss": 0.7629, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 0.5395136778115501, | |
| "grad_norm": 1.100700489965183, | |
| "learning_rate": 9.212954593037396e-06, | |
| "loss": 0.7134, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.540273556231003, | |
| "grad_norm": 1.377996218710173, | |
| "learning_rate": 9.18841274472569e-06, | |
| "loss": 0.7417, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 0.541033434650456, | |
| "grad_norm": 0.9989934252342503, | |
| "learning_rate": 9.163875816054795e-06, | |
| "loss": 0.8055, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 0.5417933130699089, | |
| "grad_norm": 1.1049868517525634, | |
| "learning_rate": 9.139343955761493e-06, | |
| "loss": 0.7453, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 0.5425531914893617, | |
| "grad_norm": 1.312278020298743, | |
| "learning_rate": 9.11481731255184e-06, | |
| "loss": 0.824, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 0.5433130699088146, | |
| "grad_norm": 1.1527438621026842, | |
| "learning_rate": 9.090296035100275e-06, | |
| "loss": 0.7723, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.5440729483282675, | |
| "grad_norm": 1.1884373944120221, | |
| "learning_rate": 9.065780272048703e-06, | |
| "loss": 0.7317, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 0.5448328267477204, | |
| "grad_norm": 1.1310529125737931, | |
| "learning_rate": 9.041270172005599e-06, | |
| "loss": 0.8618, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 0.5455927051671733, | |
| "grad_norm": 1.1442035900331595, | |
| "learning_rate": 9.016765883545116e-06, | |
| "loss": 0.7905, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 0.5463525835866262, | |
| "grad_norm": 1.1098710882653915, | |
| "learning_rate": 8.992267555206185e-06, | |
| "loss": 0.7975, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 0.547112462006079, | |
| "grad_norm": 1.2115450660429272, | |
| "learning_rate": 8.967775335491596e-06, | |
| "loss": 0.8021, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.5478723404255319, | |
| "grad_norm": 1.135452356618314, | |
| "learning_rate": 8.943289372867118e-06, | |
| "loss": 0.7123, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 0.5486322188449848, | |
| "grad_norm": 1.1521644116434293, | |
| "learning_rate": 8.918809815760585e-06, | |
| "loss": 0.7418, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 0.5493920972644377, | |
| "grad_norm": 1.1718004849681887, | |
| "learning_rate": 8.894336812561006e-06, | |
| "loss": 0.7325, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 0.5501519756838906, | |
| "grad_norm": 1.1072009229962188, | |
| "learning_rate": 8.869870511617662e-06, | |
| "loss": 0.7613, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 0.5509118541033434, | |
| "grad_norm": 1.257614504759535, | |
| "learning_rate": 8.845411061239208e-06, | |
| "loss": 0.7556, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.5516717325227963, | |
| "grad_norm": 1.0849417345257606, | |
| "learning_rate": 8.82095860969277e-06, | |
| "loss": 0.7497, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 0.5524316109422492, | |
| "grad_norm": 1.1031272827034502, | |
| "learning_rate": 8.796513305203049e-06, | |
| "loss": 0.7658, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 0.5531914893617021, | |
| "grad_norm": 1.3775242365484175, | |
| "learning_rate": 8.772075295951416e-06, | |
| "loss": 0.6591, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 0.5539513677811551, | |
| "grad_norm": 0.9888383719831942, | |
| "learning_rate": 8.74764473007504e-06, | |
| "loss": 0.64, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 0.5547112462006079, | |
| "grad_norm": 1.2075124227748892, | |
| "learning_rate": 8.723221755665948e-06, | |
| "loss": 0.7865, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.5554711246200608, | |
| "grad_norm": 1.009882422048455, | |
| "learning_rate": 8.698806520770161e-06, | |
| "loss": 0.7017, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 0.5562310030395137, | |
| "grad_norm": 1.089098732822153, | |
| "learning_rate": 8.674399173386779e-06, | |
| "loss": 0.7653, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 0.5569908814589666, | |
| "grad_norm": 0.9066239491102479, | |
| "learning_rate": 8.6499998614671e-06, | |
| "loss": 0.7083, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 0.5577507598784195, | |
| "grad_norm": 1.290321055194373, | |
| "learning_rate": 8.625608732913702e-06, | |
| "loss": 0.7564, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 0.5585106382978723, | |
| "grad_norm": 1.154000793793768, | |
| "learning_rate": 8.60122593557956e-06, | |
| "loss": 0.7446, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.5592705167173252, | |
| "grad_norm": 1.2153379132551656, | |
| "learning_rate": 8.576851617267151e-06, | |
| "loss": 0.6685, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 0.5600303951367781, | |
| "grad_norm": 1.0516830600721567, | |
| "learning_rate": 8.55248592572755e-06, | |
| "loss": 0.73, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 0.560790273556231, | |
| "grad_norm": 1.17632375499647, | |
| "learning_rate": 8.528129008659543e-06, | |
| "loss": 0.8865, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 0.5615501519756839, | |
| "grad_norm": 1.3855197095555856, | |
| "learning_rate": 8.50378101370873e-06, | |
| "loss": 0.7419, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 0.5623100303951368, | |
| "grad_norm": 1.1253254334471747, | |
| "learning_rate": 8.479442088466612e-06, | |
| "loss": 0.6986, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.5630699088145896, | |
| "grad_norm": 1.4826086172156083, | |
| "learning_rate": 8.455112380469728e-06, | |
| "loss": 0.7005, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 0.5638297872340425, | |
| "grad_norm": 1.3145066783010508, | |
| "learning_rate": 8.430792037198738e-06, | |
| "loss": 0.6741, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 0.5645896656534954, | |
| "grad_norm": 0.9905077751895227, | |
| "learning_rate": 8.406481206077535e-06, | |
| "loss": 0.6934, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 0.5653495440729484, | |
| "grad_norm": 1.3094343297026505, | |
| "learning_rate": 8.382180034472353e-06, | |
| "loss": 0.7489, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 0.5661094224924013, | |
| "grad_norm": 1.1882315623861832, | |
| "learning_rate": 8.357888669690876e-06, | |
| "loss": 0.7956, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.5668693009118541, | |
| "grad_norm": 1.4457022450712465, | |
| "learning_rate": 8.333607258981328e-06, | |
| "loss": 0.6947, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 0.567629179331307, | |
| "grad_norm": 1.2995638243894103, | |
| "learning_rate": 8.309335949531609e-06, | |
| "loss": 0.7834, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 0.5683890577507599, | |
| "grad_norm": 1.0908875056110399, | |
| "learning_rate": 8.285074888468385e-06, | |
| "loss": 0.6873, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 0.5691489361702128, | |
| "grad_norm": 1.1786022384380306, | |
| "learning_rate": 8.260824222856189e-06, | |
| "loss": 0.7755, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 0.5699088145896657, | |
| "grad_norm": 1.2349021091009151, | |
| "learning_rate": 8.236584099696553e-06, | |
| "loss": 0.6424, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.5706686930091185, | |
| "grad_norm": 1.3167496080043644, | |
| "learning_rate": 8.212354665927088e-06, | |
| "loss": 0.8825, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 0.5714285714285714, | |
| "grad_norm": 1.1979007085518116, | |
| "learning_rate": 8.188136068420627e-06, | |
| "loss": 0.7896, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 0.5721884498480243, | |
| "grad_norm": 1.3420823833862474, | |
| "learning_rate": 8.163928453984298e-06, | |
| "loss": 0.7029, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 0.5729483282674772, | |
| "grad_norm": 1.188625125713616, | |
| "learning_rate": 8.139731969358665e-06, | |
| "loss": 0.6724, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 0.5737082066869301, | |
| "grad_norm": 1.2215274657228343, | |
| "learning_rate": 8.115546761216822e-06, | |
| "loss": 0.7618, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.574468085106383, | |
| "grad_norm": 1.3006757448582367, | |
| "learning_rate": 8.091372976163496e-06, | |
| "loss": 0.8029, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 0.5752279635258358, | |
| "grad_norm": 1.270532871907162, | |
| "learning_rate": 8.067210760734199e-06, | |
| "loss": 0.7028, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 0.5759878419452887, | |
| "grad_norm": 1.2247997142405294, | |
| "learning_rate": 8.043060261394283e-06, | |
| "loss": 0.7048, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 0.5767477203647416, | |
| "grad_norm": 1.1244333436374239, | |
| "learning_rate": 8.01892162453809e-06, | |
| "loss": 0.7624, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 0.5775075987841946, | |
| "grad_norm": 0.9927880065563417, | |
| "learning_rate": 7.994794996488055e-06, | |
| "loss": 0.7434, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.5782674772036475, | |
| "grad_norm": 1.5224981732373568, | |
| "learning_rate": 7.970680523493817e-06, | |
| "loss": 0.8262, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 0.5790273556231003, | |
| "grad_norm": 1.2602357374852493, | |
| "learning_rate": 7.94657835173134e-06, | |
| "loss": 0.7799, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 0.5797872340425532, | |
| "grad_norm": 1.1866650538052121, | |
| "learning_rate": 7.922488627302016e-06, | |
| "loss": 0.8345, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 0.5805471124620061, | |
| "grad_norm": 1.0768432555260514, | |
| "learning_rate": 7.898411496231781e-06, | |
| "loss": 0.661, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 0.581306990881459, | |
| "grad_norm": 1.2649825945795095, | |
| "learning_rate": 7.874347104470234e-06, | |
| "loss": 0.7474, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.5820668693009119, | |
| "grad_norm": 1.0346170500149776, | |
| "learning_rate": 7.85029559788976e-06, | |
| "loss": 0.7371, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 0.5828267477203647, | |
| "grad_norm": 1.1406449706486834, | |
| "learning_rate": 7.82625712228463e-06, | |
| "loss": 0.6572, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 0.5835866261398176, | |
| "grad_norm": 1.2227968856980136, | |
| "learning_rate": 7.802231823370126e-06, | |
| "loss": 0.7175, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 0.5843465045592705, | |
| "grad_norm": 1.285320478106064, | |
| "learning_rate": 7.778219846781654e-06, | |
| "loss": 0.6638, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 0.5851063829787234, | |
| "grad_norm": 1.3183662162925285, | |
| "learning_rate": 7.754221338073863e-06, | |
| "loss": 0.7502, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.5858662613981763, | |
| "grad_norm": 1.2309123026301343, | |
| "learning_rate": 7.730236442719774e-06, | |
| "loss": 0.9027, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 0.5866261398176292, | |
| "grad_norm": 1.0844541062351467, | |
| "learning_rate": 7.706265306109872e-06, | |
| "loss": 0.7491, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 0.587386018237082, | |
| "grad_norm": 1.1852086990944102, | |
| "learning_rate": 7.682308073551252e-06, | |
| "loss": 0.8136, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 0.5881458966565349, | |
| "grad_norm": 1.1115228878509833, | |
| "learning_rate": 7.658364890266713e-06, | |
| "loss": 0.712, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 0.5889057750759878, | |
| "grad_norm": 1.1923503953131462, | |
| "learning_rate": 7.634435901393899e-06, | |
| "loss": 0.6844, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.5896656534954408, | |
| "grad_norm": 1.1692443969564128, | |
| "learning_rate": 7.6105212519844195e-06, | |
| "loss": 0.849, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 0.5904255319148937, | |
| "grad_norm": 1.24834798177526, | |
| "learning_rate": 7.586621087002945e-06, | |
| "loss": 0.8051, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 0.5911854103343465, | |
| "grad_norm": 1.3194672849667002, | |
| "learning_rate": 7.5627355513263545e-06, | |
| "loss": 0.6844, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 0.5919452887537994, | |
| "grad_norm": 1.3435025152474591, | |
| "learning_rate": 7.5388647897428445e-06, | |
| "loss": 0.7459, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 0.5927051671732523, | |
| "grad_norm": 1.1119729702866108, | |
| "learning_rate": 7.51500894695106e-06, | |
| "loss": 0.774, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.5934650455927052, | |
| "grad_norm": 1.0500200542615716, | |
| "learning_rate": 7.491168167559208e-06, | |
| "loss": 0.8613, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 0.5942249240121581, | |
| "grad_norm": 1.2721940570037045, | |
| "learning_rate": 7.467342596084179e-06, | |
| "loss": 0.6858, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 0.5949848024316109, | |
| "grad_norm": 1.264473615900847, | |
| "learning_rate": 7.443532376950688e-06, | |
| "loss": 0.7055, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 0.5957446808510638, | |
| "grad_norm": 1.2822923733704512, | |
| "learning_rate": 7.419737654490379e-06, | |
| "loss": 0.6221, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 0.5965045592705167, | |
| "grad_norm": 1.2198141319049045, | |
| "learning_rate": 7.3959585729409664e-06, | |
| "loss": 0.8043, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.5972644376899696, | |
| "grad_norm": 1.1915245660350762, | |
| "learning_rate": 7.372195276445349e-06, | |
| "loss": 0.7993, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 0.5980243161094225, | |
| "grad_norm": 1.1390719963097689, | |
| "learning_rate": 7.34844790905074e-06, | |
| "loss": 0.6499, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 0.5987841945288754, | |
| "grad_norm": 1.0307818422771091, | |
| "learning_rate": 7.324716614707794e-06, | |
| "loss": 0.7866, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 0.5995440729483282, | |
| "grad_norm": 1.2070328881455736, | |
| "learning_rate": 7.301001537269736e-06, | |
| "loss": 0.7185, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 0.6003039513677811, | |
| "grad_norm": 1.2634060585798093, | |
| "learning_rate": 7.277302820491492e-06, | |
| "loss": 0.6698, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.601063829787234, | |
| "grad_norm": 0.9920596477729915, | |
| "learning_rate": 7.253620608028811e-06, | |
| "loss": 0.684, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 0.601823708206687, | |
| "grad_norm": 1.4082709858032185, | |
| "learning_rate": 7.229955043437391e-06, | |
| "loss": 0.7587, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 0.6025835866261399, | |
| "grad_norm": 1.2100454605908713, | |
| "learning_rate": 7.206306270172019e-06, | |
| "loss": 0.6983, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 0.6033434650455927, | |
| "grad_norm": 1.2123839067384503, | |
| "learning_rate": 7.182674431585703e-06, | |
| "loss": 0.7878, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 0.6041033434650456, | |
| "grad_norm": 1.3832244686124757, | |
| "learning_rate": 7.1590596709287905e-06, | |
| "loss": 0.8391, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.6048632218844985, | |
| "grad_norm": 1.0293844200828812, | |
| "learning_rate": 7.135462131348107e-06, | |
| "loss": 0.6714, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 0.6056231003039514, | |
| "grad_norm": 1.097796641984852, | |
| "learning_rate": 7.111881955886093e-06, | |
| "loss": 0.7537, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 0.6063829787234043, | |
| "grad_norm": 1.2278466192430812, | |
| "learning_rate": 7.088319287479919e-06, | |
| "loss": 0.7397, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 0.6071428571428571, | |
| "grad_norm": 1.2153016660952125, | |
| "learning_rate": 7.064774268960654e-06, | |
| "loss": 0.7042, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 0.60790273556231, | |
| "grad_norm": 1.2555748053142877, | |
| "learning_rate": 7.041247043052357e-06, | |
| "loss": 0.7871, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.6086626139817629, | |
| "grad_norm": 1.167886711573254, | |
| "learning_rate": 7.017737752371243e-06, | |
| "loss": 0.735, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 0.6094224924012158, | |
| "grad_norm": 1.1702855638638483, | |
| "learning_rate": 6.994246539424809e-06, | |
| "loss": 0.783, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 0.6101823708206687, | |
| "grad_norm": 1.0941107779659658, | |
| "learning_rate": 6.970773546610958e-06, | |
| "loss": 0.6766, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 0.6109422492401215, | |
| "grad_norm": 1.0437913865718234, | |
| "learning_rate": 6.947318916217167e-06, | |
| "loss": 0.7592, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 0.6117021276595744, | |
| "grad_norm": 1.310576788570406, | |
| "learning_rate": 6.923882790419585e-06, | |
| "loss": 0.7761, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.6124620060790273, | |
| "grad_norm": 1.311391687703879, | |
| "learning_rate": 6.9004653112822e-06, | |
| "loss": 0.7195, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 0.6132218844984803, | |
| "grad_norm": 1.3595579761137682, | |
| "learning_rate": 6.87706662075597e-06, | |
| "loss": 0.7924, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 0.6139817629179332, | |
| "grad_norm": 1.1956999532609014, | |
| "learning_rate": 6.853686860677949e-06, | |
| "loss": 0.7289, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 0.6147416413373861, | |
| "grad_norm": 1.0923603201365821, | |
| "learning_rate": 6.830326172770463e-06, | |
| "loss": 0.7275, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 0.6155015197568389, | |
| "grad_norm": 1.2916270282134616, | |
| "learning_rate": 6.806984698640202e-06, | |
| "loss": 0.8375, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.6162613981762918, | |
| "grad_norm": 1.2736014176912895, | |
| "learning_rate": 6.783662579777402e-06, | |
| "loss": 0.7025, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 0.6170212765957447, | |
| "grad_norm": 1.2196965360665537, | |
| "learning_rate": 6.760359957554964e-06, | |
| "loss": 0.6562, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 0.6177811550151976, | |
| "grad_norm": 1.088609258008166, | |
| "learning_rate": 6.737076973227614e-06, | |
| "loss": 0.7689, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 0.6185410334346505, | |
| "grad_norm": 1.1774600538835474, | |
| "learning_rate": 6.713813767931032e-06, | |
| "loss": 0.6945, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 0.6193009118541033, | |
| "grad_norm": 1.2913778597854488, | |
| "learning_rate": 6.690570482681003e-06, | |
| "loss": 0.6553, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.6200607902735562, | |
| "grad_norm": 1.1168090608246355, | |
| "learning_rate": 6.66734725837256e-06, | |
| "loss": 0.7582, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 0.6208206686930091, | |
| "grad_norm": 1.1744984936995, | |
| "learning_rate": 6.6441442357791315e-06, | |
| "loss": 0.6204, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 0.621580547112462, | |
| "grad_norm": 1.2042594690499488, | |
| "learning_rate": 6.620961555551697e-06, | |
| "loss": 0.6974, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 0.6223404255319149, | |
| "grad_norm": 1.1503150036311205, | |
| "learning_rate": 6.597799358217915e-06, | |
| "loss": 0.6136, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 0.6231003039513677, | |
| "grad_norm": 1.165428483340169, | |
| "learning_rate": 6.574657784181287e-06, | |
| "loss": 0.6935, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.6238601823708206, | |
| "grad_norm": 1.1042604059305257, | |
| "learning_rate": 6.551536973720298e-06, | |
| "loss": 0.7891, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 0.6246200607902735, | |
| "grad_norm": 1.2253231220041532, | |
| "learning_rate": 6.5284370669875706e-06, | |
| "loss": 0.714, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 0.6253799392097265, | |
| "grad_norm": 1.1629047141638622, | |
| "learning_rate": 6.505358204009018e-06, | |
| "loss": 0.6389, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 0.6261398176291794, | |
| "grad_norm": 1.2546825120560545, | |
| "learning_rate": 6.482300524682985e-06, | |
| "loss": 0.8111, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 0.6268996960486323, | |
| "grad_norm": 1.0911600242647146, | |
| "learning_rate": 6.459264168779416e-06, | |
| "loss": 0.7797, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.6276595744680851, | |
| "grad_norm": 1.2818923892644603, | |
| "learning_rate": 6.436249275938977e-06, | |
| "loss": 0.7596, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 0.628419452887538, | |
| "grad_norm": 1.3150602427065075, | |
| "learning_rate": 6.413255985672262e-06, | |
| "loss": 0.6692, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 0.6291793313069909, | |
| "grad_norm": 1.0980390599670196, | |
| "learning_rate": 6.390284437358889e-06, | |
| "loss": 0.7509, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 0.6299392097264438, | |
| "grad_norm": 1.2321048428120058, | |
| "learning_rate": 6.367334770246692e-06, | |
| "loss": 0.6638, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 0.6306990881458967, | |
| "grad_norm": 1.4312551669335765, | |
| "learning_rate": 6.344407123450867e-06, | |
| "loss": 0.7158, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.6314589665653495, | |
| "grad_norm": 1.4602813993317219, | |
| "learning_rate": 6.321501635953124e-06, | |
| "loss": 0.73, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 0.6322188449848024, | |
| "grad_norm": 1.1853692940585625, | |
| "learning_rate": 6.298618446600856e-06, | |
| "loss": 0.6758, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 0.6329787234042553, | |
| "grad_norm": 1.3909305621367731, | |
| "learning_rate": 6.275757694106286e-06, | |
| "loss": 0.7104, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 0.6337386018237082, | |
| "grad_norm": 1.1481560361251812, | |
| "learning_rate": 6.252919517045626e-06, | |
| "loss": 0.7532, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 0.6344984802431611, | |
| "grad_norm": 1.2378067238807093, | |
| "learning_rate": 6.230104053858248e-06, | |
| "loss": 0.7815, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.6352583586626139, | |
| "grad_norm": 1.253493828906761, | |
| "learning_rate": 6.207311442845834e-06, | |
| "loss": 0.7082, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 0.6360182370820668, | |
| "grad_norm": 1.1573953215101316, | |
| "learning_rate": 6.1845418221715455e-06, | |
| "loss": 0.8078, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 0.6367781155015197, | |
| "grad_norm": 1.2040263280617385, | |
| "learning_rate": 6.161795329859184e-06, | |
| "loss": 0.6491, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 0.6375379939209727, | |
| "grad_norm": 1.1484083716803917, | |
| "learning_rate": 6.13907210379234e-06, | |
| "loss": 0.7016, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 0.6382978723404256, | |
| "grad_norm": 1.260925617274379, | |
| "learning_rate": 6.116372281713581e-06, | |
| "loss": 0.7287, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.6390577507598785, | |
| "grad_norm": 1.2607710682230502, | |
| "learning_rate": 6.093696001223609e-06, | |
| "loss": 0.7815, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 0.6398176291793313, | |
| "grad_norm": 1.259986817129464, | |
| "learning_rate": 6.071043399780412e-06, | |
| "loss": 0.7167, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 0.6405775075987842, | |
| "grad_norm": 1.2126297153287937, | |
| "learning_rate": 6.0484146146984475e-06, | |
| "loss": 0.7402, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 0.6413373860182371, | |
| "grad_norm": 0.9768707453059862, | |
| "learning_rate": 6.025809783147803e-06, | |
| "loss": 0.7075, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 0.64209726443769, | |
| "grad_norm": 1.3626717412315223, | |
| "learning_rate": 6.00322904215336e-06, | |
| "loss": 0.7943, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.6428571428571429, | |
| "grad_norm": 1.167698443663192, | |
| "learning_rate": 5.980672528593981e-06, | |
| "loss": 0.7003, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 0.6436170212765957, | |
| "grad_norm": 1.3176515684465449, | |
| "learning_rate": 5.95814037920166e-06, | |
| "loss": 0.7383, | |
| "step": 847 | |
| }, | |
| { | |
| "epoch": 0.6443768996960486, | |
| "grad_norm": 1.0153318496532626, | |
| "learning_rate": 5.935632730560702e-06, | |
| "loss": 0.7938, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 0.6451367781155015, | |
| "grad_norm": 1.0562907000174848, | |
| "learning_rate": 5.913149719106896e-06, | |
| "loss": 0.6615, | |
| "step": 849 | |
| }, | |
| { | |
| "epoch": 0.6458966565349544, | |
| "grad_norm": 1.1208567735144297, | |
| "learning_rate": 5.89069148112668e-06, | |
| "loss": 0.7588, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.6466565349544073, | |
| "grad_norm": 1.3042757593347651, | |
| "learning_rate": 5.868258152756336e-06, | |
| "loss": 0.8241, | |
| "step": 851 | |
| }, | |
| { | |
| "epoch": 0.6474164133738601, | |
| "grad_norm": 1.155165161224719, | |
| "learning_rate": 5.845849869981137e-06, | |
| "loss": 0.6846, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 0.648176291793313, | |
| "grad_norm": 1.14891850218874, | |
| "learning_rate": 5.823466768634538e-06, | |
| "loss": 0.7196, | |
| "step": 853 | |
| }, | |
| { | |
| "epoch": 0.648936170212766, | |
| "grad_norm": 1.3595946591316925, | |
| "learning_rate": 5.801108984397355e-06, | |
| "loss": 0.6461, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 0.6496960486322189, | |
| "grad_norm": 0.9633615668787747, | |
| "learning_rate": 5.778776652796936e-06, | |
| "loss": 0.6759, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.6504559270516718, | |
| "grad_norm": 1.046352027263796, | |
| "learning_rate": 5.7564699092063345e-06, | |
| "loss": 0.731, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 0.6512158054711246, | |
| "grad_norm": 1.2997372702293264, | |
| "learning_rate": 5.734188888843512e-06, | |
| "loss": 0.6852, | |
| "step": 857 | |
| }, | |
| { | |
| "epoch": 0.6519756838905775, | |
| "grad_norm": 1.043062250160439, | |
| "learning_rate": 5.711933726770487e-06, | |
| "loss": 0.6772, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 0.6527355623100304, | |
| "grad_norm": 1.1614935358410903, | |
| "learning_rate": 5.689704557892528e-06, | |
| "loss": 0.727, | |
| "step": 859 | |
| }, | |
| { | |
| "epoch": 0.6534954407294833, | |
| "grad_norm": 1.0958750355422942, | |
| "learning_rate": 5.667501516957365e-06, | |
| "loss": 0.7178, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.6542553191489362, | |
| "grad_norm": 1.0427986764230532, | |
| "learning_rate": 5.645324738554321e-06, | |
| "loss": 0.776, | |
| "step": 861 | |
| }, | |
| { | |
| "epoch": 0.6550151975683891, | |
| "grad_norm": 1.2036971137615013, | |
| "learning_rate": 5.623174357113528e-06, | |
| "loss": 0.8008, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 0.6557750759878419, | |
| "grad_norm": 1.2658851178060568, | |
| "learning_rate": 5.601050506905114e-06, | |
| "loss": 0.8018, | |
| "step": 863 | |
| }, | |
| { | |
| "epoch": 0.6565349544072948, | |
| "grad_norm": 1.1341797714148785, | |
| "learning_rate": 5.578953322038372e-06, | |
| "loss": 0.7381, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 0.6572948328267477, | |
| "grad_norm": 1.2554290593978301, | |
| "learning_rate": 5.556882936460966e-06, | |
| "loss": 0.7147, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.6580547112462006, | |
| "grad_norm": 1.137623406060332, | |
| "learning_rate": 5.5348394839580986e-06, | |
| "loss": 0.7121, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 0.6588145896656535, | |
| "grad_norm": 1.116518554405759, | |
| "learning_rate": 5.51282309815172e-06, | |
| "loss": 0.6151, | |
| "step": 867 | |
| }, | |
| { | |
| "epoch": 0.6595744680851063, | |
| "grad_norm": 1.196121149255991, | |
| "learning_rate": 5.4908339124997005e-06, | |
| "loss": 0.771, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 0.6603343465045592, | |
| "grad_norm": 1.2332323260533635, | |
| "learning_rate": 5.468872060295034e-06, | |
| "loss": 0.7407, | |
| "step": 869 | |
| }, | |
| { | |
| "epoch": 0.6610942249240122, | |
| "grad_norm": 1.1332751696640067, | |
| "learning_rate": 5.446937674665034e-06, | |
| "loss": 0.6792, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.6618541033434651, | |
| "grad_norm": 1.2039372349859874, | |
| "learning_rate": 5.425030888570506e-06, | |
| "loss": 0.7186, | |
| "step": 871 | |
| }, | |
| { | |
| "epoch": 0.662613981762918, | |
| "grad_norm": 1.2857664229482582, | |
| "learning_rate": 5.403151834804951e-06, | |
| "loss": 0.6679, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 0.6633738601823708, | |
| "grad_norm": 1.1201361713120777, | |
| "learning_rate": 5.381300645993779e-06, | |
| "loss": 0.7633, | |
| "step": 873 | |
| }, | |
| { | |
| "epoch": 0.6641337386018237, | |
| "grad_norm": 1.1751970613758527, | |
| "learning_rate": 5.359477454593483e-06, | |
| "loss": 0.8086, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 0.6648936170212766, | |
| "grad_norm": 1.1304475782119534, | |
| "learning_rate": 5.337682392890832e-06, | |
| "loss": 0.7016, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.6656534954407295, | |
| "grad_norm": 1.19847443022572, | |
| "learning_rate": 5.3159155930021e-06, | |
| "loss": 0.81, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 0.6664133738601824, | |
| "grad_norm": 1.4329627129931448, | |
| "learning_rate": 5.294177186872227e-06, | |
| "loss": 0.6243, | |
| "step": 877 | |
| }, | |
| { | |
| "epoch": 0.6671732522796353, | |
| "grad_norm": 1.2445918388568802, | |
| "learning_rate": 5.27246730627404e-06, | |
| "loss": 0.7642, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 0.6679331306990881, | |
| "grad_norm": 1.2668801988474625, | |
| "learning_rate": 5.250786082807462e-06, | |
| "loss": 0.8208, | |
| "step": 879 | |
| }, | |
| { | |
| "epoch": 0.668693009118541, | |
| "grad_norm": 1.2102376823655343, | |
| "learning_rate": 5.229133647898696e-06, | |
| "loss": 0.8348, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.6694528875379939, | |
| "grad_norm": 1.223374929199789, | |
| "learning_rate": 5.207510132799436e-06, | |
| "loss": 0.7789, | |
| "step": 881 | |
| }, | |
| { | |
| "epoch": 0.6702127659574468, | |
| "grad_norm": 1.0234540918241117, | |
| "learning_rate": 5.185915668586066e-06, | |
| "loss": 0.6679, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 0.6709726443768997, | |
| "grad_norm": 1.0322760369617117, | |
| "learning_rate": 5.164350386158881e-06, | |
| "loss": 0.6943, | |
| "step": 883 | |
| }, | |
| { | |
| "epoch": 0.6717325227963525, | |
| "grad_norm": 1.173841948267802, | |
| "learning_rate": 5.1428144162412815e-06, | |
| "loss": 0.7495, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 0.6724924012158054, | |
| "grad_norm": 1.124404073797988, | |
| "learning_rate": 5.121307889378975e-06, | |
| "loss": 0.7738, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.6732522796352584, | |
| "grad_norm": 1.2371565869329573, | |
| "learning_rate": 5.099830935939203e-06, | |
| "loss": 0.7004, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 0.6740121580547113, | |
| "grad_norm": 1.2873847039390403, | |
| "learning_rate": 5.078383686109927e-06, | |
| "loss": 0.6904, | |
| "step": 887 | |
| }, | |
| { | |
| "epoch": 0.6747720364741642, | |
| "grad_norm": 1.1013369935703738, | |
| "learning_rate": 5.05696626989907e-06, | |
| "loss": 0.7856, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 0.675531914893617, | |
| "grad_norm": 1.3064934842705425, | |
| "learning_rate": 5.035578817133692e-06, | |
| "loss": 0.7212, | |
| "step": 889 | |
| }, | |
| { | |
| "epoch": 0.6762917933130699, | |
| "grad_norm": 1.1639760405567843, | |
| "learning_rate": 5.01422145745924e-06, | |
| "loss": 0.6847, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.6770516717325228, | |
| "grad_norm": 1.2728283131263647, | |
| "learning_rate": 4.992894320338727e-06, | |
| "loss": 0.7246, | |
| "step": 891 | |
| }, | |
| { | |
| "epoch": 0.6778115501519757, | |
| "grad_norm": 1.4427024265465846, | |
| "learning_rate": 4.971597535051977e-06, | |
| "loss": 0.8047, | |
| "step": 892 | |
| }, | |
| { | |
| "epoch": 0.6785714285714286, | |
| "grad_norm": 1.2535533501701592, | |
| "learning_rate": 4.9503312306948294e-06, | |
| "loss": 0.6972, | |
| "step": 893 | |
| }, | |
| { | |
| "epoch": 0.6793313069908815, | |
| "grad_norm": 1.2597979840226237, | |
| "learning_rate": 4.929095536178347e-06, | |
| "loss": 0.7193, | |
| "step": 894 | |
| }, | |
| { | |
| "epoch": 0.6800911854103343, | |
| "grad_norm": 1.3536845161136342, | |
| "learning_rate": 4.907890580228042e-06, | |
| "loss": 0.7364, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.6808510638297872, | |
| "grad_norm": 1.000387252276347, | |
| "learning_rate": 4.886716491383111e-06, | |
| "loss": 0.7312, | |
| "step": 896 | |
| }, | |
| { | |
| "epoch": 0.6816109422492401, | |
| "grad_norm": 1.2185791533591015, | |
| "learning_rate": 4.865573397995626e-06, | |
| "loss": 0.7753, | |
| "step": 897 | |
| }, | |
| { | |
| "epoch": 0.682370820668693, | |
| "grad_norm": 1.1540470206905242, | |
| "learning_rate": 4.844461428229782e-06, | |
| "loss": 0.6833, | |
| "step": 898 | |
| }, | |
| { | |
| "epoch": 0.6831306990881459, | |
| "grad_norm": 1.0847038779391882, | |
| "learning_rate": 4.823380710061112e-06, | |
| "loss": 0.8202, | |
| "step": 899 | |
| }, | |
| { | |
| "epoch": 0.6838905775075987, | |
| "grad_norm": 1.0624676376276718, | |
| "learning_rate": 4.802331371275703e-06, | |
| "loss": 0.7318, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.6846504559270516, | |
| "grad_norm": 1.1056352177211737, | |
| "learning_rate": 4.7813135394694235e-06, | |
| "loss": 0.6753, | |
| "step": 901 | |
| }, | |
| { | |
| "epoch": 0.6854103343465046, | |
| "grad_norm": 1.027012418921038, | |
| "learning_rate": 4.760327342047167e-06, | |
| "loss": 0.6543, | |
| "step": 902 | |
| }, | |
| { | |
| "epoch": 0.6861702127659575, | |
| "grad_norm": 1.1073964546196238, | |
| "learning_rate": 4.739372906222066e-06, | |
| "loss": 0.7075, | |
| "step": 903 | |
| }, | |
| { | |
| "epoch": 0.6869300911854104, | |
| "grad_norm": 1.8976558771197096, | |
| "learning_rate": 4.718450359014713e-06, | |
| "loss": 0.7041, | |
| "step": 904 | |
| }, | |
| { | |
| "epoch": 0.6876899696048632, | |
| "grad_norm": 1.1314716261386006, | |
| "learning_rate": 4.697559827252398e-06, | |
| "loss": 0.8258, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.6884498480243161, | |
| "grad_norm": 1.3752285695001372, | |
| "learning_rate": 4.676701437568354e-06, | |
| "loss": 0.7319, | |
| "step": 906 | |
| }, | |
| { | |
| "epoch": 0.689209726443769, | |
| "grad_norm": 1.1153606176445532, | |
| "learning_rate": 4.655875316400974e-06, | |
| "loss": 0.6818, | |
| "step": 907 | |
| }, | |
| { | |
| "epoch": 0.6899696048632219, | |
| "grad_norm": 1.247540301365437, | |
| "learning_rate": 4.635081589993033e-06, | |
| "loss": 0.666, | |
| "step": 908 | |
| }, | |
| { | |
| "epoch": 0.6907294832826748, | |
| "grad_norm": 1.3806793377343507, | |
| "learning_rate": 4.614320384390959e-06, | |
| "loss": 0.7438, | |
| "step": 909 | |
| }, | |
| { | |
| "epoch": 0.6914893617021277, | |
| "grad_norm": 1.2113224958655249, | |
| "learning_rate": 4.593591825444028e-06, | |
| "loss": 0.7667, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.6922492401215805, | |
| "grad_norm": 1.0754934888283707, | |
| "learning_rate": 4.572896038803622e-06, | |
| "loss": 0.7109, | |
| "step": 911 | |
| }, | |
| { | |
| "epoch": 0.6930091185410334, | |
| "grad_norm": 1.139499393916106, | |
| "learning_rate": 4.552233149922483e-06, | |
| "loss": 0.7562, | |
| "step": 912 | |
| }, | |
| { | |
| "epoch": 0.6937689969604863, | |
| "grad_norm": 1.0580722657468165, | |
| "learning_rate": 4.531603284053919e-06, | |
| "loss": 0.7017, | |
| "step": 913 | |
| }, | |
| { | |
| "epoch": 0.6945288753799392, | |
| "grad_norm": 1.2681318218952722, | |
| "learning_rate": 4.51100656625106e-06, | |
| "loss": 0.7563, | |
| "step": 914 | |
| }, | |
| { | |
| "epoch": 0.6952887537993921, | |
| "grad_norm": 1.1917920726999245, | |
| "learning_rate": 4.490443121366105e-06, | |
| "loss": 0.7806, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.6960486322188449, | |
| "grad_norm": 1.4256348736999676, | |
| "learning_rate": 4.4699130740495675e-06, | |
| "loss": 0.7653, | |
| "step": 916 | |
| }, | |
| { | |
| "epoch": 0.6968085106382979, | |
| "grad_norm": 1.238249074703773, | |
| "learning_rate": 4.4494165487495035e-06, | |
| "loss": 0.714, | |
| "step": 917 | |
| }, | |
| { | |
| "epoch": 0.6975683890577508, | |
| "grad_norm": 1.0639121224826957, | |
| "learning_rate": 4.428953669710764e-06, | |
| "loss": 0.7211, | |
| "step": 918 | |
| }, | |
| { | |
| "epoch": 0.6983282674772037, | |
| "grad_norm": 1.077834657052238, | |
| "learning_rate": 4.40852456097426e-06, | |
| "loss": 0.7209, | |
| "step": 919 | |
| }, | |
| { | |
| "epoch": 0.6990881458966566, | |
| "grad_norm": 1.2088067711137636, | |
| "learning_rate": 4.388129346376177e-06, | |
| "loss": 0.7891, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.6998480243161094, | |
| "grad_norm": 1.2614349989290463, | |
| "learning_rate": 4.367768149547256e-06, | |
| "loss": 0.8736, | |
| "step": 921 | |
| }, | |
| { | |
| "epoch": 0.7006079027355623, | |
| "grad_norm": 1.1455490935379877, | |
| "learning_rate": 4.34744109391203e-06, | |
| "loss": 0.7103, | |
| "step": 922 | |
| }, | |
| { | |
| "epoch": 0.7013677811550152, | |
| "grad_norm": 1.21220672817328, | |
| "learning_rate": 4.327148302688069e-06, | |
| "loss": 0.6477, | |
| "step": 923 | |
| }, | |
| { | |
| "epoch": 0.7021276595744681, | |
| "grad_norm": 1.0089156867085385, | |
| "learning_rate": 4.306889898885241e-06, | |
| "loss": 0.6833, | |
| "step": 924 | |
| }, | |
| { | |
| "epoch": 0.702887537993921, | |
| "grad_norm": 1.0830616921028922, | |
| "learning_rate": 4.286666005304971e-06, | |
| "loss": 0.6662, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.7036474164133738, | |
| "grad_norm": 1.2425754022522661, | |
| "learning_rate": 4.2664767445394965e-06, | |
| "loss": 0.7193, | |
| "step": 926 | |
| }, | |
| { | |
| "epoch": 0.7044072948328267, | |
| "grad_norm": 0.9571870661730567, | |
| "learning_rate": 4.2463222389711045e-06, | |
| "loss": 0.7479, | |
| "step": 927 | |
| }, | |
| { | |
| "epoch": 0.7051671732522796, | |
| "grad_norm": 1.2546613281824888, | |
| "learning_rate": 4.226202610771419e-06, | |
| "loss": 0.699, | |
| "step": 928 | |
| }, | |
| { | |
| "epoch": 0.7059270516717325, | |
| "grad_norm": 1.203177011744604, | |
| "learning_rate": 4.206117981900636e-06, | |
| "loss": 0.7034, | |
| "step": 929 | |
| }, | |
| { | |
| "epoch": 0.7066869300911854, | |
| "grad_norm": 1.4614456984954713, | |
| "learning_rate": 4.186068474106803e-06, | |
| "loss": 0.6281, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.7074468085106383, | |
| "grad_norm": 1.1112564161122254, | |
| "learning_rate": 4.16605420892506e-06, | |
| "loss": 0.6175, | |
| "step": 931 | |
| }, | |
| { | |
| "epoch": 0.7082066869300911, | |
| "grad_norm": 1.4870586973587263, | |
| "learning_rate": 4.146075307676932e-06, | |
| "loss": 0.7443, | |
| "step": 932 | |
| }, | |
| { | |
| "epoch": 0.708966565349544, | |
| "grad_norm": 1.1685460561679732, | |
| "learning_rate": 4.126131891469561e-06, | |
| "loss": 0.766, | |
| "step": 933 | |
| }, | |
| { | |
| "epoch": 0.709726443768997, | |
| "grad_norm": 1.1755461287338689, | |
| "learning_rate": 4.10622408119499e-06, | |
| "loss": 0.7658, | |
| "step": 934 | |
| }, | |
| { | |
| "epoch": 0.7104863221884499, | |
| "grad_norm": 1.227657821346577, | |
| "learning_rate": 4.086351997529441e-06, | |
| "loss": 0.7387, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 0.7112462006079028, | |
| "grad_norm": 1.1643470687339084, | |
| "learning_rate": 4.0665157609325565e-06, | |
| "loss": 0.6491, | |
| "step": 936 | |
| }, | |
| { | |
| "epoch": 0.7120060790273556, | |
| "grad_norm": 1.0157944068621902, | |
| "learning_rate": 4.0467154916466835e-06, | |
| "loss": 0.7, | |
| "step": 937 | |
| }, | |
| { | |
| "epoch": 0.7127659574468085, | |
| "grad_norm": 1.0922637554367447, | |
| "learning_rate": 4.026951309696152e-06, | |
| "loss": 0.8437, | |
| "step": 938 | |
| }, | |
| { | |
| "epoch": 0.7135258358662614, | |
| "grad_norm": 1.2498441436632612, | |
| "learning_rate": 4.007223334886531e-06, | |
| "loss": 0.6498, | |
| "step": 939 | |
| }, | |
| { | |
| "epoch": 0.7142857142857143, | |
| "grad_norm": 1.0885169001945274, | |
| "learning_rate": 3.98753168680392e-06, | |
| "loss": 0.8014, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.7150455927051672, | |
| "grad_norm": 1.2108303101901503, | |
| "learning_rate": 3.967876484814202e-06, | |
| "loss": 0.7179, | |
| "step": 941 | |
| }, | |
| { | |
| "epoch": 0.71580547112462, | |
| "grad_norm": 1.2960693072143772, | |
| "learning_rate": 3.948257848062351e-06, | |
| "loss": 0.7397, | |
| "step": 942 | |
| }, | |
| { | |
| "epoch": 0.7165653495440729, | |
| "grad_norm": 1.2860279517462083, | |
| "learning_rate": 3.92867589547167e-06, | |
| "loss": 0.7084, | |
| "step": 943 | |
| }, | |
| { | |
| "epoch": 0.7173252279635258, | |
| "grad_norm": 1.1440482244947314, | |
| "learning_rate": 3.909130745743108e-06, | |
| "loss": 0.6967, | |
| "step": 944 | |
| }, | |
| { | |
| "epoch": 0.7180851063829787, | |
| "grad_norm": 1.3027229388934198, | |
| "learning_rate": 3.889622517354523e-06, | |
| "loss": 0.735, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 0.7188449848024316, | |
| "grad_norm": 1.1589577616208895, | |
| "learning_rate": 3.870151328559956e-06, | |
| "loss": 0.7304, | |
| "step": 946 | |
| }, | |
| { | |
| "epoch": 0.7196048632218845, | |
| "grad_norm": 1.2995337832311984, | |
| "learning_rate": 3.850717297388926e-06, | |
| "loss": 0.7018, | |
| "step": 947 | |
| }, | |
| { | |
| "epoch": 0.7203647416413373, | |
| "grad_norm": 1.2818598901209235, | |
| "learning_rate": 3.831320541645717e-06, | |
| "loss": 0.7521, | |
| "step": 948 | |
| }, | |
| { | |
| "epoch": 0.7211246200607903, | |
| "grad_norm": 1.1605357185557341, | |
| "learning_rate": 3.8119611789086576e-06, | |
| "loss": 0.781, | |
| "step": 949 | |
| }, | |
| { | |
| "epoch": 0.7218844984802432, | |
| "grad_norm": 1.170912809986882, | |
| "learning_rate": 3.7926393265294016e-06, | |
| "loss": 0.7718, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.7226443768996961, | |
| "grad_norm": 1.2036775520143437, | |
| "learning_rate": 3.773355101632236e-06, | |
| "loss": 0.6547, | |
| "step": 951 | |
| }, | |
| { | |
| "epoch": 0.723404255319149, | |
| "grad_norm": 1.1361160158286099, | |
| "learning_rate": 3.7541086211133504e-06, | |
| "loss": 0.7342, | |
| "step": 952 | |
| }, | |
| { | |
| "epoch": 0.7241641337386018, | |
| "grad_norm": 1.369796822699439, | |
| "learning_rate": 3.734900001640135e-06, | |
| "loss": 0.7327, | |
| "step": 953 | |
| }, | |
| { | |
| "epoch": 0.7249240121580547, | |
| "grad_norm": 1.3191104749352578, | |
| "learning_rate": 3.7157293596504863e-06, | |
| "loss": 0.668, | |
| "step": 954 | |
| }, | |
| { | |
| "epoch": 0.7256838905775076, | |
| "grad_norm": 1.1615283279964927, | |
| "learning_rate": 3.696596811352087e-06, | |
| "loss": 0.6831, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 0.7264437689969605, | |
| "grad_norm": 1.1375417525773701, | |
| "learning_rate": 3.677502472721699e-06, | |
| "loss": 0.6982, | |
| "step": 956 | |
| }, | |
| { | |
| "epoch": 0.7272036474164134, | |
| "grad_norm": 1.1141893215821372, | |
| "learning_rate": 3.6584464595044713e-06, | |
| "loss": 0.7249, | |
| "step": 957 | |
| }, | |
| { | |
| "epoch": 0.7279635258358662, | |
| "grad_norm": 1.2133415778375047, | |
| "learning_rate": 3.6394288872132335e-06, | |
| "loss": 0.6245, | |
| "step": 958 | |
| }, | |
| { | |
| "epoch": 0.7287234042553191, | |
| "grad_norm": 1.0069146427513542, | |
| "learning_rate": 3.6204498711278014e-06, | |
| "loss": 0.8016, | |
| "step": 959 | |
| }, | |
| { | |
| "epoch": 0.729483282674772, | |
| "grad_norm": 1.2486337650913875, | |
| "learning_rate": 3.6015095262942578e-06, | |
| "loss": 0.7875, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.7302431610942249, | |
| "grad_norm": 1.1715884497406666, | |
| "learning_rate": 3.5826079675242897e-06, | |
| "loss": 0.7151, | |
| "step": 961 | |
| }, | |
| { | |
| "epoch": 0.7310030395136778, | |
| "grad_norm": 1.2868137779725173, | |
| "learning_rate": 3.563745309394452e-06, | |
| "loss": 0.8482, | |
| "step": 962 | |
| }, | |
| { | |
| "epoch": 0.7317629179331308, | |
| "grad_norm": 1.2996331194254636, | |
| "learning_rate": 3.54492166624551e-06, | |
| "loss": 0.7029, | |
| "step": 963 | |
| }, | |
| { | |
| "epoch": 0.7325227963525835, | |
| "grad_norm": 1.5180714540265228, | |
| "learning_rate": 3.5261371521817247e-06, | |
| "loss": 0.8199, | |
| "step": 964 | |
| }, | |
| { | |
| "epoch": 0.7332826747720365, | |
| "grad_norm": 1.1831367421632952, | |
| "learning_rate": 3.507391881070161e-06, | |
| "loss": 0.7105, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 0.7340425531914894, | |
| "grad_norm": 1.1780401558346056, | |
| "learning_rate": 3.4886859665400075e-06, | |
| "loss": 0.6662, | |
| "step": 966 | |
| }, | |
| { | |
| "epoch": 0.7348024316109423, | |
| "grad_norm": 1.289600801527965, | |
| "learning_rate": 3.470019521981882e-06, | |
| "loss": 0.7387, | |
| "step": 967 | |
| }, | |
| { | |
| "epoch": 0.7355623100303952, | |
| "grad_norm": 1.4131039933070813, | |
| "learning_rate": 3.4513926605471504e-06, | |
| "loss": 0.7043, | |
| "step": 968 | |
| }, | |
| { | |
| "epoch": 0.736322188449848, | |
| "grad_norm": 1.3353693809977703, | |
| "learning_rate": 3.4328054951472267e-06, | |
| "loss": 0.6913, | |
| "step": 969 | |
| }, | |
| { | |
| "epoch": 0.7370820668693009, | |
| "grad_norm": 1.2903869977751055, | |
| "learning_rate": 3.4142581384528984e-06, | |
| "loss": 0.7231, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.7378419452887538, | |
| "grad_norm": 1.260612601806717, | |
| "learning_rate": 3.395750702893651e-06, | |
| "loss": 0.8436, | |
| "step": 971 | |
| }, | |
| { | |
| "epoch": 0.7386018237082067, | |
| "grad_norm": 1.227985828608111, | |
| "learning_rate": 3.377283300656967e-06, | |
| "loss": 0.6392, | |
| "step": 972 | |
| }, | |
| { | |
| "epoch": 0.7393617021276596, | |
| "grad_norm": 1.194236961902421, | |
| "learning_rate": 3.358856043687666e-06, | |
| "loss": 0.6824, | |
| "step": 973 | |
| }, | |
| { | |
| "epoch": 0.7401215805471124, | |
| "grad_norm": 1.2605763662562228, | |
| "learning_rate": 3.340469043687213e-06, | |
| "loss": 0.7581, | |
| "step": 974 | |
| }, | |
| { | |
| "epoch": 0.7408814589665653, | |
| "grad_norm": 1.118925286121007, | |
| "learning_rate": 3.322122412113047e-06, | |
| "loss": 0.7133, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 0.7416413373860182, | |
| "grad_norm": 1.3377810603981355, | |
| "learning_rate": 3.3038162601778944e-06, | |
| "loss": 0.7413, | |
| "step": 976 | |
| }, | |
| { | |
| "epoch": 0.7424012158054711, | |
| "grad_norm": 1.2318066578086257, | |
| "learning_rate": 3.285550698849117e-06, | |
| "loss": 0.7611, | |
| "step": 977 | |
| }, | |
| { | |
| "epoch": 0.743161094224924, | |
| "grad_norm": 1.2599491451130518, | |
| "learning_rate": 3.2673258388480235e-06, | |
| "loss": 0.6505, | |
| "step": 978 | |
| }, | |
| { | |
| "epoch": 0.743920972644377, | |
| "grad_norm": 1.2486984071112337, | |
| "learning_rate": 3.2491417906491916e-06, | |
| "loss": 0.6405, | |
| "step": 979 | |
| }, | |
| { | |
| "epoch": 0.7446808510638298, | |
| "grad_norm": 1.2272933803159967, | |
| "learning_rate": 3.230998664479823e-06, | |
| "loss": 0.6533, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.7454407294832827, | |
| "grad_norm": 1.0912951800923494, | |
| "learning_rate": 3.212896570319045e-06, | |
| "loss": 0.7034, | |
| "step": 981 | |
| }, | |
| { | |
| "epoch": 0.7462006079027356, | |
| "grad_norm": 1.2143540627567597, | |
| "learning_rate": 3.194835617897273e-06, | |
| "loss": 0.6325, | |
| "step": 982 | |
| }, | |
| { | |
| "epoch": 0.7469604863221885, | |
| "grad_norm": 1.2925860196438805, | |
| "learning_rate": 3.176815916695518e-06, | |
| "loss": 0.7295, | |
| "step": 983 | |
| }, | |
| { | |
| "epoch": 0.7477203647416414, | |
| "grad_norm": 1.2053005722857089, | |
| "learning_rate": 3.158837575944751e-06, | |
| "loss": 0.7487, | |
| "step": 984 | |
| }, | |
| { | |
| "epoch": 0.7484802431610942, | |
| "grad_norm": 1.4304331652404048, | |
| "learning_rate": 3.1409007046252114e-06, | |
| "loss": 0.6941, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 0.7492401215805471, | |
| "grad_norm": 1.4116481758566786, | |
| "learning_rate": 3.123005411465766e-06, | |
| "loss": 0.7205, | |
| "step": 986 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 1.1123499901998892, | |
| "learning_rate": 3.105151804943256e-06, | |
| "loss": 0.7295, | |
| "step": 987 | |
| }, | |
| { | |
| "epoch": 0.7507598784194529, | |
| "grad_norm": 1.2996425591351362, | |
| "learning_rate": 3.087339993281816e-06, | |
| "loss": 0.6778, | |
| "step": 988 | |
| }, | |
| { | |
| "epoch": 0.7515197568389058, | |
| "grad_norm": 1.0787781457697156, | |
| "learning_rate": 3.06957008445223e-06, | |
| "loss": 0.6926, | |
| "step": 989 | |
| }, | |
| { | |
| "epoch": 0.7522796352583586, | |
| "grad_norm": 1.1147980887577802, | |
| "learning_rate": 3.0518421861712845e-06, | |
| "loss": 0.7623, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.7530395136778115, | |
| "grad_norm": 1.0683957932667802, | |
| "learning_rate": 3.0341564059011086e-06, | |
| "loss": 0.6594, | |
| "step": 991 | |
| }, | |
| { | |
| "epoch": 0.7537993920972644, | |
| "grad_norm": 1.1656144776647168, | |
| "learning_rate": 3.0165128508485166e-06, | |
| "loss": 0.7066, | |
| "step": 992 | |
| }, | |
| { | |
| "epoch": 0.7545592705167173, | |
| "grad_norm": 1.0580108589989574, | |
| "learning_rate": 2.9989116279643637e-06, | |
| "loss": 0.6096, | |
| "step": 993 | |
| }, | |
| { | |
| "epoch": 0.7553191489361702, | |
| "grad_norm": 1.1284597442612554, | |
| "learning_rate": 2.9813528439429074e-06, | |
| "loss": 0.7591, | |
| "step": 994 | |
| }, | |
| { | |
| "epoch": 0.756079027355623, | |
| "grad_norm": 1.318033685959179, | |
| "learning_rate": 2.9638366052211387e-06, | |
| "loss": 0.7558, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 0.756838905775076, | |
| "grad_norm": 1.3881057337826526, | |
| "learning_rate": 2.946363017978159e-06, | |
| "loss": 0.7554, | |
| "step": 996 | |
| }, | |
| { | |
| "epoch": 0.7575987841945289, | |
| "grad_norm": 1.3245947059991736, | |
| "learning_rate": 2.9289321881345257e-06, | |
| "loss": 0.8152, | |
| "step": 997 | |
| }, | |
| { | |
| "epoch": 0.7583586626139818, | |
| "grad_norm": 1.2707332363810775, | |
| "learning_rate": 2.911544221351608e-06, | |
| "loss": 0.6816, | |
| "step": 998 | |
| }, | |
| { | |
| "epoch": 0.7591185410334347, | |
| "grad_norm": 1.0916738472645218, | |
| "learning_rate": 2.894199223030948e-06, | |
| "loss": 0.7912, | |
| "step": 999 | |
| }, | |
| { | |
| "epoch": 0.7598784194528876, | |
| "grad_norm": 1.1517704317865625, | |
| "learning_rate": 2.87689729831363e-06, | |
| "loss": 0.6742, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.7606382978723404, | |
| "grad_norm": 1.0066925047615187, | |
| "learning_rate": 2.8596385520796365e-06, | |
| "loss": 0.6901, | |
| "step": 1001 | |
| }, | |
| { | |
| "epoch": 0.7613981762917933, | |
| "grad_norm": 1.2349655633120382, | |
| "learning_rate": 2.8424230889472047e-06, | |
| "loss": 0.6515, | |
| "step": 1002 | |
| }, | |
| { | |
| "epoch": 0.7621580547112462, | |
| "grad_norm": 1.3035635090512518, | |
| "learning_rate": 2.825251013272212e-06, | |
| "loss": 0.8545, | |
| "step": 1003 | |
| }, | |
| { | |
| "epoch": 0.7629179331306991, | |
| "grad_norm": 1.2820436196834362, | |
| "learning_rate": 2.8081224291475216e-06, | |
| "loss": 0.7799, | |
| "step": 1004 | |
| }, | |
| { | |
| "epoch": 0.763677811550152, | |
| "grad_norm": 1.2688390563574587, | |
| "learning_rate": 2.7910374404023634e-06, | |
| "loss": 0.6498, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 0.7644376899696048, | |
| "grad_norm": 1.0870955368736057, | |
| "learning_rate": 2.7739961506017075e-06, | |
| "loss": 0.733, | |
| "step": 1006 | |
| }, | |
| { | |
| "epoch": 0.7651975683890577, | |
| "grad_norm": 1.212560017988284, | |
| "learning_rate": 2.7569986630456334e-06, | |
| "loss": 0.6793, | |
| "step": 1007 | |
| }, | |
| { | |
| "epoch": 0.7659574468085106, | |
| "grad_norm": 1.016734646959674, | |
| "learning_rate": 2.740045080768694e-06, | |
| "loss": 0.713, | |
| "step": 1008 | |
| }, | |
| { | |
| "epoch": 0.7667173252279635, | |
| "grad_norm": 3.2134435348071664, | |
| "learning_rate": 2.7231355065392996e-06, | |
| "loss": 0.7609, | |
| "step": 1009 | |
| }, | |
| { | |
| "epoch": 0.7674772036474165, | |
| "grad_norm": 1.1610505850006103, | |
| "learning_rate": 2.706270042859108e-06, | |
| "loss": 0.7852, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.7682370820668692, | |
| "grad_norm": 1.0150686550499242, | |
| "learning_rate": 2.689448791962377e-06, | |
| "loss": 0.8489, | |
| "step": 1011 | |
| }, | |
| { | |
| "epoch": 0.7689969604863222, | |
| "grad_norm": 1.174208707707502, | |
| "learning_rate": 2.672671855815355e-06, | |
| "loss": 0.8007, | |
| "step": 1012 | |
| }, | |
| { | |
| "epoch": 0.7697568389057751, | |
| "grad_norm": 1.2724657285938812, | |
| "learning_rate": 2.6559393361156804e-06, | |
| "loss": 0.6549, | |
| "step": 1013 | |
| }, | |
| { | |
| "epoch": 0.770516717325228, | |
| "grad_norm": 1.1203430563853727, | |
| "learning_rate": 2.6392513342917327e-06, | |
| "loss": 0.665, | |
| "step": 1014 | |
| }, | |
| { | |
| "epoch": 0.7712765957446809, | |
| "grad_norm": 1.1008242233442587, | |
| "learning_rate": 2.6226079515020507e-06, | |
| "loss": 0.7485, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 0.7720364741641338, | |
| "grad_norm": 1.2491730086944808, | |
| "learning_rate": 2.606009288634689e-06, | |
| "loss": 0.6911, | |
| "step": 1016 | |
| }, | |
| { | |
| "epoch": 0.7727963525835866, | |
| "grad_norm": 1.099717158455084, | |
| "learning_rate": 2.5894554463066356e-06, | |
| "loss": 0.838, | |
| "step": 1017 | |
| }, | |
| { | |
| "epoch": 0.7735562310030395, | |
| "grad_norm": 1.1248949051678279, | |
| "learning_rate": 2.5729465248631733e-06, | |
| "loss": 0.7271, | |
| "step": 1018 | |
| }, | |
| { | |
| "epoch": 0.7743161094224924, | |
| "grad_norm": 1.1874313455652248, | |
| "learning_rate": 2.5564826243772965e-06, | |
| "loss": 0.7437, | |
| "step": 1019 | |
| }, | |
| { | |
| "epoch": 0.7750759878419453, | |
| "grad_norm": 1.1669464020145164, | |
| "learning_rate": 2.540063844649092e-06, | |
| "loss": 0.7192, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.7758358662613982, | |
| "grad_norm": 1.347838161058927, | |
| "learning_rate": 2.5236902852051314e-06, | |
| "loss": 0.6615, | |
| "step": 1021 | |
| }, | |
| { | |
| "epoch": 0.776595744680851, | |
| "grad_norm": 1.011115738383394, | |
| "learning_rate": 2.5073620452978708e-06, | |
| "loss": 0.7356, | |
| "step": 1022 | |
| }, | |
| { | |
| "epoch": 0.7773556231003039, | |
| "grad_norm": 1.2277769452354566, | |
| "learning_rate": 2.4910792239050575e-06, | |
| "loss": 0.7106, | |
| "step": 1023 | |
| }, | |
| { | |
| "epoch": 0.7781155015197568, | |
| "grad_norm": 1.4149928236566394, | |
| "learning_rate": 2.474841919729122e-06, | |
| "loss": 0.6847, | |
| "step": 1024 | |
| }, | |
| { | |
| "epoch": 0.7788753799392097, | |
| "grad_norm": 1.2582840049051507, | |
| "learning_rate": 2.458650231196572e-06, | |
| "loss": 0.7228, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 0.7796352583586627, | |
| "grad_norm": 1.143891253434914, | |
| "learning_rate": 2.4425042564574186e-06, | |
| "loss": 0.8087, | |
| "step": 1026 | |
| }, | |
| { | |
| "epoch": 0.7803951367781155, | |
| "grad_norm": 1.1424830410542548, | |
| "learning_rate": 2.4264040933845535e-06, | |
| "loss": 0.6357, | |
| "step": 1027 | |
| }, | |
| { | |
| "epoch": 0.7811550151975684, | |
| "grad_norm": 1.1213313864189427, | |
| "learning_rate": 2.410349839573175e-06, | |
| "loss": 0.7159, | |
| "step": 1028 | |
| }, | |
| { | |
| "epoch": 0.7819148936170213, | |
| "grad_norm": 1.2073235115642222, | |
| "learning_rate": 2.3943415923401923e-06, | |
| "loss": 0.6767, | |
| "step": 1029 | |
| }, | |
| { | |
| "epoch": 0.7826747720364742, | |
| "grad_norm": 1.242224091217183, | |
| "learning_rate": 2.3783794487236367e-06, | |
| "loss": 0.743, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.7834346504559271, | |
| "grad_norm": 1.1606870550362645, | |
| "learning_rate": 2.3624635054820633e-06, | |
| "loss": 0.7305, | |
| "step": 1031 | |
| }, | |
| { | |
| "epoch": 0.78419452887538, | |
| "grad_norm": 1.1279251371855563, | |
| "learning_rate": 2.346593859093974e-06, | |
| "loss": 0.7277, | |
| "step": 1032 | |
| }, | |
| { | |
| "epoch": 0.7849544072948328, | |
| "grad_norm": 1.1091723818584946, | |
| "learning_rate": 2.3307706057572354e-06, | |
| "loss": 0.7723, | |
| "step": 1033 | |
| }, | |
| { | |
| "epoch": 0.7857142857142857, | |
| "grad_norm": 1.005594148402337, | |
| "learning_rate": 2.3149938413884886e-06, | |
| "loss": 0.7327, | |
| "step": 1034 | |
| }, | |
| { | |
| "epoch": 0.7864741641337386, | |
| "grad_norm": 1.31360566780803, | |
| "learning_rate": 2.299263661622566e-06, | |
| "loss": 0.7552, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 0.7872340425531915, | |
| "grad_norm": 1.1464047056806725, | |
| "learning_rate": 2.2835801618119247e-06, | |
| "loss": 0.7453, | |
| "step": 1036 | |
| }, | |
| { | |
| "epoch": 0.7879939209726444, | |
| "grad_norm": 1.234696595716303, | |
| "learning_rate": 2.2679434370260457e-06, | |
| "loss": 0.657, | |
| "step": 1037 | |
| }, | |
| { | |
| "epoch": 0.7887537993920972, | |
| "grad_norm": 1.1788368602627377, | |
| "learning_rate": 2.2523535820508844e-06, | |
| "loss": 0.7874, | |
| "step": 1038 | |
| }, | |
| { | |
| "epoch": 0.7895136778115501, | |
| "grad_norm": 1.2154194669660434, | |
| "learning_rate": 2.2368106913882814e-06, | |
| "loss": 0.7317, | |
| "step": 1039 | |
| }, | |
| { | |
| "epoch": 0.790273556231003, | |
| "grad_norm": 1.3384825550557304, | |
| "learning_rate": 2.2213148592553847e-06, | |
| "loss": 0.6931, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.791033434650456, | |
| "grad_norm": 1.1740637829377065, | |
| "learning_rate": 2.205866179584084e-06, | |
| "loss": 0.6781, | |
| "step": 1041 | |
| }, | |
| { | |
| "epoch": 0.7917933130699089, | |
| "grad_norm": 1.278140285067226, | |
| "learning_rate": 2.190464746020452e-06, | |
| "loss": 0.7619, | |
| "step": 1042 | |
| }, | |
| { | |
| "epoch": 0.7925531914893617, | |
| "grad_norm": 1.0503911014434641, | |
| "learning_rate": 2.175110651924165e-06, | |
| "loss": 0.6628, | |
| "step": 1043 | |
| }, | |
| { | |
| "epoch": 0.7933130699088146, | |
| "grad_norm": 1.070947082050874, | |
| "learning_rate": 2.159803990367931e-06, | |
| "loss": 0.7243, | |
| "step": 1044 | |
| }, | |
| { | |
| "epoch": 0.7940729483282675, | |
| "grad_norm": 1.264263843996111, | |
| "learning_rate": 2.1445448541369396e-06, | |
| "loss": 0.7442, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 0.7948328267477204, | |
| "grad_norm": 1.1784957636314484, | |
| "learning_rate": 2.1293333357282954e-06, | |
| "loss": 0.7736, | |
| "step": 1046 | |
| }, | |
| { | |
| "epoch": 0.7955927051671733, | |
| "grad_norm": 1.2424604365979213, | |
| "learning_rate": 2.1141695273504503e-06, | |
| "loss": 0.7312, | |
| "step": 1047 | |
| }, | |
| { | |
| "epoch": 0.7963525835866262, | |
| "grad_norm": 1.3554995592661905, | |
| "learning_rate": 2.0990535209226547e-06, | |
| "loss": 0.8187, | |
| "step": 1048 | |
| }, | |
| { | |
| "epoch": 0.797112462006079, | |
| "grad_norm": 1.0975113033457113, | |
| "learning_rate": 2.083985408074396e-06, | |
| "loss": 0.747, | |
| "step": 1049 | |
| }, | |
| { | |
| "epoch": 0.7978723404255319, | |
| "grad_norm": 1.2355431683743945, | |
| "learning_rate": 2.068965280144837e-06, | |
| "loss": 0.6862, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.7986322188449848, | |
| "grad_norm": 1.334300030887364, | |
| "learning_rate": 2.0539932281822685e-06, | |
| "loss": 0.7611, | |
| "step": 1051 | |
| }, | |
| { | |
| "epoch": 0.7993920972644377, | |
| "grad_norm": 1.1897441427021445, | |
| "learning_rate": 2.0390693429435626e-06, | |
| "loss": 0.6955, | |
| "step": 1052 | |
| }, | |
| { | |
| "epoch": 0.8001519756838906, | |
| "grad_norm": 1.2428562040846367, | |
| "learning_rate": 2.024193714893614e-06, | |
| "loss": 0.7769, | |
| "step": 1053 | |
| }, | |
| { | |
| "epoch": 0.8009118541033434, | |
| "grad_norm": 1.2282682323170688, | |
| "learning_rate": 2.0093664342047903e-06, | |
| "loss": 0.7257, | |
| "step": 1054 | |
| }, | |
| { | |
| "epoch": 0.8016717325227963, | |
| "grad_norm": 1.1312051417179814, | |
| "learning_rate": 1.994587590756397e-06, | |
| "loss": 0.6742, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 0.8024316109422492, | |
| "grad_norm": 1.0949925321618537, | |
| "learning_rate": 1.979857274134115e-06, | |
| "loss": 0.6961, | |
| "step": 1056 | |
| }, | |
| { | |
| "epoch": 0.8031914893617021, | |
| "grad_norm": 1.1308446799141856, | |
| "learning_rate": 1.9651755736294785e-06, | |
| "loss": 0.6769, | |
| "step": 1057 | |
| }, | |
| { | |
| "epoch": 0.8039513677811551, | |
| "grad_norm": 1.2539871694443214, | |
| "learning_rate": 1.9505425782393117e-06, | |
| "loss": 0.775, | |
| "step": 1058 | |
| }, | |
| { | |
| "epoch": 0.8047112462006079, | |
| "grad_norm": 1.1471416294920718, | |
| "learning_rate": 1.9359583766652135e-06, | |
| "loss": 0.7379, | |
| "step": 1059 | |
| }, | |
| { | |
| "epoch": 0.8054711246200608, | |
| "grad_norm": 1.0191594422525745, | |
| "learning_rate": 1.9214230573129944e-06, | |
| "loss": 0.7257, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.8062310030395137, | |
| "grad_norm": 1.0362403221876686, | |
| "learning_rate": 1.9069367082921542e-06, | |
| "loss": 0.7224, | |
| "step": 1061 | |
| }, | |
| { | |
| "epoch": 0.8069908814589666, | |
| "grad_norm": 1.270369766920126, | |
| "learning_rate": 1.892499417415362e-06, | |
| "loss": 0.6208, | |
| "step": 1062 | |
| }, | |
| { | |
| "epoch": 0.8077507598784195, | |
| "grad_norm": 0.9932743461699112, | |
| "learning_rate": 1.87811127219789e-06, | |
| "loss": 0.7166, | |
| "step": 1063 | |
| }, | |
| { | |
| "epoch": 0.8085106382978723, | |
| "grad_norm": 1.07194345647361, | |
| "learning_rate": 1.8637723598571078e-06, | |
| "loss": 0.7073, | |
| "step": 1064 | |
| }, | |
| { | |
| "epoch": 0.8092705167173252, | |
| "grad_norm": 1.46794964875636, | |
| "learning_rate": 1.849482767311953e-06, | |
| "loss": 0.7569, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 0.8100303951367781, | |
| "grad_norm": 1.1325075299835068, | |
| "learning_rate": 1.8352425811823893e-06, | |
| "loss": 0.5534, | |
| "step": 1066 | |
| }, | |
| { | |
| "epoch": 0.810790273556231, | |
| "grad_norm": 1.168880513565578, | |
| "learning_rate": 1.8210518877889016e-06, | |
| "loss": 0.709, | |
| "step": 1067 | |
| }, | |
| { | |
| "epoch": 0.8115501519756839, | |
| "grad_norm": 1.3044177088530167, | |
| "learning_rate": 1.8069107731519507e-06, | |
| "loss": 0.7846, | |
| "step": 1068 | |
| }, | |
| { | |
| "epoch": 0.8123100303951368, | |
| "grad_norm": 1.2700692620966523, | |
| "learning_rate": 1.7928193229914747e-06, | |
| "loss": 0.72, | |
| "step": 1069 | |
| }, | |
| { | |
| "epoch": 0.8130699088145896, | |
| "grad_norm": 1.0305509033742357, | |
| "learning_rate": 1.7787776227263464e-06, | |
| "loss": 0.5598, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.8138297872340425, | |
| "grad_norm": 1.42695527168762, | |
| "learning_rate": 1.7647857574738759e-06, | |
| "loss": 0.79, | |
| "step": 1071 | |
| }, | |
| { | |
| "epoch": 0.8145896656534954, | |
| "grad_norm": 1.2829056363535667, | |
| "learning_rate": 1.7508438120492864e-06, | |
| "loss": 0.5728, | |
| "step": 1072 | |
| }, | |
| { | |
| "epoch": 0.8153495440729484, | |
| "grad_norm": 1.138479838661748, | |
| "learning_rate": 1.7369518709651923e-06, | |
| "loss": 0.7494, | |
| "step": 1073 | |
| }, | |
| { | |
| "epoch": 0.8161094224924013, | |
| "grad_norm": 1.2061615937485768, | |
| "learning_rate": 1.7231100184310955e-06, | |
| "loss": 0.6656, | |
| "step": 1074 | |
| }, | |
| { | |
| "epoch": 0.8168693009118541, | |
| "grad_norm": 1.16307766999232, | |
| "learning_rate": 1.7093183383528776e-06, | |
| "loss": 0.6251, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 0.817629179331307, | |
| "grad_norm": 1.1047341036277805, | |
| "learning_rate": 1.6955769143322898e-06, | |
| "loss": 0.4832, | |
| "step": 1076 | |
| }, | |
| { | |
| "epoch": 0.8183890577507599, | |
| "grad_norm": 1.025541554743181, | |
| "learning_rate": 1.681885829666432e-06, | |
| "loss": 0.7241, | |
| "step": 1077 | |
| }, | |
| { | |
| "epoch": 0.8191489361702128, | |
| "grad_norm": 1.071944221847291, | |
| "learning_rate": 1.6682451673472734e-06, | |
| "loss": 0.6414, | |
| "step": 1078 | |
| }, | |
| { | |
| "epoch": 0.8199088145896657, | |
| "grad_norm": 1.1831682465406441, | |
| "learning_rate": 1.6546550100611237e-06, | |
| "loss": 0.7464, | |
| "step": 1079 | |
| }, | |
| { | |
| "epoch": 0.8206686930091185, | |
| "grad_norm": 1.1702009090623584, | |
| "learning_rate": 1.641115440188148e-06, | |
| "loss": 0.7093, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.8214285714285714, | |
| "grad_norm": 1.2293603269766884, | |
| "learning_rate": 1.6276265398018642e-06, | |
| "loss": 0.7024, | |
| "step": 1081 | |
| }, | |
| { | |
| "epoch": 0.8221884498480243, | |
| "grad_norm": 1.459648231339271, | |
| "learning_rate": 1.6141883906686484e-06, | |
| "loss": 0.7497, | |
| "step": 1082 | |
| }, | |
| { | |
| "epoch": 0.8229483282674772, | |
| "grad_norm": 1.221371250137426, | |
| "learning_rate": 1.6008010742472257e-06, | |
| "loss": 0.7866, | |
| "step": 1083 | |
| }, | |
| { | |
| "epoch": 0.8237082066869301, | |
| "grad_norm": 1.0648109611853667, | |
| "learning_rate": 1.587464671688187e-06, | |
| "loss": 0.7446, | |
| "step": 1084 | |
| }, | |
| { | |
| "epoch": 0.824468085106383, | |
| "grad_norm": 1.2040981115892886, | |
| "learning_rate": 1.5741792638335096e-06, | |
| "loss": 0.7132, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 0.8252279635258358, | |
| "grad_norm": 1.113338000614422, | |
| "learning_rate": 1.5609449312160364e-06, | |
| "loss": 0.6036, | |
| "step": 1086 | |
| }, | |
| { | |
| "epoch": 0.8259878419452887, | |
| "grad_norm": 1.2292065356977784, | |
| "learning_rate": 1.5477617540590128e-06, | |
| "loss": 0.7186, | |
| "step": 1087 | |
| }, | |
| { | |
| "epoch": 0.8267477203647416, | |
| "grad_norm": 1.0813897999802748, | |
| "learning_rate": 1.5346298122755932e-06, | |
| "loss": 0.6756, | |
| "step": 1088 | |
| }, | |
| { | |
| "epoch": 0.8275075987841946, | |
| "grad_norm": 1.1457749750130126, | |
| "learning_rate": 1.5215491854683529e-06, | |
| "loss": 0.7823, | |
| "step": 1089 | |
| }, | |
| { | |
| "epoch": 0.8282674772036475, | |
| "grad_norm": 1.5224007008610663, | |
| "learning_rate": 1.5085199529288097e-06, | |
| "loss": 0.7143, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.8290273556231003, | |
| "grad_norm": 1.268370527472653, | |
| "learning_rate": 1.4955421936369452e-06, | |
| "loss": 0.6856, | |
| "step": 1091 | |
| }, | |
| { | |
| "epoch": 0.8297872340425532, | |
| "grad_norm": 1.3042010852889585, | |
| "learning_rate": 1.4826159862607182e-06, | |
| "loss": 0.6793, | |
| "step": 1092 | |
| }, | |
| { | |
| "epoch": 0.8305471124620061, | |
| "grad_norm": 1.1794958091081331, | |
| "learning_rate": 1.4697414091555918e-06, | |
| "loss": 0.7991, | |
| "step": 1093 | |
| }, | |
| { | |
| "epoch": 0.831306990881459, | |
| "grad_norm": 1.0337903614904165, | |
| "learning_rate": 1.456918540364065e-06, | |
| "loss": 0.7581, | |
| "step": 1094 | |
| }, | |
| { | |
| "epoch": 0.8320668693009119, | |
| "grad_norm": 1.347003269461538, | |
| "learning_rate": 1.4441474576151915e-06, | |
| "loss": 0.6636, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 0.8328267477203647, | |
| "grad_norm": 1.2887023908011925, | |
| "learning_rate": 1.4314282383241097e-06, | |
| "loss": 0.7098, | |
| "step": 1096 | |
| }, | |
| { | |
| "epoch": 0.8335866261398176, | |
| "grad_norm": 1.3429794284743277, | |
| "learning_rate": 1.4187609595915697e-06, | |
| "loss": 0.7319, | |
| "step": 1097 | |
| }, | |
| { | |
| "epoch": 0.8343465045592705, | |
| "grad_norm": 1.252983768917628, | |
| "learning_rate": 1.4061456982034816e-06, | |
| "loss": 0.746, | |
| "step": 1098 | |
| }, | |
| { | |
| "epoch": 0.8351063829787234, | |
| "grad_norm": 1.1706039184977501, | |
| "learning_rate": 1.3935825306304329e-06, | |
| "loss": 0.7149, | |
| "step": 1099 | |
| }, | |
| { | |
| "epoch": 0.8358662613981763, | |
| "grad_norm": 1.1666813075662807, | |
| "learning_rate": 1.3810715330272285e-06, | |
| "loss": 0.7461, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.8366261398176292, | |
| "grad_norm": 1.1761426888453945, | |
| "learning_rate": 1.36861278123244e-06, | |
| "loss": 0.7042, | |
| "step": 1101 | |
| }, | |
| { | |
| "epoch": 0.837386018237082, | |
| "grad_norm": 1.1136282339569321, | |
| "learning_rate": 1.3562063507679323e-06, | |
| "loss": 0.6249, | |
| "step": 1102 | |
| }, | |
| { | |
| "epoch": 0.8381458966565349, | |
| "grad_norm": 1.316844828978894, | |
| "learning_rate": 1.3438523168384076e-06, | |
| "loss": 0.7023, | |
| "step": 1103 | |
| }, | |
| { | |
| "epoch": 0.8389057750759878, | |
| "grad_norm": 1.1003360730458955, | |
| "learning_rate": 1.3315507543309624e-06, | |
| "loss": 0.7811, | |
| "step": 1104 | |
| }, | |
| { | |
| "epoch": 0.8396656534954408, | |
| "grad_norm": 1.1602180838064797, | |
| "learning_rate": 1.319301737814621e-06, | |
| "loss": 0.6946, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 0.8404255319148937, | |
| "grad_norm": 1.170774245339673, | |
| "learning_rate": 1.3071053415398816e-06, | |
| "loss": 0.7957, | |
| "step": 1106 | |
| }, | |
| { | |
| "epoch": 0.8411854103343465, | |
| "grad_norm": 1.106487474924346, | |
| "learning_rate": 1.2949616394382802e-06, | |
| "loss": 0.744, | |
| "step": 1107 | |
| }, | |
| { | |
| "epoch": 0.8419452887537994, | |
| "grad_norm": 0.9635878193333491, | |
| "learning_rate": 1.2828707051219257e-06, | |
| "loss": 0.7089, | |
| "step": 1108 | |
| }, | |
| { | |
| "epoch": 0.8427051671732523, | |
| "grad_norm": 1.1291250011571128, | |
| "learning_rate": 1.2708326118830706e-06, | |
| "loss": 0.7499, | |
| "step": 1109 | |
| }, | |
| { | |
| "epoch": 0.8434650455927052, | |
| "grad_norm": 1.2022806160147763, | |
| "learning_rate": 1.2588474326936461e-06, | |
| "loss": 0.7518, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.8442249240121581, | |
| "grad_norm": 1.3829627678401308, | |
| "learning_rate": 1.2469152402048446e-06, | |
| "loss": 0.7626, | |
| "step": 1111 | |
| }, | |
| { | |
| "epoch": 0.8449848024316109, | |
| "grad_norm": 1.1783563090840374, | |
| "learning_rate": 1.2350361067466554e-06, | |
| "loss": 0.7586, | |
| "step": 1112 | |
| }, | |
| { | |
| "epoch": 0.8457446808510638, | |
| "grad_norm": 1.2748421149251483, | |
| "learning_rate": 1.2232101043274437e-06, | |
| "loss": 0.7966, | |
| "step": 1113 | |
| }, | |
| { | |
| "epoch": 0.8465045592705167, | |
| "grad_norm": 1.1416666291036888, | |
| "learning_rate": 1.2114373046335059e-06, | |
| "loss": 0.7399, | |
| "step": 1114 | |
| }, | |
| { | |
| "epoch": 0.8472644376899696, | |
| "grad_norm": 1.2905772189645583, | |
| "learning_rate": 1.1997177790286362e-06, | |
| "loss": 0.6386, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 0.8480243161094225, | |
| "grad_norm": 1.0801246082097555, | |
| "learning_rate": 1.1880515985536911e-06, | |
| "loss": 0.7456, | |
| "step": 1116 | |
| }, | |
| { | |
| "epoch": 0.8487841945288754, | |
| "grad_norm": 1.1333612857017454, | |
| "learning_rate": 1.176438833926169e-06, | |
| "loss": 0.7343, | |
| "step": 1117 | |
| }, | |
| { | |
| "epoch": 0.8495440729483282, | |
| "grad_norm": 1.2015074957886327, | |
| "learning_rate": 1.1648795555397719e-06, | |
| "loss": 0.5921, | |
| "step": 1118 | |
| }, | |
| { | |
| "epoch": 0.8503039513677811, | |
| "grad_norm": 1.0789670081746559, | |
| "learning_rate": 1.1533738334639787e-06, | |
| "loss": 0.6823, | |
| "step": 1119 | |
| }, | |
| { | |
| "epoch": 0.851063829787234, | |
| "grad_norm": 1.2053742749328062, | |
| "learning_rate": 1.1419217374436231e-06, | |
| "loss": 0.6012, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.851823708206687, | |
| "grad_norm": 1.1866990967583508, | |
| "learning_rate": 1.1305233368984792e-06, | |
| "loss": 0.6803, | |
| "step": 1121 | |
| }, | |
| { | |
| "epoch": 0.8525835866261399, | |
| "grad_norm": 1.1083532920492467, | |
| "learning_rate": 1.1191787009228194e-06, | |
| "loss": 0.7521, | |
| "step": 1122 | |
| }, | |
| { | |
| "epoch": 0.8533434650455927, | |
| "grad_norm": 1.2928824661717089, | |
| "learning_rate": 1.1078878982850193e-06, | |
| "loss": 0.6828, | |
| "step": 1123 | |
| }, | |
| { | |
| "epoch": 0.8541033434650456, | |
| "grad_norm": 1.1379951557810357, | |
| "learning_rate": 1.0966509974271289e-06, | |
| "loss": 0.7959, | |
| "step": 1124 | |
| }, | |
| { | |
| "epoch": 0.8548632218844985, | |
| "grad_norm": 1.3030282765580852, | |
| "learning_rate": 1.0854680664644534e-06, | |
| "loss": 0.7352, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 0.8556231003039514, | |
| "grad_norm": 1.2489059405752996, | |
| "learning_rate": 1.0743391731851471e-06, | |
| "loss": 0.66, | |
| "step": 1126 | |
| }, | |
| { | |
| "epoch": 0.8563829787234043, | |
| "grad_norm": 1.0870366243136975, | |
| "learning_rate": 1.0632643850498048e-06, | |
| "loss": 0.7482, | |
| "step": 1127 | |
| }, | |
| { | |
| "epoch": 0.8571428571428571, | |
| "grad_norm": 1.328893270272176, | |
| "learning_rate": 1.0522437691910493e-06, | |
| "loss": 0.7383, | |
| "step": 1128 | |
| }, | |
| { | |
| "epoch": 0.85790273556231, | |
| "grad_norm": 1.2686744988780967, | |
| "learning_rate": 1.0412773924131202e-06, | |
| "loss": 0.7787, | |
| "step": 1129 | |
| }, | |
| { | |
| "epoch": 0.8586626139817629, | |
| "grad_norm": 1.2895707204562605, | |
| "learning_rate": 1.0303653211914788e-06, | |
| "loss": 0.6886, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.8594224924012158, | |
| "grad_norm": 1.2779137612603786, | |
| "learning_rate": 1.0195076216723931e-06, | |
| "loss": 0.8103, | |
| "step": 1131 | |
| }, | |
| { | |
| "epoch": 0.8601823708206687, | |
| "grad_norm": 1.096372081021282, | |
| "learning_rate": 1.008704359672551e-06, | |
| "loss": 0.7135, | |
| "step": 1132 | |
| }, | |
| { | |
| "epoch": 0.8609422492401215, | |
| "grad_norm": 1.3234467176898201, | |
| "learning_rate": 9.97955600678644e-07, | |
| "loss": 0.6988, | |
| "step": 1133 | |
| }, | |
| { | |
| "epoch": 0.8617021276595744, | |
| "grad_norm": 1.1209608216004165, | |
| "learning_rate": 9.872614098469912e-07, | |
| "loss": 0.7817, | |
| "step": 1134 | |
| }, | |
| { | |
| "epoch": 0.8624620060790273, | |
| "grad_norm": 1.1397833603058156, | |
| "learning_rate": 9.766218520031234e-07, | |
| "loss": 0.7541, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 0.8632218844984803, | |
| "grad_norm": 1.034320455729016, | |
| "learning_rate": 9.660369916414015e-07, | |
| "loss": 0.6678, | |
| "step": 1136 | |
| }, | |
| { | |
| "epoch": 0.8639817629179332, | |
| "grad_norm": 1.2296530710233435, | |
| "learning_rate": 9.555068929246324e-07, | |
| "loss": 0.7136, | |
| "step": 1137 | |
| }, | |
| { | |
| "epoch": 0.8647416413373861, | |
| "grad_norm": 1.2527301663230523, | |
| "learning_rate": 9.450316196836618e-07, | |
| "loss": 0.7636, | |
| "step": 1138 | |
| }, | |
| { | |
| "epoch": 0.8655015197568389, | |
| "grad_norm": 1.1293246428106032, | |
| "learning_rate": 9.346112354169978e-07, | |
| "loss": 0.7786, | |
| "step": 1139 | |
| }, | |
| { | |
| "epoch": 0.8662613981762918, | |
| "grad_norm": 1.1033905454979154, | |
| "learning_rate": 9.242458032904311e-07, | |
| "loss": 0.7522, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.8670212765957447, | |
| "grad_norm": 1.0998877115636188, | |
| "learning_rate": 9.139353861366385e-07, | |
| "loss": 0.6357, | |
| "step": 1141 | |
| }, | |
| { | |
| "epoch": 0.8677811550151976, | |
| "grad_norm": 1.1220569394959372, | |
| "learning_rate": 9.036800464548157e-07, | |
| "loss": 0.7298, | |
| "step": 1142 | |
| }, | |
| { | |
| "epoch": 0.8685410334346505, | |
| "grad_norm": 0.9377229606864954, | |
| "learning_rate": 8.934798464102923e-07, | |
| "loss": 0.7743, | |
| "step": 1143 | |
| }, | |
| { | |
| "epoch": 0.8693009118541033, | |
| "grad_norm": 1.4040998613086915, | |
| "learning_rate": 8.833348478341519e-07, | |
| "loss": 0.6949, | |
| "step": 1144 | |
| }, | |
| { | |
| "epoch": 0.8700607902735562, | |
| "grad_norm": 1.264722195112047, | |
| "learning_rate": 8.732451122228592e-07, | |
| "loss": 0.7638, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 0.8708206686930091, | |
| "grad_norm": 1.2605234663077025, | |
| "learning_rate": 8.632107007378932e-07, | |
| "loss": 0.7058, | |
| "step": 1146 | |
| }, | |
| { | |
| "epoch": 0.871580547112462, | |
| "grad_norm": 1.1261574149190048, | |
| "learning_rate": 8.532316742053715e-07, | |
| "loss": 0.7339, | |
| "step": 1147 | |
| }, | |
| { | |
| "epoch": 0.8723404255319149, | |
| "grad_norm": 1.0991804296035141, | |
| "learning_rate": 8.433080931156767e-07, | |
| "loss": 0.7465, | |
| "step": 1148 | |
| }, | |
| { | |
| "epoch": 0.8731003039513677, | |
| "grad_norm": 1.259133970961514, | |
| "learning_rate": 8.334400176230983e-07, | |
| "loss": 0.6702, | |
| "step": 1149 | |
| }, | |
| { | |
| "epoch": 0.8738601823708206, | |
| "grad_norm": 1.3136670718052346, | |
| "learning_rate": 8.236275075454647e-07, | |
| "loss": 0.6648, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.8746200607902735, | |
| "grad_norm": 1.277392389644452, | |
| "learning_rate": 8.138706223637827e-07, | |
| "loss": 0.6864, | |
| "step": 1151 | |
| }, | |
| { | |
| "epoch": 0.8753799392097265, | |
| "grad_norm": 1.3876976866133421, | |
| "learning_rate": 8.041694212218698e-07, | |
| "loss": 0.7385, | |
| "step": 1152 | |
| }, | |
| { | |
| "epoch": 0.8761398176291794, | |
| "grad_norm": 1.4747082319995537, | |
| "learning_rate": 7.945239629260037e-07, | |
| "loss": 0.7746, | |
| "step": 1153 | |
| }, | |
| { | |
| "epoch": 0.8768996960486323, | |
| "grad_norm": 1.1371939027639884, | |
| "learning_rate": 7.849343059445635e-07, | |
| "loss": 0.741, | |
| "step": 1154 | |
| }, | |
| { | |
| "epoch": 0.8776595744680851, | |
| "grad_norm": 1.309602353479703, | |
| "learning_rate": 7.75400508407671e-07, | |
| "loss": 0.7401, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 0.878419452887538, | |
| "grad_norm": 1.1564042356426667, | |
| "learning_rate": 7.659226281068444e-07, | |
| "loss": 0.7205, | |
| "step": 1156 | |
| }, | |
| { | |
| "epoch": 0.8791793313069909, | |
| "grad_norm": 1.156740460459362, | |
| "learning_rate": 7.565007224946486e-07, | |
| "loss": 0.7415, | |
| "step": 1157 | |
| }, | |
| { | |
| "epoch": 0.8799392097264438, | |
| "grad_norm": 1.2647908147510765, | |
| "learning_rate": 7.471348486843355e-07, | |
| "loss": 0.7918, | |
| "step": 1158 | |
| }, | |
| { | |
| "epoch": 0.8806990881458967, | |
| "grad_norm": 1.3143112798378256, | |
| "learning_rate": 7.378250634495144e-07, | |
| "loss": 0.774, | |
| "step": 1159 | |
| }, | |
| { | |
| "epoch": 0.8814589665653495, | |
| "grad_norm": 1.1449424447218988, | |
| "learning_rate": 7.285714232237972e-07, | |
| "loss": 0.6843, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.8822188449848024, | |
| "grad_norm": 1.3532484966962761, | |
| "learning_rate": 7.193739841004565e-07, | |
| "loss": 0.7039, | |
| "step": 1161 | |
| }, | |
| { | |
| "epoch": 0.8829787234042553, | |
| "grad_norm": 1.2936366905589973, | |
| "learning_rate": 7.102328018320859e-07, | |
| "loss": 0.7873, | |
| "step": 1162 | |
| }, | |
| { | |
| "epoch": 0.8837386018237082, | |
| "grad_norm": 0.9717974113949969, | |
| "learning_rate": 7.011479318302716e-07, | |
| "loss": 0.7699, | |
| "step": 1163 | |
| }, | |
| { | |
| "epoch": 0.8844984802431611, | |
| "grad_norm": 1.1715802959793402, | |
| "learning_rate": 6.9211942916524e-07, | |
| "loss": 0.5883, | |
| "step": 1164 | |
| }, | |
| { | |
| "epoch": 0.8852583586626139, | |
| "grad_norm": 1.3086636343348252, | |
| "learning_rate": 6.831473485655393e-07, | |
| "loss": 0.7256, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 0.8860182370820668, | |
| "grad_norm": 1.208842753014264, | |
| "learning_rate": 6.742317444176994e-07, | |
| "loss": 0.7851, | |
| "step": 1166 | |
| }, | |
| { | |
| "epoch": 0.8867781155015197, | |
| "grad_norm": 1.1799112115283237, | |
| "learning_rate": 6.653726707659014e-07, | |
| "loss": 0.6623, | |
| "step": 1167 | |
| }, | |
| { | |
| "epoch": 0.8875379939209727, | |
| "grad_norm": 1.2068774142243552, | |
| "learning_rate": 6.565701813116543e-07, | |
| "loss": 0.7625, | |
| "step": 1168 | |
| }, | |
| { | |
| "epoch": 0.8882978723404256, | |
| "grad_norm": 1.2319741329101992, | |
| "learning_rate": 6.478243294134678e-07, | |
| "loss": 0.7044, | |
| "step": 1169 | |
| }, | |
| { | |
| "epoch": 0.8890577507598785, | |
| "grad_norm": 1.1223217263876877, | |
| "learning_rate": 6.39135168086531e-07, | |
| "loss": 0.6951, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.8898176291793313, | |
| "grad_norm": 1.050821124506145, | |
| "learning_rate": 6.305027500023841e-07, | |
| "loss": 0.6536, | |
| "step": 1171 | |
| }, | |
| { | |
| "epoch": 0.8905775075987842, | |
| "grad_norm": 1.3232626776904932, | |
| "learning_rate": 6.219271274886052e-07, | |
| "loss": 0.4959, | |
| "step": 1172 | |
| }, | |
| { | |
| "epoch": 0.8913373860182371, | |
| "grad_norm": 0.9236612849856684, | |
| "learning_rate": 6.13408352528495e-07, | |
| "loss": 0.7861, | |
| "step": 1173 | |
| }, | |
| { | |
| "epoch": 0.89209726443769, | |
| "grad_norm": 1.192884000865282, | |
| "learning_rate": 6.04946476760756e-07, | |
| "loss": 0.5997, | |
| "step": 1174 | |
| }, | |
| { | |
| "epoch": 0.8928571428571429, | |
| "grad_norm": 1.1576671265770457, | |
| "learning_rate": 5.965415514791817e-07, | |
| "loss": 0.772, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 0.8936170212765957, | |
| "grad_norm": 1.529144350057864, | |
| "learning_rate": 5.881936276323463e-07, | |
| "loss": 0.7293, | |
| "step": 1176 | |
| }, | |
| { | |
| "epoch": 0.8943768996960486, | |
| "grad_norm": 0.9054911996951112, | |
| "learning_rate": 5.79902755823295e-07, | |
| "loss": 0.7576, | |
| "step": 1177 | |
| }, | |
| { | |
| "epoch": 0.8951367781155015, | |
| "grad_norm": 1.0259473352886155, | |
| "learning_rate": 5.716689863092362e-07, | |
| "loss": 0.6183, | |
| "step": 1178 | |
| }, | |
| { | |
| "epoch": 0.8958966565349544, | |
| "grad_norm": 1.174079780058221, | |
| "learning_rate": 5.634923690012451e-07, | |
| "loss": 0.7321, | |
| "step": 1179 | |
| }, | |
| { | |
| "epoch": 0.8966565349544073, | |
| "grad_norm": 1.2330332627506773, | |
| "learning_rate": 5.553729534639463e-07, | |
| "loss": 0.5713, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.8974164133738601, | |
| "grad_norm": 1.1076288366775002, | |
| "learning_rate": 5.473107889152241e-07, | |
| "loss": 0.6634, | |
| "step": 1181 | |
| }, | |
| { | |
| "epoch": 0.898176291793313, | |
| "grad_norm": 1.044467700270493, | |
| "learning_rate": 5.393059242259236e-07, | |
| "loss": 0.7181, | |
| "step": 1182 | |
| }, | |
| { | |
| "epoch": 0.898936170212766, | |
| "grad_norm": 1.1039285982158928, | |
| "learning_rate": 5.313584079195488e-07, | |
| "loss": 0.7007, | |
| "step": 1183 | |
| }, | |
| { | |
| "epoch": 0.8996960486322189, | |
| "grad_norm": 1.1206888701324598, | |
| "learning_rate": 5.234682881719766e-07, | |
| "loss": 0.8018, | |
| "step": 1184 | |
| }, | |
| { | |
| "epoch": 0.9004559270516718, | |
| "grad_norm": 1.2788167988761892, | |
| "learning_rate": 5.156356128111551e-07, | |
| "loss": 0.734, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 0.9012158054711246, | |
| "grad_norm": 1.161556996711126, | |
| "learning_rate": 5.078604293168232e-07, | |
| "loss": 0.6582, | |
| "step": 1186 | |
| }, | |
| { | |
| "epoch": 0.9019756838905775, | |
| "grad_norm": 1.4668452746754073, | |
| "learning_rate": 5.001427848202145e-07, | |
| "loss": 0.6544, | |
| "step": 1187 | |
| }, | |
| { | |
| "epoch": 0.9027355623100304, | |
| "grad_norm": 1.157465848912087, | |
| "learning_rate": 4.924827261037779e-07, | |
| "loss": 0.695, | |
| "step": 1188 | |
| }, | |
| { | |
| "epoch": 0.9034954407294833, | |
| "grad_norm": 1.2516317056350037, | |
| "learning_rate": 4.848802996008928e-07, | |
| "loss": 0.6245, | |
| "step": 1189 | |
| }, | |
| { | |
| "epoch": 0.9042553191489362, | |
| "grad_norm": 1.2747114150041692, | |
| "learning_rate": 4.773355513955847e-07, | |
| "loss": 0.7333, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.9050151975683891, | |
| "grad_norm": 1.2619667168592554, | |
| "learning_rate": 4.6984852722224307e-07, | |
| "loss": 0.7702, | |
| "step": 1191 | |
| }, | |
| { | |
| "epoch": 0.9057750759878419, | |
| "grad_norm": 0.9219825124620067, | |
| "learning_rate": 4.6241927246535645e-07, | |
| "loss": 0.5749, | |
| "step": 1192 | |
| }, | |
| { | |
| "epoch": 0.9065349544072948, | |
| "grad_norm": 1.110094346430556, | |
| "learning_rate": 4.5504783215922775e-07, | |
| "loss": 0.6984, | |
| "step": 1193 | |
| }, | |
| { | |
| "epoch": 0.9072948328267477, | |
| "grad_norm": 1.1407430182639415, | |
| "learning_rate": 4.4773425098769697e-07, | |
| "loss": 0.7996, | |
| "step": 1194 | |
| }, | |
| { | |
| "epoch": 0.9080547112462006, | |
| "grad_norm": 1.0802363485046098, | |
| "learning_rate": 4.4047857328388457e-07, | |
| "loss": 0.7322, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 0.9088145896656535, | |
| "grad_norm": 1.1856070794492464, | |
| "learning_rate": 4.332808430299085e-07, | |
| "loss": 0.7228, | |
| "step": 1196 | |
| }, | |
| { | |
| "epoch": 0.9095744680851063, | |
| "grad_norm": 1.2226551116509903, | |
| "learning_rate": 4.2614110385662544e-07, | |
| "loss": 0.6992, | |
| "step": 1197 | |
| }, | |
| { | |
| "epoch": 0.9103343465045592, | |
| "grad_norm": 1.3205637153900702, | |
| "learning_rate": 4.190593990433656e-07, | |
| "loss": 0.7009, | |
| "step": 1198 | |
| }, | |
| { | |
| "epoch": 0.9110942249240122, | |
| "grad_norm": 1.095834319653727, | |
| "learning_rate": 4.1203577151767037e-07, | |
| "loss": 0.7305, | |
| "step": 1199 | |
| }, | |
| { | |
| "epoch": 0.9118541033434651, | |
| "grad_norm": 1.2495250897776518, | |
| "learning_rate": 4.0507026385502747e-07, | |
| "loss": 0.6482, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.912613981762918, | |
| "grad_norm": 1.297832348549546, | |
| "learning_rate": 3.981629182786162e-07, | |
| "loss": 0.6437, | |
| "step": 1201 | |
| }, | |
| { | |
| "epoch": 0.9133738601823708, | |
| "grad_norm": 1.2692776625110178, | |
| "learning_rate": 3.913137766590569e-07, | |
| "loss": 0.6882, | |
| "step": 1202 | |
| }, | |
| { | |
| "epoch": 0.9141337386018237, | |
| "grad_norm": 1.1654468543511542, | |
| "learning_rate": 3.8452288051414765e-07, | |
| "loss": 0.6392, | |
| "step": 1203 | |
| }, | |
| { | |
| "epoch": 0.9148936170212766, | |
| "grad_norm": 1.1132826975460226, | |
| "learning_rate": 3.7779027100861785e-07, | |
| "loss": 0.8161, | |
| "step": 1204 | |
| }, | |
| { | |
| "epoch": 0.9156534954407295, | |
| "grad_norm": 1.1830454463138378, | |
| "learning_rate": 3.711159889538774e-07, | |
| "loss": 0.7455, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 0.9164133738601824, | |
| "grad_norm": 1.1107355529496095, | |
| "learning_rate": 3.645000748077709e-07, | |
| "loss": 0.6665, | |
| "step": 1206 | |
| }, | |
| { | |
| "epoch": 0.9171732522796353, | |
| "grad_norm": 1.4043982405524962, | |
| "learning_rate": 3.5794256867432985e-07, | |
| "loss": 0.7543, | |
| "step": 1207 | |
| }, | |
| { | |
| "epoch": 0.9179331306990881, | |
| "grad_norm": 1.1072791466616974, | |
| "learning_rate": 3.5144351030353077e-07, | |
| "loss": 0.6181, | |
| "step": 1208 | |
| }, | |
| { | |
| "epoch": 0.918693009118541, | |
| "grad_norm": 1.3604325527469041, | |
| "learning_rate": 3.45002939091057e-07, | |
| "loss": 0.64, | |
| "step": 1209 | |
| }, | |
| { | |
| "epoch": 0.9194528875379939, | |
| "grad_norm": 1.1613277958259418, | |
| "learning_rate": 3.386208940780522e-07, | |
| "loss": 0.7014, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.9202127659574468, | |
| "grad_norm": 1.2883166806221475, | |
| "learning_rate": 3.3229741395089276e-07, | |
| "loss": 0.6231, | |
| "step": 1211 | |
| }, | |
| { | |
| "epoch": 0.9209726443768997, | |
| "grad_norm": 1.07106874447568, | |
| "learning_rate": 3.260325370409501e-07, | |
| "loss": 0.756, | |
| "step": 1212 | |
| }, | |
| { | |
| "epoch": 0.9217325227963525, | |
| "grad_norm": 1.1402182304655575, | |
| "learning_rate": 3.19826301324353e-07, | |
| "loss": 0.6856, | |
| "step": 1213 | |
| }, | |
| { | |
| "epoch": 0.9224924012158054, | |
| "grad_norm": 1.2245489779536751, | |
| "learning_rate": 3.1367874442176485e-07, | |
| "loss": 0.772, | |
| "step": 1214 | |
| }, | |
| { | |
| "epoch": 0.9232522796352584, | |
| "grad_norm": 1.0692493602926023, | |
| "learning_rate": 3.075899035981533e-07, | |
| "loss": 0.8044, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 0.9240121580547113, | |
| "grad_norm": 1.086270725074644, | |
| "learning_rate": 3.0155981576255986e-07, | |
| "loss": 0.7422, | |
| "step": 1216 | |
| }, | |
| { | |
| "epoch": 0.9247720364741642, | |
| "grad_norm": 1.1256467316810377, | |
| "learning_rate": 2.955885174678852e-07, | |
| "loss": 0.7003, | |
| "step": 1217 | |
| }, | |
| { | |
| "epoch": 0.925531914893617, | |
| "grad_norm": 1.0791349620959785, | |
| "learning_rate": 2.896760449106606e-07, | |
| "loss": 0.7397, | |
| "step": 1218 | |
| }, | |
| { | |
| "epoch": 0.9262917933130699, | |
| "grad_norm": 1.3221673630842614, | |
| "learning_rate": 2.83822433930826e-07, | |
| "loss": 0.7822, | |
| "step": 1219 | |
| }, | |
| { | |
| "epoch": 0.9270516717325228, | |
| "grad_norm": 1.3827967682000326, | |
| "learning_rate": 2.7802772001152224e-07, | |
| "loss": 0.6561, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.9278115501519757, | |
| "grad_norm": 1.1647372123284827, | |
| "learning_rate": 2.7229193827886913e-07, | |
| "loss": 0.7444, | |
| "step": 1221 | |
| }, | |
| { | |
| "epoch": 0.9285714285714286, | |
| "grad_norm": 1.0338958717107452, | |
| "learning_rate": 2.6661512350175556e-07, | |
| "loss": 0.7402, | |
| "step": 1222 | |
| }, | |
| { | |
| "epoch": 0.9293313069908815, | |
| "grad_norm": 1.208529153577792, | |
| "learning_rate": 2.609973100916241e-07, | |
| "loss": 0.7045, | |
| "step": 1223 | |
| }, | |
| { | |
| "epoch": 0.9300911854103343, | |
| "grad_norm": 1.1059287905966912, | |
| "learning_rate": 2.5543853210226566e-07, | |
| "loss": 0.7058, | |
| "step": 1224 | |
| }, | |
| { | |
| "epoch": 0.9308510638297872, | |
| "grad_norm": 1.0272188031192246, | |
| "learning_rate": 2.499388232296174e-07, | |
| "loss": 0.714, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 0.9316109422492401, | |
| "grad_norm": 1.109569180689212, | |
| "learning_rate": 2.4449821681155284e-07, | |
| "loss": 0.7372, | |
| "step": 1226 | |
| }, | |
| { | |
| "epoch": 0.932370820668693, | |
| "grad_norm": 1.0941332032262718, | |
| "learning_rate": 2.3911674582767553e-07, | |
| "loss": 0.7376, | |
| "step": 1227 | |
| }, | |
| { | |
| "epoch": 0.9331306990881459, | |
| "grad_norm": 1.2668138684154482, | |
| "learning_rate": 2.3379444289913344e-07, | |
| "loss": 0.7441, | |
| "step": 1228 | |
| }, | |
| { | |
| "epoch": 0.9338905775075987, | |
| "grad_norm": 1.2926666290047633, | |
| "learning_rate": 2.2853134028840594e-07, | |
| "loss": 0.6299, | |
| "step": 1229 | |
| }, | |
| { | |
| "epoch": 0.9346504559270516, | |
| "grad_norm": 1.2652491690280863, | |
| "learning_rate": 2.233274698991139e-07, | |
| "loss": 0.7062, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.9354103343465046, | |
| "grad_norm": 1.16999575279007, | |
| "learning_rate": 2.1818286327583537e-07, | |
| "loss": 0.7384, | |
| "step": 1231 | |
| }, | |
| { | |
| "epoch": 0.9361702127659575, | |
| "grad_norm": 1.199752072256382, | |
| "learning_rate": 2.13097551603898e-07, | |
| "loss": 0.7242, | |
| "step": 1232 | |
| }, | |
| { | |
| "epoch": 0.9369300911854104, | |
| "grad_norm": 1.1854955098902373, | |
| "learning_rate": 2.0807156570920027e-07, | |
| "loss": 0.711, | |
| "step": 1233 | |
| }, | |
| { | |
| "epoch": 0.9376899696048632, | |
| "grad_norm": 1.2437547291546114, | |
| "learning_rate": 2.0310493605802395e-07, | |
| "loss": 0.6667, | |
| "step": 1234 | |
| }, | |
| { | |
| "epoch": 0.9384498480243161, | |
| "grad_norm": 1.2926290227382389, | |
| "learning_rate": 1.9819769275684742e-07, | |
| "loss": 0.6911, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 0.939209726443769, | |
| "grad_norm": 1.0840825819471644, | |
| "learning_rate": 1.9334986555216374e-07, | |
| "loss": 0.7342, | |
| "step": 1236 | |
| }, | |
| { | |
| "epoch": 0.9399696048632219, | |
| "grad_norm": 1.2538090748428903, | |
| "learning_rate": 1.8856148383029848e-07, | |
| "loss": 0.6757, | |
| "step": 1237 | |
| }, | |
| { | |
| "epoch": 0.9407294832826748, | |
| "grad_norm": 1.4126784652360704, | |
| "learning_rate": 1.8383257661723664e-07, | |
| "loss": 0.6853, | |
| "step": 1238 | |
| }, | |
| { | |
| "epoch": 0.9414893617021277, | |
| "grad_norm": 1.318452356380669, | |
| "learning_rate": 1.791631725784404e-07, | |
| "loss": 0.7256, | |
| "step": 1239 | |
| }, | |
| { | |
| "epoch": 0.9422492401215805, | |
| "grad_norm": 1.0696438846259557, | |
| "learning_rate": 1.7455330001868054e-07, | |
| "loss": 0.7606, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.9430091185410334, | |
| "grad_norm": 1.1814938712612333, | |
| "learning_rate": 1.7000298688186312e-07, | |
| "loss": 0.6582, | |
| "step": 1241 | |
| }, | |
| { | |
| "epoch": 0.9437689969604863, | |
| "grad_norm": 1.026864506838113, | |
| "learning_rate": 1.6551226075085746e-07, | |
| "loss": 0.7409, | |
| "step": 1242 | |
| }, | |
| { | |
| "epoch": 0.9445288753799392, | |
| "grad_norm": 1.2904611830419945, | |
| "learning_rate": 1.6108114884733184e-07, | |
| "loss": 0.8411, | |
| "step": 1243 | |
| }, | |
| { | |
| "epoch": 0.9452887537993921, | |
| "grad_norm": 1.1710363239047947, | |
| "learning_rate": 1.567096780315891e-07, | |
| "loss": 0.7208, | |
| "step": 1244 | |
| }, | |
| { | |
| "epoch": 0.9460486322188449, | |
| "grad_norm": 1.3850457624197534, | |
| "learning_rate": 1.5239787480240353e-07, | |
| "loss": 0.6515, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 0.9468085106382979, | |
| "grad_norm": 1.2129885090216885, | |
| "learning_rate": 1.4814576529685543e-07, | |
| "loss": 0.6777, | |
| "step": 1246 | |
| }, | |
| { | |
| "epoch": 0.9475683890577508, | |
| "grad_norm": 1.0550076920801514, | |
| "learning_rate": 1.4395337529018116e-07, | |
| "loss": 0.6414, | |
| "step": 1247 | |
| }, | |
| { | |
| "epoch": 0.9483282674772037, | |
| "grad_norm": 1.1847891838061086, | |
| "learning_rate": 1.3982073019560782e-07, | |
| "loss": 0.7128, | |
| "step": 1248 | |
| }, | |
| { | |
| "epoch": 0.9490881458966566, | |
| "grad_norm": 1.0692036625822556, | |
| "learning_rate": 1.3574785506420773e-07, | |
| "loss": 0.7368, | |
| "step": 1249 | |
| }, | |
| { | |
| "epoch": 0.9498480243161094, | |
| "grad_norm": 1.2475853419725542, | |
| "learning_rate": 1.317347745847386e-07, | |
| "loss": 0.726, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.9506079027355623, | |
| "grad_norm": 1.0022200242497838, | |
| "learning_rate": 1.277815130835014e-07, | |
| "loss": 0.6144, | |
| "step": 1251 | |
| }, | |
| { | |
| "epoch": 0.9513677811550152, | |
| "grad_norm": 1.1201478240829223, | |
| "learning_rate": 1.2388809452418716e-07, | |
| "loss": 0.701, | |
| "step": 1252 | |
| }, | |
| { | |
| "epoch": 0.9521276595744681, | |
| "grad_norm": 1.0702737120375463, | |
| "learning_rate": 1.2005454250773262e-07, | |
| "loss": 0.7577, | |
| "step": 1253 | |
| }, | |
| { | |
| "epoch": 0.952887537993921, | |
| "grad_norm": 1.4573738041400115, | |
| "learning_rate": 1.1628088027218265e-07, | |
| "loss": 0.8345, | |
| "step": 1254 | |
| }, | |
| { | |
| "epoch": 0.9536474164133738, | |
| "grad_norm": 1.1243415983178697, | |
| "learning_rate": 1.1256713069254244e-07, | |
| "loss": 0.6961, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 0.9544072948328267, | |
| "grad_norm": 1.3520110451299268, | |
| "learning_rate": 1.0891331628063884e-07, | |
| "loss": 0.7706, | |
| "step": 1256 | |
| }, | |
| { | |
| "epoch": 0.9551671732522796, | |
| "grad_norm": 1.1624917314586243, | |
| "learning_rate": 1.0531945918499265e-07, | |
| "loss": 0.677, | |
| "step": 1257 | |
| }, | |
| { | |
| "epoch": 0.9559270516717325, | |
| "grad_norm": 1.1657708074236401, | |
| "learning_rate": 1.0178558119067316e-07, | |
| "loss": 0.7009, | |
| "step": 1258 | |
| }, | |
| { | |
| "epoch": 0.9566869300911854, | |
| "grad_norm": 1.0079180440845639, | |
| "learning_rate": 9.831170371917276e-08, | |
| "loss": 0.7824, | |
| "step": 1259 | |
| }, | |
| { | |
| "epoch": 0.9574468085106383, | |
| "grad_norm": 1.1149105837624373, | |
| "learning_rate": 9.489784782827582e-08, | |
| "loss": 0.6843, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.9582066869300911, | |
| "grad_norm": 1.1927508569696441, | |
| "learning_rate": 9.154403421193226e-08, | |
| "loss": 0.6613, | |
| "step": 1261 | |
| }, | |
| { | |
| "epoch": 0.958966565349544, | |
| "grad_norm": 1.1122068978430713, | |
| "learning_rate": 8.825028320012752e-08, | |
| "loss": 0.6781, | |
| "step": 1262 | |
| }, | |
| { | |
| "epoch": 0.959726443768997, | |
| "grad_norm": 1.2334169187567947, | |
| "learning_rate": 8.5016614758765e-08, | |
| "loss": 0.737, | |
| "step": 1263 | |
| }, | |
| { | |
| "epoch": 0.9604863221884499, | |
| "grad_norm": 1.2323786538778407, | |
| "learning_rate": 8.18430484895405e-08, | |
| "loss": 0.7317, | |
| "step": 1264 | |
| }, | |
| { | |
| "epoch": 0.9612462006079028, | |
| "grad_norm": 1.0924386889255142, | |
| "learning_rate": 7.872960362982684e-08, | |
| "loss": 0.6551, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 0.9620060790273556, | |
| "grad_norm": 1.1517888170333437, | |
| "learning_rate": 7.567629905255503e-08, | |
| "loss": 0.5987, | |
| "step": 1266 | |
| }, | |
| { | |
| "epoch": 0.9627659574468085, | |
| "grad_norm": 1.1833039652242365, | |
| "learning_rate": 7.268315326609988e-08, | |
| "loss": 0.7391, | |
| "step": 1267 | |
| }, | |
| { | |
| "epoch": 0.9635258358662614, | |
| "grad_norm": 1.0840543214834377, | |
| "learning_rate": 6.975018441417126e-08, | |
| "loss": 0.7182, | |
| "step": 1268 | |
| }, | |
| { | |
| "epoch": 0.9642857142857143, | |
| "grad_norm": 1.1852995405165008, | |
| "learning_rate": 6.68774102756975e-08, | |
| "loss": 0.7569, | |
| "step": 1269 | |
| }, | |
| { | |
| "epoch": 0.9650455927051672, | |
| "grad_norm": 1.137582652300508, | |
| "learning_rate": 6.406484826472547e-08, | |
| "loss": 0.7312, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.96580547112462, | |
| "grad_norm": 1.3374720384775478, | |
| "learning_rate": 6.131251543030848e-08, | |
| "loss": 0.6992, | |
| "step": 1271 | |
| }, | |
| { | |
| "epoch": 0.9665653495440729, | |
| "grad_norm": 1.121757330959547, | |
| "learning_rate": 5.862042845640403e-08, | |
| "loss": 0.637, | |
| "step": 1272 | |
| }, | |
| { | |
| "epoch": 0.9673252279635258, | |
| "grad_norm": 1.4347726775747547, | |
| "learning_rate": 5.59886036617785e-08, | |
| "loss": 0.6933, | |
| "step": 1273 | |
| }, | |
| { | |
| "epoch": 0.9680851063829787, | |
| "grad_norm": 1.301304876118703, | |
| "learning_rate": 5.3417056999901515e-08, | |
| "loss": 0.8316, | |
| "step": 1274 | |
| }, | |
| { | |
| "epoch": 0.9688449848024316, | |
| "grad_norm": 1.1803649101402443, | |
| "learning_rate": 5.0905804058850595e-08, | |
| "loss": 0.7101, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 0.9696048632218845, | |
| "grad_norm": 1.1060712259940828, | |
| "learning_rate": 4.845486006121891e-08, | |
| "loss": 0.7412, | |
| "step": 1276 | |
| }, | |
| { | |
| "epoch": 0.9703647416413373, | |
| "grad_norm": 1.1273428567457309, | |
| "learning_rate": 4.6064239864020975e-08, | |
| "loss": 0.6744, | |
| "step": 1277 | |
| }, | |
| { | |
| "epoch": 0.9711246200607903, | |
| "grad_norm": 1.1100062168839253, | |
| "learning_rate": 4.3733957958607134e-08, | |
| "loss": 0.6931, | |
| "step": 1278 | |
| }, | |
| { | |
| "epoch": 0.9718844984802432, | |
| "grad_norm": 1.2938991096262489, | |
| "learning_rate": 4.146402847056474e-08, | |
| "loss": 0.6328, | |
| "step": 1279 | |
| }, | |
| { | |
| "epoch": 0.9726443768996961, | |
| "grad_norm": 1.2112214335798759, | |
| "learning_rate": 3.925446515964604e-08, | |
| "loss": 0.7165, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.973404255319149, | |
| "grad_norm": 1.1618411263834023, | |
| "learning_rate": 3.7105281419675954e-08, | |
| "loss": 0.7141, | |
| "step": 1281 | |
| }, | |
| { | |
| "epoch": 0.9741641337386018, | |
| "grad_norm": 1.1427140269686802, | |
| "learning_rate": 3.5016490278473316e-08, | |
| "loss": 0.761, | |
| "step": 1282 | |
| }, | |
| { | |
| "epoch": 0.9749240121580547, | |
| "grad_norm": 0.9924491441266435, | |
| "learning_rate": 3.2988104397773115e-08, | |
| "loss": 0.7008, | |
| "step": 1283 | |
| }, | |
| { | |
| "epoch": 0.9756838905775076, | |
| "grad_norm": 1.2012943111283463, | |
| "learning_rate": 3.1020136073146575e-08, | |
| "loss": 0.7781, | |
| "step": 1284 | |
| }, | |
| { | |
| "epoch": 0.9764437689969605, | |
| "grad_norm": 1.2944128870216616, | |
| "learning_rate": 2.9112597233931228e-08, | |
| "loss": 0.7978, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 0.9772036474164134, | |
| "grad_norm": 1.1282632728937483, | |
| "learning_rate": 2.7265499443154264e-08, | |
| "loss": 0.7739, | |
| "step": 1286 | |
| }, | |
| { | |
| "epoch": 0.9779635258358662, | |
| "grad_norm": 1.0343667532817375, | |
| "learning_rate": 2.547885389746485e-08, | |
| "loss": 0.7328, | |
| "step": 1287 | |
| }, | |
| { | |
| "epoch": 0.9787234042553191, | |
| "grad_norm": 1.0549463196082776, | |
| "learning_rate": 2.3752671427065276e-08, | |
| "loss": 0.6895, | |
| "step": 1288 | |
| }, | |
| { | |
| "epoch": 0.979483282674772, | |
| "grad_norm": 1.1956658219495757, | |
| "learning_rate": 2.208696249564657e-08, | |
| "loss": 0.6392, | |
| "step": 1289 | |
| }, | |
| { | |
| "epoch": 0.9802431610942249, | |
| "grad_norm": 1.0671859520814504, | |
| "learning_rate": 2.048173720032298e-08, | |
| "loss": 0.7815, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.9810030395136778, | |
| "grad_norm": 1.1658326414440146, | |
| "learning_rate": 1.8937005271572052e-08, | |
| "loss": 0.5811, | |
| "step": 1291 | |
| }, | |
| { | |
| "epoch": 0.9817629179331308, | |
| "grad_norm": 1.5023725416363705, | |
| "learning_rate": 1.7452776073175748e-08, | |
| "loss": 0.7484, | |
| "step": 1292 | |
| }, | |
| { | |
| "epoch": 0.9825227963525835, | |
| "grad_norm": 1.4744222575867239, | |
| "learning_rate": 1.602905860216497e-08, | |
| "loss": 0.8648, | |
| "step": 1293 | |
| }, | |
| { | |
| "epoch": 0.9832826747720365, | |
| "grad_norm": 1.1608932376778889, | |
| "learning_rate": 1.4665861488761813e-08, | |
| "loss": 0.7665, | |
| "step": 1294 | |
| }, | |
| { | |
| "epoch": 0.9840425531914894, | |
| "grad_norm": 1.266250554987892, | |
| "learning_rate": 1.3363192996328488e-08, | |
| "loss": 0.7226, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 0.9848024316109423, | |
| "grad_norm": 1.1542555891752557, | |
| "learning_rate": 1.2121061021318492e-08, | |
| "loss": 0.727, | |
| "step": 1296 | |
| }, | |
| { | |
| "epoch": 0.9855623100303952, | |
| "grad_norm": 1.2333369808039218, | |
| "learning_rate": 1.0939473093229958e-08, | |
| "loss": 0.7792, | |
| "step": 1297 | |
| }, | |
| { | |
| "epoch": 0.986322188449848, | |
| "grad_norm": 1.240270342137932, | |
| "learning_rate": 9.818436374553487e-09, | |
| "loss": 0.6631, | |
| "step": 1298 | |
| }, | |
| { | |
| "epoch": 0.9870820668693009, | |
| "grad_norm": 1.1511006266063406, | |
| "learning_rate": 8.757957660737726e-09, | |
| "loss": 0.6575, | |
| "step": 1299 | |
| }, | |
| { | |
| "epoch": 0.9878419452887538, | |
| "grad_norm": 1.2681975928568516, | |
| "learning_rate": 7.758043380140523e-09, | |
| "loss": 0.7211, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.9886018237082067, | |
| "grad_norm": 1.4786173263579545, | |
| "learning_rate": 6.818699593996725e-09, | |
| "loss": 0.7583, | |
| "step": 1301 | |
| }, | |
| { | |
| "epoch": 0.9893617021276596, | |
| "grad_norm": 1.1860985334495093, | |
| "learning_rate": 5.939931996372661e-09, | |
| "loss": 0.7429, | |
| "step": 1302 | |
| }, | |
| { | |
| "epoch": 0.9901215805471124, | |
| "grad_norm": 1.3025416632127267, | |
| "learning_rate": 5.1217459141406074e-09, | |
| "loss": 0.7154, | |
| "step": 1303 | |
| }, | |
| { | |
| "epoch": 0.9908814589665653, | |
| "grad_norm": 1.0711539271109787, | |
| "learning_rate": 4.364146306943262e-09, | |
| "loss": 0.7368, | |
| "step": 1304 | |
| }, | |
| { | |
| "epoch": 0.9916413373860182, | |
| "grad_norm": 1.0623996484897302, | |
| "learning_rate": 3.6671377671604337e-09, | |
| "loss": 0.7121, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 0.9924012158054711, | |
| "grad_norm": 1.4452068992708094, | |
| "learning_rate": 3.0307245198857303e-09, | |
| "loss": 0.7666, | |
| "step": 1306 | |
| }, | |
| { | |
| "epoch": 0.993161094224924, | |
| "grad_norm": 0.9712828674325977, | |
| "learning_rate": 2.454910422897694e-09, | |
| "loss": 0.7135, | |
| "step": 1307 | |
| }, | |
| { | |
| "epoch": 0.993920972644377, | |
| "grad_norm": 1.0144768297644253, | |
| "learning_rate": 1.9396989666398137e-09, | |
| "loss": 0.5925, | |
| "step": 1308 | |
| }, | |
| { | |
| "epoch": 0.9946808510638298, | |
| "grad_norm": 1.244985893773935, | |
| "learning_rate": 1.485093274194993e-09, | |
| "loss": 0.6732, | |
| "step": 1309 | |
| }, | |
| { | |
| "epoch": 0.9954407294832827, | |
| "grad_norm": 1.2630809566730536, | |
| "learning_rate": 1.0910961012711163e-09, | |
| "loss": 0.7794, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.9962006079027356, | |
| "grad_norm": 1.183867059470281, | |
| "learning_rate": 7.577098361810641e-10, | |
| "loss": 0.6929, | |
| "step": 1311 | |
| }, | |
| { | |
| "epoch": 0.9969604863221885, | |
| "grad_norm": 1.1831588120436123, | |
| "learning_rate": 4.849364998305017e-10, | |
| "loss": 0.6337, | |
| "step": 1312 | |
| }, | |
| { | |
| "epoch": 0.9977203647416414, | |
| "grad_norm": 1.2159665430915092, | |
| "learning_rate": 2.7277774570233507e-10, | |
| "loss": 0.6908, | |
| "step": 1313 | |
| }, | |
| { | |
| "epoch": 0.9984802431610942, | |
| "grad_norm": 1.1989958423457108, | |
| "learning_rate": 1.2123485985227058e-10, | |
| "loss": 0.6538, | |
| "step": 1314 | |
| }, | |
| { | |
| "epoch": 0.9992401215805471, | |
| "grad_norm": 1.354617619619747, | |
| "learning_rate": 3.030876089438195e-11, | |
| "loss": 0.5996, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 1.200818594693258, | |
| "learning_rate": 0.0, | |
| "loss": 0.6751, | |
| "step": 1316 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 1316, | |
| "total_flos": 141830150160384.0, | |
| "train_loss": 0.7797298631135453, | |
| "train_runtime": 5250.0566, | |
| "train_samples_per_second": 16.037, | |
| "train_steps_per_second": 0.251 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 1316, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 395, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 141830150160384.0, | |
| "train_batch_size": 16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |