| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.999447004608295, | |
| "eval_steps": 500, | |
| "global_step": 5424, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.005529953917050691, | |
| "grad_norm": 6.0903776907776255, | |
| "learning_rate": 1.8416206261510132e-07, | |
| "loss": 0.8665, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.011059907834101382, | |
| "grad_norm": 5.453075517812696, | |
| "learning_rate": 3.6832412523020263e-07, | |
| "loss": 0.8531, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.016589861751152075, | |
| "grad_norm": 3.537343733022185, | |
| "learning_rate": 5.524861878453039e-07, | |
| "loss": 0.8281, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.022119815668202765, | |
| "grad_norm": 1.7050757720044358, | |
| "learning_rate": 7.366482504604053e-07, | |
| "loss": 0.7589, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.027649769585253458, | |
| "grad_norm": 1.4012843364617489, | |
| "learning_rate": 9.208103130755065e-07, | |
| "loss": 0.715, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.03317972350230415, | |
| "grad_norm": 0.8011478929925271, | |
| "learning_rate": 1.1049723756906078e-06, | |
| "loss": 0.6922, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.03870967741935484, | |
| "grad_norm": 0.5699780949895114, | |
| "learning_rate": 1.289134438305709e-06, | |
| "loss": 0.6497, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.04423963133640553, | |
| "grad_norm": 0.4520674784173067, | |
| "learning_rate": 1.4732965009208105e-06, | |
| "loss": 0.6281, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.04976958525345622, | |
| "grad_norm": 0.39365714402711754, | |
| "learning_rate": 1.6574585635359118e-06, | |
| "loss": 0.611, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.055299539170506916, | |
| "grad_norm": 0.4304728776273806, | |
| "learning_rate": 1.841620626151013e-06, | |
| "loss": 0.594, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.060829493087557605, | |
| "grad_norm": 0.36773866506240127, | |
| "learning_rate": 2.0257826887661147e-06, | |
| "loss": 0.5946, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.0663594470046083, | |
| "grad_norm": 0.40144795349227336, | |
| "learning_rate": 2.2099447513812157e-06, | |
| "loss": 0.5758, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.07188940092165899, | |
| "grad_norm": 0.3650630134248591, | |
| "learning_rate": 2.394106813996317e-06, | |
| "loss": 0.5774, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.07741935483870968, | |
| "grad_norm": 0.40858596339929243, | |
| "learning_rate": 2.578268876611418e-06, | |
| "loss": 0.5811, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.08294930875576037, | |
| "grad_norm": 0.3915180921441105, | |
| "learning_rate": 2.7624309392265196e-06, | |
| "loss": 0.5567, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.08847926267281106, | |
| "grad_norm": 0.38045253569441917, | |
| "learning_rate": 2.946593001841621e-06, | |
| "loss": 0.5558, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.09400921658986175, | |
| "grad_norm": 0.37413606476610706, | |
| "learning_rate": 3.130755064456722e-06, | |
| "loss": 0.5493, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.09953917050691244, | |
| "grad_norm": 0.42201408210941044, | |
| "learning_rate": 3.3149171270718235e-06, | |
| "loss": 0.5517, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.10506912442396313, | |
| "grad_norm": 0.4095688421864769, | |
| "learning_rate": 3.4990791896869245e-06, | |
| "loss": 0.5463, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.11059907834101383, | |
| "grad_norm": 0.4463086469391698, | |
| "learning_rate": 3.683241252302026e-06, | |
| "loss": 0.547, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.11612903225806452, | |
| "grad_norm": 0.3760139550501188, | |
| "learning_rate": 3.867403314917128e-06, | |
| "loss": 0.5393, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.12165898617511521, | |
| "grad_norm": 0.4078492489838482, | |
| "learning_rate": 4.051565377532229e-06, | |
| "loss": 0.5329, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.1271889400921659, | |
| "grad_norm": 0.3908887647799372, | |
| "learning_rate": 4.23572744014733e-06, | |
| "loss": 0.5431, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.1327188940092166, | |
| "grad_norm": 0.3761589181214343, | |
| "learning_rate": 4.419889502762431e-06, | |
| "loss": 0.5381, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.1382488479262673, | |
| "grad_norm": 0.41027742174158294, | |
| "learning_rate": 4.604051565377533e-06, | |
| "loss": 0.5339, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.14377880184331798, | |
| "grad_norm": 0.3993047304895881, | |
| "learning_rate": 4.788213627992634e-06, | |
| "loss": 0.5337, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.14930875576036867, | |
| "grad_norm": 0.4267384725056861, | |
| "learning_rate": 4.972375690607736e-06, | |
| "loss": 0.5325, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.15483870967741936, | |
| "grad_norm": 0.3741112789490821, | |
| "learning_rate": 5.156537753222836e-06, | |
| "loss": 0.5347, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.16036866359447005, | |
| "grad_norm": 0.5001650422449634, | |
| "learning_rate": 5.340699815837938e-06, | |
| "loss": 0.5264, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.16589861751152074, | |
| "grad_norm": 0.40870225399677257, | |
| "learning_rate": 5.524861878453039e-06, | |
| "loss": 0.5295, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.17142857142857143, | |
| "grad_norm": 0.4489579942156596, | |
| "learning_rate": 5.709023941068141e-06, | |
| "loss": 0.5267, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.17695852534562212, | |
| "grad_norm": 0.4576001899146063, | |
| "learning_rate": 5.893186003683242e-06, | |
| "loss": 0.5208, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.1824884792626728, | |
| "grad_norm": 0.42765306743716647, | |
| "learning_rate": 6.077348066298343e-06, | |
| "loss": 0.5188, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.1880184331797235, | |
| "grad_norm": 0.43147440269952797, | |
| "learning_rate": 6.261510128913444e-06, | |
| "loss": 0.5154, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.1935483870967742, | |
| "grad_norm": 0.4497751095330459, | |
| "learning_rate": 6.445672191528546e-06, | |
| "loss": 0.5048, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.19907834101382488, | |
| "grad_norm": 0.5327826362213908, | |
| "learning_rate": 6.629834254143647e-06, | |
| "loss": 0.5128, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.20460829493087557, | |
| "grad_norm": 0.4115996357259951, | |
| "learning_rate": 6.8139963167587485e-06, | |
| "loss": 0.5178, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.21013824884792626, | |
| "grad_norm": 0.40196263236255503, | |
| "learning_rate": 6.998158379373849e-06, | |
| "loss": 0.514, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.21566820276497695, | |
| "grad_norm": 0.42296446460219156, | |
| "learning_rate": 7.182320441988951e-06, | |
| "loss": 0.5085, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.22119815668202766, | |
| "grad_norm": 0.45986207995548617, | |
| "learning_rate": 7.366482504604052e-06, | |
| "loss": 0.5139, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.22672811059907835, | |
| "grad_norm": 0.44973185296884877, | |
| "learning_rate": 7.5506445672191534e-06, | |
| "loss": 0.5103, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.23225806451612904, | |
| "grad_norm": 0.5306307382931725, | |
| "learning_rate": 7.734806629834256e-06, | |
| "loss": 0.506, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.23778801843317973, | |
| "grad_norm": 0.42387585926266613, | |
| "learning_rate": 7.918968692449355e-06, | |
| "loss": 0.508, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.24331797235023042, | |
| "grad_norm": 0.4600172141091088, | |
| "learning_rate": 8.103130755064459e-06, | |
| "loss": 0.5053, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.2488479262672811, | |
| "grad_norm": 0.5063886096298784, | |
| "learning_rate": 8.287292817679558e-06, | |
| "loss": 0.505, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.2543778801843318, | |
| "grad_norm": 0.45595110131670297, | |
| "learning_rate": 8.47145488029466e-06, | |
| "loss": 0.4994, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.25990783410138246, | |
| "grad_norm": 0.3979219166043402, | |
| "learning_rate": 8.655616942909761e-06, | |
| "loss": 0.5003, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.2654377880184332, | |
| "grad_norm": 0.47071002002813517, | |
| "learning_rate": 8.839779005524863e-06, | |
| "loss": 0.4971, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.2709677419354839, | |
| "grad_norm": 0.4204629206638486, | |
| "learning_rate": 9.023941068139964e-06, | |
| "loss": 0.5032, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.2764976958525346, | |
| "grad_norm": 0.4755494443427242, | |
| "learning_rate": 9.208103130755066e-06, | |
| "loss": 0.4999, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.2820276497695853, | |
| "grad_norm": 0.41495435521999285, | |
| "learning_rate": 9.392265193370167e-06, | |
| "loss": 0.5035, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.28755760368663597, | |
| "grad_norm": 0.405511787171205, | |
| "learning_rate": 9.576427255985269e-06, | |
| "loss": 0.4999, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.29308755760368665, | |
| "grad_norm": 0.47131810224352805, | |
| "learning_rate": 9.760589318600368e-06, | |
| "loss": 0.5089, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.29861751152073734, | |
| "grad_norm": 0.4504585902565694, | |
| "learning_rate": 9.944751381215471e-06, | |
| "loss": 0.499, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.30414746543778803, | |
| "grad_norm": 0.5477004156365718, | |
| "learning_rate": 9.999949252169092e-06, | |
| "loss": 0.5006, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.3096774193548387, | |
| "grad_norm": 0.5531038596543296, | |
| "learning_rate": 9.999700693844523e-06, | |
| "loss": 0.4882, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.3152073732718894, | |
| "grad_norm": 0.5330112235228269, | |
| "learning_rate": 9.99924501428021e-06, | |
| "loss": 0.4916, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.3207373271889401, | |
| "grad_norm": 0.4577766765611679, | |
| "learning_rate": 9.99858223235347e-06, | |
| "loss": 0.4924, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.3262672811059908, | |
| "grad_norm": 0.40765286650009175, | |
| "learning_rate": 9.997712375521187e-06, | |
| "loss": 0.4895, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.3317972350230415, | |
| "grad_norm": 0.47974015373170437, | |
| "learning_rate": 9.996635479818683e-06, | |
| "loss": 0.4933, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.33732718894009217, | |
| "grad_norm": 0.44956075853602034, | |
| "learning_rate": 9.995351589858227e-06, | |
| "loss": 0.4954, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.34285714285714286, | |
| "grad_norm": 0.4619350325798451, | |
| "learning_rate": 9.993860758827171e-06, | |
| "loss": 0.4918, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.34838709677419355, | |
| "grad_norm": 0.49495275870753425, | |
| "learning_rate": 9.992163048485776e-06, | |
| "loss": 0.4928, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.35391705069124424, | |
| "grad_norm": 0.3640423564274247, | |
| "learning_rate": 9.990258529164618e-06, | |
| "loss": 0.4838, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.35944700460829493, | |
| "grad_norm": 0.4420987239801644, | |
| "learning_rate": 9.988147279761706e-06, | |
| "loss": 0.488, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.3649769585253456, | |
| "grad_norm": 0.4511577371212687, | |
| "learning_rate": 9.985829387739192e-06, | |
| "loss": 0.4985, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.3705069124423963, | |
| "grad_norm": 0.4396075419107915, | |
| "learning_rate": 9.98330494911976e-06, | |
| "loss": 0.4915, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.376036866359447, | |
| "grad_norm": 0.4704848369571105, | |
| "learning_rate": 9.980574068482635e-06, | |
| "loss": 0.4878, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.3815668202764977, | |
| "grad_norm": 0.4763102579576698, | |
| "learning_rate": 9.977636858959274e-06, | |
| "loss": 0.4957, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.3870967741935484, | |
| "grad_norm": 0.45436384650049316, | |
| "learning_rate": 9.974493442228653e-06, | |
| "loss": 0.4871, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.39262672811059907, | |
| "grad_norm": 0.4546113951664281, | |
| "learning_rate": 9.971143948512239e-06, | |
| "loss": 0.4839, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.39815668202764976, | |
| "grad_norm": 0.3942291564886981, | |
| "learning_rate": 9.967588516568601e-06, | |
| "loss": 0.4817, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.40368663594470044, | |
| "grad_norm": 0.42410341959833237, | |
| "learning_rate": 9.963827293687648e-06, | |
| "loss": 0.4926, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.40921658986175113, | |
| "grad_norm": 0.5249360145309199, | |
| "learning_rate": 9.959860435684534e-06, | |
| "loss": 0.4855, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.4147465437788018, | |
| "grad_norm": 0.44333787753063203, | |
| "learning_rate": 9.95568810689321e-06, | |
| "loss": 0.4909, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.4202764976958525, | |
| "grad_norm": 0.4571044299213202, | |
| "learning_rate": 9.951310480159605e-06, | |
| "loss": 0.4808, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.4258064516129032, | |
| "grad_norm": 0.404103839654976, | |
| "learning_rate": 9.946727736834467e-06, | |
| "loss": 0.4816, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.4313364055299539, | |
| "grad_norm": 0.4725053041297357, | |
| "learning_rate": 9.94194006676586e-06, | |
| "loss": 0.4888, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.4368663594470046, | |
| "grad_norm": 0.46985417927228335, | |
| "learning_rate": 9.936947668291284e-06, | |
| "loss": 0.4877, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.4423963133640553, | |
| "grad_norm": 0.49996822002708025, | |
| "learning_rate": 9.931750748229475e-06, | |
| "loss": 0.4804, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.447926267281106, | |
| "grad_norm": 0.4531834191607307, | |
| "learning_rate": 9.926349521871824e-06, | |
| "loss": 0.4795, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.4534562211981567, | |
| "grad_norm": 0.37802138341323005, | |
| "learning_rate": 9.920744212973468e-06, | |
| "loss": 0.4756, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.4589861751152074, | |
| "grad_norm": 0.4243052863598972, | |
| "learning_rate": 9.91493505374401e-06, | |
| "loss": 0.4852, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.4645161290322581, | |
| "grad_norm": 0.38897153286113667, | |
| "learning_rate": 9.908922284837911e-06, | |
| "loss": 0.4816, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.4700460829493088, | |
| "grad_norm": 0.39229328262545654, | |
| "learning_rate": 9.90270615534451e-06, | |
| "loss": 0.485, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.47557603686635946, | |
| "grad_norm": 0.4195538643886297, | |
| "learning_rate": 9.89628692277771e-06, | |
| "loss": 0.4791, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.48110599078341015, | |
| "grad_norm": 0.45215319555762606, | |
| "learning_rate": 9.889664853065315e-06, | |
| "loss": 0.4749, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.48663594470046084, | |
| "grad_norm": 0.3934282464522341, | |
| "learning_rate": 9.882840220538002e-06, | |
| "loss": 0.4847, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.49216589861751153, | |
| "grad_norm": 0.45882453815542523, | |
| "learning_rate": 9.875813307917966e-06, | |
| "loss": 0.4823, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.4976958525345622, | |
| "grad_norm": 0.4171659335530635, | |
| "learning_rate": 9.8685844063072e-06, | |
| "loss": 0.4791, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.5032258064516129, | |
| "grad_norm": 0.36877001843415697, | |
| "learning_rate": 9.86115381517545e-06, | |
| "loss": 0.48, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.5087557603686635, | |
| "grad_norm": 0.4248437155717216, | |
| "learning_rate": 9.853521842347787e-06, | |
| "loss": 0.4821, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.5142857142857142, | |
| "grad_norm": 0.3865550402068234, | |
| "learning_rate": 9.845688803991873e-06, | |
| "loss": 0.4778, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.5198156682027649, | |
| "grad_norm": 0.4048712896727712, | |
| "learning_rate": 9.837655024604856e-06, | |
| "loss": 0.4771, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.5253456221198156, | |
| "grad_norm": 0.3928080223641722, | |
| "learning_rate": 9.82942083699993e-06, | |
| "loss": 0.4711, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.5308755760368664, | |
| "grad_norm": 0.4073506281198024, | |
| "learning_rate": 9.82098658229254e-06, | |
| "loss": 0.4773, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.5364055299539171, | |
| "grad_norm": 0.3799265930738453, | |
| "learning_rate": 9.812352609886261e-06, | |
| "loss": 0.4633, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.5419354838709678, | |
| "grad_norm": 0.3791754783762012, | |
| "learning_rate": 9.803519277458323e-06, | |
| "loss": 0.4786, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.5474654377880185, | |
| "grad_norm": 0.3724177446979329, | |
| "learning_rate": 9.794486950944775e-06, | |
| "loss": 0.4706, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.5529953917050692, | |
| "grad_norm": 0.39011612493597525, | |
| "learning_rate": 9.785256004525357e-06, | |
| "loss": 0.472, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.5585253456221199, | |
| "grad_norm": 0.4159910634501067, | |
| "learning_rate": 9.775826820607967e-06, | |
| "loss": 0.4684, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.5640552995391706, | |
| "grad_norm": 0.35305967243893827, | |
| "learning_rate": 9.766199789812845e-06, | |
| "loss": 0.4705, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.5695852534562212, | |
| "grad_norm": 0.3826755145391677, | |
| "learning_rate": 9.756375310956375e-06, | |
| "loss": 0.4759, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.5751152073732719, | |
| "grad_norm": 0.44126945362012004, | |
| "learning_rate": 9.746353791034566e-06, | |
| "loss": 0.468, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.5806451612903226, | |
| "grad_norm": 0.3594904638993689, | |
| "learning_rate": 9.736135645206198e-06, | |
| "loss": 0.4718, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.5861751152073733, | |
| "grad_norm": 0.3757559330794908, | |
| "learning_rate": 9.725721296775616e-06, | |
| "loss": 0.4727, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.591705069124424, | |
| "grad_norm": 0.39116446398772703, | |
| "learning_rate": 9.7151111771752e-06, | |
| "loss": 0.4697, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.5972350230414747, | |
| "grad_norm": 0.3968279194826454, | |
| "learning_rate": 9.704305725947483e-06, | |
| "loss": 0.4698, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.6027649769585254, | |
| "grad_norm": 0.40587146268322655, | |
| "learning_rate": 9.693305390726952e-06, | |
| "loss": 0.4685, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.6082949308755761, | |
| "grad_norm": 0.377035786368524, | |
| "learning_rate": 9.682110627221503e-06, | |
| "loss": 0.4654, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.6138248847926268, | |
| "grad_norm": 0.4003895929403935, | |
| "learning_rate": 9.670721899193556e-06, | |
| "loss": 0.4769, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.6193548387096774, | |
| "grad_norm": 0.45185844229175914, | |
| "learning_rate": 9.659139678440845e-06, | |
| "loss": 0.4684, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.6248847926267281, | |
| "grad_norm": 0.37936262857073433, | |
| "learning_rate": 9.647364444776877e-06, | |
| "loss": 0.478, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.6304147465437788, | |
| "grad_norm": 0.42159273587503454, | |
| "learning_rate": 9.635396686011052e-06, | |
| "loss": 0.4731, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.6359447004608295, | |
| "grad_norm": 0.43046479239329904, | |
| "learning_rate": 9.623236897928456e-06, | |
| "loss": 0.4685, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.6414746543778802, | |
| "grad_norm": 0.4148842353858753, | |
| "learning_rate": 9.61088558426932e-06, | |
| "loss": 0.4659, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.6470046082949309, | |
| "grad_norm": 0.39473737868525427, | |
| "learning_rate": 9.59834325670815e-06, | |
| "loss": 0.4709, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.6525345622119816, | |
| "grad_norm": 0.4015353344240955, | |
| "learning_rate": 9.58561043483254e-06, | |
| "loss": 0.4751, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.6580645161290323, | |
| "grad_norm": 0.42994188947795353, | |
| "learning_rate": 9.572687646121632e-06, | |
| "loss": 0.4732, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.663594470046083, | |
| "grad_norm": 0.42736852075339316, | |
| "learning_rate": 9.559575425924279e-06, | |
| "loss": 0.4665, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.6691244239631337, | |
| "grad_norm": 0.34722978632130697, | |
| "learning_rate": 9.546274317436858e-06, | |
| "loss": 0.4608, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.6746543778801843, | |
| "grad_norm": 0.37549780094613294, | |
| "learning_rate": 9.53278487168077e-06, | |
| "loss": 0.4759, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.680184331797235, | |
| "grad_norm": 0.5132407412202136, | |
| "learning_rate": 9.519107647479609e-06, | |
| "loss": 0.4662, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.6857142857142857, | |
| "grad_norm": 0.4174412166764788, | |
| "learning_rate": 9.505243211436023e-06, | |
| "loss": 0.4594, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.6912442396313364, | |
| "grad_norm": 0.3916564363860535, | |
| "learning_rate": 9.49119213790823e-06, | |
| "loss": 0.4715, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.6967741935483871, | |
| "grad_norm": 0.3997293166106522, | |
| "learning_rate": 9.476955008986228e-06, | |
| "loss": 0.4683, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.7023041474654378, | |
| "grad_norm": 0.3308300953282631, | |
| "learning_rate": 9.46253241446768e-06, | |
| "loss": 0.4633, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.7078341013824885, | |
| "grad_norm": 0.43602935959437467, | |
| "learning_rate": 9.447924951833483e-06, | |
| "loss": 0.4656, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.7133640552995392, | |
| "grad_norm": 0.36016668450990325, | |
| "learning_rate": 9.433133226223018e-06, | |
| "loss": 0.4768, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.7188940092165899, | |
| "grad_norm": 0.3965741821184055, | |
| "learning_rate": 9.418157850409075e-06, | |
| "loss": 0.4669, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.7244239631336405, | |
| "grad_norm": 0.38734403985392707, | |
| "learning_rate": 9.40299944477247e-06, | |
| "loss": 0.4765, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.7299539170506912, | |
| "grad_norm": 0.3563445657533695, | |
| "learning_rate": 9.387658637276348e-06, | |
| "loss": 0.4655, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.7354838709677419, | |
| "grad_norm": 0.3410984943251049, | |
| "learning_rate": 9.372136063440165e-06, | |
| "loss": 0.4669, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.7410138248847926, | |
| "grad_norm": 0.43850058902455336, | |
| "learning_rate": 9.356432366313362e-06, | |
| "loss": 0.4601, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.7465437788018433, | |
| "grad_norm": 0.39491440886088125, | |
| "learning_rate": 9.340548196448729e-06, | |
| "loss": 0.4762, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.752073732718894, | |
| "grad_norm": 0.3843586922714567, | |
| "learning_rate": 9.324484211875442e-06, | |
| "loss": 0.4639, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.7576036866359447, | |
| "grad_norm": 0.37491204324406624, | |
| "learning_rate": 9.30824107807182e-06, | |
| "loss": 0.4654, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.7631336405529954, | |
| "grad_norm": 0.4576626954126291, | |
| "learning_rate": 9.291819467937746e-06, | |
| "loss": 0.4611, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.7686635944700461, | |
| "grad_norm": 0.4394756523970031, | |
| "learning_rate": 9.275220061766793e-06, | |
| "loss": 0.4619, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.7741935483870968, | |
| "grad_norm": 0.3687712612876861, | |
| "learning_rate": 9.258443547218041e-06, | |
| "loss": 0.4601, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.7797235023041474, | |
| "grad_norm": 0.37939730644811315, | |
| "learning_rate": 9.241490619287593e-06, | |
| "loss": 0.4671, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.7852534562211981, | |
| "grad_norm": 0.4503285510527914, | |
| "learning_rate": 9.224361980279779e-06, | |
| "loss": 0.4631, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.7907834101382488, | |
| "grad_norm": 0.45715075255423665, | |
| "learning_rate": 9.207058339778065e-06, | |
| "loss": 0.4681, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.7963133640552995, | |
| "grad_norm": 0.3541259878976316, | |
| "learning_rate": 9.189580414615658e-06, | |
| "loss": 0.457, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.8018433179723502, | |
| "grad_norm": 0.4085052745840183, | |
| "learning_rate": 9.171928928845802e-06, | |
| "loss": 0.4541, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.8073732718894009, | |
| "grad_norm": 0.3616899823860361, | |
| "learning_rate": 9.154104613711798e-06, | |
| "loss": 0.4575, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.8129032258064516, | |
| "grad_norm": 0.39971875774962656, | |
| "learning_rate": 9.136108207616694e-06, | |
| "loss": 0.4695, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.8184331797235023, | |
| "grad_norm": 0.4179584638146379, | |
| "learning_rate": 9.117940456092706e-06, | |
| "loss": 0.4659, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.823963133640553, | |
| "grad_norm": 0.3835155006911313, | |
| "learning_rate": 9.099602111770336e-06, | |
| "loss": 0.458, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.8294930875576036, | |
| "grad_norm": 0.36547715603099723, | |
| "learning_rate": 9.081093934347178e-06, | |
| "loss": 0.4646, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.8350230414746543, | |
| "grad_norm": 0.3295307717464045, | |
| "learning_rate": 9.062416690556463e-06, | |
| "loss": 0.4602, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.840552995391705, | |
| "grad_norm": 0.34026406181637314, | |
| "learning_rate": 9.043571154135285e-06, | |
| "loss": 0.4543, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.8460829493087557, | |
| "grad_norm": 0.40280573682285203, | |
| "learning_rate": 9.02455810579255e-06, | |
| "loss": 0.4669, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.8516129032258064, | |
| "grad_norm": 0.34345937973887686, | |
| "learning_rate": 9.005378333176637e-06, | |
| "loss": 0.4616, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.8571428571428571, | |
| "grad_norm": 0.34245350990620255, | |
| "learning_rate": 8.986032630842767e-06, | |
| "loss": 0.4623, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.8626728110599078, | |
| "grad_norm": 0.3852027478756217, | |
| "learning_rate": 8.966521800220084e-06, | |
| "loss": 0.4629, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.8682027649769585, | |
| "grad_norm": 0.4686461690105208, | |
| "learning_rate": 8.946846649578457e-06, | |
| "loss": 0.4606, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.8737327188940092, | |
| "grad_norm": 0.3405054745038256, | |
| "learning_rate": 8.927007993994997e-06, | |
| "loss": 0.4562, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.8792626728110599, | |
| "grad_norm": 0.3692693675496781, | |
| "learning_rate": 8.907006655320287e-06, | |
| "loss": 0.4558, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.8847926267281107, | |
| "grad_norm": 0.3379877106135158, | |
| "learning_rate": 8.886843462144343e-06, | |
| "loss": 0.4605, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.8903225806451613, | |
| "grad_norm": 0.36146892614586007, | |
| "learning_rate": 8.866519249762275e-06, | |
| "loss": 0.464, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.895852534562212, | |
| "grad_norm": 0.3979151729935212, | |
| "learning_rate": 8.846034860139706e-06, | |
| "loss": 0.465, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.9013824884792627, | |
| "grad_norm": 0.3488713134537118, | |
| "learning_rate": 8.82539114187786e-06, | |
| "loss": 0.4545, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.9069124423963134, | |
| "grad_norm": 0.37006007621420706, | |
| "learning_rate": 8.804588950178439e-06, | |
| "loss": 0.4592, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.9124423963133641, | |
| "grad_norm": 0.4218977786755043, | |
| "learning_rate": 8.783629146808175e-06, | |
| "loss": 0.4623, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.9179723502304148, | |
| "grad_norm": 0.36142612348312186, | |
| "learning_rate": 8.762512600063136e-06, | |
| "loss": 0.4597, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.9235023041474655, | |
| "grad_norm": 0.37743894605389894, | |
| "learning_rate": 8.74124018473276e-06, | |
| "loss": 0.4538, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.9290322580645162, | |
| "grad_norm": 0.37370963533147783, | |
| "learning_rate": 8.719812782063603e-06, | |
| "loss": 0.4547, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.9345622119815669, | |
| "grad_norm": 0.40809993391643884, | |
| "learning_rate": 8.698231279722845e-06, | |
| "loss": 0.462, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.9400921658986175, | |
| "grad_norm": 0.3756284623011763, | |
| "learning_rate": 8.676496571761507e-06, | |
| "loss": 0.4513, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.9456221198156682, | |
| "grad_norm": 0.3390495467924666, | |
| "learning_rate": 8.65460955857742e-06, | |
| "loss": 0.4574, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.9511520737327189, | |
| "grad_norm": 0.3968064162366307, | |
| "learning_rate": 8.632571146877924e-06, | |
| "loss": 0.4587, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.9566820276497696, | |
| "grad_norm": 0.37489260177502365, | |
| "learning_rate": 8.6103822496423e-06, | |
| "loss": 0.4614, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.9622119815668203, | |
| "grad_norm": 0.3468923282438075, | |
| "learning_rate": 8.588043786083952e-06, | |
| "loss": 0.4561, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.967741935483871, | |
| "grad_norm": 0.3742022627413655, | |
| "learning_rate": 8.565556681612335e-06, | |
| "loss": 0.4521, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.9732718894009217, | |
| "grad_norm": 0.35327369319565305, | |
| "learning_rate": 8.542921867794597e-06, | |
| "loss": 0.4534, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.9788018433179724, | |
| "grad_norm": 0.39149761577819314, | |
| "learning_rate": 8.520140282317018e-06, | |
| "loss": 0.4561, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.9843317972350231, | |
| "grad_norm": 0.42419710853826387, | |
| "learning_rate": 8.497212868946132e-06, | |
| "loss": 0.4496, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.9898617511520738, | |
| "grad_norm": 0.370513055308333, | |
| "learning_rate": 8.474140577489652e-06, | |
| "loss": 0.4512, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.9953917050691244, | |
| "grad_norm": 0.46731006937418296, | |
| "learning_rate": 8.45092436375712e-06, | |
| "loss": 0.447, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 1.0009216589861751, | |
| "grad_norm": 0.43527476672720355, | |
| "learning_rate": 8.4275651895203e-06, | |
| "loss": 0.4858, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 1.0064516129032257, | |
| "grad_norm": 0.3927852344244657, | |
| "learning_rate": 8.404064022473344e-06, | |
| "loss": 0.433, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 1.0119815668202765, | |
| "grad_norm": 0.44790042998448304, | |
| "learning_rate": 8.380421836192705e-06, | |
| "loss": 0.4299, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 1.017511520737327, | |
| "grad_norm": 0.36665409180864517, | |
| "learning_rate": 8.356639610096799e-06, | |
| "loss": 0.4372, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 1.023041474654378, | |
| "grad_norm": 0.4154548719085105, | |
| "learning_rate": 8.33271832940543e-06, | |
| "loss": 0.4153, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 1.0285714285714285, | |
| "grad_norm": 0.3498424106524037, | |
| "learning_rate": 8.308658985098983e-06, | |
| "loss": 0.4365, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 1.0341013824884793, | |
| "grad_norm": 0.3527496476806808, | |
| "learning_rate": 8.284462573877367e-06, | |
| "loss": 0.4414, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 1.0396313364055298, | |
| "grad_norm": 0.34095264970889066, | |
| "learning_rate": 8.260130098118724e-06, | |
| "loss": 0.4373, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 1.0451612903225806, | |
| "grad_norm": 0.3525020261906649, | |
| "learning_rate": 8.235662565837901e-06, | |
| "loss": 0.446, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 1.0506912442396312, | |
| "grad_norm": 0.30559116702498945, | |
| "learning_rate": 8.211060990644699e-06, | |
| "loss": 0.4293, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 1.056221198156682, | |
| "grad_norm": 0.37541049203915616, | |
| "learning_rate": 8.18632639170188e-06, | |
| "loss": 0.4306, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 1.0617511520737328, | |
| "grad_norm": 0.3896683499442952, | |
| "learning_rate": 8.161459793682937e-06, | |
| "loss": 0.4338, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 1.0672811059907834, | |
| "grad_norm": 0.35556240560090524, | |
| "learning_rate": 8.136462226729663e-06, | |
| "loss": 0.4273, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 1.072811059907834, | |
| "grad_norm": 0.33870684965626063, | |
| "learning_rate": 8.111334726409453e-06, | |
| "loss": 0.4275, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 1.0783410138248848, | |
| "grad_norm": 0.32599049999370167, | |
| "learning_rate": 8.08607833367243e-06, | |
| "loss": 0.4344, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 1.0838709677419356, | |
| "grad_norm": 0.37446391533889434, | |
| "learning_rate": 8.060694094808295e-06, | |
| "loss": 0.4376, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 1.0894009216589862, | |
| "grad_norm": 0.33002930959463256, | |
| "learning_rate": 8.035183061403006e-06, | |
| "loss": 0.434, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 1.094930875576037, | |
| "grad_norm": 0.35423865601253207, | |
| "learning_rate": 8.0095462902952e-06, | |
| "loss": 0.4201, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 1.1004608294930875, | |
| "grad_norm": 0.3248427195938627, | |
| "learning_rate": 7.983784843532415e-06, | |
| "loss": 0.4253, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 1.1059907834101383, | |
| "grad_norm": 0.34623653882886374, | |
| "learning_rate": 7.957899788327092e-06, | |
| "loss": 0.4323, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 1.111520737327189, | |
| "grad_norm": 0.3599295805789168, | |
| "learning_rate": 7.93189219701237e-06, | |
| "loss": 0.4259, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 1.1170506912442397, | |
| "grad_norm": 0.38414283548649564, | |
| "learning_rate": 7.905763146997652e-06, | |
| "loss": 0.4335, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 1.1225806451612903, | |
| "grad_norm": 0.32382155481213737, | |
| "learning_rate": 7.879513720723984e-06, | |
| "loss": 0.4395, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 1.128110599078341, | |
| "grad_norm": 0.32042378851107606, | |
| "learning_rate": 7.853145005619199e-06, | |
| "loss": 0.426, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 1.1336405529953917, | |
| "grad_norm": 0.33755347609978986, | |
| "learning_rate": 7.826658094052884e-06, | |
| "loss": 0.4362, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 1.1391705069124425, | |
| "grad_norm": 0.39877451031067934, | |
| "learning_rate": 7.800054083291114e-06, | |
| "loss": 0.428, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 1.144700460829493, | |
| "grad_norm": 0.31738937325882993, | |
| "learning_rate": 7.773334075451e-06, | |
| "loss": 0.4413, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 1.1502304147465439, | |
| "grad_norm": 0.34047775678025793, | |
| "learning_rate": 7.746499177455036e-06, | |
| "loss": 0.4309, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 1.1557603686635944, | |
| "grad_norm": 0.324338242448013, | |
| "learning_rate": 7.719550500985234e-06, | |
| "loss": 0.4315, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 1.1612903225806452, | |
| "grad_norm": 0.33430643506460095, | |
| "learning_rate": 7.69248916243708e-06, | |
| "loss": 0.4329, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 1.1668202764976958, | |
| "grad_norm": 0.3641478085283545, | |
| "learning_rate": 7.665316282873281e-06, | |
| "loss": 0.4286, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 1.1723502304147466, | |
| "grad_norm": 0.43045384768424416, | |
| "learning_rate": 7.638032987977322e-06, | |
| "loss": 0.4247, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 1.1778801843317972, | |
| "grad_norm": 0.4742558814490241, | |
| "learning_rate": 7.610640408006832e-06, | |
| "loss": 0.4358, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 1.183410138248848, | |
| "grad_norm": 0.3728801367095553, | |
| "learning_rate": 7.583139677746769e-06, | |
| "loss": 0.4261, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 1.1889400921658986, | |
| "grad_norm": 0.3946467119158356, | |
| "learning_rate": 7.555531936462398e-06, | |
| "loss": 0.4215, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 1.1944700460829494, | |
| "grad_norm": 0.42194525138871064, | |
| "learning_rate": 7.527818327852101e-06, | |
| "loss": 0.4413, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "grad_norm": 0.3362981808853216, | |
| "learning_rate": 7.500000000000001e-06, | |
| "loss": 0.4268, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 1.2055299539170508, | |
| "grad_norm": 0.3603451029762717, | |
| "learning_rate": 7.472078105328391e-06, | |
| "loss": 0.4264, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 1.2110599078341013, | |
| "grad_norm": 0.4138525995577666, | |
| "learning_rate": 7.444053800550004e-06, | |
| "loss": 0.4366, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 1.2165898617511521, | |
| "grad_norm": 0.39448869972439937, | |
| "learning_rate": 7.415928246620086e-06, | |
| "loss": 0.4294, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 1.2221198156682027, | |
| "grad_norm": 0.33343622855975236, | |
| "learning_rate": 7.387702608688302e-06, | |
| "loss": 0.42, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 1.2276497695852535, | |
| "grad_norm": 0.33564499515032403, | |
| "learning_rate": 7.359378056050472e-06, | |
| "loss": 0.4382, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 1.233179723502304, | |
| "grad_norm": 0.3305094552462621, | |
| "learning_rate": 7.3309557621001295e-06, | |
| "loss": 0.4362, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 1.238709677419355, | |
| "grad_norm": 0.3505064540471358, | |
| "learning_rate": 7.3024369042799094e-06, | |
| "loss": 0.4315, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 1.2442396313364055, | |
| "grad_norm": 0.4175232059230619, | |
| "learning_rate": 7.273822664032771e-06, | |
| "loss": 0.4333, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 1.2497695852534563, | |
| "grad_norm": 0.31440206686096517, | |
| "learning_rate": 7.245114226753055e-06, | |
| "loss": 0.4302, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 1.2552995391705069, | |
| "grad_norm": 0.3799967479589324, | |
| "learning_rate": 7.2163127817373815e-06, | |
| "loss": 0.4416, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 1.2608294930875577, | |
| "grad_norm": 0.34246345630017244, | |
| "learning_rate": 7.1874195221353706e-06, | |
| "loss": 0.4315, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 1.2663594470046082, | |
| "grad_norm": 0.2952208188222985, | |
| "learning_rate": 7.158435644900226e-06, | |
| "loss": 0.4308, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 1.271889400921659, | |
| "grad_norm": 0.3364601276348195, | |
| "learning_rate": 7.129362350739138e-06, | |
| "loss": 0.4271, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 1.2774193548387096, | |
| "grad_norm": 0.3474139278148847, | |
| "learning_rate": 7.1002008440635515e-06, | |
| "loss": 0.4305, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 1.2829493087557604, | |
| "grad_norm": 0.3280092320348997, | |
| "learning_rate": 7.070952332939266e-06, | |
| "loss": 0.4331, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 1.288479262672811, | |
| "grad_norm": 0.4223833613636633, | |
| "learning_rate": 7.04161802903639e-06, | |
| "loss": 0.4327, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 1.2940092165898618, | |
| "grad_norm": 0.3448536879815269, | |
| "learning_rate": 7.012199147579146e-06, | |
| "loss": 0.4246, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 1.2995391705069124, | |
| "grad_norm": 0.3207441765420925, | |
| "learning_rate": 6.98269690729553e-06, | |
| "loss": 0.4301, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 1.3050691244239632, | |
| "grad_norm": 0.3165222151173208, | |
| "learning_rate": 6.953112530366818e-06, | |
| "loss": 0.4261, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 1.3105990783410137, | |
| "grad_norm": 0.3876615485028844, | |
| "learning_rate": 6.923447242376942e-06, | |
| "loss": 0.4316, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 1.3161290322580645, | |
| "grad_norm": 0.37232768341887623, | |
| "learning_rate": 6.893702272261711e-06, | |
| "loss": 0.4212, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 1.3216589861751151, | |
| "grad_norm": 0.34637683615909776, | |
| "learning_rate": 6.863878852257908e-06, | |
| "loss": 0.4334, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 1.327188940092166, | |
| "grad_norm": 0.35731944832213297, | |
| "learning_rate": 6.833978217852233e-06, | |
| "loss": 0.4225, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 1.3327188940092167, | |
| "grad_norm": 0.31517156848331396, | |
| "learning_rate": 6.80400160773013e-06, | |
| "loss": 0.4254, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 1.3382488479262673, | |
| "grad_norm": 0.31675935891661117, | |
| "learning_rate": 6.773950263724467e-06, | |
| "loss": 0.4267, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 1.3437788018433179, | |
| "grad_norm": 0.35820405964245244, | |
| "learning_rate": 6.743825430764091e-06, | |
| "loss": 0.4369, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 1.3493087557603687, | |
| "grad_norm": 0.3198735130417677, | |
| "learning_rate": 6.713628356822259e-06, | |
| "loss": 0.4334, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 1.3548387096774195, | |
| "grad_norm": 0.35391376983158945, | |
| "learning_rate": 6.683360292864933e-06, | |
| "loss": 0.4298, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 1.36036866359447, | |
| "grad_norm": 0.3447181141751271, | |
| "learning_rate": 6.653022492798959e-06, | |
| "loss": 0.429, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 1.3658986175115206, | |
| "grad_norm": 0.32591032190478314, | |
| "learning_rate": 6.622616213420125e-06, | |
| "loss": 0.4242, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 1.3714285714285714, | |
| "grad_norm": 0.35079358158009544, | |
| "learning_rate": 6.592142714361085e-06, | |
| "loss": 0.4299, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 1.3769585253456222, | |
| "grad_norm": 0.3105061790757026, | |
| "learning_rate": 6.561603258039195e-06, | |
| "loss": 0.4315, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 1.3824884792626728, | |
| "grad_norm": 0.366393325245765, | |
| "learning_rate": 6.530999109604197e-06, | |
| "loss": 0.4244, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 1.3880184331797234, | |
| "grad_norm": 0.354855687477892, | |
| "learning_rate": 6.500331536885819e-06, | |
| "loss": 0.425, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 1.3935483870967742, | |
| "grad_norm": 0.29867754767506527, | |
| "learning_rate": 6.469601810341247e-06, | |
| "loss": 0.4314, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 1.399078341013825, | |
| "grad_norm": 0.3631093513648162, | |
| "learning_rate": 6.438811203002499e-06, | |
| "loss": 0.4246, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 1.4046082949308756, | |
| "grad_norm": 0.3467860388407693, | |
| "learning_rate": 6.407960990423683e-06, | |
| "loss": 0.4344, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 1.4101382488479262, | |
| "grad_norm": 0.31828964825920697, | |
| "learning_rate": 6.377052450628159e-06, | |
| "loss": 0.4158, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 1.415668202764977, | |
| "grad_norm": 0.31023813410711815, | |
| "learning_rate": 6.346086864055594e-06, | |
| "loss": 0.4356, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 1.4211981566820278, | |
| "grad_norm": 0.332473917922917, | |
| "learning_rate": 6.31506551350891e-06, | |
| "loss": 0.43, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 1.4267281105990783, | |
| "grad_norm": 0.3409141756767579, | |
| "learning_rate": 6.283989684101155e-06, | |
| "loss": 0.4296, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 1.432258064516129, | |
| "grad_norm": 0.3385052993526194, | |
| "learning_rate": 6.252860663202254e-06, | |
| "loss": 0.4297, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 1.4377880184331797, | |
| "grad_norm": 0.32236622569856693, | |
| "learning_rate": 6.221679740385684e-06, | |
| "loss": 0.4274, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 1.4433179723502305, | |
| "grad_norm": 0.315316876619347, | |
| "learning_rate": 6.190448207375046e-06, | |
| "loss": 0.4162, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 1.448847926267281, | |
| "grad_norm": 0.3376657547389814, | |
| "learning_rate": 6.159167357990555e-06, | |
| "loss": 0.433, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 1.4543778801843317, | |
| "grad_norm": 0.32358782865643143, | |
| "learning_rate": 6.127838488095448e-06, | |
| "loss": 0.4267, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 1.4599078341013825, | |
| "grad_norm": 0.3756530225009165, | |
| "learning_rate": 6.096462895542288e-06, | |
| "loss": 0.4373, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 1.4654377880184333, | |
| "grad_norm": 0.38682543540126985, | |
| "learning_rate": 6.065041880119209e-06, | |
| "loss": 0.4227, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 1.4709677419354839, | |
| "grad_norm": 0.38616555341944436, | |
| "learning_rate": 6.0335767434960625e-06, | |
| "loss": 0.4185, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 1.4764976958525344, | |
| "grad_norm": 0.3602473989547172, | |
| "learning_rate": 6.002068789170497e-06, | |
| "loss": 0.4229, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 1.4820276497695852, | |
| "grad_norm": 0.34024391453280106, | |
| "learning_rate": 5.970519322413965e-06, | |
| "loss": 0.4325, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 1.487557603686636, | |
| "grad_norm": 0.334874165318193, | |
| "learning_rate": 5.938929650217636e-06, | |
| "loss": 0.4325, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 1.4930875576036866, | |
| "grad_norm": 0.29562689064173814, | |
| "learning_rate": 5.9073010812382595e-06, | |
| "loss": 0.4171, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 1.4986175115207372, | |
| "grad_norm": 0.30805696197429483, | |
| "learning_rate": 5.875634925743955e-06, | |
| "loss": 0.428, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 1.504147465437788, | |
| "grad_norm": 0.32622309094002283, | |
| "learning_rate": 5.843932495559925e-06, | |
| "loss": 0.4181, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 1.5096774193548388, | |
| "grad_norm": 0.33140067437976467, | |
| "learning_rate": 5.812195104014119e-06, | |
| "loss": 0.4359, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 1.5152073732718894, | |
| "grad_norm": 0.3233776164901754, | |
| "learning_rate": 5.780424065882817e-06, | |
| "loss": 0.4241, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 1.52073732718894, | |
| "grad_norm": 0.3295175080953441, | |
| "learning_rate": 5.748620697336165e-06, | |
| "loss": 0.4276, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 1.5262672811059907, | |
| "grad_norm": 0.3051493556807249, | |
| "learning_rate": 5.716786315883657e-06, | |
| "loss": 0.4332, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 1.5317972350230415, | |
| "grad_norm": 0.32518469758621127, | |
| "learning_rate": 5.6849222403195455e-06, | |
| "loss": 0.427, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 1.5373271889400921, | |
| "grad_norm": 0.3502785279432566, | |
| "learning_rate": 5.653029790668221e-06, | |
| "loss": 0.4228, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 1.5428571428571427, | |
| "grad_norm": 0.328626841799296, | |
| "learning_rate": 5.621110288129509e-06, | |
| "loss": 0.4254, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 1.5483870967741935, | |
| "grad_norm": 0.34919486039336417, | |
| "learning_rate": 5.5891650550239555e-06, | |
| "loss": 0.4333, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 1.5539170506912443, | |
| "grad_norm": 0.31813515025326766, | |
| "learning_rate": 5.5571954147380355e-06, | |
| "loss": 0.4246, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 1.5594470046082949, | |
| "grad_norm": 0.33770710120288266, | |
| "learning_rate": 5.525202691669335e-06, | |
| "loss": 0.4289, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 1.5649769585253455, | |
| "grad_norm": 0.3169850413623026, | |
| "learning_rate": 5.493188211171688e-06, | |
| "loss": 0.4303, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 1.5705069124423963, | |
| "grad_norm": 0.3593536785787455, | |
| "learning_rate": 5.461153299500261e-06, | |
| "loss": 0.4286, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 1.576036866359447, | |
| "grad_norm": 0.30079605129375003, | |
| "learning_rate": 5.429099283756618e-06, | |
| "loss": 0.4299, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 1.5815668202764976, | |
| "grad_norm": 0.3239103063739578, | |
| "learning_rate": 5.3970274918337464e-06, | |
| "loss": 0.4162, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 1.5870967741935482, | |
| "grad_norm": 0.3255485670024084, | |
| "learning_rate": 5.364939252361041e-06, | |
| "loss": 0.4276, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 1.592626728110599, | |
| "grad_norm": 0.3301274211586159, | |
| "learning_rate": 5.3328358946492634e-06, | |
| "loss": 0.4192, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 1.5981566820276498, | |
| "grad_norm": 0.316100975989548, | |
| "learning_rate": 5.3007187486354735e-06, | |
| "loss": 0.4283, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 1.6036866359447006, | |
| "grad_norm": 0.3340521867756924, | |
| "learning_rate": 5.268589144827939e-06, | |
| "loss": 0.4387, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 1.6092165898617512, | |
| "grad_norm": 0.298422914640259, | |
| "learning_rate": 5.236448414251012e-06, | |
| "loss": 0.4332, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 1.6147465437788018, | |
| "grad_norm": 0.3397319884556222, | |
| "learning_rate": 5.204297888389988e-06, | |
| "loss": 0.4248, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 1.6202764976958526, | |
| "grad_norm": 0.30762817141809806, | |
| "learning_rate": 5.17213889913595e-06, | |
| "loss": 0.4207, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 1.6258064516129034, | |
| "grad_norm": 0.30525042043948947, | |
| "learning_rate": 5.139972778730593e-06, | |
| "loss": 0.4232, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 1.631336405529954, | |
| "grad_norm": 0.38997729074598075, | |
| "learning_rate": 5.107800859711032e-06, | |
| "loss": 0.4271, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 1.6368663594470045, | |
| "grad_norm": 0.36592419956378963, | |
| "learning_rate": 5.075624474854599e-06, | |
| "loss": 0.4247, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 1.6423963133640553, | |
| "grad_norm": 0.29950858149943177, | |
| "learning_rate": 5.0434449571236314e-06, | |
| "loss": 0.4168, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 1.6479262672811061, | |
| "grad_norm": 0.32408588211542844, | |
| "learning_rate": 5.01126363961025e-06, | |
| "loss": 0.4287, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 1.6534562211981567, | |
| "grad_norm": 0.3199409766199922, | |
| "learning_rate": 4.979081855481136e-06, | |
| "loss": 0.4223, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 1.6589861751152073, | |
| "grad_norm": 0.3308561884228066, | |
| "learning_rate": 4.946900937922302e-06, | |
| "loss": 0.4219, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 1.664516129032258, | |
| "grad_norm": 0.316626858385731, | |
| "learning_rate": 4.914722220083859e-06, | |
| "loss": 0.4331, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 1.670046082949309, | |
| "grad_norm": 0.40665900945253314, | |
| "learning_rate": 4.8825470350247925e-06, | |
| "loss": 0.4246, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 1.6755760368663595, | |
| "grad_norm": 0.3780339341762518, | |
| "learning_rate": 4.850376715657736e-06, | |
| "loss": 0.4295, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 1.68110599078341, | |
| "grad_norm": 0.33553921665146924, | |
| "learning_rate": 4.81821259469375e-06, | |
| "loss": 0.4298, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 1.6866359447004609, | |
| "grad_norm": 0.34374298733619857, | |
| "learning_rate": 4.786056004587125e-06, | |
| "loss": 0.423, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 1.6921658986175117, | |
| "grad_norm": 0.3151480951742569, | |
| "learning_rate": 4.753908277480162e-06, | |
| "loss": 0.413, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 1.6976958525345622, | |
| "grad_norm": 0.30161164918082994, | |
| "learning_rate": 4.721770745148003e-06, | |
| "loss": 0.4278, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 1.7032258064516128, | |
| "grad_norm": 0.31750652806344676, | |
| "learning_rate": 4.689644738943451e-06, | |
| "loss": 0.4245, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 1.7087557603686636, | |
| "grad_norm": 0.3494801701281207, | |
| "learning_rate": 4.657531589741822e-06, | |
| "loss": 0.4236, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 1.7142857142857144, | |
| "grad_norm": 0.3449639569803028, | |
| "learning_rate": 4.6254326278858056e-06, | |
| "loss": 0.4261, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 1.719815668202765, | |
| "grad_norm": 0.32166503784245665, | |
| "learning_rate": 4.593349183130359e-06, | |
| "loss": 0.4255, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 1.7253456221198156, | |
| "grad_norm": 0.3318579047371131, | |
| "learning_rate": 4.561282584587612e-06, | |
| "loss": 0.4319, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 1.7308755760368664, | |
| "grad_norm": 0.3194685879222433, | |
| "learning_rate": 4.529234160671814e-06, | |
| "loss": 0.4206, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 1.7364055299539172, | |
| "grad_norm": 0.2942895639419189, | |
| "learning_rate": 4.497205239044305e-06, | |
| "loss": 0.4228, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 1.7419354838709677, | |
| "grad_norm": 0.357982132475268, | |
| "learning_rate": 4.465197146558498e-06, | |
| "loss": 0.426, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 1.7474654377880183, | |
| "grad_norm": 0.3165760814177771, | |
| "learning_rate": 4.433211209204928e-06, | |
| "loss": 0.4259, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 1.7529953917050691, | |
| "grad_norm": 0.3275533345265724, | |
| "learning_rate": 4.401248752056317e-06, | |
| "loss": 0.419, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 1.75852534562212, | |
| "grad_norm": 0.35208363382480495, | |
| "learning_rate": 4.369311099212676e-06, | |
| "loss": 0.4285, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 1.7640552995391705, | |
| "grad_norm": 0.3322400582949709, | |
| "learning_rate": 4.337399573746457e-06, | |
| "loss": 0.4227, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 1.769585253456221, | |
| "grad_norm": 0.3318593706128403, | |
| "learning_rate": 4.305515497647739e-06, | |
| "loss": 0.4169, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 1.7751152073732719, | |
| "grad_norm": 0.3342118085075894, | |
| "learning_rate": 4.273660191769463e-06, | |
| "loss": 0.4156, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 1.7806451612903227, | |
| "grad_norm": 0.3243982851428786, | |
| "learning_rate": 4.241834975772715e-06, | |
| "loss": 0.4296, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 1.7861751152073733, | |
| "grad_norm": 0.30024745550956594, | |
| "learning_rate": 4.210041168072055e-06, | |
| "loss": 0.4155, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 1.7917050691244238, | |
| "grad_norm": 0.31137799931917143, | |
| "learning_rate": 4.1782800857809025e-06, | |
| "loss": 0.423, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 1.7972350230414746, | |
| "grad_norm": 0.34187401190050304, | |
| "learning_rate": 4.146553044656967e-06, | |
| "loss": 0.4222, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 1.8027649769585254, | |
| "grad_norm": 0.3233685519538971, | |
| "learning_rate": 4.114861359047744e-06, | |
| "loss": 0.411, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 1.808294930875576, | |
| "grad_norm": 0.31178793164096785, | |
| "learning_rate": 4.083206341836069e-06, | |
| "loss": 0.4244, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 1.8138248847926266, | |
| "grad_norm": 0.31269926393343633, | |
| "learning_rate": 4.051589304385723e-06, | |
| "loss": 0.4163, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 1.8193548387096774, | |
| "grad_norm": 0.3184909825619447, | |
| "learning_rate": 4.02001155648711e-06, | |
| "loss": 0.4292, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 1.8248847926267282, | |
| "grad_norm": 0.3028954525575807, | |
| "learning_rate": 3.988474406302995e-06, | |
| "loss": 0.4258, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 1.8304147465437788, | |
| "grad_norm": 0.3155454356334078, | |
| "learning_rate": 3.956979160314318e-06, | |
| "loss": 0.4131, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 1.8359447004608294, | |
| "grad_norm": 0.28622990953618355, | |
| "learning_rate": 3.925527123266059e-06, | |
| "loss": 0.4161, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 1.8414746543778802, | |
| "grad_norm": 0.2945156841734928, | |
| "learning_rate": 3.894119598113196e-06, | |
| "loss": 0.4298, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 1.847004608294931, | |
| "grad_norm": 0.33440108206239455, | |
| "learning_rate": 3.862757885966726e-06, | |
| "loss": 0.4281, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 1.8525345622119815, | |
| "grad_norm": 0.31936860050567883, | |
| "learning_rate": 3.83144328603976e-06, | |
| "loss": 0.4212, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 1.8580645161290321, | |
| "grad_norm": 0.35312174535698115, | |
| "learning_rate": 3.800177095593706e-06, | |
| "loss": 0.4227, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 1.863594470046083, | |
| "grad_norm": 0.34861457106320726, | |
| "learning_rate": 3.7689606098845264e-06, | |
| "loss": 0.431, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 1.8691244239631337, | |
| "grad_norm": 0.27435811268943505, | |
| "learning_rate": 3.737795122109075e-06, | |
| "loss": 0.408, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 1.8746543778801843, | |
| "grad_norm": 0.2838590192840328, | |
| "learning_rate": 3.706681923351533e-06, | |
| "loss": 0.4181, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 1.8801843317972349, | |
| "grad_norm": 0.2829660341227658, | |
| "learning_rate": 3.675622302529914e-06, | |
| "loss": 0.4221, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 1.8857142857142857, | |
| "grad_norm": 0.3274364986529272, | |
| "learning_rate": 3.644617546342678e-06, | |
| "loss": 0.4269, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 1.8912442396313365, | |
| "grad_norm": 0.30334659787980706, | |
| "learning_rate": 3.6136689392154186e-06, | |
| "loss": 0.4218, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 1.896774193548387, | |
| "grad_norm": 0.2997371229134624, | |
| "learning_rate": 3.582777763247659e-06, | |
| "loss": 0.4271, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 1.9023041474654376, | |
| "grad_norm": 0.2912936491731307, | |
| "learning_rate": 3.5519452981597386e-06, | |
| "loss": 0.4261, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 1.9078341013824884, | |
| "grad_norm": 0.31304519994194124, | |
| "learning_rate": 3.521172821239796e-06, | |
| "loss": 0.4237, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 1.9133640552995392, | |
| "grad_norm": 0.29223645004148463, | |
| "learning_rate": 3.490461607290857e-06, | |
| "loss": 0.4206, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 1.9188940092165898, | |
| "grad_norm": 0.2767982303480485, | |
| "learning_rate": 3.4598129285780214e-06, | |
| "loss": 0.4239, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 1.9244239631336404, | |
| "grad_norm": 0.2857900663137054, | |
| "learning_rate": 3.4292280547757586e-06, | |
| "loss": 0.4274, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 1.9299539170506912, | |
| "grad_norm": 0.30735872690033156, | |
| "learning_rate": 3.398708252915312e-06, | |
| "loss": 0.4249, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 1.935483870967742, | |
| "grad_norm": 0.35018152107887573, | |
| "learning_rate": 3.368254787332206e-06, | |
| "loss": 0.4224, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 1.9410138248847926, | |
| "grad_norm": 0.32130245581445155, | |
| "learning_rate": 3.337868919613869e-06, | |
| "loss": 0.4273, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 1.9465437788018434, | |
| "grad_norm": 0.3399875563122416, | |
| "learning_rate": 3.3075519085473746e-06, | |
| "loss": 0.416, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 1.952073732718894, | |
| "grad_norm": 0.2975964678653707, | |
| "learning_rate": 3.277305010067282e-06, | |
| "loss": 0.4161, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 1.9576036866359448, | |
| "grad_norm": 0.32352875000727493, | |
| "learning_rate": 3.2471294772036287e-06, | |
| "loss": 0.4338, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 1.9631336405529956, | |
| "grad_norm": 0.26327310896150496, | |
| "learning_rate": 3.2170265600299977e-06, | |
| "loss": 0.4231, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 1.9686635944700461, | |
| "grad_norm": 0.30483497487890593, | |
| "learning_rate": 3.186997505611745e-06, | |
| "loss": 0.4244, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 1.9741935483870967, | |
| "grad_norm": 0.33975972239987273, | |
| "learning_rate": 3.1570435579543333e-06, | |
| "loss": 0.4238, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 1.9797235023041475, | |
| "grad_norm": 0.3449206346574356, | |
| "learning_rate": 3.1271659579518e-06, | |
| "loss": 0.4251, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 1.9852534562211983, | |
| "grad_norm": 0.28831583716824516, | |
| "learning_rate": 3.097365943335347e-06, | |
| "loss": 0.4216, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 1.9907834101382489, | |
| "grad_norm": 0.3144096706683747, | |
| "learning_rate": 3.0676447486220705e-06, | |
| "loss": 0.4285, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 1.9963133640552995, | |
| "grad_norm": 0.290045183909965, | |
| "learning_rate": 3.0380036050638106e-06, | |
| "loss": 0.4197, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 2.0018433179723503, | |
| "grad_norm": 0.3238458773243826, | |
| "learning_rate": 3.008443740596153e-06, | |
| "loss": 0.4474, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 2.007373271889401, | |
| "grad_norm": 0.3287019922620765, | |
| "learning_rate": 2.9789663797875614e-06, | |
| "loss": 0.4023, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 2.0129032258064514, | |
| "grad_norm": 0.29141383184538444, | |
| "learning_rate": 2.9495727437886355e-06, | |
| "loss": 0.3943, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 2.0184331797235022, | |
| "grad_norm": 0.29646707994500565, | |
| "learning_rate": 2.920264050281533e-06, | |
| "loss": 0.4023, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 2.023963133640553, | |
| "grad_norm": 0.30655398704765174, | |
| "learning_rate": 2.8910415134295216e-06, | |
| "loss": 0.3968, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 2.029493087557604, | |
| "grad_norm": 0.2833005227781744, | |
| "learning_rate": 2.8619063438266846e-06, | |
| "loss": 0.4101, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 2.035023041474654, | |
| "grad_norm": 0.2968009733215101, | |
| "learning_rate": 2.8328597484477582e-06, | |
| "loss": 0.3935, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 2.040552995391705, | |
| "grad_norm": 0.2839483708587392, | |
| "learning_rate": 2.803902930598144e-06, | |
| "loss": 0.4034, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 2.046082949308756, | |
| "grad_norm": 0.2707519826472345, | |
| "learning_rate": 2.775037089864054e-06, | |
| "loss": 0.3941, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 2.0516129032258066, | |
| "grad_norm": 0.30059219330904746, | |
| "learning_rate": 2.7462634220628146e-06, | |
| "loss": 0.3981, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 2.057142857142857, | |
| "grad_norm": 0.29397070675360604, | |
| "learning_rate": 2.7175831191933275e-06, | |
| "loss": 0.4004, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 2.0626728110599077, | |
| "grad_norm": 0.3320584432373214, | |
| "learning_rate": 2.688997369386698e-06, | |
| "loss": 0.3934, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 2.0682027649769585, | |
| "grad_norm": 0.31494300922463103, | |
| "learning_rate": 2.6605073568569993e-06, | |
| "loss": 0.394, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 2.0737327188940093, | |
| "grad_norm": 0.29583460527636174, | |
| "learning_rate": 2.6321142618522288e-06, | |
| "loss": 0.3989, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 2.0792626728110597, | |
| "grad_norm": 0.32390817110689435, | |
| "learning_rate": 2.603819260605399e-06, | |
| "loss": 0.4019, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 2.0847926267281105, | |
| "grad_norm": 0.2672110867976394, | |
| "learning_rate": 2.5756235252858288e-06, | |
| "loss": 0.4003, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 2.0903225806451613, | |
| "grad_norm": 0.3123455250583991, | |
| "learning_rate": 2.5475282239505685e-06, | |
| "loss": 0.4018, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 2.095852534562212, | |
| "grad_norm": 0.27691159250970976, | |
| "learning_rate": 2.5195345204960196e-06, | |
| "loss": 0.4008, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 2.1013824884792625, | |
| "grad_norm": 0.26123816601956984, | |
| "learning_rate": 2.4916435746097166e-06, | |
| "loss": 0.3876, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 2.1069124423963133, | |
| "grad_norm": 0.30637053965231187, | |
| "learning_rate": 2.4638565417222816e-06, | |
| "loss": 0.3982, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 2.112442396313364, | |
| "grad_norm": 0.3315750567953371, | |
| "learning_rate": 2.436174572959561e-06, | |
| "loss": 0.4073, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 2.117972350230415, | |
| "grad_norm": 0.3105020636320244, | |
| "learning_rate": 2.408598815094944e-06, | |
| "loss": 0.399, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 2.1235023041474657, | |
| "grad_norm": 0.27435494573021874, | |
| "learning_rate": 2.381130410501845e-06, | |
| "loss": 0.4009, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 2.129032258064516, | |
| "grad_norm": 0.2663389329232196, | |
| "learning_rate": 2.353770497106381e-06, | |
| "loss": 0.3967, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 2.134562211981567, | |
| "grad_norm": 0.3194906899085194, | |
| "learning_rate": 2.3265202083402376e-06, | |
| "loss": 0.4001, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 2.1400921658986176, | |
| "grad_norm": 0.31197437534194744, | |
| "learning_rate": 2.299380673093712e-06, | |
| "loss": 0.4033, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 2.145622119815668, | |
| "grad_norm": 0.29924705453864847, | |
| "learning_rate": 2.272353015668942e-06, | |
| "loss": 0.3994, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 2.1511520737327188, | |
| "grad_norm": 0.29115026537537425, | |
| "learning_rate": 2.2454383557333358e-06, | |
| "loss": 0.4014, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 2.1566820276497696, | |
| "grad_norm": 0.2834889760546468, | |
| "learning_rate": 2.218637808273184e-06, | |
| "loss": 0.3998, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 2.1622119815668204, | |
| "grad_norm": 0.2910305450411384, | |
| "learning_rate": 2.1919524835474713e-06, | |
| "loss": 0.405, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 2.167741935483871, | |
| "grad_norm": 0.289932930612974, | |
| "learning_rate": 2.1653834870418807e-06, | |
| "loss": 0.3896, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 2.1732718894009215, | |
| "grad_norm": 0.2722298839273463, | |
| "learning_rate": 2.1389319194230017e-06, | |
| "loss": 0.4003, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 2.1788018433179723, | |
| "grad_norm": 0.28486359542042644, | |
| "learning_rate": 2.1125988764927225e-06, | |
| "loss": 0.4013, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 2.184331797235023, | |
| "grad_norm": 0.29750089176475963, | |
| "learning_rate": 2.0863854491428454e-06, | |
| "loss": 0.3989, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 2.189861751152074, | |
| "grad_norm": 0.2766502298353407, | |
| "learning_rate": 2.0602927233098908e-06, | |
| "loss": 0.3921, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 2.1953917050691243, | |
| "grad_norm": 0.2789326319161608, | |
| "learning_rate": 2.034321779930109e-06, | |
| "loss": 0.4001, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 2.200921658986175, | |
| "grad_norm": 0.3371618622489858, | |
| "learning_rate": 2.0084736948947027e-06, | |
| "loss": 0.4049, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 2.206451612903226, | |
| "grad_norm": 0.2783901912695157, | |
| "learning_rate": 1.982749539005254e-06, | |
| "loss": 0.4041, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 2.2119815668202767, | |
| "grad_norm": 0.279176756791974, | |
| "learning_rate": 1.9571503779293683e-06, | |
| "loss": 0.3978, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 2.217511520737327, | |
| "grad_norm": 0.30608460679313365, | |
| "learning_rate": 1.93167727215652e-06, | |
| "loss": 0.406, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 2.223041474654378, | |
| "grad_norm": 0.3151983208092032, | |
| "learning_rate": 1.9063312769541348e-06, | |
| "loss": 0.3956, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 2.2285714285714286, | |
| "grad_norm": 0.3005770395178785, | |
| "learning_rate": 1.8811134423238513e-06, | |
| "loss": 0.3966, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 2.2341013824884794, | |
| "grad_norm": 0.2940499233107073, | |
| "learning_rate": 1.8560248129580422e-06, | |
| "loss": 0.4043, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 2.23963133640553, | |
| "grad_norm": 0.28429612401283494, | |
| "learning_rate": 1.8310664281965268e-06, | |
| "loss": 0.4076, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 2.2451612903225806, | |
| "grad_norm": 0.3064049873744565, | |
| "learning_rate": 1.8062393219835173e-06, | |
| "loss": 0.3972, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 2.2506912442396314, | |
| "grad_norm": 0.29946204785644764, | |
| "learning_rate": 1.7815445228247851e-06, | |
| "loss": 0.3945, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 2.256221198156682, | |
| "grad_norm": 0.2715437304149103, | |
| "learning_rate": 1.7569830537450533e-06, | |
| "loss": 0.4033, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 2.2617511520737326, | |
| "grad_norm": 0.2854830351871705, | |
| "learning_rate": 1.732555932245616e-06, | |
| "loss": 0.3955, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 2.2672811059907834, | |
| "grad_norm": 0.3147639419603036, | |
| "learning_rate": 1.7082641702621856e-06, | |
| "loss": 0.3953, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 2.272811059907834, | |
| "grad_norm": 0.27518159279482673, | |
| "learning_rate": 1.6841087741229745e-06, | |
| "loss": 0.4021, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 2.278341013824885, | |
| "grad_norm": 0.3039597096572996, | |
| "learning_rate": 1.660090744507003e-06, | |
| "loss": 0.4013, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 2.2838709677419353, | |
| "grad_norm": 0.2998615031450226, | |
| "learning_rate": 1.6362110764026473e-06, | |
| "loss": 0.4112, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 2.289400921658986, | |
| "grad_norm": 0.3022483293545665, | |
| "learning_rate": 1.6124707590664168e-06, | |
| "loss": 0.3993, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 2.294930875576037, | |
| "grad_norm": 0.2724443752633973, | |
| "learning_rate": 1.5888707759819766e-06, | |
| "loss": 0.4078, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 2.3004608294930877, | |
| "grad_norm": 0.30699585205563196, | |
| "learning_rate": 1.5654121048194016e-06, | |
| "loss": 0.3966, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 2.305990783410138, | |
| "grad_norm": 0.2774026863139645, | |
| "learning_rate": 1.5420957173946772e-06, | |
| "loss": 0.3931, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 2.311520737327189, | |
| "grad_norm": 0.30520288048797833, | |
| "learning_rate": 1.5189225796294383e-06, | |
| "loss": 0.3963, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 2.3170506912442397, | |
| "grad_norm": 0.25456512857000035, | |
| "learning_rate": 1.4958936515109551e-06, | |
| "loss": 0.3937, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 2.3225806451612905, | |
| "grad_norm": 0.26955539407811574, | |
| "learning_rate": 1.4730098870523652e-06, | |
| "loss": 0.4004, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 2.328110599078341, | |
| "grad_norm": 0.2793395228272683, | |
| "learning_rate": 1.450272234253149e-06, | |
| "loss": 0.4014, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 2.3336405529953916, | |
| "grad_norm": 0.2670008731741953, | |
| "learning_rate": 1.427681635059861e-06, | |
| "loss": 0.399, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 2.3391705069124424, | |
| "grad_norm": 0.26366634919045073, | |
| "learning_rate": 1.4052390253271037e-06, | |
| "loss": 0.3967, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 2.3447004608294932, | |
| "grad_norm": 0.2736550342611439, | |
| "learning_rate": 1.3829453347787626e-06, | |
| "loss": 0.4053, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 2.3502304147465436, | |
| "grad_norm": 0.2658041185832838, | |
| "learning_rate": 1.3608014869694869e-06, | |
| "loss": 0.3965, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 2.3557603686635944, | |
| "grad_norm": 0.2663003739913603, | |
| "learning_rate": 1.3388083992464335e-06, | |
| "loss": 0.408, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 2.361290322580645, | |
| "grad_norm": 0.3046466052962007, | |
| "learning_rate": 1.3169669827112603e-06, | |
| "loss": 0.4036, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 2.366820276497696, | |
| "grad_norm": 0.2895481931800935, | |
| "learning_rate": 1.2952781421823846e-06, | |
| "loss": 0.3936, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 2.3723502304147464, | |
| "grad_norm": 0.2677071356544069, | |
| "learning_rate": 1.2737427761575006e-06, | |
| "loss": 0.4006, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 2.377880184331797, | |
| "grad_norm": 0.29306704953880125, | |
| "learning_rate": 1.2523617767763535e-06, | |
| "loss": 0.3994, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 2.383410138248848, | |
| "grad_norm": 0.2739530738988769, | |
| "learning_rate": 1.2311360297837849e-06, | |
| "loss": 0.4001, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 2.3889400921658988, | |
| "grad_norm": 0.27681408852313116, | |
| "learning_rate": 1.210066414493039e-06, | |
| "loss": 0.3924, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 2.394470046082949, | |
| "grad_norm": 0.30545245080294625, | |
| "learning_rate": 1.1891538037493322e-06, | |
| "loss": 0.4044, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "grad_norm": 0.26864664635574714, | |
| "learning_rate": 1.1683990638936981e-06, | |
| "loss": 0.4017, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 2.4055299539170507, | |
| "grad_norm": 0.2944152586026279, | |
| "learning_rate": 1.147803054727095e-06, | |
| "loss": 0.401, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 2.4110599078341015, | |
| "grad_norm": 0.282595000728746, | |
| "learning_rate": 1.1273666294747886e-06, | |
| "loss": 0.4003, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 2.4165898617511523, | |
| "grad_norm": 0.28429008222935853, | |
| "learning_rate": 1.1070906347510051e-06, | |
| "loss": 0.3955, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 2.4221198156682027, | |
| "grad_norm": 0.2831830240980828, | |
| "learning_rate": 1.0869759105238592e-06, | |
| "loss": 0.3981, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 2.4276497695852535, | |
| "grad_norm": 0.30047729151565855, | |
| "learning_rate": 1.0670232900805561e-06, | |
| "loss": 0.3979, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 2.4331797235023043, | |
| "grad_norm": 0.27151153552568635, | |
| "learning_rate": 1.0472335999928712e-06, | |
| "loss": 0.398, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 2.4387096774193546, | |
| "grad_norm": 0.26079113997988657, | |
| "learning_rate": 1.0276076600829094e-06, | |
| "loss": 0.3928, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 2.4442396313364054, | |
| "grad_norm": 0.2943190568488448, | |
| "learning_rate": 1.008146283389142e-06, | |
| "loss": 0.4051, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 2.4497695852534562, | |
| "grad_norm": 0.29471175081496465, | |
| "learning_rate": 9.888502761327235e-07, | |
| "loss": 0.4016, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 2.455299539170507, | |
| "grad_norm": 0.2828161001369082, | |
| "learning_rate": 9.697204376840936e-07, | |
| "loss": 0.3955, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 2.460829493087558, | |
| "grad_norm": 0.2700457496469629, | |
| "learning_rate": 9.50757560529863e-07, | |
| "loss": 0.3999, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 2.466359447004608, | |
| "grad_norm": 0.28527113567956547, | |
| "learning_rate": 9.319624302399827e-07, | |
| "loss": 0.3976, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 2.471889400921659, | |
| "grad_norm": 0.2913598279022557, | |
| "learning_rate": 9.133358254351982e-07, | |
| "loss": 0.3959, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 2.47741935483871, | |
| "grad_norm": 0.2528128242967444, | |
| "learning_rate": 8.948785177547975e-07, | |
| "loss": 0.3999, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 2.48294930875576, | |
| "grad_norm": 0.28379973900510885, | |
| "learning_rate": 8.765912718246423e-07, | |
| "loss": 0.3943, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 2.488479262672811, | |
| "grad_norm": 0.3067123055839093, | |
| "learning_rate": 8.584748452254888e-07, | |
| "loss": 0.3979, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 2.4940092165898617, | |
| "grad_norm": 0.2753166288210943, | |
| "learning_rate": 8.405299884616142e-07, | |
| "loss": 0.3942, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 2.4995391705069125, | |
| "grad_norm": 0.28727455911235195, | |
| "learning_rate": 8.227574449297137e-07, | |
| "loss": 0.4003, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 2.5050691244239633, | |
| "grad_norm": 0.2743598195044324, | |
| "learning_rate": 8.051579508881107e-07, | |
| "loss": 0.3952, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 2.5105990783410137, | |
| "grad_norm": 0.2693576075557049, | |
| "learning_rate": 7.877322354262545e-07, | |
| "loss": 0.4033, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 2.5161290322580645, | |
| "grad_norm": 0.2896358229000151, | |
| "learning_rate": 7.704810204345154e-07, | |
| "loss": 0.4005, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 2.5216589861751153, | |
| "grad_norm": 0.28471183419181173, | |
| "learning_rate": 7.534050205742827e-07, | |
| "loss": 0.3914, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 2.5271889400921657, | |
| "grad_norm": 0.2594303056396339, | |
| "learning_rate": 7.365049432483529e-07, | |
| "loss": 0.4032, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 2.5327188940092165, | |
| "grad_norm": 0.29975502440359814, | |
| "learning_rate": 7.19781488571632e-07, | |
| "loss": 0.4039, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 2.5382488479262673, | |
| "grad_norm": 0.272206139015925, | |
| "learning_rate": 7.032353493421213e-07, | |
| "loss": 0.403, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 2.543778801843318, | |
| "grad_norm": 0.27442194973354395, | |
| "learning_rate": 6.868672110122271e-07, | |
| "loss": 0.4, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 2.549308755760369, | |
| "grad_norm": 0.26622819466668635, | |
| "learning_rate": 6.706777516603636e-07, | |
| "loss": 0.4064, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 2.554838709677419, | |
| "grad_norm": 0.2921603411199283, | |
| "learning_rate": 6.546676419628545e-07, | |
| "loss": 0.398, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 2.56036866359447, | |
| "grad_norm": 0.2634896902237433, | |
| "learning_rate": 6.388375451661578e-07, | |
| "loss": 0.3958, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 2.565898617511521, | |
| "grad_norm": 0.2578581966184642, | |
| "learning_rate": 6.231881170593828e-07, | |
| "loss": 0.3983, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 2.571428571428571, | |
| "grad_norm": 0.2816419136865501, | |
| "learning_rate": 6.077200059471289e-07, | |
| "loss": 0.3993, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 2.576958525345622, | |
| "grad_norm": 0.25487527822128436, | |
| "learning_rate": 5.924338526226259e-07, | |
| "loss": 0.3856, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 2.5824884792626728, | |
| "grad_norm": 0.25615420713688475, | |
| "learning_rate": 5.773302903411848e-07, | |
| "loss": 0.3943, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 2.5880184331797236, | |
| "grad_norm": 0.25869788749142564, | |
| "learning_rate": 5.624099447939696e-07, | |
| "loss": 0.3997, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 2.5935483870967744, | |
| "grad_norm": 0.27630938101238434, | |
| "learning_rate": 5.476734340820738e-07, | |
| "loss": 0.3914, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 2.5990783410138247, | |
| "grad_norm": 0.29080902856797114, | |
| "learning_rate": 5.331213686909159e-07, | |
| "loss": 0.4052, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 2.6046082949308755, | |
| "grad_norm": 0.27150428918656966, | |
| "learning_rate": 5.187543514649479e-07, | |
| "loss": 0.3987, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 2.6101382488479263, | |
| "grad_norm": 0.2814113138950482, | |
| "learning_rate": 5.045729775826818e-07, | |
| "loss": 0.4, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 2.6156682027649767, | |
| "grad_norm": 0.26628699037750947, | |
| "learning_rate": 4.905778345320339e-07, | |
| "loss": 0.4046, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 2.6211981566820275, | |
| "grad_norm": 0.2679284474349017, | |
| "learning_rate": 4.767695020859847e-07, | |
| "loss": 0.3989, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 2.6267281105990783, | |
| "grad_norm": 0.304861228416287, | |
| "learning_rate": 4.6314855227856505e-07, | |
| "loss": 0.3966, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 2.632258064516129, | |
| "grad_norm": 0.28378340208874464, | |
| "learning_rate": 4.497155493811539e-07, | |
| "loss": 0.3923, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 2.63778801843318, | |
| "grad_norm": 0.2720375532229136, | |
| "learning_rate": 4.3647104987910636e-07, | |
| "loss": 0.3983, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 2.6433179723502302, | |
| "grad_norm": 0.2648120126986214, | |
| "learning_rate": 4.2341560244869797e-07, | |
| "loss": 0.3925, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 2.648847926267281, | |
| "grad_norm": 0.30311062145568735, | |
| "learning_rate": 4.1054974793439504e-07, | |
| "loss": 0.3986, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 2.654377880184332, | |
| "grad_norm": 0.26568936508620244, | |
| "learning_rate": 3.978740193264524e-07, | |
| "loss": 0.3921, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 2.6599078341013827, | |
| "grad_norm": 0.2710800781363938, | |
| "learning_rate": 3.853889417388279e-07, | |
| "loss": 0.3984, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 2.6654377880184335, | |
| "grad_norm": 0.2676349044854644, | |
| "learning_rate": 3.730950323874322e-07, | |
| "loss": 0.3977, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 2.670967741935484, | |
| "grad_norm": 0.28364404186426295, | |
| "learning_rate": 3.6099280056870136e-07, | |
| "loss": 0.4008, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 2.6764976958525346, | |
| "grad_norm": 0.27479432043235197, | |
| "learning_rate": 3.490827476385006e-07, | |
| "loss": 0.4006, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 2.6820276497695854, | |
| "grad_norm": 0.2801115377310389, | |
| "learning_rate": 3.373653669913479e-07, | |
| "loss": 0.3973, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 2.6875576036866358, | |
| "grad_norm": 0.26687736410074, | |
| "learning_rate": 3.258411440399839e-07, | |
| "loss": 0.3921, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 2.6930875576036866, | |
| "grad_norm": 0.26749077132763405, | |
| "learning_rate": 3.1451055619525495e-07, | |
| "loss": 0.401, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 2.6986175115207374, | |
| "grad_norm": 0.2477585356057615, | |
| "learning_rate": 3.0337407284634023e-07, | |
| "loss": 0.4003, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 2.704147465437788, | |
| "grad_norm": 0.2577396288674699, | |
| "learning_rate": 2.924321553413029e-07, | |
| "loss": 0.4003, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 2.709677419354839, | |
| "grad_norm": 0.25584827001987803, | |
| "learning_rate": 2.8168525696798287e-07, | |
| "loss": 0.3932, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 2.7152073732718893, | |
| "grad_norm": 0.24937840132532715, | |
| "learning_rate": 2.7113382293521285e-07, | |
| "loss": 0.3935, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 2.72073732718894, | |
| "grad_norm": 0.27907536990926873, | |
| "learning_rate": 2.607782903543782e-07, | |
| "loss": 0.4059, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 2.726267281105991, | |
| "grad_norm": 0.2646543562986845, | |
| "learning_rate": 2.5061908822131e-07, | |
| "loss": 0.3842, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 2.7317972350230413, | |
| "grad_norm": 0.2471190703893049, | |
| "learning_rate": 2.406566373985075e-07, | |
| "loss": 0.3952, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 2.737327188940092, | |
| "grad_norm": 0.31897120592126615, | |
| "learning_rate": 2.3089135059771007e-07, | |
| "loss": 0.4097, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 2.742857142857143, | |
| "grad_norm": 0.2805170269928884, | |
| "learning_rate": 2.2132363236279654e-07, | |
| "loss": 0.402, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 2.7483870967741937, | |
| "grad_norm": 0.2653861937229007, | |
| "learning_rate": 2.1195387905302511e-07, | |
| "loss": 0.3986, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 2.7539170506912445, | |
| "grad_norm": 0.2730399737644054, | |
| "learning_rate": 2.0278247882661584e-07, | |
| "loss": 0.3925, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 2.759447004608295, | |
| "grad_norm": 0.26909939634669794, | |
| "learning_rate": 1.9380981162466895e-07, | |
| "loss": 0.3967, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 2.7649769585253456, | |
| "grad_norm": 0.25520919828788585, | |
| "learning_rate": 1.8503624915542805e-07, | |
| "loss": 0.3958, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 2.7705069124423964, | |
| "grad_norm": 0.2671702254098107, | |
| "learning_rate": 1.7646215487887587e-07, | |
| "loss": 0.3972, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 2.776036866359447, | |
| "grad_norm": 0.27339509483158747, | |
| "learning_rate": 1.680878839916833e-07, | |
| "loss": 0.3954, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 2.7815668202764976, | |
| "grad_norm": 0.26926037978444856, | |
| "learning_rate": 1.5991378341249032e-07, | |
| "loss": 0.3909, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 2.7870967741935484, | |
| "grad_norm": 0.2829092539377384, | |
| "learning_rate": 1.5194019176753615e-07, | |
| "loss": 0.3987, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 2.792626728110599, | |
| "grad_norm": 0.3050896133507036, | |
| "learning_rate": 1.441674393766318e-07, | |
| "loss": 0.4025, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 2.79815668202765, | |
| "grad_norm": 0.2856518392105625, | |
| "learning_rate": 1.3659584823947524e-07, | |
| "loss": 0.3959, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 2.8036866359447004, | |
| "grad_norm": 0.2687721431412885, | |
| "learning_rate": 1.2922573202231114e-07, | |
| "loss": 0.3981, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 2.809216589861751, | |
| "grad_norm": 0.26981891331331803, | |
| "learning_rate": 1.2205739604493838e-07, | |
| "loss": 0.4043, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 2.814746543778802, | |
| "grad_norm": 0.27405104128549934, | |
| "learning_rate": 1.1509113726805965e-07, | |
| "loss": 0.3903, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 2.8202764976958523, | |
| "grad_norm": 0.28208244890965906, | |
| "learning_rate": 1.0832724428098185e-07, | |
| "loss": 0.3964, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 2.825806451612903, | |
| "grad_norm": 0.2763730332681255, | |
| "learning_rate": 1.0176599728965842e-07, | |
| "loss": 0.3975, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 2.831336405529954, | |
| "grad_norm": 0.2678730622209392, | |
| "learning_rate": 9.540766810508196e-08, | |
| "loss": 0.3966, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 2.8368663594470047, | |
| "grad_norm": 0.2701400907907813, | |
| "learning_rate": 8.925252013202545e-08, | |
| "loss": 0.3974, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 2.8423963133640555, | |
| "grad_norm": 0.2585458146463293, | |
| "learning_rate": 8.330080835812826e-08, | |
| "loss": 0.3915, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 2.847926267281106, | |
| "grad_norm": 0.23930472824369428, | |
| "learning_rate": 7.75527793433345e-08, | |
| "loss": 0.3959, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 2.8534562211981567, | |
| "grad_norm": 0.27899399555881804, | |
| "learning_rate": 7.20086712096768e-08, | |
| "loss": 0.4048, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 2.8589861751152075, | |
| "grad_norm": 0.834759001453546, | |
| "learning_rate": 6.666871363141426e-08, | |
| "loss": 0.4008, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 2.864516129032258, | |
| "grad_norm": 0.26729552891819924, | |
| "learning_rate": 6.153312782551546e-08, | |
| "loss": 0.3943, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 2.8700460829493086, | |
| "grad_norm": 0.28300940951846, | |
| "learning_rate": 5.6602126542496525e-08, | |
| "loss": 0.4068, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 2.8755760368663594, | |
| "grad_norm": 0.2863970268789953, | |
| "learning_rate": 5.187591405760528e-08, | |
| "loss": 0.3978, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 2.8811059907834102, | |
| "grad_norm": 0.25831402266128894, | |
| "learning_rate": 4.7354686162359165e-08, | |
| "loss": 0.4013, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 2.886635944700461, | |
| "grad_norm": 0.28023244054969493, | |
| "learning_rate": 4.3038630156436166e-08, | |
| "loss": 0.3905, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 2.8921658986175114, | |
| "grad_norm": 0.2661529733653034, | |
| "learning_rate": 3.8927924839913257e-08, | |
| "loss": 0.393, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 2.897695852534562, | |
| "grad_norm": 0.25493956535145407, | |
| "learning_rate": 3.502274050586063e-08, | |
| "loss": 0.4006, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 2.903225806451613, | |
| "grad_norm": 0.26543797268631775, | |
| "learning_rate": 3.1323238933286814e-08, | |
| "loss": 0.3978, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 2.9087557603686633, | |
| "grad_norm": 0.26274051264290293, | |
| "learning_rate": 2.7829573380436793e-08, | |
| "loss": 0.4029, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 2.914285714285714, | |
| "grad_norm": 0.281124942017121, | |
| "learning_rate": 2.4541888578442085e-08, | |
| "loss": 0.3915, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 2.919815668202765, | |
| "grad_norm": 0.2560745200044448, | |
| "learning_rate": 2.1460320725326113e-08, | |
| "loss": 0.392, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 2.9253456221198157, | |
| "grad_norm": 0.2679757306919012, | |
| "learning_rate": 1.8584997480361467e-08, | |
| "loss": 0.4023, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 2.9308755760368665, | |
| "grad_norm": 0.26744986991724706, | |
| "learning_rate": 1.5916037958781938e-08, | |
| "loss": 0.3966, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 2.936405529953917, | |
| "grad_norm": 0.25508086605530733, | |
| "learning_rate": 1.3453552726847008e-08, | |
| "loss": 0.4018, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 2.9419354838709677, | |
| "grad_norm": 0.27916128301046206, | |
| "learning_rate": 1.1197643797261626e-08, | |
| "loss": 0.4027, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 2.9474654377880185, | |
| "grad_norm": 0.2750137090266509, | |
| "learning_rate": 9.148404624951812e-09, | |
| "loss": 0.4033, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 2.952995391705069, | |
| "grad_norm": 0.2725446012014818, | |
| "learning_rate": 7.3059201031899786e-09, | |
| "loss": 0.3962, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 2.9585253456221197, | |
| "grad_norm": 0.2815517448254806, | |
| "learning_rate": 5.670266560081628e-09, | |
| "loss": 0.4014, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 2.9640552995391705, | |
| "grad_norm": 0.27490789785492287, | |
| "learning_rate": 4.241511755400662e-09, | |
| "loss": 0.4113, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 2.9695852534562213, | |
| "grad_norm": 0.2939191489301333, | |
| "learning_rate": 3.0197148777838524e-09, | |
| "loss": 0.3967, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 2.975115207373272, | |
| "grad_norm": 0.271422595495557, | |
| "learning_rate": 2.0049265422772414e-09, | |
| "loss": 0.3993, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 2.9806451612903224, | |
| "grad_norm": 0.26327553337631326, | |
| "learning_rate": 1.1971887882405997e-09, | |
| "loss": 0.4024, | |
| "step": 5390 | |
| }, | |
| { | |
| "epoch": 2.986175115207373, | |
| "grad_norm": 0.2805409873853394, | |
| "learning_rate": 5.965350776071521e-10, | |
| "loss": 0.4049, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 2.991705069124424, | |
| "grad_norm": 0.25201401300926546, | |
| "learning_rate": 2.029902934941319e-10, | |
| "loss": 0.3993, | |
| "step": 5410 | |
| }, | |
| { | |
| "epoch": 2.9972350230414744, | |
| "grad_norm": 0.2495374923869934, | |
| "learning_rate": 1.6570739174714967e-11, | |
| "loss": 0.3964, | |
| "step": 5420 | |
| }, | |
| { | |
| "epoch": 2.999447004608295, | |
| "step": 5424, | |
| "total_flos": 8192265561309184.0, | |
| "train_loss": 0.4416485577162388, | |
| "train_runtime": 91348.8221, | |
| "train_samples_per_second": 5.7, | |
| "train_steps_per_second": 0.059 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 5424, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 8192265561309184.0, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |