| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.0, | |
| "eval_steps": 500, | |
| "global_step": 1300, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0015384615384615385, | |
| "grad_norm": 5.063921474779319, | |
| "learning_rate": 2e-05, | |
| "loss": 0.9605, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.003076923076923077, | |
| "grad_norm": 3.369623660244633, | |
| "learning_rate": 1.999997080000119e-05, | |
| "loss": 0.8245, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.004615384615384616, | |
| "grad_norm": 2.686003070114147, | |
| "learning_rate": 1.9999883200175286e-05, | |
| "loss": 0.7138, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.006153846153846154, | |
| "grad_norm": 1.5234580549826773, | |
| "learning_rate": 1.9999737201033877e-05, | |
| "loss": 0.6496, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.007692307692307693, | |
| "grad_norm": 2.609918856467969, | |
| "learning_rate": 1.999953280342959e-05, | |
| "loss": 0.6476, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.009230769230769232, | |
| "grad_norm": 1.6701564603819716, | |
| "learning_rate": 1.9999270008556108e-05, | |
| "loss": 0.6071, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.010769230769230769, | |
| "grad_norm": 0.9106505494488014, | |
| "learning_rate": 1.9998948817948157e-05, | |
| "loss": 0.545, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.012307692307692308, | |
| "grad_norm": 1.018082854225675, | |
| "learning_rate": 1.999856923348149e-05, | |
| "loss": 0.5326, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.013846153846153847, | |
| "grad_norm": 0.7909606612418608, | |
| "learning_rate": 1.9998131257372878e-05, | |
| "loss": 0.524, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.015384615384615385, | |
| "grad_norm": 0.7555909222636807, | |
| "learning_rate": 1.99976348921801e-05, | |
| "loss": 0.5163, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.016923076923076923, | |
| "grad_norm": 0.7450235308921992, | |
| "learning_rate": 1.9997080140801932e-05, | |
| "loss": 0.5088, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.018461538461538463, | |
| "grad_norm": 0.7134021367422554, | |
| "learning_rate": 1.999646700647812e-05, | |
| "loss": 0.4993, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 0.5759662130624531, | |
| "learning_rate": 1.9995795492789368e-05, | |
| "loss": 0.4851, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.021538461538461538, | |
| "grad_norm": 0.5606778035425145, | |
| "learning_rate": 1.9995065603657317e-05, | |
| "loss": 0.4666, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.023076923076923078, | |
| "grad_norm": 0.5398744265890032, | |
| "learning_rate": 1.999427734334452e-05, | |
| "loss": 0.4691, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.024615384615384615, | |
| "grad_norm": 0.5156262642068832, | |
| "learning_rate": 1.9993430716454415e-05, | |
| "loss": 0.4605, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.026153846153846153, | |
| "grad_norm": 0.5268519764536594, | |
| "learning_rate": 1.9992525727931303e-05, | |
| "loss": 0.4564, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.027692307692307693, | |
| "grad_norm": 0.5454016980521769, | |
| "learning_rate": 1.9991562383060316e-05, | |
| "loss": 0.4659, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.02923076923076923, | |
| "grad_norm": 0.4816040745018356, | |
| "learning_rate": 1.9990540687467394e-05, | |
| "loss": 0.4565, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.03076923076923077, | |
| "grad_norm": 0.4590572903497101, | |
| "learning_rate": 1.9989460647119232e-05, | |
| "loss": 0.4407, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.03230769230769231, | |
| "grad_norm": 0.4226998877202937, | |
| "learning_rate": 1.998832226832327e-05, | |
| "loss": 0.4373, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.033846153846153845, | |
| "grad_norm": 0.4536363868403153, | |
| "learning_rate": 1.9987125557727633e-05, | |
| "loss": 0.4313, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.03538461538461538, | |
| "grad_norm": 0.42868619210170616, | |
| "learning_rate": 1.9985870522321118e-05, | |
| "loss": 0.4553, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.036923076923076927, | |
| "grad_norm": 0.45896932342495994, | |
| "learning_rate": 1.9984557169433126e-05, | |
| "loss": 0.441, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.038461538461538464, | |
| "grad_norm": 0.4294926658833097, | |
| "learning_rate": 1.9983185506733643e-05, | |
| "loss": 0.4318, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 0.44540317006888985, | |
| "learning_rate": 1.9981755542233175e-05, | |
| "loss": 0.4308, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.04153846153846154, | |
| "grad_norm": 0.4559389600389272, | |
| "learning_rate": 1.9980267284282718e-05, | |
| "loss": 0.4278, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.043076923076923075, | |
| "grad_norm": 0.37690300997098264, | |
| "learning_rate": 1.9978720741573693e-05, | |
| "loss": 0.4405, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.04461538461538461, | |
| "grad_norm": 0.439464993843592, | |
| "learning_rate": 1.9977115923137912e-05, | |
| "loss": 0.4387, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.046153846153846156, | |
| "grad_norm": 0.43264990079260623, | |
| "learning_rate": 1.9975452838347513e-05, | |
| "loss": 0.4362, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.047692307692307694, | |
| "grad_norm": 0.4644507012498486, | |
| "learning_rate": 1.9973731496914914e-05, | |
| "loss": 0.4179, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.04923076923076923, | |
| "grad_norm": 0.4572720391818459, | |
| "learning_rate": 1.9971951908892743e-05, | |
| "loss": 0.4092, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.05076923076923077, | |
| "grad_norm": 0.46432612449175953, | |
| "learning_rate": 1.9970114084673796e-05, | |
| "loss": 0.4146, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.052307692307692305, | |
| "grad_norm": 0.4129940856967706, | |
| "learning_rate": 1.996821803499097e-05, | |
| "loss": 0.4297, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.05384615384615385, | |
| "grad_norm": 0.4042181784594487, | |
| "learning_rate": 1.9966263770917192e-05, | |
| "loss": 0.4112, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.055384615384615386, | |
| "grad_norm": 0.3906521043350921, | |
| "learning_rate": 1.9964251303865362e-05, | |
| "loss": 0.4161, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.05692307692307692, | |
| "grad_norm": 0.4418884612703507, | |
| "learning_rate": 1.996218064558829e-05, | |
| "loss": 0.4101, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.05846153846153846, | |
| "grad_norm": 0.42008732000288845, | |
| "learning_rate": 1.9960051808178616e-05, | |
| "loss": 0.4043, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 0.43296733708514057, | |
| "learning_rate": 1.9957864804068752e-05, | |
| "loss": 0.4128, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.06153846153846154, | |
| "grad_norm": 0.45169856277574605, | |
| "learning_rate": 1.99556196460308e-05, | |
| "loss": 0.3995, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.06307692307692307, | |
| "grad_norm": 0.4164452322138832, | |
| "learning_rate": 1.995331634717649e-05, | |
| "loss": 0.4112, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.06461538461538462, | |
| "grad_norm": 0.4099581368464527, | |
| "learning_rate": 1.9950954920957074e-05, | |
| "loss": 0.4119, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.06615384615384616, | |
| "grad_norm": 0.44696899014004116, | |
| "learning_rate": 1.994853538116329e-05, | |
| "loss": 0.4146, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.06769230769230769, | |
| "grad_norm": 0.4055013281834995, | |
| "learning_rate": 1.994605774192525e-05, | |
| "loss": 0.4093, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.06923076923076923, | |
| "grad_norm": 0.40395767432429797, | |
| "learning_rate": 1.994352201771236e-05, | |
| "loss": 0.4063, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.07076923076923076, | |
| "grad_norm": 0.4116346989910219, | |
| "learning_rate": 1.9940928223333254e-05, | |
| "loss": 0.393, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.07230769230769231, | |
| "grad_norm": 0.3591126773969262, | |
| "learning_rate": 1.9938276373935688e-05, | |
| "loss": 0.4039, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.07384615384615385, | |
| "grad_norm": 0.46456735467456856, | |
| "learning_rate": 1.9935566485006464e-05, | |
| "loss": 0.3851, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.07538461538461538, | |
| "grad_norm": 0.4268493349476963, | |
| "learning_rate": 1.993279857237133e-05, | |
| "loss": 0.4044, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.07692307692307693, | |
| "grad_norm": 0.38063003139174306, | |
| "learning_rate": 1.99299726521949e-05, | |
| "loss": 0.404, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.07846153846153846, | |
| "grad_norm": 0.4497110041221097, | |
| "learning_rate": 1.992708874098054e-05, | |
| "loss": 0.41, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 0.4157742963507939, | |
| "learning_rate": 1.9924146855570298e-05, | |
| "loss": 0.3941, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.08153846153846153, | |
| "grad_norm": 0.4414354437213974, | |
| "learning_rate": 1.9921147013144782e-05, | |
| "loss": 0.3993, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.08307692307692308, | |
| "grad_norm": 0.43571701128684, | |
| "learning_rate": 1.9918089231223066e-05, | |
| "loss": 0.3959, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.08461538461538462, | |
| "grad_norm": 0.4371043287551301, | |
| "learning_rate": 1.99149735276626e-05, | |
| "loss": 0.3986, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.08615384615384615, | |
| "grad_norm": 0.4137823130711333, | |
| "learning_rate": 1.9911799920659093e-05, | |
| "loss": 0.3891, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.0876923076923077, | |
| "grad_norm": 0.43271326063346954, | |
| "learning_rate": 1.9908568428746408e-05, | |
| "loss": 0.408, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.08923076923076922, | |
| "grad_norm": 0.3809934064411492, | |
| "learning_rate": 1.9905279070796454e-05, | |
| "loss": 0.3983, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.09076923076923077, | |
| "grad_norm": 0.4431656521313663, | |
| "learning_rate": 1.9901931866019087e-05, | |
| "loss": 0.3915, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.09230769230769231, | |
| "grad_norm": 0.4382895852653331, | |
| "learning_rate": 1.989852683396198e-05, | |
| "loss": 0.3896, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.09384615384615384, | |
| "grad_norm": 0.43423688769031565, | |
| "learning_rate": 1.9895063994510512e-05, | |
| "loss": 0.4006, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.09538461538461539, | |
| "grad_norm": 0.4564712962684036, | |
| "learning_rate": 1.9891543367887675e-05, | |
| "loss": 0.3936, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.09692307692307692, | |
| "grad_norm": 0.4079034300130273, | |
| "learning_rate": 1.988796497465392e-05, | |
| "loss": 0.4032, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.09846153846153846, | |
| "grad_norm": 0.4405646045189855, | |
| "learning_rate": 1.988432883570707e-05, | |
| "loss": 0.3913, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 0.41717807318047573, | |
| "learning_rate": 1.9880634972282168e-05, | |
| "loss": 0.3876, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.10153846153846154, | |
| "grad_norm": 0.4684874071170093, | |
| "learning_rate": 1.9876883405951378e-05, | |
| "loss": 0.3989, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.10307692307692308, | |
| "grad_norm": 0.41741113705795535, | |
| "learning_rate": 1.987307415862385e-05, | |
| "loss": 0.3853, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.10461538461538461, | |
| "grad_norm": 0.42913698621302665, | |
| "learning_rate": 1.9869207252545582e-05, | |
| "loss": 0.389, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.10615384615384615, | |
| "grad_norm": 0.4135673359860317, | |
| "learning_rate": 1.986528271029931e-05, | |
| "loss": 0.3991, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.1076923076923077, | |
| "grad_norm": 0.3824388045772453, | |
| "learning_rate": 1.9861300554804357e-05, | |
| "loss": 0.3862, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.10923076923076923, | |
| "grad_norm": 0.4088649251683559, | |
| "learning_rate": 1.985726080931651e-05, | |
| "loss": 0.3861, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.11076923076923077, | |
| "grad_norm": 0.41188429042311264, | |
| "learning_rate": 1.9853163497427885e-05, | |
| "loss": 0.3933, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.1123076923076923, | |
| "grad_norm": 0.4042211920009733, | |
| "learning_rate": 1.9849008643066774e-05, | |
| "loss": 0.3801, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.11384615384615385, | |
| "grad_norm": 0.4349260424024348, | |
| "learning_rate": 1.984479627049753e-05, | |
| "loss": 0.3972, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.11538461538461539, | |
| "grad_norm": 0.42179852404385665, | |
| "learning_rate": 1.9840526404320415e-05, | |
| "loss": 0.3814, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.11692307692307692, | |
| "grad_norm": 0.4361915252579203, | |
| "learning_rate": 1.983619906947144e-05, | |
| "loss": 0.3682, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.11846153846153847, | |
| "grad_norm": 0.41443723031355584, | |
| "learning_rate": 1.9831814291222233e-05, | |
| "loss": 0.3974, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 0.4713684797848488, | |
| "learning_rate": 1.982737209517991e-05, | |
| "loss": 0.3853, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.12153846153846154, | |
| "grad_norm": 0.4319359759319244, | |
| "learning_rate": 1.982287250728689e-05, | |
| "loss": 0.382, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.12307692307692308, | |
| "grad_norm": 0.46173603703218374, | |
| "learning_rate": 1.981831555382076e-05, | |
| "loss": 0.3986, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.12461538461538461, | |
| "grad_norm": 0.47085778692524854, | |
| "learning_rate": 1.9813701261394136e-05, | |
| "loss": 0.3787, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.12615384615384614, | |
| "grad_norm": 0.48130879868276283, | |
| "learning_rate": 1.980902965695448e-05, | |
| "loss": 0.368, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.1276923076923077, | |
| "grad_norm": 0.4375676704104233, | |
| "learning_rate": 1.9804300767783958e-05, | |
| "loss": 0.3889, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.12923076923076923, | |
| "grad_norm": 0.47219005186638796, | |
| "learning_rate": 1.979951462149929e-05, | |
| "loss": 0.3875, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.13076923076923078, | |
| "grad_norm": 0.3831973173530425, | |
| "learning_rate": 1.979467124605156e-05, | |
| "loss": 0.3856, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.13230769230769232, | |
| "grad_norm": 0.4442260067203606, | |
| "learning_rate": 1.9789770669726088e-05, | |
| "loss": 0.3888, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.13384615384615384, | |
| "grad_norm": 0.5019617632101833, | |
| "learning_rate": 1.9784812921142232e-05, | |
| "loss": 0.3828, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.13538461538461538, | |
| "grad_norm": 0.43270240554403, | |
| "learning_rate": 1.977979802925324e-05, | |
| "loss": 0.3918, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.13692307692307693, | |
| "grad_norm": 0.5121748308328352, | |
| "learning_rate": 1.977472602334609e-05, | |
| "loss": 0.3842, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.13846153846153847, | |
| "grad_norm": 0.4285419146768387, | |
| "learning_rate": 1.976959693304129e-05, | |
| "loss": 0.3646, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 0.4443754327929979, | |
| "learning_rate": 1.9764410788292724e-05, | |
| "loss": 0.3839, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.14153846153846153, | |
| "grad_norm": 0.44335647718942606, | |
| "learning_rate": 1.9759167619387474e-05, | |
| "loss": 0.3755, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.14307692307692307, | |
| "grad_norm": 0.4485326069844085, | |
| "learning_rate": 1.9753867456945653e-05, | |
| "loss": 0.353, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.14461538461538462, | |
| "grad_norm": 0.41159403443114567, | |
| "learning_rate": 1.9748510331920204e-05, | |
| "loss": 0.3861, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.14615384615384616, | |
| "grad_norm": 0.4507383444906886, | |
| "learning_rate": 1.9743096275596735e-05, | |
| "loss": 0.3942, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.1476923076923077, | |
| "grad_norm": 0.39621991179394783, | |
| "learning_rate": 1.9737625319593338e-05, | |
| "loss": 0.3891, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.14923076923076922, | |
| "grad_norm": 0.40824533675854696, | |
| "learning_rate": 1.9732097495860388e-05, | |
| "loss": 0.3773, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.15076923076923077, | |
| "grad_norm": 0.4036362707197561, | |
| "learning_rate": 1.972651283668038e-05, | |
| "loss": 0.3673, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.1523076923076923, | |
| "grad_norm": 0.39196000792144625, | |
| "learning_rate": 1.9720871374667714e-05, | |
| "loss": 0.3742, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.15384615384615385, | |
| "grad_norm": 0.45342715962137886, | |
| "learning_rate": 1.971517314276854e-05, | |
| "loss": 0.3843, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.15538461538461537, | |
| "grad_norm": 0.39932262509282124, | |
| "learning_rate": 1.9709418174260523e-05, | |
| "loss": 0.3869, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.15692307692307692, | |
| "grad_norm": 0.37268042255848166, | |
| "learning_rate": 1.9703606502752674e-05, | |
| "loss": 0.3831, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.15846153846153846, | |
| "grad_norm": 0.3997668194053964, | |
| "learning_rate": 1.9697738162185163e-05, | |
| "loss": 0.3755, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 0.40502233507726826, | |
| "learning_rate": 1.969181318682909e-05, | |
| "loss": 0.3764, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.16153846153846155, | |
| "grad_norm": 0.439443980401936, | |
| "learning_rate": 1.9685831611286312e-05, | |
| "loss": 0.374, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.16307692307692306, | |
| "grad_norm": 0.40103661213841674, | |
| "learning_rate": 1.967979347048923e-05, | |
| "loss": 0.3806, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.1646153846153846, | |
| "grad_norm": 0.4211147893705695, | |
| "learning_rate": 1.9673698799700582e-05, | |
| "loss": 0.3839, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.16615384615384615, | |
| "grad_norm": 0.38205382927654796, | |
| "learning_rate": 1.9667547634513248e-05, | |
| "loss": 0.3647, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.1676923076923077, | |
| "grad_norm": 0.37836543863414746, | |
| "learning_rate": 1.9661340010850025e-05, | |
| "loss": 0.361, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.16923076923076924, | |
| "grad_norm": 0.41111158527489766, | |
| "learning_rate": 1.9655075964963443e-05, | |
| "loss": 0.3766, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.17076923076923076, | |
| "grad_norm": 0.3885346262736236, | |
| "learning_rate": 1.9648755533435517e-05, | |
| "loss": 0.3845, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.1723076923076923, | |
| "grad_norm": 0.4039456045891746, | |
| "learning_rate": 1.9642378753177573e-05, | |
| "loss": 0.3715, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.17384615384615384, | |
| "grad_norm": 0.3951420777639365, | |
| "learning_rate": 1.9635945661430006e-05, | |
| "loss": 0.3753, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.1753846153846154, | |
| "grad_norm": 0.4173309904873508, | |
| "learning_rate": 1.9629456295762067e-05, | |
| "loss": 0.3725, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.17692307692307693, | |
| "grad_norm": 0.3972068234381968, | |
| "learning_rate": 1.9622910694071654e-05, | |
| "loss": 0.3783, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.17846153846153845, | |
| "grad_norm": 0.4396990125524382, | |
| "learning_rate": 1.9616308894585078e-05, | |
| "loss": 0.3787, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 0.3931184722966462, | |
| "learning_rate": 1.9609650935856847e-05, | |
| "loss": 0.3838, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.18153846153846154, | |
| "grad_norm": 0.42394116553785294, | |
| "learning_rate": 1.9602936856769432e-05, | |
| "loss": 0.3757, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.18307692307692308, | |
| "grad_norm": 0.3872711590188605, | |
| "learning_rate": 1.9596166696533062e-05, | |
| "loss": 0.3622, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.18461538461538463, | |
| "grad_norm": 0.43209247940136025, | |
| "learning_rate": 1.9589340494685464e-05, | |
| "loss": 0.3747, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.18615384615384614, | |
| "grad_norm": 0.378527901108427, | |
| "learning_rate": 1.9582458291091664e-05, | |
| "loss": 0.3501, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.18769230769230769, | |
| "grad_norm": 0.4372506951423749, | |
| "learning_rate": 1.957552012594372e-05, | |
| "loss": 0.3769, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.18923076923076923, | |
| "grad_norm": 0.42034408938336454, | |
| "learning_rate": 1.956852603976052e-05, | |
| "loss": 0.3799, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.19076923076923077, | |
| "grad_norm": 0.413661131437597, | |
| "learning_rate": 1.9561476073387527e-05, | |
| "loss": 0.3694, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.19230769230769232, | |
| "grad_norm": 0.4225946538123462, | |
| "learning_rate": 1.9554370267996537e-05, | |
| "loss": 0.3698, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.19384615384615383, | |
| "grad_norm": 0.4211824517053695, | |
| "learning_rate": 1.954720866508546e-05, | |
| "loss": 0.3723, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.19538461538461538, | |
| "grad_norm": 0.441349567520454, | |
| "learning_rate": 1.9539991306478046e-05, | |
| "loss": 0.3776, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.19692307692307692, | |
| "grad_norm": 0.38925256704611244, | |
| "learning_rate": 1.953271823432367e-05, | |
| "loss": 0.3771, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.19846153846153847, | |
| "grad_norm": 0.44208731465380763, | |
| "learning_rate": 1.952538949109708e-05, | |
| "loss": 0.3817, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 0.37980861881568023, | |
| "learning_rate": 1.9518005119598124e-05, | |
| "loss": 0.3956, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.20153846153846153, | |
| "grad_norm": 0.43746564826656514, | |
| "learning_rate": 1.9510565162951538e-05, | |
| "loss": 0.383, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.20307692307692307, | |
| "grad_norm": 0.37554151931595203, | |
| "learning_rate": 1.9503069664606663e-05, | |
| "loss": 0.3812, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.20461538461538462, | |
| "grad_norm": 0.3938512517910276, | |
| "learning_rate": 1.9495518668337204e-05, | |
| "loss": 0.3755, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.20615384615384616, | |
| "grad_norm": 0.4315786747382278, | |
| "learning_rate": 1.9487912218240983e-05, | |
| "loss": 0.3769, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.2076923076923077, | |
| "grad_norm": 0.38364198110864106, | |
| "learning_rate": 1.9480250358739667e-05, | |
| "loss": 0.3562, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.20923076923076922, | |
| "grad_norm": 0.44967743929701914, | |
| "learning_rate": 1.947253313457851e-05, | |
| "loss": 0.36, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.21076923076923076, | |
| "grad_norm": 0.39255599974719213, | |
| "learning_rate": 1.94647605908261e-05, | |
| "loss": 0.3693, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.2123076923076923, | |
| "grad_norm": 0.43233859547333486, | |
| "learning_rate": 1.9456932772874092e-05, | |
| "loss": 0.3747, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.21384615384615385, | |
| "grad_norm": 0.4544360118997912, | |
| "learning_rate": 1.944904972643694e-05, | |
| "loss": 0.3705, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.2153846153846154, | |
| "grad_norm": 0.4569398610982909, | |
| "learning_rate": 1.944111149755164e-05, | |
| "loss": 0.3663, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.2169230769230769, | |
| "grad_norm": 0.4164699887840533, | |
| "learning_rate": 1.9433118132577432e-05, | |
| "loss": 0.3582, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.21846153846153846, | |
| "grad_norm": 0.4013888688937234, | |
| "learning_rate": 1.9425069678195577e-05, | |
| "loss": 0.3687, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 0.4505585773925497, | |
| "learning_rate": 1.9416966181409047e-05, | |
| "loss": 0.3715, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.22153846153846155, | |
| "grad_norm": 0.44354218271689405, | |
| "learning_rate": 1.9408807689542257e-05, | |
| "loss": 0.3863, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.2230769230769231, | |
| "grad_norm": 0.46031650050587875, | |
| "learning_rate": 1.94005942502408e-05, | |
| "loss": 0.376, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.2246153846153846, | |
| "grad_norm": 0.4248853811443392, | |
| "learning_rate": 1.9392325911471154e-05, | |
| "loss": 0.3753, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.22615384615384615, | |
| "grad_norm": 0.4185179395452762, | |
| "learning_rate": 1.9384002721520423e-05, | |
| "loss": 0.3631, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.2276923076923077, | |
| "grad_norm": 0.4008997188426561, | |
| "learning_rate": 1.937562472899603e-05, | |
| "loss": 0.3608, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.22923076923076924, | |
| "grad_norm": 0.34580398387061256, | |
| "learning_rate": 1.936719198282545e-05, | |
| "loss": 0.374, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.23076923076923078, | |
| "grad_norm": 0.38734751172931087, | |
| "learning_rate": 1.935870453225592e-05, | |
| "loss": 0.3717, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.2323076923076923, | |
| "grad_norm": 0.3960233199543072, | |
| "learning_rate": 1.9350162426854152e-05, | |
| "loss": 0.3723, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.23384615384615384, | |
| "grad_norm": 0.4002038595149067, | |
| "learning_rate": 1.934156571650603e-05, | |
| "loss": 0.3645, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.2353846153846154, | |
| "grad_norm": 0.40387269153898225, | |
| "learning_rate": 1.933291445141635e-05, | |
| "loss": 0.3693, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.23692307692307693, | |
| "grad_norm": 0.3948395913072729, | |
| "learning_rate": 1.9324208682108493e-05, | |
| "loss": 0.3638, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.23846153846153847, | |
| "grad_norm": 0.45422980198428947, | |
| "learning_rate": 1.931544845942415e-05, | |
| "loss": 0.3593, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 0.3944506938629949, | |
| "learning_rate": 1.9306633834523022e-05, | |
| "loss": 0.3637, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.24153846153846154, | |
| "grad_norm": 0.37482301609081387, | |
| "learning_rate": 1.9297764858882516e-05, | |
| "loss": 0.3577, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.24307692307692308, | |
| "grad_norm": 0.41539320799014184, | |
| "learning_rate": 1.9288841584297445e-05, | |
| "loss": 0.3603, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.24461538461538462, | |
| "grad_norm": 0.3812817138489635, | |
| "learning_rate": 1.927986406287973e-05, | |
| "loss": 0.3758, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.24615384615384617, | |
| "grad_norm": 0.4090759124668495, | |
| "learning_rate": 1.92708323470581e-05, | |
| "loss": 0.3758, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.24769230769230768, | |
| "grad_norm": 0.37892023888017984, | |
| "learning_rate": 1.9261746489577767e-05, | |
| "loss": 0.3776, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.24923076923076923, | |
| "grad_norm": 0.44478751408219885, | |
| "learning_rate": 1.925260654350014e-05, | |
| "loss": 0.3589, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.25076923076923074, | |
| "grad_norm": 0.3648405453963439, | |
| "learning_rate": 1.92434125622025e-05, | |
| "loss": 0.3577, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.2523076923076923, | |
| "grad_norm": 0.4879484783466178, | |
| "learning_rate": 1.9234164599377692e-05, | |
| "loss": 0.3531, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.25384615384615383, | |
| "grad_norm": 0.3731118065527373, | |
| "learning_rate": 1.9224862709033823e-05, | |
| "loss": 0.3607, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.2553846153846154, | |
| "grad_norm": 0.4097056211741247, | |
| "learning_rate": 1.9215506945493933e-05, | |
| "loss": 0.3637, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.2569230769230769, | |
| "grad_norm": 0.45284781505380334, | |
| "learning_rate": 1.9206097363395668e-05, | |
| "loss": 0.3732, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.25846153846153846, | |
| "grad_norm": 0.4062016992258126, | |
| "learning_rate": 1.9196634017690993e-05, | |
| "loss": 0.361, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 0.4282966491275036, | |
| "learning_rate": 1.9187116963645845e-05, | |
| "loss": 0.3715, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.26153846153846155, | |
| "grad_norm": 0.4242618592829376, | |
| "learning_rate": 1.9177546256839814e-05, | |
| "loss": 0.3491, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.2630769230769231, | |
| "grad_norm": 0.46549729776801385, | |
| "learning_rate": 1.9167921953165827e-05, | |
| "loss": 0.3708, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.26461538461538464, | |
| "grad_norm": 0.39329336860390796, | |
| "learning_rate": 1.9158244108829815e-05, | |
| "loss": 0.3653, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.26615384615384613, | |
| "grad_norm": 0.42072110375926625, | |
| "learning_rate": 1.9148512780350384e-05, | |
| "loss": 0.3658, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.2676923076923077, | |
| "grad_norm": 0.40327284519428425, | |
| "learning_rate": 1.9138728024558494e-05, | |
| "loss": 0.375, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.2692307692307692, | |
| "grad_norm": 0.4008125375412133, | |
| "learning_rate": 1.9128889898597117e-05, | |
| "loss": 0.3681, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.27076923076923076, | |
| "grad_norm": 0.3734692274716112, | |
| "learning_rate": 1.91189984599209e-05, | |
| "loss": 0.3617, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.2723076923076923, | |
| "grad_norm": 0.37325469734795563, | |
| "learning_rate": 1.910905376629585e-05, | |
| "loss": 0.3521, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.27384615384615385, | |
| "grad_norm": 0.3856667934315571, | |
| "learning_rate": 1.9099055875798974e-05, | |
| "loss": 0.3531, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.2753846153846154, | |
| "grad_norm": 0.3808888210552385, | |
| "learning_rate": 1.9089004846817947e-05, | |
| "loss": 0.3595, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.27692307692307694, | |
| "grad_norm": 0.3745173965711979, | |
| "learning_rate": 1.9078900738050776e-05, | |
| "loss": 0.3564, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.2784615384615385, | |
| "grad_norm": 0.34898266841145875, | |
| "learning_rate": 1.9068743608505454e-05, | |
| "loss": 0.3753, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 0.3644769885231561, | |
| "learning_rate": 1.905853351749962e-05, | |
| "loss": 0.3512, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.2815384615384615, | |
| "grad_norm": 0.3858389082782684, | |
| "learning_rate": 1.9048270524660197e-05, | |
| "loss": 0.3876, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.28307692307692306, | |
| "grad_norm": 0.38818683800074927, | |
| "learning_rate": 1.903795468992306e-05, | |
| "loss": 0.361, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.2846153846153846, | |
| "grad_norm": 0.36695466265129156, | |
| "learning_rate": 1.902758607353269e-05, | |
| "loss": 0.3654, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.28615384615384615, | |
| "grad_norm": 0.39570005961028404, | |
| "learning_rate": 1.9017164736041795e-05, | |
| "loss": 0.364, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.2876923076923077, | |
| "grad_norm": 0.3771366863278202, | |
| "learning_rate": 1.9006690738310988e-05, | |
| "loss": 0.3603, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.28923076923076924, | |
| "grad_norm": 0.38878503899522604, | |
| "learning_rate": 1.8996164141508412e-05, | |
| "loss": 0.3507, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.2907692307692308, | |
| "grad_norm": 0.3956545304159193, | |
| "learning_rate": 1.898558500710939e-05, | |
| "loss": 0.3667, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.2923076923076923, | |
| "grad_norm": 0.35381083702922, | |
| "learning_rate": 1.8974953396896066e-05, | |
| "loss": 0.3376, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.29384615384615387, | |
| "grad_norm": 0.4170829328048133, | |
| "learning_rate": 1.896426937295704e-05, | |
| "loss": 0.3572, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.2953846153846154, | |
| "grad_norm": 0.3559172538176691, | |
| "learning_rate": 1.8953532997687008e-05, | |
| "loss": 0.3541, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.2969230769230769, | |
| "grad_norm": 0.3869731893258225, | |
| "learning_rate": 1.89427443337864e-05, | |
| "loss": 0.3678, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.29846153846153844, | |
| "grad_norm": 0.3594161161799857, | |
| "learning_rate": 1.8931903444261007e-05, | |
| "loss": 0.3649, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 0.3613700145720469, | |
| "learning_rate": 1.8921010392421628e-05, | |
| "loss": 0.3622, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.30153846153846153, | |
| "grad_norm": 0.37149126110892394, | |
| "learning_rate": 1.891006524188368e-05, | |
| "loss": 0.37, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.3030769230769231, | |
| "grad_norm": 0.40590505685938155, | |
| "learning_rate": 1.889906805656684e-05, | |
| "loss": 0.3625, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.3046153846153846, | |
| "grad_norm": 0.36776799202272653, | |
| "learning_rate": 1.888801890069467e-05, | |
| "loss": 0.3348, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.30615384615384617, | |
| "grad_norm": 0.38714355877029916, | |
| "learning_rate": 1.8876917838794226e-05, | |
| "loss": 0.3518, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.3076923076923077, | |
| "grad_norm": 0.39132293890378467, | |
| "learning_rate": 1.886576493569572e-05, | |
| "loss": 0.3567, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.30923076923076925, | |
| "grad_norm": 0.37037393157233806, | |
| "learning_rate": 1.8854560256532098e-05, | |
| "loss": 0.3433, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.31076923076923074, | |
| "grad_norm": 0.3742182134623614, | |
| "learning_rate": 1.884330386673869e-05, | |
| "loss": 0.3602, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.3123076923076923, | |
| "grad_norm": 0.3745982088926344, | |
| "learning_rate": 1.8831995832052802e-05, | |
| "loss": 0.3598, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.31384615384615383, | |
| "grad_norm": 0.3148505000859751, | |
| "learning_rate": 1.8820636218513354e-05, | |
| "loss": 0.3514, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.3153846153846154, | |
| "grad_norm": 0.3777147146126031, | |
| "learning_rate": 1.8809225092460488e-05, | |
| "loss": 0.3624, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.3169230769230769, | |
| "grad_norm": 0.3620744024651921, | |
| "learning_rate": 1.8797762520535178e-05, | |
| "loss": 0.3545, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.31846153846153846, | |
| "grad_norm": 0.3275337617591835, | |
| "learning_rate": 1.8786248569678847e-05, | |
| "loss": 0.3549, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 0.37980267084252867, | |
| "learning_rate": 1.8774683307132956e-05, | |
| "loss": 0.3632, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.32153846153846155, | |
| "grad_norm": 0.34585621541822953, | |
| "learning_rate": 1.8763066800438638e-05, | |
| "loss": 0.36, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.3230769230769231, | |
| "grad_norm": 0.3480579524545715, | |
| "learning_rate": 1.8751399117436292e-05, | |
| "loss": 0.3531, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.32461538461538464, | |
| "grad_norm": 0.3496509737199204, | |
| "learning_rate": 1.873968032626518e-05, | |
| "loss": 0.3605, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.3261538461538461, | |
| "grad_norm": 0.40524163043153, | |
| "learning_rate": 1.8727910495363043e-05, | |
| "loss": 0.3583, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.32769230769230767, | |
| "grad_norm": 0.34877588777050383, | |
| "learning_rate": 1.8716089693465696e-05, | |
| "loss": 0.347, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.3292307692307692, | |
| "grad_norm": 0.4592738818592951, | |
| "learning_rate": 1.8704217989606606e-05, | |
| "loss": 0.3598, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.33076923076923076, | |
| "grad_norm": 0.3783649551999555, | |
| "learning_rate": 1.869229545311653e-05, | |
| "loss": 0.3583, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.3323076923076923, | |
| "grad_norm": 0.3536547509407451, | |
| "learning_rate": 1.8680322153623077e-05, | |
| "loss": 0.3498, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.33384615384615385, | |
| "grad_norm": 0.4079948196470235, | |
| "learning_rate": 1.8668298161050308e-05, | |
| "loss": 0.3622, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.3353846153846154, | |
| "grad_norm": 0.36942640741898625, | |
| "learning_rate": 1.8656223545618345e-05, | |
| "loss": 0.353, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.33692307692307694, | |
| "grad_norm": 0.39196579363565714, | |
| "learning_rate": 1.8644098377842934e-05, | |
| "loss": 0.3532, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.3384615384615385, | |
| "grad_norm": 0.3564332628975117, | |
| "learning_rate": 1.8631922728535054e-05, | |
| "loss": 0.3551, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 0.34786080023934873, | |
| "learning_rate": 1.8619696668800494e-05, | |
| "loss": 0.3531, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.3415384615384615, | |
| "grad_norm": 0.37096480013302563, | |
| "learning_rate": 1.860742027003944e-05, | |
| "loss": 0.3472, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.34307692307692306, | |
| "grad_norm": 0.33695890031849024, | |
| "learning_rate": 1.8595093603946053e-05, | |
| "loss": 0.3489, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.3446153846153846, | |
| "grad_norm": 0.36025354068440185, | |
| "learning_rate": 1.8582716742508066e-05, | |
| "loss": 0.3539, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.34615384615384615, | |
| "grad_norm": 0.3410672872818961, | |
| "learning_rate": 1.8570289758006346e-05, | |
| "loss": 0.352, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.3476923076923077, | |
| "grad_norm": 0.3598105304326528, | |
| "learning_rate": 1.8557812723014476e-05, | |
| "loss": 0.3538, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.34923076923076923, | |
| "grad_norm": 0.36804240330611765, | |
| "learning_rate": 1.8545285710398343e-05, | |
| "loss": 0.3519, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.3507692307692308, | |
| "grad_norm": 0.32813391778641043, | |
| "learning_rate": 1.853270879331569e-05, | |
| "loss": 0.3539, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.3523076923076923, | |
| "grad_norm": 0.3846644370625242, | |
| "learning_rate": 1.852008204521572e-05, | |
| "loss": 0.3429, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.35384615384615387, | |
| "grad_norm": 0.36227501037133736, | |
| "learning_rate": 1.850740553983863e-05, | |
| "loss": 0.3524, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.3553846153846154, | |
| "grad_norm": 0.35547652422353576, | |
| "learning_rate": 1.8494679351215212e-05, | |
| "loss": 0.3459, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.3569230769230769, | |
| "grad_norm": 0.3439467119964199, | |
| "learning_rate": 1.8481903553666405e-05, | |
| "loss": 0.3635, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.35846153846153844, | |
| "grad_norm": 0.38495422393809375, | |
| "learning_rate": 1.846907822180286e-05, | |
| "loss": 0.3583, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 0.3468097978390537, | |
| "learning_rate": 1.845620343052452e-05, | |
| "loss": 0.3519, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.36153846153846153, | |
| "grad_norm": 0.3939392362586235, | |
| "learning_rate": 1.8443279255020153e-05, | |
| "loss": 0.3389, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.3630769230769231, | |
| "grad_norm": 0.3557187005044313, | |
| "learning_rate": 1.8430305770766947e-05, | |
| "loss": 0.3538, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.3646153846153846, | |
| "grad_norm": 0.3613084285594687, | |
| "learning_rate": 1.8417283053530047e-05, | |
| "loss": 0.3544, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.36615384615384616, | |
| "grad_norm": 0.37263843055238405, | |
| "learning_rate": 1.8404211179362116e-05, | |
| "loss": 0.3467, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.3676923076923077, | |
| "grad_norm": 0.3562469357331947, | |
| "learning_rate": 1.8391090224602895e-05, | |
| "loss": 0.3557, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.36923076923076925, | |
| "grad_norm": 0.3409376089014781, | |
| "learning_rate": 1.837792026587876e-05, | |
| "loss": 0.3423, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.3707692307692308, | |
| "grad_norm": 0.42464049754075456, | |
| "learning_rate": 1.8364701380102267e-05, | |
| "loss": 0.3463, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.3723076923076923, | |
| "grad_norm": 0.37801548493364734, | |
| "learning_rate": 1.8351433644471708e-05, | |
| "loss": 0.3409, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.37384615384615383, | |
| "grad_norm": 0.408672340900871, | |
| "learning_rate": 1.8338117136470648e-05, | |
| "loss": 0.3759, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.37538461538461537, | |
| "grad_norm": 0.37546504595179914, | |
| "learning_rate": 1.8324751933867496e-05, | |
| "loss": 0.3563, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.3769230769230769, | |
| "grad_norm": 0.36631809004670857, | |
| "learning_rate": 1.831133811471503e-05, | |
| "loss": 0.3578, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.37846153846153846, | |
| "grad_norm": 0.3736994550757926, | |
| "learning_rate": 1.829787575734995e-05, | |
| "loss": 0.3538, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 0.37463686730232026, | |
| "learning_rate": 1.8284364940392426e-05, | |
| "loss": 0.3544, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.38153846153846155, | |
| "grad_norm": 0.3725028314834782, | |
| "learning_rate": 1.827080574274562e-05, | |
| "loss": 0.3553, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.3830769230769231, | |
| "grad_norm": 0.39321544749534515, | |
| "learning_rate": 1.825719824359524e-05, | |
| "loss": 0.3572, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.38461538461538464, | |
| "grad_norm": 0.39648989139486013, | |
| "learning_rate": 1.824354252240909e-05, | |
| "loss": 0.356, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.3861538461538462, | |
| "grad_norm": 0.34380776676973107, | |
| "learning_rate": 1.8229838658936566e-05, | |
| "loss": 0.3452, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.38769230769230767, | |
| "grad_norm": 0.36601585597171354, | |
| "learning_rate": 1.821608673320823e-05, | |
| "loss": 0.3396, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.3892307692307692, | |
| "grad_norm": 0.3616253511776755, | |
| "learning_rate": 1.820228682553533e-05, | |
| "loss": 0.3396, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.39076923076923076, | |
| "grad_norm": 0.3516180682215301, | |
| "learning_rate": 1.818843901650932e-05, | |
| "loss": 0.3533, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.3923076923076923, | |
| "grad_norm": 0.35992255609326845, | |
| "learning_rate": 1.8174543387001403e-05, | |
| "loss": 0.353, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.39384615384615385, | |
| "grad_norm": 0.359104051539393, | |
| "learning_rate": 1.816060001816205e-05, | |
| "loss": 0.3528, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.3953846153846154, | |
| "grad_norm": 0.3839196503752293, | |
| "learning_rate": 1.8146608991420533e-05, | |
| "loss": 0.3528, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.39692307692307693, | |
| "grad_norm": 0.3945898720253854, | |
| "learning_rate": 1.8132570388484442e-05, | |
| "loss": 0.3591, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.3984615384615385, | |
| "grad_norm": 0.35326375322997333, | |
| "learning_rate": 1.811848429133922e-05, | |
| "loss": 0.3476, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 0.3987685904984489, | |
| "learning_rate": 1.810435078224767e-05, | |
| "loss": 0.349, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.4015384615384615, | |
| "grad_norm": 0.36852779510455175, | |
| "learning_rate": 1.8090169943749477e-05, | |
| "loss": 0.3651, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.40307692307692305, | |
| "grad_norm": 0.4086515779440857, | |
| "learning_rate": 1.8075941858660737e-05, | |
| "loss": 0.3461, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.4046153846153846, | |
| "grad_norm": 0.3793557979593047, | |
| "learning_rate": 1.8061666610073465e-05, | |
| "loss": 0.3522, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.40615384615384614, | |
| "grad_norm": 0.4119306737647016, | |
| "learning_rate": 1.8047344281355112e-05, | |
| "loss": 0.3541, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.4076923076923077, | |
| "grad_norm": 0.3808370864185399, | |
| "learning_rate": 1.8032974956148064e-05, | |
| "loss": 0.3501, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.40923076923076923, | |
| "grad_norm": 0.3827615753077048, | |
| "learning_rate": 1.8018558718369187e-05, | |
| "loss": 0.3507, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.4107692307692308, | |
| "grad_norm": 0.3876568030885218, | |
| "learning_rate": 1.8004095652209304e-05, | |
| "loss": 0.3386, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.4123076923076923, | |
| "grad_norm": 0.3738590685818937, | |
| "learning_rate": 1.7989585842132713e-05, | |
| "loss": 0.3547, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.41384615384615386, | |
| "grad_norm": 0.34897180553671925, | |
| "learning_rate": 1.7975029372876706e-05, | |
| "loss": 0.3427, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.4153846153846154, | |
| "grad_norm": 0.3792529674837759, | |
| "learning_rate": 1.7960426329451062e-05, | |
| "loss": 0.347, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.4169230769230769, | |
| "grad_norm": 0.3699278879810063, | |
| "learning_rate": 1.7945776797137544e-05, | |
| "loss": 0.36, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.41846153846153844, | |
| "grad_norm": 0.37408068508210207, | |
| "learning_rate": 1.7931080861489425e-05, | |
| "loss": 0.353, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 0.3470876802937946, | |
| "learning_rate": 1.791633860833096e-05, | |
| "loss": 0.3452, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.42153846153846153, | |
| "grad_norm": 0.32277614204114535, | |
| "learning_rate": 1.7901550123756906e-05, | |
| "loss": 0.3597, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.4230769230769231, | |
| "grad_norm": 0.3122099562281799, | |
| "learning_rate": 1.7886715494132008e-05, | |
| "loss": 0.343, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.4246153846153846, | |
| "grad_norm": 0.35093054267366997, | |
| "learning_rate": 1.7871834806090502e-05, | |
| "loss": 0.3402, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.42615384615384616, | |
| "grad_norm": 0.3526844675215035, | |
| "learning_rate": 1.7856908146535602e-05, | |
| "loss": 0.3471, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.4276923076923077, | |
| "grad_norm": 0.36989716444938414, | |
| "learning_rate": 1.7841935602638997e-05, | |
| "loss": 0.3493, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.42923076923076925, | |
| "grad_norm": 0.34659497432637826, | |
| "learning_rate": 1.7826917261840337e-05, | |
| "loss": 0.3476, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.4307692307692308, | |
| "grad_norm": 0.36942940048352685, | |
| "learning_rate": 1.781185321184673e-05, | |
| "loss": 0.3465, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.4323076923076923, | |
| "grad_norm": 0.33930299087150695, | |
| "learning_rate": 1.7796743540632226e-05, | |
| "loss": 0.3488, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.4338461538461538, | |
| "grad_norm": 0.3532264238998626, | |
| "learning_rate": 1.77815883364373e-05, | |
| "loss": 0.3412, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.43538461538461537, | |
| "grad_norm": 0.34563607097263, | |
| "learning_rate": 1.7766387687768338e-05, | |
| "loss": 0.3492, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.4369230769230769, | |
| "grad_norm": 0.3359455576897893, | |
| "learning_rate": 1.7751141683397128e-05, | |
| "loss": 0.3453, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.43846153846153846, | |
| "grad_norm": 0.33525285705398855, | |
| "learning_rate": 1.7735850412360332e-05, | |
| "loss": 0.3493, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 0.3349856346616677, | |
| "learning_rate": 1.772051396395897e-05, | |
| "loss": 0.3534, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.44153846153846155, | |
| "grad_norm": 0.3392621630571748, | |
| "learning_rate": 1.7705132427757895e-05, | |
| "loss": 0.3487, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.4430769230769231, | |
| "grad_norm": 0.3367417477753544, | |
| "learning_rate": 1.7689705893585273e-05, | |
| "loss": 0.3517, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.44461538461538463, | |
| "grad_norm": 0.3451957771468646, | |
| "learning_rate": 1.7674234451532065e-05, | |
| "loss": 0.3452, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.4461538461538462, | |
| "grad_norm": 0.3685834045166528, | |
| "learning_rate": 1.7658718191951483e-05, | |
| "loss": 0.3541, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.44769230769230767, | |
| "grad_norm": 0.35075879867972815, | |
| "learning_rate": 1.7643157205458483e-05, | |
| "loss": 0.3517, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.4492307692307692, | |
| "grad_norm": 0.363045183643031, | |
| "learning_rate": 1.7627551582929223e-05, | |
| "loss": 0.3637, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.45076923076923076, | |
| "grad_norm": 0.3685236151251217, | |
| "learning_rate": 1.7611901415500536e-05, | |
| "loss": 0.3257, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.4523076923076923, | |
| "grad_norm": 0.3538775664927815, | |
| "learning_rate": 1.759620679456939e-05, | |
| "loss": 0.3466, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.45384615384615384, | |
| "grad_norm": 0.37169949722464124, | |
| "learning_rate": 1.7580467811792374e-05, | |
| "loss": 0.3522, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.4553846153846154, | |
| "grad_norm": 0.40012856967527305, | |
| "learning_rate": 1.7564684559085138e-05, | |
| "loss": 0.3408, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.45692307692307693, | |
| "grad_norm": 0.3666356782056923, | |
| "learning_rate": 1.7548857128621878e-05, | |
| "loss": 0.3452, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.4584615384615385, | |
| "grad_norm": 0.3272969997669924, | |
| "learning_rate": 1.753298561283478e-05, | |
| "loss": 0.3434, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 0.38831742649553147, | |
| "learning_rate": 1.7517070104413497e-05, | |
| "loss": 0.3452, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.46153846153846156, | |
| "grad_norm": 0.3602246989660254, | |
| "learning_rate": 1.7501110696304598e-05, | |
| "loss": 0.3561, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.46307692307692305, | |
| "grad_norm": 0.32751426562964475, | |
| "learning_rate": 1.7485107481711014e-05, | |
| "loss": 0.3422, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.4646153846153846, | |
| "grad_norm": 0.3775562040944554, | |
| "learning_rate": 1.7469060554091518e-05, | |
| "loss": 0.3567, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.46615384615384614, | |
| "grad_norm": 0.3746811301380941, | |
| "learning_rate": 1.745297000716016e-05, | |
| "loss": 0.3417, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.4676923076923077, | |
| "grad_norm": 0.32919868953567194, | |
| "learning_rate": 1.7436835934885735e-05, | |
| "loss": 0.3442, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.46923076923076923, | |
| "grad_norm": 0.3862876725431514, | |
| "learning_rate": 1.7420658431491224e-05, | |
| "loss": 0.3361, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.4707692307692308, | |
| "grad_norm": 0.34901646771529365, | |
| "learning_rate": 1.7404437591453237e-05, | |
| "loss": 0.3508, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.4723076923076923, | |
| "grad_norm": 0.3675088015884468, | |
| "learning_rate": 1.7388173509501475e-05, | |
| "loss": 0.3457, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.47384615384615386, | |
| "grad_norm": 0.3515874155159066, | |
| "learning_rate": 1.7371866280618176e-05, | |
| "loss": 0.3643, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.4753846153846154, | |
| "grad_norm": 0.36516295839622703, | |
| "learning_rate": 1.7355516000037555e-05, | |
| "loss": 0.3434, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.47692307692307695, | |
| "grad_norm": 0.36215517755761173, | |
| "learning_rate": 1.733912276324524e-05, | |
| "loss": 0.3472, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.47846153846153844, | |
| "grad_norm": 0.36430798311353485, | |
| "learning_rate": 1.7322686665977738e-05, | |
| "loss": 0.35, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 0.3363708591416349, | |
| "learning_rate": 1.7306207804221845e-05, | |
| "loss": 0.3327, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.4815384615384615, | |
| "grad_norm": 0.3464405667250271, | |
| "learning_rate": 1.7289686274214116e-05, | |
| "loss": 0.3309, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.48307692307692307, | |
| "grad_norm": 0.41057799162040165, | |
| "learning_rate": 1.727312217244028e-05, | |
| "loss": 0.3362, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.4846153846153846, | |
| "grad_norm": 0.3508212343362318, | |
| "learning_rate": 1.7256515595634688e-05, | |
| "loss": 0.3445, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.48615384615384616, | |
| "grad_norm": 0.36827996807255337, | |
| "learning_rate": 1.7239866640779745e-05, | |
| "loss": 0.3463, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.4876923076923077, | |
| "grad_norm": 0.34115993415767315, | |
| "learning_rate": 1.722317540510534e-05, | |
| "loss": 0.3426, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.48923076923076925, | |
| "grad_norm": 0.3312349293414821, | |
| "learning_rate": 1.720644198608829e-05, | |
| "loss": 0.347, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.4907692307692308, | |
| "grad_norm": 0.349340239732704, | |
| "learning_rate": 1.7189666481451755e-05, | |
| "loss": 0.3615, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.49230769230769234, | |
| "grad_norm": 0.3330797470660715, | |
| "learning_rate": 1.717284898916468e-05, | |
| "loss": 0.3421, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.4938461538461538, | |
| "grad_norm": 0.32038359438240244, | |
| "learning_rate": 1.715598960744121e-05, | |
| "loss": 0.356, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.49538461538461537, | |
| "grad_norm": 0.3429307183460945, | |
| "learning_rate": 1.7139088434740142e-05, | |
| "loss": 0.3315, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.4969230769230769, | |
| "grad_norm": 0.3329575955338515, | |
| "learning_rate": 1.712214556976431e-05, | |
| "loss": 0.3374, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.49846153846153846, | |
| "grad_norm": 0.33173791143793213, | |
| "learning_rate": 1.7105161111460046e-05, | |
| "loss": 0.3416, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.35973140787573293, | |
| "learning_rate": 1.7088135159016584e-05, | |
| "loss": 0.3557, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.5015384615384615, | |
| "grad_norm": 0.32789280511095725, | |
| "learning_rate": 1.7071067811865477e-05, | |
| "loss": 0.353, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.5030769230769231, | |
| "grad_norm": 0.40991419155641984, | |
| "learning_rate": 1.7053959169680033e-05, | |
| "loss": 0.3488, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.5046153846153846, | |
| "grad_norm": 0.31432251880938783, | |
| "learning_rate": 1.7036809332374713e-05, | |
| "loss": 0.3377, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.5061538461538462, | |
| "grad_norm": 0.3665523546008641, | |
| "learning_rate": 1.7019618400104572e-05, | |
| "loss": 0.3378, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.5076923076923077, | |
| "grad_norm": 0.3320078438985165, | |
| "learning_rate": 1.700238647326464e-05, | |
| "loss": 0.3361, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.5092307692307693, | |
| "grad_norm": 0.35680941172132136, | |
| "learning_rate": 1.6985113652489374e-05, | |
| "loss": 0.3453, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.5107692307692308, | |
| "grad_norm": 0.3654902422894205, | |
| "learning_rate": 1.6967800038652035e-05, | |
| "loss": 0.3467, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.5123076923076924, | |
| "grad_norm": 0.3332106421189731, | |
| "learning_rate": 1.695044573286413e-05, | |
| "loss": 0.3395, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 0.5138461538461538, | |
| "grad_norm": 0.3576528257798259, | |
| "learning_rate": 1.69330508364748e-05, | |
| "loss": 0.3454, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.5153846153846153, | |
| "grad_norm": 0.3154421189547101, | |
| "learning_rate": 1.6915615451070234e-05, | |
| "loss": 0.3627, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.5169230769230769, | |
| "grad_norm": 0.33079254546716974, | |
| "learning_rate": 1.689813967847308e-05, | |
| "loss": 0.3385, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.5184615384615384, | |
| "grad_norm": 0.3718374242713586, | |
| "learning_rate": 1.6880623620741843e-05, | |
| "loss": 0.3386, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 0.35919342674204147, | |
| "learning_rate": 1.68630673801703e-05, | |
| "loss": 0.3347, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.5215384615384615, | |
| "grad_norm": 0.4145660600161487, | |
| "learning_rate": 1.684547105928689e-05, | |
| "loss": 0.3326, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 0.5230769230769231, | |
| "grad_norm": 0.35262102463458106, | |
| "learning_rate": 1.682783476085412e-05, | |
| "loss": 0.3468, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.5246153846153846, | |
| "grad_norm": 0.3969110582273147, | |
| "learning_rate": 1.6810158587867973e-05, | |
| "loss": 0.345, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 0.5261538461538462, | |
| "grad_norm": 0.36729152355342054, | |
| "learning_rate": 1.679244264355729e-05, | |
| "loss": 0.3494, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.5276923076923077, | |
| "grad_norm": 0.34584656489173654, | |
| "learning_rate": 1.677468703138319e-05, | |
| "loss": 0.3598, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 0.5292307692307693, | |
| "grad_norm": 0.3769779959966056, | |
| "learning_rate": 1.6756891855038436e-05, | |
| "loss": 0.3428, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.5307692307692308, | |
| "grad_norm": 0.3870397539579194, | |
| "learning_rate": 1.673905721844686e-05, | |
| "loss": 0.3399, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.5323076923076923, | |
| "grad_norm": 0.3803105690557983, | |
| "learning_rate": 1.6721183225762726e-05, | |
| "loss": 0.3407, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.5338461538461539, | |
| "grad_norm": 0.3811961355506509, | |
| "learning_rate": 1.670326998137016e-05, | |
| "loss": 0.3435, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 0.5353846153846153, | |
| "grad_norm": 0.355424944231993, | |
| "learning_rate": 1.668531758988249e-05, | |
| "loss": 0.3419, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.536923076923077, | |
| "grad_norm": 0.3697051964035359, | |
| "learning_rate": 1.666732615614169e-05, | |
| "loss": 0.3558, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 0.5384615384615384, | |
| "grad_norm": 0.3724554786238689, | |
| "learning_rate": 1.6649295785217722e-05, | |
| "loss": 0.3465, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 0.3713258592311073, | |
| "learning_rate": 1.6631226582407954e-05, | |
| "loss": 0.3472, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 0.5415384615384615, | |
| "grad_norm": 0.37379815805633365, | |
| "learning_rate": 1.661311865323652e-05, | |
| "loss": 0.3476, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.5430769230769231, | |
| "grad_norm": 0.3574918037766584, | |
| "learning_rate": 1.6594972103453727e-05, | |
| "loss": 0.3505, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 0.5446153846153846, | |
| "grad_norm": 0.36735856136671163, | |
| "learning_rate": 1.6576787039035417e-05, | |
| "loss": 0.3437, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.5461538461538461, | |
| "grad_norm": 0.39795420244333224, | |
| "learning_rate": 1.6558563566182365e-05, | |
| "loss": 0.3387, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.5476923076923077, | |
| "grad_norm": 0.35332927485568466, | |
| "learning_rate": 1.6540301791319647e-05, | |
| "loss": 0.3351, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.5492307692307692, | |
| "grad_norm": 0.3405051238522205, | |
| "learning_rate": 1.652200182109602e-05, | |
| "loss": 0.3388, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 0.5507692307692308, | |
| "grad_norm": 0.3833225434413435, | |
| "learning_rate": 1.6503663762383312e-05, | |
| "loss": 0.3419, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.5523076923076923, | |
| "grad_norm": 0.3462506998511973, | |
| "learning_rate": 1.6485287722275783e-05, | |
| "loss": 0.3475, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 0.5538461538461539, | |
| "grad_norm": 0.391689506628609, | |
| "learning_rate": 1.6466873808089496e-05, | |
| "loss": 0.357, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.5553846153846154, | |
| "grad_norm": 0.3270791225847661, | |
| "learning_rate": 1.6448422127361707e-05, | |
| "loss": 0.3443, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 0.556923076923077, | |
| "grad_norm": 0.37696990145987513, | |
| "learning_rate": 1.642993278785023e-05, | |
| "loss": 0.3461, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.5584615384615385, | |
| "grad_norm": 0.3489276120822278, | |
| "learning_rate": 1.64114058975328e-05, | |
| "loss": 0.3416, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 0.35545398572334047, | |
| "learning_rate": 1.639284156460646e-05, | |
| "loss": 0.3451, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.5615384615384615, | |
| "grad_norm": 0.3496864708346177, | |
| "learning_rate": 1.63742398974869e-05, | |
| "loss": 0.3582, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.563076923076923, | |
| "grad_norm": 0.3433728900258887, | |
| "learning_rate": 1.6355601004807856e-05, | |
| "loss": 0.35, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.5646153846153846, | |
| "grad_norm": 0.363448160242182, | |
| "learning_rate": 1.6336924995420453e-05, | |
| "loss": 0.3412, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 0.5661538461538461, | |
| "grad_norm": 0.3358514480354539, | |
| "learning_rate": 1.6318211978392588e-05, | |
| "loss": 0.3462, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.5676923076923077, | |
| "grad_norm": 0.3529963835277034, | |
| "learning_rate": 1.6299462063008272e-05, | |
| "loss": 0.3454, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 0.5692307692307692, | |
| "grad_norm": 0.35264544772660966, | |
| "learning_rate": 1.6280675358767005e-05, | |
| "loss": 0.3335, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.5707692307692308, | |
| "grad_norm": 0.34118145221992047, | |
| "learning_rate": 1.626185197538314e-05, | |
| "loss": 0.3506, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 0.5723076923076923, | |
| "grad_norm": 0.33861364933452004, | |
| "learning_rate": 1.6242992022785225e-05, | |
| "loss": 0.3394, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.5738461538461539, | |
| "grad_norm": 0.34114296179605763, | |
| "learning_rate": 1.6224095611115385e-05, | |
| "loss": 0.342, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 0.5753846153846154, | |
| "grad_norm": 0.35031645242644793, | |
| "learning_rate": 1.620516285072866e-05, | |
| "loss": 0.3446, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.5769230769230769, | |
| "grad_norm": 0.33369847070376424, | |
| "learning_rate": 1.6186193852192356e-05, | |
| "loss": 0.3283, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.5784615384615385, | |
| "grad_norm": 0.35653923352092576, | |
| "learning_rate": 1.6167188726285433e-05, | |
| "loss": 0.3413, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 0.33404971938811645, | |
| "learning_rate": 1.6148147583997813e-05, | |
| "loss": 0.3501, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 0.5815384615384616, | |
| "grad_norm": 0.37355957953604907, | |
| "learning_rate": 1.6129070536529767e-05, | |
| "loss": 0.3381, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 0.583076923076923, | |
| "grad_norm": 0.3503877045317371, | |
| "learning_rate": 1.6109957695291246e-05, | |
| "loss": 0.3394, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 0.5846153846153846, | |
| "grad_norm": 0.3458427753639511, | |
| "learning_rate": 1.6090809171901237e-05, | |
| "loss": 0.3216, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.5861538461538461, | |
| "grad_norm": 0.3602325778928683, | |
| "learning_rate": 1.6071625078187113e-05, | |
| "loss": 0.3453, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 0.5876923076923077, | |
| "grad_norm": 0.3193438829573852, | |
| "learning_rate": 1.605240552618398e-05, | |
| "loss": 0.3495, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 0.5892307692307692, | |
| "grad_norm": 0.33548527878831463, | |
| "learning_rate": 1.603315062813401e-05, | |
| "loss": 0.3417, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 0.5907692307692308, | |
| "grad_norm": 0.32840411596842495, | |
| "learning_rate": 1.601386049648581e-05, | |
| "loss": 0.347, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 0.5923076923076923, | |
| "grad_norm": 0.3298738925919233, | |
| "learning_rate": 1.5994535243893742e-05, | |
| "loss": 0.329, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.5938461538461538, | |
| "grad_norm": 0.3316001678941656, | |
| "learning_rate": 1.5975174983217273e-05, | |
| "loss": 0.3374, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 0.5953846153846154, | |
| "grad_norm": 0.31506047903749107, | |
| "learning_rate": 1.5955779827520327e-05, | |
| "loss": 0.3286, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 0.5969230769230769, | |
| "grad_norm": 0.3205561551558476, | |
| "learning_rate": 1.5936349890070602e-05, | |
| "loss": 0.3439, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 0.5984615384615385, | |
| "grad_norm": 0.34332344122828284, | |
| "learning_rate": 1.5916885284338937e-05, | |
| "loss": 0.3498, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 0.3378431634660829, | |
| "learning_rate": 1.5897386123998613e-05, | |
| "loss": 0.3411, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.6015384615384616, | |
| "grad_norm": 0.3308198558930134, | |
| "learning_rate": 1.5877852522924733e-05, | |
| "loss": 0.3401, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 0.6030769230769231, | |
| "grad_norm": 0.3382324720181038, | |
| "learning_rate": 1.5858284595193514e-05, | |
| "loss": 0.3313, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 0.6046153846153847, | |
| "grad_norm": 0.3115119958797461, | |
| "learning_rate": 1.5838682455081657e-05, | |
| "loss": 0.3473, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 0.6061538461538462, | |
| "grad_norm": 0.345775349619465, | |
| "learning_rate": 1.581904621706565e-05, | |
| "loss": 0.3416, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 0.6076923076923076, | |
| "grad_norm": 0.3278035432263032, | |
| "learning_rate": 1.5799375995821116e-05, | |
| "loss": 0.3402, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.6092307692307692, | |
| "grad_norm": 0.3091426767370126, | |
| "learning_rate": 1.577967190622215e-05, | |
| "loss": 0.3419, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 0.6107692307692307, | |
| "grad_norm": 0.32700056228623475, | |
| "learning_rate": 1.5759934063340627e-05, | |
| "loss": 0.3286, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 0.6123076923076923, | |
| "grad_norm": 0.31000502760088167, | |
| "learning_rate": 1.5740162582445545e-05, | |
| "loss": 0.3359, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 0.6138461538461538, | |
| "grad_norm": 0.315042243493686, | |
| "learning_rate": 1.5720357579002346e-05, | |
| "loss": 0.3302, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 0.6153846153846154, | |
| "grad_norm": 0.3562010310358661, | |
| "learning_rate": 1.5700519168672248e-05, | |
| "loss": 0.3241, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.6169230769230769, | |
| "grad_norm": 0.32548257408496123, | |
| "learning_rate": 1.568064746731156e-05, | |
| "loss": 0.3396, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 0.6184615384615385, | |
| "grad_norm": 0.34393891513916103, | |
| "learning_rate": 1.5660742590971014e-05, | |
| "loss": 0.3344, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 0.3504882415860048, | |
| "learning_rate": 1.5640804655895086e-05, | |
| "loss": 0.3435, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 0.6215384615384615, | |
| "grad_norm": 0.2977739667457439, | |
| "learning_rate": 1.5620833778521306e-05, | |
| "loss": 0.3406, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 0.6230769230769231, | |
| "grad_norm": 0.3465963236491581, | |
| "learning_rate": 1.5600830075479604e-05, | |
| "loss": 0.3541, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.6246153846153846, | |
| "grad_norm": 0.3278292046990623, | |
| "learning_rate": 1.5580793663591583e-05, | |
| "loss": 0.3399, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 0.6261538461538462, | |
| "grad_norm": 0.3178133040140898, | |
| "learning_rate": 1.5560724659869905e-05, | |
| "loss": 0.3487, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 0.6276923076923077, | |
| "grad_norm": 0.3189251567444249, | |
| "learning_rate": 1.5540623181517532e-05, | |
| "loss": 0.3316, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 0.6292307692307693, | |
| "grad_norm": 0.3082890284907204, | |
| "learning_rate": 1.5520489345927095e-05, | |
| "loss": 0.327, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 0.6307692307692307, | |
| "grad_norm": 0.32991631832198975, | |
| "learning_rate": 1.5500323270680194e-05, | |
| "loss": 0.3363, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.6323076923076923, | |
| "grad_norm": 0.2992131839708694, | |
| "learning_rate": 1.5480125073546705e-05, | |
| "loss": 0.338, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 0.6338461538461538, | |
| "grad_norm": 0.3325641611293239, | |
| "learning_rate": 1.5459894872484083e-05, | |
| "loss": 0.3227, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 0.6353846153846154, | |
| "grad_norm": 0.32566576056931756, | |
| "learning_rate": 1.5439632785636707e-05, | |
| "loss": 0.3455, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 0.6369230769230769, | |
| "grad_norm": 0.3523675990715312, | |
| "learning_rate": 1.5419338931335155e-05, | |
| "loss": 0.3427, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 0.6384615384615384, | |
| "grad_norm": 0.35998620466162407, | |
| "learning_rate": 1.539901342809554e-05, | |
| "loss": 0.3372, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 0.3446553956298386, | |
| "learning_rate": 1.5378656394618788e-05, | |
| "loss": 0.3394, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 0.6415384615384615, | |
| "grad_norm": 0.3616445233684353, | |
| "learning_rate": 1.5358267949789968e-05, | |
| "loss": 0.3322, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 0.6430769230769231, | |
| "grad_norm": 0.3639906664590491, | |
| "learning_rate": 1.53378482126776e-05, | |
| "loss": 0.3468, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 0.6446153846153846, | |
| "grad_norm": 0.3217847263901737, | |
| "learning_rate": 1.5317397302532933e-05, | |
| "loss": 0.3386, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 0.6461538461538462, | |
| "grad_norm": 0.3523419339077219, | |
| "learning_rate": 1.529691533878929e-05, | |
| "loss": 0.3345, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.6476923076923077, | |
| "grad_norm": 0.29498589810539044, | |
| "learning_rate": 1.527640244106133e-05, | |
| "loss": 0.3282, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 0.6492307692307693, | |
| "grad_norm": 0.35484962462726277, | |
| "learning_rate": 1.5255858729144368e-05, | |
| "loss": 0.3318, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 0.6507692307692308, | |
| "grad_norm": 0.32079171813409035, | |
| "learning_rate": 1.5235284323013674e-05, | |
| "loss": 0.3405, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 0.6523076923076923, | |
| "grad_norm": 0.3160074548522989, | |
| "learning_rate": 1.5214679342823786e-05, | |
| "loss": 0.347, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 0.6538461538461539, | |
| "grad_norm": 0.3539240389229003, | |
| "learning_rate": 1.5194043908907774e-05, | |
| "loss": 0.3323, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.6553846153846153, | |
| "grad_norm": 0.330852711681434, | |
| "learning_rate": 1.5173378141776569e-05, | |
| "loss": 0.3327, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 0.6569230769230769, | |
| "grad_norm": 0.322980956210938, | |
| "learning_rate": 1.515268216211825e-05, | |
| "loss": 0.3382, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 0.6584615384615384, | |
| "grad_norm": 0.3529073490405514, | |
| "learning_rate": 1.5131956090797326e-05, | |
| "loss": 0.3277, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 0.3535741007698121, | |
| "learning_rate": 1.5111200048854055e-05, | |
| "loss": 0.3378, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 0.6615384615384615, | |
| "grad_norm": 0.3443986070735677, | |
| "learning_rate": 1.5090414157503715e-05, | |
| "loss": 0.3434, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.6630769230769231, | |
| "grad_norm": 0.3190609653993291, | |
| "learning_rate": 1.5069598538135905e-05, | |
| "loss": 0.3368, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 0.6646153846153846, | |
| "grad_norm": 0.3208785969499383, | |
| "learning_rate": 1.504875331231384e-05, | |
| "loss": 0.3301, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 0.6661538461538462, | |
| "grad_norm": 0.3405536133260573, | |
| "learning_rate": 1.5027878601773633e-05, | |
| "loss": 0.3381, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 0.6676923076923077, | |
| "grad_norm": 0.3235610875812187, | |
| "learning_rate": 1.5006974528423585e-05, | |
| "loss": 0.348, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 0.6692307692307692, | |
| "grad_norm": 0.33286382213223575, | |
| "learning_rate": 1.4986041214343487e-05, | |
| "loss": 0.3298, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.6707692307692308, | |
| "grad_norm": 0.31311886460294547, | |
| "learning_rate": 1.4965078781783882e-05, | |
| "loss": 0.3347, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 0.6723076923076923, | |
| "grad_norm": 0.34048661944444236, | |
| "learning_rate": 1.494408735316537e-05, | |
| "loss": 0.3179, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 0.6738461538461539, | |
| "grad_norm": 0.30741869868907906, | |
| "learning_rate": 1.4923067051077893e-05, | |
| "loss": 0.3342, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 0.6753846153846154, | |
| "grad_norm": 0.384913969400681, | |
| "learning_rate": 1.490201799828001e-05, | |
| "loss": 0.328, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 0.676923076923077, | |
| "grad_norm": 0.3252037127931481, | |
| "learning_rate": 1.4880940317698182e-05, | |
| "loss": 0.3325, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.6784615384615384, | |
| "grad_norm": 0.3381708194506858, | |
| "learning_rate": 1.485983413242606e-05, | |
| "loss": 0.3347, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 0.3138690227925255, | |
| "learning_rate": 1.4838699565723764e-05, | |
| "loss": 0.3457, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 0.6815384615384615, | |
| "grad_norm": 0.31660415974825973, | |
| "learning_rate": 1.4817536741017153e-05, | |
| "loss": 0.341, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 0.683076923076923, | |
| "grad_norm": 0.32414107649278284, | |
| "learning_rate": 1.479634578189712e-05, | |
| "loss": 0.3318, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 0.6846153846153846, | |
| "grad_norm": 0.33198663699808667, | |
| "learning_rate": 1.4775126812118865e-05, | |
| "loss": 0.342, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.6861538461538461, | |
| "grad_norm": 0.3574753350792076, | |
| "learning_rate": 1.4753879955601162e-05, | |
| "loss": 0.3293, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 0.6876923076923077, | |
| "grad_norm": 0.3039010914154921, | |
| "learning_rate": 1.4732605336425651e-05, | |
| "loss": 0.3377, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 0.6892307692307692, | |
| "grad_norm": 0.331147958191396, | |
| "learning_rate": 1.4711303078836098e-05, | |
| "loss": 0.3341, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 0.6907692307692308, | |
| "grad_norm": 0.33253847273949044, | |
| "learning_rate": 1.4689973307237687e-05, | |
| "loss": 0.3246, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 0.6923076923076923, | |
| "grad_norm": 0.34714104465860907, | |
| "learning_rate": 1.466861614619628e-05, | |
| "loss": 0.3376, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.6938461538461539, | |
| "grad_norm": 0.3185908753912035, | |
| "learning_rate": 1.4647231720437687e-05, | |
| "loss": 0.3272, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 0.6953846153846154, | |
| "grad_norm": 0.3528975380407178, | |
| "learning_rate": 1.4625820154846953e-05, | |
| "loss": 0.336, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 0.696923076923077, | |
| "grad_norm": 0.3277896571363917, | |
| "learning_rate": 1.4604381574467616e-05, | |
| "loss": 0.3306, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 0.6984615384615385, | |
| "grad_norm": 0.3243374356110855, | |
| "learning_rate": 1.4582916104500977e-05, | |
| "loss": 0.3412, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 0.34889136163694917, | |
| "learning_rate": 1.4561423870305383e-05, | |
| "loss": 0.3471, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.7015384615384616, | |
| "grad_norm": 0.32513754947818535, | |
| "learning_rate": 1.4539904997395468e-05, | |
| "loss": 0.3432, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 0.703076923076923, | |
| "grad_norm": 0.3067256440653582, | |
| "learning_rate": 1.4518359611441452e-05, | |
| "loss": 0.3254, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 0.7046153846153846, | |
| "grad_norm": 0.32317304945228514, | |
| "learning_rate": 1.4496787838268378e-05, | |
| "loss": 0.3471, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 0.7061538461538461, | |
| "grad_norm": 0.3201968707132547, | |
| "learning_rate": 1.4475189803855399e-05, | |
| "loss": 0.33, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 0.7076923076923077, | |
| "grad_norm": 0.31117563005728266, | |
| "learning_rate": 1.445356563433503e-05, | |
| "loss": 0.3241, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.7092307692307692, | |
| "grad_norm": 0.3231141370386228, | |
| "learning_rate": 1.4431915455992416e-05, | |
| "loss": 0.325, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 0.7107692307692308, | |
| "grad_norm": 0.3505400578573498, | |
| "learning_rate": 1.4410239395264594e-05, | |
| "loss": 0.3504, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 0.7123076923076923, | |
| "grad_norm": 0.3641284162614635, | |
| "learning_rate": 1.438853757873975e-05, | |
| "loss": 0.3451, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 0.7138461538461538, | |
| "grad_norm": 0.341362150690221, | |
| "learning_rate": 1.4366810133156495e-05, | |
| "loss": 0.334, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 0.7153846153846154, | |
| "grad_norm": 0.3366781323480242, | |
| "learning_rate": 1.43450571854031e-05, | |
| "loss": 0.3317, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.7169230769230769, | |
| "grad_norm": 0.33305216446331765, | |
| "learning_rate": 1.4323278862516774e-05, | |
| "loss": 0.3275, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 0.7184615384615385, | |
| "grad_norm": 0.3463239764810698, | |
| "learning_rate": 1.430147529168292e-05, | |
| "loss": 0.3275, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 0.32846628027216124, | |
| "learning_rate": 1.4279646600234388e-05, | |
| "loss": 0.3319, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 0.7215384615384616, | |
| "grad_norm": 0.34238219227879857, | |
| "learning_rate": 1.4257792915650728e-05, | |
| "loss": 0.3293, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 0.7230769230769231, | |
| "grad_norm": 0.32976197302001725, | |
| "learning_rate": 1.4235914365557455e-05, | |
| "loss": 0.3237, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.7246153846153847, | |
| "grad_norm": 0.34893869828996976, | |
| "learning_rate": 1.4214011077725293e-05, | |
| "loss": 0.3419, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 0.7261538461538461, | |
| "grad_norm": 0.31966372573316476, | |
| "learning_rate": 1.4192083180069441e-05, | |
| "loss": 0.3427, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 0.7276923076923076, | |
| "grad_norm": 0.35132035791751987, | |
| "learning_rate": 1.4170130800648814e-05, | |
| "loss": 0.3395, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 0.7292307692307692, | |
| "grad_norm": 0.3664547092525681, | |
| "learning_rate": 1.4148154067665305e-05, | |
| "loss": 0.3426, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 0.7307692307692307, | |
| "grad_norm": 0.3368990568479692, | |
| "learning_rate": 1.4126153109463025e-05, | |
| "loss": 0.3276, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.7323076923076923, | |
| "grad_norm": 0.3651280080968852, | |
| "learning_rate": 1.410412805452757e-05, | |
| "loss": 0.3227, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 0.7338461538461538, | |
| "grad_norm": 0.34742282148815545, | |
| "learning_rate": 1.4082079031485253e-05, | |
| "loss": 0.3253, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 0.7353846153846154, | |
| "grad_norm": 0.32137945130339735, | |
| "learning_rate": 1.4060006169102363e-05, | |
| "loss": 0.3423, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 0.7369230769230769, | |
| "grad_norm": 0.34666033781216216, | |
| "learning_rate": 1.403790959628441e-05, | |
| "loss": 0.3475, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 0.7384615384615385, | |
| "grad_norm": 0.35142123558854127, | |
| "learning_rate": 1.4015789442075376e-05, | |
| "loss": 0.3385, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 0.3341424816484901, | |
| "learning_rate": 1.3993645835656955e-05, | |
| "loss": 0.3389, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 0.7415384615384616, | |
| "grad_norm": 0.32207944514331105, | |
| "learning_rate": 1.3971478906347806e-05, | |
| "loss": 0.3259, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 0.7430769230769231, | |
| "grad_norm": 0.3179744347055682, | |
| "learning_rate": 1.394928878360279e-05, | |
| "loss": 0.3306, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 0.7446153846153846, | |
| "grad_norm": 0.3299785758953266, | |
| "learning_rate": 1.3927075597012215e-05, | |
| "loss": 0.3403, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 0.7461538461538462, | |
| "grad_norm": 0.3479339850990576, | |
| "learning_rate": 1.3904839476301091e-05, | |
| "loss": 0.3363, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.7476923076923077, | |
| "grad_norm": 0.3160433072118054, | |
| "learning_rate": 1.388258055132835e-05, | |
| "loss": 0.3339, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 0.7492307692307693, | |
| "grad_norm": 0.3168391369647155, | |
| "learning_rate": 1.3860298952086118e-05, | |
| "loss": 0.3303, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 0.7507692307692307, | |
| "grad_norm": 0.3255877966163271, | |
| "learning_rate": 1.383799480869892e-05, | |
| "loss": 0.329, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 0.7523076923076923, | |
| "grad_norm": 0.33535362647277184, | |
| "learning_rate": 1.3815668251422953e-05, | |
| "loss": 0.3353, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 0.7538461538461538, | |
| "grad_norm": 0.3271690638955483, | |
| "learning_rate": 1.3793319410645307e-05, | |
| "loss": 0.327, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.7553846153846154, | |
| "grad_norm": 0.29308835135249056, | |
| "learning_rate": 1.3770948416883205e-05, | |
| "loss": 0.3301, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 0.7569230769230769, | |
| "grad_norm": 0.33463441859903204, | |
| "learning_rate": 1.3748555400783245e-05, | |
| "loss": 0.3372, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 0.7584615384615384, | |
| "grad_norm": 0.30324390052773076, | |
| "learning_rate": 1.3726140493120639e-05, | |
| "loss": 0.3284, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 0.3233241376039668, | |
| "learning_rate": 1.3703703824798438e-05, | |
| "loss": 0.3263, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 0.7615384615384615, | |
| "grad_norm": 0.31728343223649114, | |
| "learning_rate": 1.3681245526846782e-05, | |
| "loss": 0.3352, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.7630769230769231, | |
| "grad_norm": 0.3214067215862394, | |
| "learning_rate": 1.3658765730422126e-05, | |
| "loss": 0.3425, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 0.7646153846153846, | |
| "grad_norm": 0.31943063401432914, | |
| "learning_rate": 1.3636264566806473e-05, | |
| "loss": 0.3401, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 0.7661538461538462, | |
| "grad_norm": 0.30418243324485683, | |
| "learning_rate": 1.3613742167406614e-05, | |
| "loss": 0.3372, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 0.7676923076923077, | |
| "grad_norm": 0.34821005797159027, | |
| "learning_rate": 1.3591198663753358e-05, | |
| "loss": 0.3217, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 0.7692307692307693, | |
| "grad_norm": 0.3087089373776307, | |
| "learning_rate": 1.3568634187500762e-05, | |
| "loss": 0.3283, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.7707692307692308, | |
| "grad_norm": 0.30635526513225797, | |
| "learning_rate": 1.3546048870425356e-05, | |
| "loss": 0.3233, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 0.7723076923076924, | |
| "grad_norm": 0.3276774808261006, | |
| "learning_rate": 1.3523442844425393e-05, | |
| "loss": 0.3413, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 0.7738461538461539, | |
| "grad_norm": 0.3148280444491086, | |
| "learning_rate": 1.3500816241520059e-05, | |
| "loss": 0.3315, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 0.7753846153846153, | |
| "grad_norm": 0.32807106888422827, | |
| "learning_rate": 1.3478169193848705e-05, | |
| "loss": 0.327, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 0.7769230769230769, | |
| "grad_norm": 0.3292383482401427, | |
| "learning_rate": 1.3455501833670089e-05, | |
| "loss": 0.3336, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.7784615384615384, | |
| "grad_norm": 0.3121711347510858, | |
| "learning_rate": 1.3432814293361585e-05, | |
| "loss": 0.3343, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 0.31726922745700675, | |
| "learning_rate": 1.3410106705418424e-05, | |
| "loss": 0.3268, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 0.7815384615384615, | |
| "grad_norm": 0.3369144300435686, | |
| "learning_rate": 1.3387379202452917e-05, | |
| "loss": 0.3379, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 0.7830769230769231, | |
| "grad_norm": 0.3420593507280525, | |
| "learning_rate": 1.3364631917193671e-05, | |
| "loss": 0.3312, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 0.7846153846153846, | |
| "grad_norm": 0.3088403155613573, | |
| "learning_rate": 1.3341864982484828e-05, | |
| "loss": 0.3376, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.7861538461538462, | |
| "grad_norm": 0.3372304602489395, | |
| "learning_rate": 1.3319078531285286e-05, | |
| "loss": 0.3396, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 0.7876923076923077, | |
| "grad_norm": 0.3556448458865266, | |
| "learning_rate": 1.329627269666791e-05, | |
| "loss": 0.3259, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 0.7892307692307692, | |
| "grad_norm": 0.32239867500588243, | |
| "learning_rate": 1.3273447611818768e-05, | |
| "loss": 0.3334, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 0.7907692307692308, | |
| "grad_norm": 0.34313378935169225, | |
| "learning_rate": 1.3250603410036356e-05, | |
| "loss": 0.3278, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 0.7923076923076923, | |
| "grad_norm": 0.3130455306524999, | |
| "learning_rate": 1.3227740224730799e-05, | |
| "loss": 0.3227, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.7938461538461539, | |
| "grad_norm": 0.35119933240152384, | |
| "learning_rate": 1.3204858189423097e-05, | |
| "loss": 0.3328, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 0.7953846153846154, | |
| "grad_norm": 0.31600394128037407, | |
| "learning_rate": 1.3181957437744333e-05, | |
| "loss": 0.3395, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 0.796923076923077, | |
| "grad_norm": 0.3479657346532963, | |
| "learning_rate": 1.3159038103434889e-05, | |
| "loss": 0.3483, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 0.7984615384615384, | |
| "grad_norm": 0.3206870512019153, | |
| "learning_rate": 1.3136100320343674e-05, | |
| "loss": 0.3402, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.31675024233834703, | |
| "learning_rate": 1.3113144222427334e-05, | |
| "loss": 0.3163, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.8015384615384615, | |
| "grad_norm": 0.34122270739362187, | |
| "learning_rate": 1.3090169943749475e-05, | |
| "loss": 0.3352, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 0.803076923076923, | |
| "grad_norm": 0.28611701873915973, | |
| "learning_rate": 1.3067177618479883e-05, | |
| "loss": 0.3405, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 0.8046153846153846, | |
| "grad_norm": 0.327896071980244, | |
| "learning_rate": 1.3044167380893726e-05, | |
| "loss": 0.3276, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 0.8061538461538461, | |
| "grad_norm": 0.31845280215584937, | |
| "learning_rate": 1.3021139365370787e-05, | |
| "loss": 0.348, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 0.8076923076923077, | |
| "grad_norm": 0.32071766074021624, | |
| "learning_rate": 1.2998093706394676e-05, | |
| "loss": 0.3491, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.8092307692307692, | |
| "grad_norm": 0.3233443240330192, | |
| "learning_rate": 1.297503053855203e-05, | |
| "loss": 0.336, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 0.8107692307692308, | |
| "grad_norm": 0.35467566330457945, | |
| "learning_rate": 1.295194999653175e-05, | |
| "loss": 0.3409, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 0.8123076923076923, | |
| "grad_norm": 0.316307679891141, | |
| "learning_rate": 1.292885221512419e-05, | |
| "loss": 0.326, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 0.8138461538461539, | |
| "grad_norm": 0.314005066699375, | |
| "learning_rate": 1.2905737329220394e-05, | |
| "loss": 0.3151, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 0.8153846153846154, | |
| "grad_norm": 0.3077830345752374, | |
| "learning_rate": 1.2882605473811282e-05, | |
| "loss": 0.3329, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.816923076923077, | |
| "grad_norm": 0.32958916574455754, | |
| "learning_rate": 1.2859456783986892e-05, | |
| "loss": 0.3287, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 0.8184615384615385, | |
| "grad_norm": 0.3067485099829722, | |
| "learning_rate": 1.2836291394935568e-05, | |
| "loss": 0.3309, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 0.3104385024448171, | |
| "learning_rate": 1.2813109441943166e-05, | |
| "loss": 0.334, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 0.8215384615384616, | |
| "grad_norm": 0.31618015424262574, | |
| "learning_rate": 1.2789911060392295e-05, | |
| "loss": 0.3337, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 0.823076923076923, | |
| "grad_norm": 0.32038146009037033, | |
| "learning_rate": 1.2766696385761494e-05, | |
| "loss": 0.3304, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.8246153846153846, | |
| "grad_norm": 0.30932611044623504, | |
| "learning_rate": 1.274346555362446e-05, | |
| "loss": 0.3314, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 0.8261538461538461, | |
| "grad_norm": 0.3153016831527301, | |
| "learning_rate": 1.2720218699649243e-05, | |
| "loss": 0.3439, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 0.8276923076923077, | |
| "grad_norm": 0.3019690928384475, | |
| "learning_rate": 1.269695595959747e-05, | |
| "loss": 0.3338, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 0.8292307692307692, | |
| "grad_norm": 0.29367927565138535, | |
| "learning_rate": 1.2673677469323532e-05, | |
| "loss": 0.3287, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 0.8307692307692308, | |
| "grad_norm": 0.3216958588833604, | |
| "learning_rate": 1.2650383364773812e-05, | |
| "loss": 0.32, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.8323076923076923, | |
| "grad_norm": 0.33236242408977856, | |
| "learning_rate": 1.262707378198587e-05, | |
| "loss": 0.3176, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 0.8338461538461538, | |
| "grad_norm": 0.29093819677752464, | |
| "learning_rate": 1.2603748857087668e-05, | |
| "loss": 0.337, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 0.8353846153846154, | |
| "grad_norm": 0.30218052941466567, | |
| "learning_rate": 1.258040872629676e-05, | |
| "loss": 0.3268, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 0.8369230769230769, | |
| "grad_norm": 0.31560988459280653, | |
| "learning_rate": 1.2557053525919503e-05, | |
| "loss": 0.3177, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 0.8384615384615385, | |
| "grad_norm": 0.3396123824016913, | |
| "learning_rate": 1.2533683392350264e-05, | |
| "loss": 0.3337, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 0.30587015627634234, | |
| "learning_rate": 1.2510298462070619e-05, | |
| "loss": 0.3357, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 0.8415384615384616, | |
| "grad_norm": 0.314387767986292, | |
| "learning_rate": 1.2486898871648552e-05, | |
| "loss": 0.3365, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 0.8430769230769231, | |
| "grad_norm": 0.31389305994558653, | |
| "learning_rate": 1.2463484757737663e-05, | |
| "loss": 0.3412, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 0.8446153846153847, | |
| "grad_norm": 0.3060685974378655, | |
| "learning_rate": 1.2440056257076376e-05, | |
| "loss": 0.3272, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 0.8461538461538461, | |
| "grad_norm": 0.30434307224961593, | |
| "learning_rate": 1.241661350648713e-05, | |
| "loss": 0.3284, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.8476923076923077, | |
| "grad_norm": 0.3132509005169485, | |
| "learning_rate": 1.2393156642875579e-05, | |
| "loss": 0.3304, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 0.8492307692307692, | |
| "grad_norm": 0.31509089282351654, | |
| "learning_rate": 1.2369685803229802e-05, | |
| "loss": 0.3352, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 0.8507692307692307, | |
| "grad_norm": 0.3285296432652221, | |
| "learning_rate": 1.2346201124619502e-05, | |
| "loss": 0.3278, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 0.8523076923076923, | |
| "grad_norm": 0.31718979342496284, | |
| "learning_rate": 1.2322702744195192e-05, | |
| "loss": 0.3335, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 0.8538461538461538, | |
| "grad_norm": 0.31312636222764384, | |
| "learning_rate": 1.2299190799187405e-05, | |
| "loss": 0.3268, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.8553846153846154, | |
| "grad_norm": 0.3112656909831258, | |
| "learning_rate": 1.22756654269059e-05, | |
| "loss": 0.3245, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 0.8569230769230769, | |
| "grad_norm": 0.32161444094635033, | |
| "learning_rate": 1.2252126764738845e-05, | |
| "loss": 0.3244, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 0.8584615384615385, | |
| "grad_norm": 0.3218845643109017, | |
| "learning_rate": 1.222857495015202e-05, | |
| "loss": 0.3342, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 0.31065573649293504, | |
| "learning_rate": 1.2205010120688012e-05, | |
| "loss": 0.3302, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 0.8615384615384616, | |
| "grad_norm": 0.324391159225024, | |
| "learning_rate": 1.2181432413965428e-05, | |
| "loss": 0.3497, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.8630769230769231, | |
| "grad_norm": 0.3043829743503028, | |
| "learning_rate": 1.2157841967678064e-05, | |
| "loss": 0.3305, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 0.8646153846153846, | |
| "grad_norm": 0.31249008455235144, | |
| "learning_rate": 1.2134238919594122e-05, | |
| "loss": 0.3249, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 0.8661538461538462, | |
| "grad_norm": 0.3196330879564442, | |
| "learning_rate": 1.2110623407555398e-05, | |
| "loss": 0.3369, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 0.8676923076923077, | |
| "grad_norm": 0.3185290427871534, | |
| "learning_rate": 1.2086995569476474e-05, | |
| "loss": 0.3288, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 0.8692307692307693, | |
| "grad_norm": 0.2870092449390502, | |
| "learning_rate": 1.2063355543343925e-05, | |
| "loss": 0.3346, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.8707692307692307, | |
| "grad_norm": 0.3257019255068011, | |
| "learning_rate": 1.2039703467215489e-05, | |
| "loss": 0.3262, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 0.8723076923076923, | |
| "grad_norm": 0.30786304302506623, | |
| "learning_rate": 1.2016039479219293e-05, | |
| "loss": 0.3241, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 0.8738461538461538, | |
| "grad_norm": 0.2871242511667203, | |
| "learning_rate": 1.1992363717553015e-05, | |
| "loss": 0.3338, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 0.8753846153846154, | |
| "grad_norm": 0.31399555825061976, | |
| "learning_rate": 1.1968676320483103e-05, | |
| "loss": 0.3214, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 0.8769230769230769, | |
| "grad_norm": 0.3206608664636182, | |
| "learning_rate": 1.194497742634395e-05, | |
| "loss": 0.327, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.8784615384615385, | |
| "grad_norm": 0.29015010647347106, | |
| "learning_rate": 1.1921267173537085e-05, | |
| "loss": 0.3236, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 0.3063448933173235, | |
| "learning_rate": 1.1897545700530387e-05, | |
| "loss": 0.3397, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 0.8815384615384615, | |
| "grad_norm": 0.3110184425830958, | |
| "learning_rate": 1.187381314585725e-05, | |
| "loss": 0.3213, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 0.8830769230769231, | |
| "grad_norm": 0.3457162836672877, | |
| "learning_rate": 1.1850069648115785e-05, | |
| "loss": 0.3397, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 0.8846153846153846, | |
| "grad_norm": 0.331003030927215, | |
| "learning_rate": 1.1826315345968014e-05, | |
| "loss": 0.3206, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.8861538461538462, | |
| "grad_norm": 0.315149509773091, | |
| "learning_rate": 1.180255037813906e-05, | |
| "loss": 0.33, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 0.8876923076923077, | |
| "grad_norm": 0.340006308605447, | |
| "learning_rate": 1.1778774883416325e-05, | |
| "loss": 0.3231, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 0.8892307692307693, | |
| "grad_norm": 0.3169393379879127, | |
| "learning_rate": 1.1754989000648693e-05, | |
| "loss": 0.3312, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 0.8907692307692308, | |
| "grad_norm": 0.30576326286204264, | |
| "learning_rate": 1.1731192868745716e-05, | |
| "loss": 0.3292, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 0.8923076923076924, | |
| "grad_norm": 0.29835580565844005, | |
| "learning_rate": 1.1707386626676798e-05, | |
| "loss": 0.3341, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.8938461538461538, | |
| "grad_norm": 0.30635228645222695, | |
| "learning_rate": 1.1683570413470384e-05, | |
| "loss": 0.3354, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 0.8953846153846153, | |
| "grad_norm": 0.3122003041641902, | |
| "learning_rate": 1.1659744368213159e-05, | |
| "loss": 0.3355, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 0.8969230769230769, | |
| "grad_norm": 0.29808807782765917, | |
| "learning_rate": 1.163590863004922e-05, | |
| "loss": 0.309, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 0.8984615384615384, | |
| "grad_norm": 0.31722094162289693, | |
| "learning_rate": 1.1612063338179269e-05, | |
| "loss": 0.3345, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 0.34343706386507955, | |
| "learning_rate": 1.1588208631859808e-05, | |
| "loss": 0.3264, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.9015384615384615, | |
| "grad_norm": 0.3561421859445545, | |
| "learning_rate": 1.156434465040231e-05, | |
| "loss": 0.3221, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 0.9030769230769231, | |
| "grad_norm": 0.3122333077895763, | |
| "learning_rate": 1.154047153317243e-05, | |
| "loss": 0.334, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 0.9046153846153846, | |
| "grad_norm": 0.36462307583103276, | |
| "learning_rate": 1.1516589419589159e-05, | |
| "loss": 0.3383, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 0.9061538461538462, | |
| "grad_norm": 0.3787798255981821, | |
| "learning_rate": 1.1492698449124042e-05, | |
| "loss": 0.3236, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 0.9076923076923077, | |
| "grad_norm": 0.31052416517739034, | |
| "learning_rate": 1.1468798761300335e-05, | |
| "loss": 0.328, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.9092307692307692, | |
| "grad_norm": 0.34670105196219847, | |
| "learning_rate": 1.1444890495692214e-05, | |
| "loss": 0.32, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 0.9107692307692308, | |
| "grad_norm": 0.3435408690029312, | |
| "learning_rate": 1.1420973791923941e-05, | |
| "loss": 0.3179, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 0.9123076923076923, | |
| "grad_norm": 0.34535921979706546, | |
| "learning_rate": 1.1397048789669061e-05, | |
| "loss": 0.3202, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 0.9138461538461539, | |
| "grad_norm": 0.34135586974992327, | |
| "learning_rate": 1.1373115628649582e-05, | |
| "loss": 0.3377, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 0.9153846153846154, | |
| "grad_norm": 0.3340598746124593, | |
| "learning_rate": 1.1349174448635158e-05, | |
| "loss": 0.3301, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.916923076923077, | |
| "grad_norm": 0.31048990721325737, | |
| "learning_rate": 1.1325225389442278e-05, | |
| "loss": 0.3409, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 0.9184615384615384, | |
| "grad_norm": 0.32319816702516574, | |
| "learning_rate": 1.1301268590933434e-05, | |
| "loss": 0.3334, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 0.32377592283674955, | |
| "learning_rate": 1.1277304193016332e-05, | |
| "loss": 0.3223, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 0.9215384615384615, | |
| "grad_norm": 0.32788798640022937, | |
| "learning_rate": 1.1253332335643043e-05, | |
| "loss": 0.3277, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 0.9230769230769231, | |
| "grad_norm": 0.3645233665166125, | |
| "learning_rate": 1.1229353158809216e-05, | |
| "loss": 0.3224, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.9246153846153846, | |
| "grad_norm": 0.31503479960191505, | |
| "learning_rate": 1.1205366802553231e-05, | |
| "loss": 0.3339, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 0.9261538461538461, | |
| "grad_norm": 0.331938769447499, | |
| "learning_rate": 1.118137340695541e-05, | |
| "loss": 0.3365, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 0.9276923076923077, | |
| "grad_norm": 0.36305216542449914, | |
| "learning_rate": 1.1157373112137171e-05, | |
| "loss": 0.3135, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 0.9292307692307692, | |
| "grad_norm": 0.29524307547847034, | |
| "learning_rate": 1.1133366058260232e-05, | |
| "loss": 0.3342, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 0.9307692307692308, | |
| "grad_norm": 0.3349025908800711, | |
| "learning_rate": 1.1109352385525782e-05, | |
| "loss": 0.3406, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.9323076923076923, | |
| "grad_norm": 0.3016000039609564, | |
| "learning_rate": 1.1085332234173664e-05, | |
| "loss": 0.3224, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 0.9338461538461539, | |
| "grad_norm": 0.334743615461434, | |
| "learning_rate": 1.106130574448156e-05, | |
| "loss": 0.3147, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 0.9353846153846154, | |
| "grad_norm": 0.3102917595170546, | |
| "learning_rate": 1.1037273056764157e-05, | |
| "loss": 0.3309, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 0.936923076923077, | |
| "grad_norm": 0.3102194233828966, | |
| "learning_rate": 1.1013234311372353e-05, | |
| "loss": 0.3217, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 0.9384615384615385, | |
| "grad_norm": 0.3250678456241659, | |
| "learning_rate": 1.0989189648692408e-05, | |
| "loss": 0.3279, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 0.2966650578987603, | |
| "learning_rate": 1.0965139209145153e-05, | |
| "loss": 0.3183, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 0.9415384615384615, | |
| "grad_norm": 0.3251521973725705, | |
| "learning_rate": 1.0941083133185146e-05, | |
| "loss": 0.3186, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 0.943076923076923, | |
| "grad_norm": 0.31455203366608475, | |
| "learning_rate": 1.0917021561299864e-05, | |
| "loss": 0.3278, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 0.9446153846153846, | |
| "grad_norm": 0.296941621015581, | |
| "learning_rate": 1.089295463400888e-05, | |
| "loss": 0.3298, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 0.9461538461538461, | |
| "grad_norm": 0.3103348965824328, | |
| "learning_rate": 1.0868882491863048e-05, | |
| "loss": 0.327, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.9476923076923077, | |
| "grad_norm": 0.3046972829370567, | |
| "learning_rate": 1.0844805275443673e-05, | |
| "loss": 0.3314, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 0.9492307692307692, | |
| "grad_norm": 0.3098146820614246, | |
| "learning_rate": 1.0820723125361685e-05, | |
| "loss": 0.3278, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 0.9507692307692308, | |
| "grad_norm": 0.30699825622994825, | |
| "learning_rate": 1.0796636182256846e-05, | |
| "loss": 0.3263, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 0.9523076923076923, | |
| "grad_norm": 0.31333681919188466, | |
| "learning_rate": 1.077254458679689e-05, | |
| "loss": 0.3219, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 0.9538461538461539, | |
| "grad_norm": 0.32070135634319324, | |
| "learning_rate": 1.074844847967673e-05, | |
| "loss": 0.3363, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.9553846153846154, | |
| "grad_norm": 0.28389243504267636, | |
| "learning_rate": 1.0724348001617626e-05, | |
| "loss": 0.3235, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 0.9569230769230769, | |
| "grad_norm": 0.31963587873691246, | |
| "learning_rate": 1.0700243293366365e-05, | |
| "loss": 0.3415, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 0.9584615384615385, | |
| "grad_norm": 0.32359664500198776, | |
| "learning_rate": 1.0676134495694439e-05, | |
| "loss": 0.3139, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 0.3031597251286149, | |
| "learning_rate": 1.0652021749397216e-05, | |
| "loss": 0.3152, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 0.9615384615384616, | |
| "grad_norm": 0.29998909574751775, | |
| "learning_rate": 1.0627905195293135e-05, | |
| "loss": 0.3201, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.963076923076923, | |
| "grad_norm": 0.3030414628868554, | |
| "learning_rate": 1.0603784974222862e-05, | |
| "loss": 0.3309, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 0.9646153846153847, | |
| "grad_norm": 0.31264131381498866, | |
| "learning_rate": 1.0579661227048484e-05, | |
| "loss": 0.3313, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 0.9661538461538461, | |
| "grad_norm": 0.3013874384696025, | |
| "learning_rate": 1.0555534094652675e-05, | |
| "loss": 0.3178, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 0.9676923076923077, | |
| "grad_norm": 0.3248209351600733, | |
| "learning_rate": 1.0531403717937888e-05, | |
| "loss": 0.3285, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 0.9692307692307692, | |
| "grad_norm": 0.29044342641824206, | |
| "learning_rate": 1.0507270237825513e-05, | |
| "loss": 0.3359, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.9707692307692307, | |
| "grad_norm": 0.32732965806122305, | |
| "learning_rate": 1.0483133795255072e-05, | |
| "loss": 0.3128, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 0.9723076923076923, | |
| "grad_norm": 0.29247913151654736, | |
| "learning_rate": 1.045899453118338e-05, | |
| "loss": 0.3268, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 0.9738461538461538, | |
| "grad_norm": 0.3127738976329768, | |
| "learning_rate": 1.0434852586583737e-05, | |
| "loss": 0.3311, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 0.9753846153846154, | |
| "grad_norm": 0.32245069255148034, | |
| "learning_rate": 1.0410708102445091e-05, | |
| "loss": 0.3329, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 0.9769230769230769, | |
| "grad_norm": 0.2961723521203939, | |
| "learning_rate": 1.0386561219771222e-05, | |
| "loss": 0.3313, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.9784615384615385, | |
| "grad_norm": 0.3164765116267267, | |
| "learning_rate": 1.0362412079579925e-05, | |
| "loss": 0.3271, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 0.30693337679851157, | |
| "learning_rate": 1.0338260822902166e-05, | |
| "loss": 0.3068, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 0.9815384615384616, | |
| "grad_norm": 0.309847879496408, | |
| "learning_rate": 1.0314107590781284e-05, | |
| "loss": 0.3146, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 0.9830769230769231, | |
| "grad_norm": 0.3441561954467763, | |
| "learning_rate": 1.0289952524272147e-05, | |
| "loss": 0.3275, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 0.9846153846153847, | |
| "grad_norm": 0.32734844589039386, | |
| "learning_rate": 1.0265795764440335e-05, | |
| "loss": 0.3307, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.9861538461538462, | |
| "grad_norm": 0.29131479550823436, | |
| "learning_rate": 1.0241637452361323e-05, | |
| "loss": 0.3153, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 0.9876923076923076, | |
| "grad_norm": 0.2927444977942452, | |
| "learning_rate": 1.0217477729119648e-05, | |
| "loss": 0.308, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 0.9892307692307692, | |
| "grad_norm": 0.31859817571165966, | |
| "learning_rate": 1.0193316735808085e-05, | |
| "loss": 0.3237, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 0.9907692307692307, | |
| "grad_norm": 0.3143106412699962, | |
| "learning_rate": 1.0169154613526831e-05, | |
| "loss": 0.3211, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 0.9923076923076923, | |
| "grad_norm": 0.30834730776946945, | |
| "learning_rate": 1.0144991503382676e-05, | |
| "loss": 0.3246, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.9938461538461538, | |
| "grad_norm": 0.2967696610141374, | |
| "learning_rate": 1.0120827546488175e-05, | |
| "loss": 0.3279, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 0.9953846153846154, | |
| "grad_norm": 0.3197694465778131, | |
| "learning_rate": 1.0096662883960833e-05, | |
| "loss": 0.3188, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 0.9969230769230769, | |
| "grad_norm": 0.3033818619276967, | |
| "learning_rate": 1.0072497656922266e-05, | |
| "loss": 0.3303, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 0.9984615384615385, | |
| "grad_norm": 0.3239836485410687, | |
| "learning_rate": 1.0048332006497406e-05, | |
| "loss": 0.3165, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.3016364748263467, | |
| "learning_rate": 1.0024166073813634e-05, | |
| "loss": 0.3191, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.0015384615384615, | |
| "grad_norm": 0.34732331187804666, | |
| "learning_rate": 1e-05, | |
| "loss": 0.28, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 1.003076923076923, | |
| "grad_norm": 0.32444904819206183, | |
| "learning_rate": 9.975833926186367e-06, | |
| "loss": 0.2838, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 1.0046153846153847, | |
| "grad_norm": 0.3422552839442046, | |
| "learning_rate": 9.951667993502599e-06, | |
| "loss": 0.2926, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 1.0061538461538462, | |
| "grad_norm": 0.3320499383753382, | |
| "learning_rate": 9.927502343077732e-06, | |
| "loss": 0.2734, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 1.0076923076923077, | |
| "grad_norm": 0.3604432577698012, | |
| "learning_rate": 9.903337116039172e-06, | |
| "loss": 0.2867, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 1.0092307692307692, | |
| "grad_norm": 0.34312310604563834, | |
| "learning_rate": 9.879172453511827e-06, | |
| "loss": 0.2872, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 1.0107692307692309, | |
| "grad_norm": 0.33513487457394814, | |
| "learning_rate": 9.855008496617326e-06, | |
| "loss": 0.2784, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 1.0123076923076924, | |
| "grad_norm": 0.33806408587135317, | |
| "learning_rate": 9.830845386473169e-06, | |
| "loss": 0.2873, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 1.0138461538461538, | |
| "grad_norm": 0.3377539416935379, | |
| "learning_rate": 9.806683264191916e-06, | |
| "loss": 0.2759, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 1.0153846153846153, | |
| "grad_norm": 0.33051105629103283, | |
| "learning_rate": 9.782522270880354e-06, | |
| "loss": 0.285, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.0169230769230768, | |
| "grad_norm": 0.33144527590601386, | |
| "learning_rate": 9.75836254763868e-06, | |
| "loss": 0.2845, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 1.0184615384615385, | |
| "grad_norm": 0.3401441522675921, | |
| "learning_rate": 9.73420423555967e-06, | |
| "loss": 0.2825, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "grad_norm": 0.302990505339083, | |
| "learning_rate": 9.710047475727854e-06, | |
| "loss": 0.2817, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 1.0215384615384615, | |
| "grad_norm": 0.3287454011663842, | |
| "learning_rate": 9.685892409218718e-06, | |
| "loss": 0.2913, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 1.023076923076923, | |
| "grad_norm": 0.35566618631853003, | |
| "learning_rate": 9.661739177097836e-06, | |
| "loss": 0.2791, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 1.0246153846153847, | |
| "grad_norm": 0.3228421290063285, | |
| "learning_rate": 9.63758792042008e-06, | |
| "loss": 0.2845, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 1.0261538461538462, | |
| "grad_norm": 0.3437761267951725, | |
| "learning_rate": 9.613438780228777e-06, | |
| "loss": 0.281, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 1.0276923076923077, | |
| "grad_norm": 0.3453200532176201, | |
| "learning_rate": 9.589291897554912e-06, | |
| "loss": 0.2758, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 1.0292307692307692, | |
| "grad_norm": 0.3074205348443002, | |
| "learning_rate": 9.565147413416266e-06, | |
| "loss": 0.2927, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 1.0307692307692307, | |
| "grad_norm": 0.33000167975061806, | |
| "learning_rate": 9.541005468816622e-06, | |
| "loss": 0.2844, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 1.0323076923076924, | |
| "grad_norm": 0.3157400495137632, | |
| "learning_rate": 9.516866204744932e-06, | |
| "loss": 0.2827, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 1.0338461538461539, | |
| "grad_norm": 0.2994542642176147, | |
| "learning_rate": 9.492729762174489e-06, | |
| "loss": 0.2906, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 1.0353846153846153, | |
| "grad_norm": 0.31402805230733577, | |
| "learning_rate": 9.468596282062114e-06, | |
| "loss": 0.2819, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 1.0369230769230768, | |
| "grad_norm": 0.3100325537805443, | |
| "learning_rate": 9.444465905347327e-06, | |
| "loss": 0.2811, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 1.0384615384615385, | |
| "grad_norm": 0.3059953095706399, | |
| "learning_rate": 9.420338772951521e-06, | |
| "loss": 0.2828, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "grad_norm": 0.3133760427383818, | |
| "learning_rate": 9.39621502577714e-06, | |
| "loss": 0.2864, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 1.0415384615384615, | |
| "grad_norm": 0.3024464286164455, | |
| "learning_rate": 9.372094804706867e-06, | |
| "loss": 0.2818, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 1.043076923076923, | |
| "grad_norm": 0.30541483235210354, | |
| "learning_rate": 9.347978250602786e-06, | |
| "loss": 0.2899, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 1.0446153846153847, | |
| "grad_norm": 0.3248165537366672, | |
| "learning_rate": 9.323865504305566e-06, | |
| "loss": 0.2853, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 1.0461538461538462, | |
| "grad_norm": 0.3043409466949707, | |
| "learning_rate": 9.299756706633636e-06, | |
| "loss": 0.2836, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 1.0476923076923077, | |
| "grad_norm": 0.30670835427245857, | |
| "learning_rate": 9.275651998382377e-06, | |
| "loss": 0.2835, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 1.0492307692307692, | |
| "grad_norm": 0.3311046500288093, | |
| "learning_rate": 9.251551520323273e-06, | |
| "loss": 0.2882, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 1.0507692307692307, | |
| "grad_norm": 0.2988763207191971, | |
| "learning_rate": 9.227455413203115e-06, | |
| "loss": 0.2743, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 1.0523076923076924, | |
| "grad_norm": 0.3373397840286235, | |
| "learning_rate": 9.203363817743159e-06, | |
| "loss": 0.2792, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 1.0538461538461539, | |
| "grad_norm": 0.3009723950259022, | |
| "learning_rate": 9.179276874638315e-06, | |
| "loss": 0.2777, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 1.0553846153846154, | |
| "grad_norm": 0.27068925158047413, | |
| "learning_rate": 9.15519472455633e-06, | |
| "loss": 0.2951, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 1.0569230769230769, | |
| "grad_norm": 0.3317453704078991, | |
| "learning_rate": 9.131117508136953e-06, | |
| "loss": 0.2883, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 1.0584615384615386, | |
| "grad_norm": 0.32194852853215894, | |
| "learning_rate": 9.107045365991123e-06, | |
| "loss": 0.2705, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "grad_norm": 0.29148481095050166, | |
| "learning_rate": 9.082978438700138e-06, | |
| "loss": 0.2811, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 1.0615384615384615, | |
| "grad_norm": 0.3142038270601785, | |
| "learning_rate": 9.058916866814857e-06, | |
| "loss": 0.2736, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 1.063076923076923, | |
| "grad_norm": 0.31913692752332856, | |
| "learning_rate": 9.034860790854848e-06, | |
| "loss": 0.2825, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 1.0646153846153845, | |
| "grad_norm": 0.30108394301414343, | |
| "learning_rate": 9.010810351307593e-06, | |
| "loss": 0.2885, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 1.0661538461538462, | |
| "grad_norm": 0.2868490120035731, | |
| "learning_rate": 8.986765688627652e-06, | |
| "loss": 0.2772, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 1.0676923076923077, | |
| "grad_norm": 0.31221108777213336, | |
| "learning_rate": 8.962726943235845e-06, | |
| "loss": 0.2821, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 1.0692307692307692, | |
| "grad_norm": 0.29716262144245537, | |
| "learning_rate": 8.938694255518444e-06, | |
| "loss": 0.2881, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 1.0707692307692307, | |
| "grad_norm": 0.30277698505911427, | |
| "learning_rate": 8.91466776582634e-06, | |
| "loss": 0.2898, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 1.0723076923076924, | |
| "grad_norm": 0.29732736704215773, | |
| "learning_rate": 8.890647614474223e-06, | |
| "loss": 0.2836, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 1.073846153846154, | |
| "grad_norm": 0.3020279299659443, | |
| "learning_rate": 8.86663394173977e-06, | |
| "loss": 0.2755, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 1.0753846153846154, | |
| "grad_norm": 0.3037592281364952, | |
| "learning_rate": 8.842626887862832e-06, | |
| "loss": 0.2862, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 1.0769230769230769, | |
| "grad_norm": 0.31444223489573714, | |
| "learning_rate": 8.818626593044595e-06, | |
| "loss": 0.2889, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.0784615384615384, | |
| "grad_norm": 0.28879372525106306, | |
| "learning_rate": 8.79463319744677e-06, | |
| "loss": 0.2842, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "grad_norm": 0.31140195110522306, | |
| "learning_rate": 8.77064684119079e-06, | |
| "loss": 0.2832, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 1.0815384615384616, | |
| "grad_norm": 0.28956637297734567, | |
| "learning_rate": 8.746667664356957e-06, | |
| "loss": 0.2916, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 1.083076923076923, | |
| "grad_norm": 0.3143889238176807, | |
| "learning_rate": 8.722695806983673e-06, | |
| "loss": 0.2792, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 1.0846153846153845, | |
| "grad_norm": 0.31601279314706854, | |
| "learning_rate": 8.69873140906657e-06, | |
| "loss": 0.2779, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 1.0861538461538462, | |
| "grad_norm": 0.30658503317889174, | |
| "learning_rate": 8.674774610557728e-06, | |
| "loss": 0.2766, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 1.0876923076923077, | |
| "grad_norm": 0.34147915924162614, | |
| "learning_rate": 8.650825551364844e-06, | |
| "loss": 0.2849, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 1.0892307692307692, | |
| "grad_norm": 0.30821824160641453, | |
| "learning_rate": 8.626884371350421e-06, | |
| "loss": 0.2877, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 1.0907692307692307, | |
| "grad_norm": 0.31875400232628437, | |
| "learning_rate": 8.602951210330942e-06, | |
| "loss": 0.2861, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 1.0923076923076924, | |
| "grad_norm": 0.3192051063313104, | |
| "learning_rate": 8.579026208076064e-06, | |
| "loss": 0.2821, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 1.093846153846154, | |
| "grad_norm": 0.33251206019330676, | |
| "learning_rate": 8.55510950430779e-06, | |
| "loss": 0.2809, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 1.0953846153846154, | |
| "grad_norm": 0.32505900793848264, | |
| "learning_rate": 8.531201238699665e-06, | |
| "loss": 0.2752, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 1.096923076923077, | |
| "grad_norm": 0.3223453889854819, | |
| "learning_rate": 8.50730155087596e-06, | |
| "loss": 0.2757, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 1.0984615384615384, | |
| "grad_norm": 0.3336165182216915, | |
| "learning_rate": 8.483410580410843e-06, | |
| "loss": 0.2922, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "grad_norm": 0.3296284161675168, | |
| "learning_rate": 8.459528466827576e-06, | |
| "loss": 0.2903, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 1.1015384615384616, | |
| "grad_norm": 0.3187313302623022, | |
| "learning_rate": 8.43565534959769e-06, | |
| "loss": 0.2822, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 1.103076923076923, | |
| "grad_norm": 0.30974612871731544, | |
| "learning_rate": 8.411791368140197e-06, | |
| "loss": 0.2762, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 1.1046153846153846, | |
| "grad_norm": 0.3175016265965753, | |
| "learning_rate": 8.387936661820733e-06, | |
| "loss": 0.2872, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 1.106153846153846, | |
| "grad_norm": 0.30779535668335356, | |
| "learning_rate": 8.364091369950783e-06, | |
| "loss": 0.2804, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 1.1076923076923078, | |
| "grad_norm": 0.2920330333132833, | |
| "learning_rate": 8.340255631786843e-06, | |
| "loss": 0.28, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 1.1092307692307692, | |
| "grad_norm": 0.3111369113787312, | |
| "learning_rate": 8.316429586529616e-06, | |
| "loss": 0.2763, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 1.1107692307692307, | |
| "grad_norm": 0.32761302966957634, | |
| "learning_rate": 8.292613373323203e-06, | |
| "loss": 0.2826, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 1.1123076923076922, | |
| "grad_norm": 0.35928711060393653, | |
| "learning_rate": 8.268807131254288e-06, | |
| "loss": 0.2765, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 1.113846153846154, | |
| "grad_norm": 0.3086406308767895, | |
| "learning_rate": 8.24501099935131e-06, | |
| "loss": 0.2917, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 1.1153846153846154, | |
| "grad_norm": 0.3109545610843942, | |
| "learning_rate": 8.221225116583677e-06, | |
| "loss": 0.2847, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 1.116923076923077, | |
| "grad_norm": 0.30456642359778996, | |
| "learning_rate": 8.197449621860944e-06, | |
| "loss": 0.2801, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 1.1184615384615384, | |
| "grad_norm": 0.3003688527787175, | |
| "learning_rate": 8.17368465403199e-06, | |
| "loss": 0.2837, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "grad_norm": 0.3100907093038021, | |
| "learning_rate": 8.14993035188422e-06, | |
| "loss": 0.2861, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 1.1215384615384616, | |
| "grad_norm": 0.3132232125379502, | |
| "learning_rate": 8.126186854142752e-06, | |
| "loss": 0.284, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 1.123076923076923, | |
| "grad_norm": 0.31065863337930283, | |
| "learning_rate": 8.102454299469615e-06, | |
| "loss": 0.2826, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 1.1246153846153846, | |
| "grad_norm": 0.323662181174654, | |
| "learning_rate": 8.078732826462917e-06, | |
| "loss": 0.2926, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 1.126153846153846, | |
| "grad_norm": 0.3134358033047369, | |
| "learning_rate": 8.055022573656055e-06, | |
| "loss": 0.2731, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 1.1276923076923078, | |
| "grad_norm": 0.32361693876034364, | |
| "learning_rate": 8.0313236795169e-06, | |
| "loss": 0.2741, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 1.1292307692307693, | |
| "grad_norm": 0.29101903327880213, | |
| "learning_rate": 8.007636282446986e-06, | |
| "loss": 0.2721, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 1.1307692307692307, | |
| "grad_norm": 0.3018397285643358, | |
| "learning_rate": 7.983960520780712e-06, | |
| "loss": 0.2868, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 1.1323076923076922, | |
| "grad_norm": 0.3105308000272363, | |
| "learning_rate": 7.960296532784515e-06, | |
| "loss": 0.2842, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 1.1338461538461537, | |
| "grad_norm": 0.30612415691801403, | |
| "learning_rate": 7.936644456656082e-06, | |
| "loss": 0.2914, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 1.1353846153846154, | |
| "grad_norm": 0.313629256201062, | |
| "learning_rate": 7.913004430523526e-06, | |
| "loss": 0.2771, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 1.136923076923077, | |
| "grad_norm": 0.3240648431845592, | |
| "learning_rate": 7.889376592444605e-06, | |
| "loss": 0.2855, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 1.1384615384615384, | |
| "grad_norm": 0.3044134814557973, | |
| "learning_rate": 7.865761080405882e-06, | |
| "loss": 0.291, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 1.1400000000000001, | |
| "grad_norm": 0.27811641502889545, | |
| "learning_rate": 7.84215803232194e-06, | |
| "loss": 0.2794, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 1.1415384615384616, | |
| "grad_norm": 0.32218417728467347, | |
| "learning_rate": 7.818567586034578e-06, | |
| "loss": 0.2844, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 1.143076923076923, | |
| "grad_norm": 0.3067532071100499, | |
| "learning_rate": 7.794989879311991e-06, | |
| "loss": 0.285, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 1.1446153846153846, | |
| "grad_norm": 0.2991862411303418, | |
| "learning_rate": 7.771425049847984e-06, | |
| "loss": 0.2841, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 1.146153846153846, | |
| "grad_norm": 0.32247577406668865, | |
| "learning_rate": 7.747873235261157e-06, | |
| "loss": 0.2909, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 1.1476923076923078, | |
| "grad_norm": 0.31954229399729445, | |
| "learning_rate": 7.724334573094101e-06, | |
| "loss": 0.2878, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 1.1492307692307693, | |
| "grad_norm": 0.30083806921436773, | |
| "learning_rate": 7.700809200812596e-06, | |
| "loss": 0.2891, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 1.1507692307692308, | |
| "grad_norm": 0.3142510325234358, | |
| "learning_rate": 7.677297255804811e-06, | |
| "loss": 0.2856, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 1.1523076923076923, | |
| "grad_norm": 0.2870832473008288, | |
| "learning_rate": 7.6537988753805e-06, | |
| "loss": 0.2885, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 1.1538461538461537, | |
| "grad_norm": 0.3007062802890109, | |
| "learning_rate": 7.6303141967702e-06, | |
| "loss": 0.2934, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.1553846153846155, | |
| "grad_norm": 0.31805936290046793, | |
| "learning_rate": 7.606843357124426e-06, | |
| "loss": 0.2859, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 1.156923076923077, | |
| "grad_norm": 0.2888594426489485, | |
| "learning_rate": 7.583386493512872e-06, | |
| "loss": 0.2823, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 1.1584615384615384, | |
| "grad_norm": 0.2918277431569358, | |
| "learning_rate": 7.559943742923626e-06, | |
| "loss": 0.2806, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "grad_norm": 0.27255929440743293, | |
| "learning_rate": 7.536515242262341e-06, | |
| "loss": 0.2739, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 1.1615384615384616, | |
| "grad_norm": 0.33173787979163183, | |
| "learning_rate": 7.513101128351454e-06, | |
| "loss": 0.2834, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 1.1630769230769231, | |
| "grad_norm": 0.3238320694495885, | |
| "learning_rate": 7.489701537929384e-06, | |
| "loss": 0.289, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 1.1646153846153846, | |
| "grad_norm": 0.2809829073539621, | |
| "learning_rate": 7.4663166076497376e-06, | |
| "loss": 0.2838, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 1.166153846153846, | |
| "grad_norm": 0.2947202247452326, | |
| "learning_rate": 7.442946474080499e-06, | |
| "loss": 0.2787, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 1.1676923076923078, | |
| "grad_norm": 0.319228719886482, | |
| "learning_rate": 7.419591273703245e-06, | |
| "loss": 0.2828, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 1.1692307692307693, | |
| "grad_norm": 0.2847995636733814, | |
| "learning_rate": 7.396251142912337e-06, | |
| "loss": 0.2857, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 1.1707692307692308, | |
| "grad_norm": 0.29399040893561257, | |
| "learning_rate": 7.372926218014131e-06, | |
| "loss": 0.2776, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 1.1723076923076923, | |
| "grad_norm": 0.2955780863612147, | |
| "learning_rate": 7.349616635226191e-06, | |
| "loss": 0.2782, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 1.1738461538461538, | |
| "grad_norm": 0.28775904936662283, | |
| "learning_rate": 7.326322530676471e-06, | |
| "loss": 0.2873, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 1.1753846153846155, | |
| "grad_norm": 0.30879780507027793, | |
| "learning_rate": 7.303044040402536e-06, | |
| "loss": 0.2874, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 1.176923076923077, | |
| "grad_norm": 0.27411031230591126, | |
| "learning_rate": 7.279781300350758e-06, | |
| "loss": 0.2746, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 1.1784615384615384, | |
| "grad_norm": 0.3025701791637634, | |
| "learning_rate": 7.256534446375543e-06, | |
| "loss": 0.2798, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "grad_norm": 0.3047469460566567, | |
| "learning_rate": 7.23330361423851e-06, | |
| "loss": 0.2892, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 1.1815384615384614, | |
| "grad_norm": 0.2957662470836879, | |
| "learning_rate": 7.210088939607709e-06, | |
| "loss": 0.2839, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 1.1830769230769231, | |
| "grad_norm": 0.2849127131802998, | |
| "learning_rate": 7.186890558056836e-06, | |
| "loss": 0.2797, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 1.1846153846153846, | |
| "grad_norm": 0.29518177363624887, | |
| "learning_rate": 7.163708605064437e-06, | |
| "loss": 0.289, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 1.1861538461538461, | |
| "grad_norm": 0.28630707314852843, | |
| "learning_rate": 7.140543216013109e-06, | |
| "loss": 0.278, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 1.1876923076923076, | |
| "grad_norm": 0.2947200274521145, | |
| "learning_rate": 7.117394526188719e-06, | |
| "loss": 0.2762, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 1.1892307692307693, | |
| "grad_norm": 0.2871611641306348, | |
| "learning_rate": 7.094262670779611e-06, | |
| "loss": 0.2864, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 1.1907692307692308, | |
| "grad_norm": 0.2905509938001612, | |
| "learning_rate": 7.071147784875809e-06, | |
| "loss": 0.2811, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 1.1923076923076923, | |
| "grad_norm": 0.28852886723854776, | |
| "learning_rate": 7.048050003468252e-06, | |
| "loss": 0.2788, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 1.1938461538461538, | |
| "grad_norm": 0.2862816806354988, | |
| "learning_rate": 7.024969461447973e-06, | |
| "loss": 0.2921, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 1.1953846153846155, | |
| "grad_norm": 0.28035417832956877, | |
| "learning_rate": 7.001906293605329e-06, | |
| "loss": 0.27, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 1.196923076923077, | |
| "grad_norm": 0.3081549365320152, | |
| "learning_rate": 6.978860634629213e-06, | |
| "loss": 0.2628, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 1.1984615384615385, | |
| "grad_norm": 0.29646626529473624, | |
| "learning_rate": 6.9558326191062775e-06, | |
| "loss": 0.2739, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "grad_norm": 0.30203294671651026, | |
| "learning_rate": 6.932822381520121e-06, | |
| "loss": 0.268, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 1.2015384615384614, | |
| "grad_norm": 0.3093392278102877, | |
| "learning_rate": 6.909830056250527e-06, | |
| "loss": 0.2838, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 1.2030769230769232, | |
| "grad_norm": 0.29792246798832145, | |
| "learning_rate": 6.88685577757267e-06, | |
| "loss": 0.2803, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 1.2046153846153846, | |
| "grad_norm": 0.2977596725066236, | |
| "learning_rate": 6.8638996796563275e-06, | |
| "loss": 0.2875, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 1.2061538461538461, | |
| "grad_norm": 0.3043730929932303, | |
| "learning_rate": 6.8409618965651125e-06, | |
| "loss": 0.275, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 1.2076923076923076, | |
| "grad_norm": 0.2813402340518155, | |
| "learning_rate": 6.81804256225567e-06, | |
| "loss": 0.2862, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 1.209230769230769, | |
| "grad_norm": 0.32350615987131526, | |
| "learning_rate": 6.795141810576906e-06, | |
| "loss": 0.2846, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 1.2107692307692308, | |
| "grad_norm": 0.2861412130641737, | |
| "learning_rate": 6.772259775269203e-06, | |
| "loss": 0.2777, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 1.2123076923076923, | |
| "grad_norm": 0.2948577774888858, | |
| "learning_rate": 6.7493965899636486e-06, | |
| "loss": 0.2815, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 1.2138461538461538, | |
| "grad_norm": 0.28655075068199576, | |
| "learning_rate": 6.7265523881812335e-06, | |
| "loss": 0.2809, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 1.2153846153846155, | |
| "grad_norm": 0.2954405673613976, | |
| "learning_rate": 6.703727303332094e-06, | |
| "loss": 0.2793, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 1.216923076923077, | |
| "grad_norm": 0.285777124696368, | |
| "learning_rate": 6.680921468714718e-06, | |
| "loss": 0.277, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 1.2184615384615385, | |
| "grad_norm": 0.28801215245271594, | |
| "learning_rate": 6.6581350175151715e-06, | |
| "loss": 0.2817, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "grad_norm": 0.29358260711212547, | |
| "learning_rate": 6.6353680828063306e-06, | |
| "loss": 0.2761, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 1.2215384615384615, | |
| "grad_norm": 0.2732920910463464, | |
| "learning_rate": 6.612620797547087e-06, | |
| "loss": 0.2688, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 1.2230769230769232, | |
| "grad_norm": 0.2823232953579946, | |
| "learning_rate": 6.589893294581579e-06, | |
| "loss": 0.283, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 1.2246153846153847, | |
| "grad_norm": 0.27802707917977293, | |
| "learning_rate": 6.567185706638417e-06, | |
| "loss": 0.2812, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 1.2261538461538461, | |
| "grad_norm": 0.29195760944835975, | |
| "learning_rate": 6.5444981663299135e-06, | |
| "loss": 0.272, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 1.2276923076923076, | |
| "grad_norm": 0.271685328313952, | |
| "learning_rate": 6.521830806151297e-06, | |
| "loss": 0.2902, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 1.2292307692307691, | |
| "grad_norm": 0.28460765234355695, | |
| "learning_rate": 6.499183758479944e-06, | |
| "loss": 0.2882, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 1.2307692307692308, | |
| "grad_norm": 0.2863072902666916, | |
| "learning_rate": 6.47655715557461e-06, | |
| "loss": 0.2812, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 1.2323076923076923, | |
| "grad_norm": 0.28377583009326257, | |
| "learning_rate": 6.453951129574644e-06, | |
| "loss": 0.2795, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 1.2338461538461538, | |
| "grad_norm": 0.29333544616487517, | |
| "learning_rate": 6.431365812499242e-06, | |
| "loss": 0.2936, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 1.2353846153846153, | |
| "grad_norm": 0.29953315694101823, | |
| "learning_rate": 6.408801336246645e-06, | |
| "loss": 0.2867, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 1.236923076923077, | |
| "grad_norm": 0.27386702982109357, | |
| "learning_rate": 6.38625783259339e-06, | |
| "loss": 0.2837, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 1.2384615384615385, | |
| "grad_norm": 0.310898071421186, | |
| "learning_rate": 6.36373543319353e-06, | |
| "loss": 0.2826, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "grad_norm": 0.2944810008817099, | |
| "learning_rate": 6.341234269577878e-06, | |
| "loss": 0.2685, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 1.2415384615384615, | |
| "grad_norm": 0.2791213720495511, | |
| "learning_rate": 6.318754473153221e-06, | |
| "loss": 0.2824, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 1.2430769230769232, | |
| "grad_norm": 0.29365028663555504, | |
| "learning_rate": 6.296296175201565e-06, | |
| "loss": 0.2799, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 1.2446153846153847, | |
| "grad_norm": 0.3150248889907267, | |
| "learning_rate": 6.273859506879365e-06, | |
| "loss": 0.2744, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 1.2461538461538462, | |
| "grad_norm": 0.32842309193065544, | |
| "learning_rate": 6.251444599216756e-06, | |
| "loss": 0.2769, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 1.2476923076923077, | |
| "grad_norm": 0.29180890811914484, | |
| "learning_rate": 6.229051583116796e-06, | |
| "loss": 0.2863, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 1.2492307692307691, | |
| "grad_norm": 0.2887378998610124, | |
| "learning_rate": 6.206680589354696e-06, | |
| "loss": 0.2748, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 1.2507692307692309, | |
| "grad_norm": 0.2777487183276391, | |
| "learning_rate": 6.18433174857705e-06, | |
| "loss": 0.288, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 1.2523076923076923, | |
| "grad_norm": 0.30945368617265023, | |
| "learning_rate": 6.162005191301082e-06, | |
| "loss": 0.2788, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 1.2538461538461538, | |
| "grad_norm": 0.27052743875684937, | |
| "learning_rate": 6.139701047913885e-06, | |
| "loss": 0.2653, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 1.2553846153846153, | |
| "grad_norm": 0.30934036148283867, | |
| "learning_rate": 6.117419448671651e-06, | |
| "loss": 0.2892, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 1.2569230769230768, | |
| "grad_norm": 0.31001399577452204, | |
| "learning_rate": 6.095160523698913e-06, | |
| "loss": 0.2795, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 1.2584615384615385, | |
| "grad_norm": 0.306823491455034, | |
| "learning_rate": 6.072924402987785e-06, | |
| "loss": 0.2761, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "grad_norm": 0.2821450940838836, | |
| "learning_rate": 6.050711216397212e-06, | |
| "loss": 0.277, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 1.2615384615384615, | |
| "grad_norm": 0.29778869798743096, | |
| "learning_rate": 6.028521093652195e-06, | |
| "loss": 0.2938, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 1.2630769230769232, | |
| "grad_norm": 0.3355217690858126, | |
| "learning_rate": 6.006354164343047e-06, | |
| "loss": 0.2728, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 1.2646153846153847, | |
| "grad_norm": 0.289782832193456, | |
| "learning_rate": 5.984210557924628e-06, | |
| "loss": 0.2814, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 1.2661538461538462, | |
| "grad_norm": 0.2878090579578816, | |
| "learning_rate": 5.962090403715592e-06, | |
| "loss": 0.2866, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 1.2676923076923077, | |
| "grad_norm": 0.3019556593740292, | |
| "learning_rate": 5.939993830897641e-06, | |
| "loss": 0.2807, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 1.2692307692307692, | |
| "grad_norm": 0.3158326263040181, | |
| "learning_rate": 5.9179209685147525e-06, | |
| "loss": 0.2794, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 1.2707692307692309, | |
| "grad_norm": 0.28707432706520675, | |
| "learning_rate": 5.895871945472434e-06, | |
| "loss": 0.2845, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 1.2723076923076924, | |
| "grad_norm": 0.2963834535647672, | |
| "learning_rate": 5.873846890536976e-06, | |
| "loss": 0.2816, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 1.2738461538461539, | |
| "grad_norm": 0.31596386394483994, | |
| "learning_rate": 5.851845932334698e-06, | |
| "loss": 0.2855, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 1.2753846153846153, | |
| "grad_norm": 0.2828167486772763, | |
| "learning_rate": 5.829869199351188e-06, | |
| "loss": 0.2869, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 1.2769230769230768, | |
| "grad_norm": 0.3016058478748179, | |
| "learning_rate": 5.807916819930563e-06, | |
| "loss": 0.2812, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 1.2784615384615385, | |
| "grad_norm": 0.2955037144013558, | |
| "learning_rate": 5.785988922274711e-06, | |
| "loss": 0.2761, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "grad_norm": 0.28474944236281235, | |
| "learning_rate": 5.7640856344425465e-06, | |
| "loss": 0.2754, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 1.2815384615384615, | |
| "grad_norm": 0.2949274618459585, | |
| "learning_rate": 5.742207084349274e-06, | |
| "loss": 0.2829, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 1.283076923076923, | |
| "grad_norm": 0.3097048774109836, | |
| "learning_rate": 5.720353399765615e-06, | |
| "loss": 0.285, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 1.2846153846153845, | |
| "grad_norm": 0.3097721464332409, | |
| "learning_rate": 5.698524708317082e-06, | |
| "loss": 0.2894, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 1.2861538461538462, | |
| "grad_norm": 0.2766367492199567, | |
| "learning_rate": 5.676721137483226e-06, | |
| "loss": 0.2801, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 1.2876923076923077, | |
| "grad_norm": 0.29568337118433724, | |
| "learning_rate": 5.654942814596902e-06, | |
| "loss": 0.2955, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 1.2892307692307692, | |
| "grad_norm": 0.31699922367774136, | |
| "learning_rate": 5.633189866843507e-06, | |
| "loss": 0.288, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 1.290769230769231, | |
| "grad_norm": 0.28639959989906244, | |
| "learning_rate": 5.611462421260251e-06, | |
| "loss": 0.2843, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 1.2923076923076924, | |
| "grad_norm": 0.2678488804524567, | |
| "learning_rate": 5.58976060473541e-06, | |
| "loss": 0.2873, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 1.2938461538461539, | |
| "grad_norm": 0.2776902486642549, | |
| "learning_rate": 5.5680845440075885e-06, | |
| "loss": 0.2896, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 1.2953846153846154, | |
| "grad_norm": 0.3138059651765253, | |
| "learning_rate": 5.546434365664974e-06, | |
| "loss": 0.2701, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 1.2969230769230768, | |
| "grad_norm": 0.3146892839180133, | |
| "learning_rate": 5.5248101961446065e-06, | |
| "loss": 0.2817, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 1.2984615384615386, | |
| "grad_norm": 0.2939281513518946, | |
| "learning_rate": 5.503212161731628e-06, | |
| "loss": 0.2836, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "grad_norm": 0.3025062620250353, | |
| "learning_rate": 5.481640388558551e-06, | |
| "loss": 0.2902, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 1.3015384615384615, | |
| "grad_norm": 0.3162590193256863, | |
| "learning_rate": 5.460095002604533e-06, | |
| "loss": 0.2736, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 1.303076923076923, | |
| "grad_norm": 0.2844927223440658, | |
| "learning_rate": 5.43857612969462e-06, | |
| "loss": 0.2849, | |
| "step": 847 | |
| }, | |
| { | |
| "epoch": 1.3046153846153845, | |
| "grad_norm": 0.29370463036029804, | |
| "learning_rate": 5.417083895499024e-06, | |
| "loss": 0.2842, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 1.3061538461538462, | |
| "grad_norm": 0.30408719752218816, | |
| "learning_rate": 5.39561842553239e-06, | |
| "loss": 0.2752, | |
| "step": 849 | |
| }, | |
| { | |
| "epoch": 1.3076923076923077, | |
| "grad_norm": 0.29814878432273506, | |
| "learning_rate": 5.374179845153048e-06, | |
| "loss": 0.2725, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 1.3092307692307692, | |
| "grad_norm": 0.3090317868858365, | |
| "learning_rate": 5.352768279562315e-06, | |
| "loss": 0.2795, | |
| "step": 851 | |
| }, | |
| { | |
| "epoch": 1.3107692307692307, | |
| "grad_norm": 0.3149051067033696, | |
| "learning_rate": 5.331383853803724e-06, | |
| "loss": 0.279, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 1.3123076923076922, | |
| "grad_norm": 0.29026031338987546, | |
| "learning_rate": 5.310026692762316e-06, | |
| "loss": 0.269, | |
| "step": 853 | |
| }, | |
| { | |
| "epoch": 1.3138461538461539, | |
| "grad_norm": 0.3061058627098662, | |
| "learning_rate": 5.288696921163902e-06, | |
| "loss": 0.2755, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 1.3153846153846154, | |
| "grad_norm": 0.3067580088583177, | |
| "learning_rate": 5.267394663574351e-06, | |
| "loss": 0.2616, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 1.3169230769230769, | |
| "grad_norm": 0.28661946051764187, | |
| "learning_rate": 5.246120044398839e-06, | |
| "loss": 0.2949, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 1.3184615384615386, | |
| "grad_norm": 0.2862892658665391, | |
| "learning_rate": 5.224873187881136e-06, | |
| "loss": 0.275, | |
| "step": 857 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "grad_norm": 0.28534062120425885, | |
| "learning_rate": 5.20365421810288e-06, | |
| "loss": 0.2879, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 1.3215384615384616, | |
| "grad_norm": 0.27279911218675196, | |
| "learning_rate": 5.1824632589828465e-06, | |
| "loss": 0.2685, | |
| "step": 859 | |
| }, | |
| { | |
| "epoch": 1.323076923076923, | |
| "grad_norm": 0.30170979115869934, | |
| "learning_rate": 5.161300434276237e-06, | |
| "loss": 0.2728, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 1.3246153846153845, | |
| "grad_norm": 0.31083997998278023, | |
| "learning_rate": 5.14016586757394e-06, | |
| "loss": 0.2765, | |
| "step": 861 | |
| }, | |
| { | |
| "epoch": 1.3261538461538462, | |
| "grad_norm": 0.27275151442143564, | |
| "learning_rate": 5.119059682301819e-06, | |
| "loss": 0.2796, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 1.3276923076923077, | |
| "grad_norm": 0.28369026644045975, | |
| "learning_rate": 5.097982001719994e-06, | |
| "loss": 0.28, | |
| "step": 863 | |
| }, | |
| { | |
| "epoch": 1.3292307692307692, | |
| "grad_norm": 0.304381409197318, | |
| "learning_rate": 5.076932948922111e-06, | |
| "loss": 0.2816, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 1.3307692307692307, | |
| "grad_norm": 0.2844124937713117, | |
| "learning_rate": 5.0559126468346354e-06, | |
| "loss": 0.2797, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 1.3323076923076922, | |
| "grad_norm": 0.2715699243242887, | |
| "learning_rate": 5.034921218216126e-06, | |
| "loss": 0.2767, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 1.333846153846154, | |
| "grad_norm": 0.2891583855016688, | |
| "learning_rate": 5.013958785656516e-06, | |
| "loss": 0.2722, | |
| "step": 867 | |
| }, | |
| { | |
| "epoch": 1.3353846153846154, | |
| "grad_norm": 0.3086744282279073, | |
| "learning_rate": 4.993025471576417e-06, | |
| "loss": 0.2727, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 1.3369230769230769, | |
| "grad_norm": 0.2809894462210739, | |
| "learning_rate": 4.972121398226371e-06, | |
| "loss": 0.2843, | |
| "step": 869 | |
| }, | |
| { | |
| "epoch": 1.3384615384615386, | |
| "grad_norm": 0.27436121104774197, | |
| "learning_rate": 4.951246687686164e-06, | |
| "loss": 0.2833, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "grad_norm": 0.283858740765341, | |
| "learning_rate": 4.930401461864099e-06, | |
| "loss": 0.2826, | |
| "step": 871 | |
| }, | |
| { | |
| "epoch": 1.3415384615384616, | |
| "grad_norm": 0.2818450715432375, | |
| "learning_rate": 4.909585842496287e-06, | |
| "loss": 0.2887, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 1.343076923076923, | |
| "grad_norm": 0.28854298555763697, | |
| "learning_rate": 4.888799951145948e-06, | |
| "loss": 0.2768, | |
| "step": 873 | |
| }, | |
| { | |
| "epoch": 1.3446153846153845, | |
| "grad_norm": 0.30065363915653354, | |
| "learning_rate": 4.868043909202678e-06, | |
| "loss": 0.278, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 1.3461538461538463, | |
| "grad_norm": 0.27408495965643176, | |
| "learning_rate": 4.847317837881757e-06, | |
| "loss": 0.28, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 1.3476923076923077, | |
| "grad_norm": 0.2873579092416445, | |
| "learning_rate": 4.826621858223431e-06, | |
| "loss": 0.2777, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 1.3492307692307692, | |
| "grad_norm": 0.2913491598278634, | |
| "learning_rate": 4.805956091092228e-06, | |
| "loss": 0.2829, | |
| "step": 877 | |
| }, | |
| { | |
| "epoch": 1.3507692307692307, | |
| "grad_norm": 0.3000633859084866, | |
| "learning_rate": 4.785320657176216e-06, | |
| "loss": 0.2738, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 1.3523076923076922, | |
| "grad_norm": 0.2992709856059088, | |
| "learning_rate": 4.764715676986327e-06, | |
| "loss": 0.2797, | |
| "step": 879 | |
| }, | |
| { | |
| "epoch": 1.353846153846154, | |
| "grad_norm": 0.2764988951323593, | |
| "learning_rate": 4.744141270855638e-06, | |
| "loss": 0.2696, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 1.3553846153846154, | |
| "grad_norm": 0.27731299592358694, | |
| "learning_rate": 4.7235975589386715e-06, | |
| "loss": 0.2838, | |
| "step": 881 | |
| }, | |
| { | |
| "epoch": 1.356923076923077, | |
| "grad_norm": 0.27762040579303454, | |
| "learning_rate": 4.7030846612107105e-06, | |
| "loss": 0.2678, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 1.3584615384615384, | |
| "grad_norm": 0.3032540883629789, | |
| "learning_rate": 4.6826026974670665e-06, | |
| "loss": 0.2873, | |
| "step": 883 | |
| }, | |
| { | |
| "epoch": 1.3599999999999999, | |
| "grad_norm": 0.28474295761604757, | |
| "learning_rate": 4.662151787322405e-06, | |
| "loss": 0.2931, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 1.3615384615384616, | |
| "grad_norm": 0.2767424780339405, | |
| "learning_rate": 4.641732050210032e-06, | |
| "loss": 0.2754, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 1.363076923076923, | |
| "grad_norm": 0.2661749973204445, | |
| "learning_rate": 4.621343605381215e-06, | |
| "loss": 0.2736, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 1.3646153846153846, | |
| "grad_norm": 0.2855111722858186, | |
| "learning_rate": 4.600986571904461e-06, | |
| "loss": 0.2724, | |
| "step": 887 | |
| }, | |
| { | |
| "epoch": 1.3661538461538463, | |
| "grad_norm": 0.2670827052296132, | |
| "learning_rate": 4.580661068664844e-06, | |
| "loss": 0.2808, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 1.3676923076923078, | |
| "grad_norm": 0.26642196866945184, | |
| "learning_rate": 4.560367214363295e-06, | |
| "loss": 0.2832, | |
| "step": 889 | |
| }, | |
| { | |
| "epoch": 1.3692307692307693, | |
| "grad_norm": 0.26447372975548733, | |
| "learning_rate": 4.540105127515921e-06, | |
| "loss": 0.2886, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 1.3707692307692307, | |
| "grad_norm": 0.27104430863461343, | |
| "learning_rate": 4.519874926453303e-06, | |
| "loss": 0.2732, | |
| "step": 891 | |
| }, | |
| { | |
| "epoch": 1.3723076923076922, | |
| "grad_norm": 0.2639620175059038, | |
| "learning_rate": 4.499676729319809e-06, | |
| "loss": 0.2772, | |
| "step": 892 | |
| }, | |
| { | |
| "epoch": 1.373846153846154, | |
| "grad_norm": 0.27441667168958817, | |
| "learning_rate": 4.479510654072909e-06, | |
| "loss": 0.2812, | |
| "step": 893 | |
| }, | |
| { | |
| "epoch": 1.3753846153846154, | |
| "grad_norm": 0.29082870304085784, | |
| "learning_rate": 4.459376818482471e-06, | |
| "loss": 0.2682, | |
| "step": 894 | |
| }, | |
| { | |
| "epoch": 1.376923076923077, | |
| "grad_norm": 0.2877630922254318, | |
| "learning_rate": 4.439275340130099e-06, | |
| "loss": 0.2681, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 1.3784615384615384, | |
| "grad_norm": 0.26989551556496555, | |
| "learning_rate": 4.419206336408418e-06, | |
| "loss": 0.279, | |
| "step": 896 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "grad_norm": 0.2869441676477556, | |
| "learning_rate": 4.399169924520403e-06, | |
| "loss": 0.2842, | |
| "step": 897 | |
| }, | |
| { | |
| "epoch": 1.3815384615384616, | |
| "grad_norm": 0.30094163030214116, | |
| "learning_rate": 4.379166221478697e-06, | |
| "loss": 0.2747, | |
| "step": 898 | |
| }, | |
| { | |
| "epoch": 1.383076923076923, | |
| "grad_norm": 0.2771588125935493, | |
| "learning_rate": 4.359195344104916e-06, | |
| "loss": 0.2685, | |
| "step": 899 | |
| }, | |
| { | |
| "epoch": 1.3846153846153846, | |
| "grad_norm": 0.29507488075814775, | |
| "learning_rate": 4.339257409028987e-06, | |
| "loss": 0.2806, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 1.3861538461538463, | |
| "grad_norm": 0.26093386279230574, | |
| "learning_rate": 4.319352532688444e-06, | |
| "loss": 0.2888, | |
| "step": 901 | |
| }, | |
| { | |
| "epoch": 1.3876923076923076, | |
| "grad_norm": 0.29971147233177703, | |
| "learning_rate": 4.2994808313277565e-06, | |
| "loss": 0.2806, | |
| "step": 902 | |
| }, | |
| { | |
| "epoch": 1.3892307692307693, | |
| "grad_norm": 0.2925280942409716, | |
| "learning_rate": 4.279642420997655e-06, | |
| "loss": 0.2766, | |
| "step": 903 | |
| }, | |
| { | |
| "epoch": 1.3907692307692308, | |
| "grad_norm": 0.310206982896478, | |
| "learning_rate": 4.259837417554457e-06, | |
| "loss": 0.274, | |
| "step": 904 | |
| }, | |
| { | |
| "epoch": 1.3923076923076922, | |
| "grad_norm": 0.27515905727135725, | |
| "learning_rate": 4.240065936659374e-06, | |
| "loss": 0.2872, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 1.393846153846154, | |
| "grad_norm": 0.30847037362181456, | |
| "learning_rate": 4.220328093777851e-06, | |
| "loss": 0.2732, | |
| "step": 906 | |
| }, | |
| { | |
| "epoch": 1.3953846153846154, | |
| "grad_norm": 0.29152540720862175, | |
| "learning_rate": 4.200624004178883e-06, | |
| "loss": 0.2815, | |
| "step": 907 | |
| }, | |
| { | |
| "epoch": 1.396923076923077, | |
| "grad_norm": 0.2796021392011909, | |
| "learning_rate": 4.180953782934352e-06, | |
| "loss": 0.28, | |
| "step": 908 | |
| }, | |
| { | |
| "epoch": 1.3984615384615384, | |
| "grad_norm": 0.2807257095406004, | |
| "learning_rate": 4.161317544918345e-06, | |
| "loss": 0.2733, | |
| "step": 909 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "grad_norm": 0.2772487280421376, | |
| "learning_rate": 4.141715404806486e-06, | |
| "loss": 0.2699, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 1.4015384615384616, | |
| "grad_norm": 0.2884138212191608, | |
| "learning_rate": 4.12214747707527e-06, | |
| "loss": 0.279, | |
| "step": 911 | |
| }, | |
| { | |
| "epoch": 1.403076923076923, | |
| "grad_norm": 0.2680045744149855, | |
| "learning_rate": 4.1026138760013886e-06, | |
| "loss": 0.2798, | |
| "step": 912 | |
| }, | |
| { | |
| "epoch": 1.4046153846153846, | |
| "grad_norm": 0.2692589868706836, | |
| "learning_rate": 4.083114715661069e-06, | |
| "loss": 0.2829, | |
| "step": 913 | |
| }, | |
| { | |
| "epoch": 1.406153846153846, | |
| "grad_norm": 0.297498404277643, | |
| "learning_rate": 4.0636501099294e-06, | |
| "loss": 0.2797, | |
| "step": 914 | |
| }, | |
| { | |
| "epoch": 1.4076923076923076, | |
| "grad_norm": 0.287113462303936, | |
| "learning_rate": 4.044220172479675e-06, | |
| "loss": 0.2787, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 1.4092307692307693, | |
| "grad_norm": 0.30033725563998004, | |
| "learning_rate": 4.024825016782727e-06, | |
| "loss": 0.2793, | |
| "step": 916 | |
| }, | |
| { | |
| "epoch": 1.4107692307692308, | |
| "grad_norm": 0.2887459684418934, | |
| "learning_rate": 4.0054647561062625e-06, | |
| "loss": 0.2676, | |
| "step": 917 | |
| }, | |
| { | |
| "epoch": 1.4123076923076923, | |
| "grad_norm": 0.28514681381668844, | |
| "learning_rate": 3.9861395035141936e-06, | |
| "loss": 0.2867, | |
| "step": 918 | |
| }, | |
| { | |
| "epoch": 1.413846153846154, | |
| "grad_norm": 0.27472517359961973, | |
| "learning_rate": 3.9668493718659924e-06, | |
| "loss": 0.2636, | |
| "step": 919 | |
| }, | |
| { | |
| "epoch": 1.4153846153846155, | |
| "grad_norm": 0.29407748344726986, | |
| "learning_rate": 3.947594473816026e-06, | |
| "loss": 0.2821, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 1.416923076923077, | |
| "grad_norm": 0.2704272597057848, | |
| "learning_rate": 3.9283749218128885e-06, | |
| "loss": 0.2749, | |
| "step": 921 | |
| }, | |
| { | |
| "epoch": 1.4184615384615384, | |
| "grad_norm": 0.27420846427149403, | |
| "learning_rate": 3.909190828098766e-06, | |
| "loss": 0.2811, | |
| "step": 922 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "grad_norm": 0.2797656797273435, | |
| "learning_rate": 3.890042304708758e-06, | |
| "loss": 0.2773, | |
| "step": 923 | |
| }, | |
| { | |
| "epoch": 1.4215384615384616, | |
| "grad_norm": 0.3078131735166291, | |
| "learning_rate": 3.8709294634702374e-06, | |
| "loss": 0.2727, | |
| "step": 924 | |
| }, | |
| { | |
| "epoch": 1.4230769230769231, | |
| "grad_norm": 0.28470564287868144, | |
| "learning_rate": 3.8518524160021876e-06, | |
| "loss": 0.2739, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 1.4246153846153846, | |
| "grad_norm": 0.3020599963499438, | |
| "learning_rate": 3.832811273714569e-06, | |
| "loss": 0.2793, | |
| "step": 926 | |
| }, | |
| { | |
| "epoch": 1.426153846153846, | |
| "grad_norm": 0.2849356503004256, | |
| "learning_rate": 3.813806147807645e-06, | |
| "loss": 0.2743, | |
| "step": 927 | |
| }, | |
| { | |
| "epoch": 1.4276923076923076, | |
| "grad_norm": 0.2933219685932077, | |
| "learning_rate": 3.7948371492713454e-06, | |
| "loss": 0.2699, | |
| "step": 928 | |
| }, | |
| { | |
| "epoch": 1.4292307692307693, | |
| "grad_norm": 0.26565134002528773, | |
| "learning_rate": 3.775904388884618e-06, | |
| "loss": 0.2733, | |
| "step": 929 | |
| }, | |
| { | |
| "epoch": 1.4307692307692308, | |
| "grad_norm": 0.28025463474691675, | |
| "learning_rate": 3.7570079772147748e-06, | |
| "loss": 0.2706, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 1.4323076923076923, | |
| "grad_norm": 0.2977520733153107, | |
| "learning_rate": 3.738148024616863e-06, | |
| "loss": 0.2881, | |
| "step": 931 | |
| }, | |
| { | |
| "epoch": 1.4338461538461538, | |
| "grad_norm": 0.28252543847339456, | |
| "learning_rate": 3.7193246412329976e-06, | |
| "loss": 0.2812, | |
| "step": 932 | |
| }, | |
| { | |
| "epoch": 1.4353846153846153, | |
| "grad_norm": 0.2870050233029735, | |
| "learning_rate": 3.700537936991733e-06, | |
| "loss": 0.265, | |
| "step": 933 | |
| }, | |
| { | |
| "epoch": 1.436923076923077, | |
| "grad_norm": 0.2652244026722324, | |
| "learning_rate": 3.681788021607413e-06, | |
| "loss": 0.2817, | |
| "step": 934 | |
| }, | |
| { | |
| "epoch": 1.4384615384615385, | |
| "grad_norm": 0.31013959013850706, | |
| "learning_rate": 3.6630750045795472e-06, | |
| "loss": 0.2798, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "grad_norm": 0.2743987811436827, | |
| "learning_rate": 3.6443989951921478e-06, | |
| "loss": 0.2696, | |
| "step": 936 | |
| }, | |
| { | |
| "epoch": 1.4415384615384617, | |
| "grad_norm": 0.27339074035174915, | |
| "learning_rate": 3.625760102513103e-06, | |
| "loss": 0.2733, | |
| "step": 937 | |
| }, | |
| { | |
| "epoch": 1.4430769230769231, | |
| "grad_norm": 0.2839064463945384, | |
| "learning_rate": 3.607158435393544e-06, | |
| "loss": 0.2796, | |
| "step": 938 | |
| }, | |
| { | |
| "epoch": 1.4446153846153846, | |
| "grad_norm": 0.295811608192796, | |
| "learning_rate": 3.5885941024672e-06, | |
| "loss": 0.2845, | |
| "step": 939 | |
| }, | |
| { | |
| "epoch": 1.4461538461538461, | |
| "grad_norm": 0.29015867105530985, | |
| "learning_rate": 3.5700672121497728e-06, | |
| "loss": 0.2701, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 1.4476923076923076, | |
| "grad_norm": 0.2693689280307087, | |
| "learning_rate": 3.5515778726382967e-06, | |
| "loss": 0.2797, | |
| "step": 941 | |
| }, | |
| { | |
| "epoch": 1.4492307692307693, | |
| "grad_norm": 0.2945264603333907, | |
| "learning_rate": 3.53312619191051e-06, | |
| "loss": 0.278, | |
| "step": 942 | |
| }, | |
| { | |
| "epoch": 1.4507692307692308, | |
| "grad_norm": 0.2705312345116653, | |
| "learning_rate": 3.5147122777242203e-06, | |
| "loss": 0.28, | |
| "step": 943 | |
| }, | |
| { | |
| "epoch": 1.4523076923076923, | |
| "grad_norm": 0.2847915292490043, | |
| "learning_rate": 3.4963362376166886e-06, | |
| "loss": 0.2777, | |
| "step": 944 | |
| }, | |
| { | |
| "epoch": 1.4538461538461538, | |
| "grad_norm": 0.2753826997127367, | |
| "learning_rate": 3.477998178903982e-06, | |
| "loss": 0.2787, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 1.4553846153846153, | |
| "grad_norm": 0.27318410243587926, | |
| "learning_rate": 3.459698208680359e-06, | |
| "loss": 0.2765, | |
| "step": 946 | |
| }, | |
| { | |
| "epoch": 1.456923076923077, | |
| "grad_norm": 0.28833597885945284, | |
| "learning_rate": 3.441436433817641e-06, | |
| "loss": 0.2708, | |
| "step": 947 | |
| }, | |
| { | |
| "epoch": 1.4584615384615385, | |
| "grad_norm": 0.27620830508227745, | |
| "learning_rate": 3.423212960964586e-06, | |
| "loss": 0.2758, | |
| "step": 948 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "grad_norm": 0.271564199389485, | |
| "learning_rate": 3.405027896546277e-06, | |
| "loss": 0.2805, | |
| "step": 949 | |
| }, | |
| { | |
| "epoch": 1.4615384615384617, | |
| "grad_norm": 0.26988340339307304, | |
| "learning_rate": 3.3868813467634833e-06, | |
| "loss": 0.2791, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 1.463076923076923, | |
| "grad_norm": 0.2838014425158747, | |
| "learning_rate": 3.3687734175920505e-06, | |
| "loss": 0.2716, | |
| "step": 951 | |
| }, | |
| { | |
| "epoch": 1.4646153846153847, | |
| "grad_norm": 0.27584807857810434, | |
| "learning_rate": 3.350704214782278e-06, | |
| "loss": 0.2627, | |
| "step": 952 | |
| }, | |
| { | |
| "epoch": 1.4661538461538461, | |
| "grad_norm": 0.2750476670562843, | |
| "learning_rate": 3.3326738438583116e-06, | |
| "loss": 0.2671, | |
| "step": 953 | |
| }, | |
| { | |
| "epoch": 1.4676923076923076, | |
| "grad_norm": 0.27704107334718375, | |
| "learning_rate": 3.314682410117511e-06, | |
| "loss": 0.2802, | |
| "step": 954 | |
| }, | |
| { | |
| "epoch": 1.4692307692307693, | |
| "grad_norm": 0.2649151011885699, | |
| "learning_rate": 3.2967300186298456e-06, | |
| "loss": 0.2817, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 1.4707692307692308, | |
| "grad_norm": 0.2815722650702043, | |
| "learning_rate": 3.2788167742372725e-06, | |
| "loss": 0.2706, | |
| "step": 956 | |
| }, | |
| { | |
| "epoch": 1.4723076923076923, | |
| "grad_norm": 0.287669633361647, | |
| "learning_rate": 3.2609427815531426e-06, | |
| "loss": 0.2816, | |
| "step": 957 | |
| }, | |
| { | |
| "epoch": 1.4738461538461538, | |
| "grad_norm": 0.2759316296845802, | |
| "learning_rate": 3.243108144961563e-06, | |
| "loss": 0.2745, | |
| "step": 958 | |
| }, | |
| { | |
| "epoch": 1.4753846153846153, | |
| "grad_norm": 0.2928929578884538, | |
| "learning_rate": 3.2253129686168105e-06, | |
| "loss": 0.2781, | |
| "step": 959 | |
| }, | |
| { | |
| "epoch": 1.476923076923077, | |
| "grad_norm": 0.28769583350600747, | |
| "learning_rate": 3.2075573564427097e-06, | |
| "loss": 0.275, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 1.4784615384615385, | |
| "grad_norm": 0.2950526179921246, | |
| "learning_rate": 3.1898414121320277e-06, | |
| "loss": 0.2746, | |
| "step": 961 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "grad_norm": 0.28082302779472623, | |
| "learning_rate": 3.1721652391458804e-06, | |
| "loss": 0.2731, | |
| "step": 962 | |
| }, | |
| { | |
| "epoch": 1.4815384615384615, | |
| "grad_norm": 0.3037822234665378, | |
| "learning_rate": 3.1545289407131128e-06, | |
| "loss": 0.2846, | |
| "step": 963 | |
| }, | |
| { | |
| "epoch": 1.483076923076923, | |
| "grad_norm": 0.26541552279778613, | |
| "learning_rate": 3.1369326198297025e-06, | |
| "loss": 0.2707, | |
| "step": 964 | |
| }, | |
| { | |
| "epoch": 1.4846153846153847, | |
| "grad_norm": 0.27228670305719155, | |
| "learning_rate": 3.11937637925816e-06, | |
| "loss": 0.2907, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 1.4861538461538462, | |
| "grad_norm": 0.2898752439173941, | |
| "learning_rate": 3.101860321526924e-06, | |
| "loss": 0.2787, | |
| "step": 966 | |
| }, | |
| { | |
| "epoch": 1.4876923076923076, | |
| "grad_norm": 0.28760668304946113, | |
| "learning_rate": 3.0843845489297698e-06, | |
| "loss": 0.2771, | |
| "step": 967 | |
| }, | |
| { | |
| "epoch": 1.4892307692307694, | |
| "grad_norm": 0.26927083755746745, | |
| "learning_rate": 3.066949163525205e-06, | |
| "loss": 0.2884, | |
| "step": 968 | |
| }, | |
| { | |
| "epoch": 1.4907692307692308, | |
| "grad_norm": 0.2722526618027186, | |
| "learning_rate": 3.0495542671358745e-06, | |
| "loss": 0.2776, | |
| "step": 969 | |
| }, | |
| { | |
| "epoch": 1.4923076923076923, | |
| "grad_norm": 0.2628110690196813, | |
| "learning_rate": 3.0321999613479668e-06, | |
| "loss": 0.2756, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 1.4938461538461538, | |
| "grad_norm": 0.27947564803570074, | |
| "learning_rate": 3.0148863475106315e-06, | |
| "loss": 0.2716, | |
| "step": 971 | |
| }, | |
| { | |
| "epoch": 1.4953846153846153, | |
| "grad_norm": 0.28847862238255323, | |
| "learning_rate": 2.9976135267353636e-06, | |
| "loss": 0.2705, | |
| "step": 972 | |
| }, | |
| { | |
| "epoch": 1.496923076923077, | |
| "grad_norm": 0.2830722792497127, | |
| "learning_rate": 2.9803815998954334e-06, | |
| "loss": 0.2694, | |
| "step": 973 | |
| }, | |
| { | |
| "epoch": 1.4984615384615385, | |
| "grad_norm": 0.2825543118881783, | |
| "learning_rate": 2.9631906676252865e-06, | |
| "loss": 0.2688, | |
| "step": 974 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "grad_norm": 0.2794572077398108, | |
| "learning_rate": 2.9460408303199696e-06, | |
| "loss": 0.2772, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 1.5015384615384615, | |
| "grad_norm": 0.26354437004415276, | |
| "learning_rate": 2.9289321881345257e-06, | |
| "loss": 0.289, | |
| "step": 976 | |
| }, | |
| { | |
| "epoch": 1.503076923076923, | |
| "grad_norm": 0.2999748853854813, | |
| "learning_rate": 2.9118648409834205e-06, | |
| "loss": 0.2797, | |
| "step": 977 | |
| }, | |
| { | |
| "epoch": 1.5046153846153847, | |
| "grad_norm": 0.26897393008418174, | |
| "learning_rate": 2.894838888539957e-06, | |
| "loss": 0.2799, | |
| "step": 978 | |
| }, | |
| { | |
| "epoch": 1.5061538461538462, | |
| "grad_norm": 0.2756670966010222, | |
| "learning_rate": 2.8778544302356904e-06, | |
| "loss": 0.2669, | |
| "step": 979 | |
| }, | |
| { | |
| "epoch": 1.5076923076923077, | |
| "grad_norm": 0.28448485978171967, | |
| "learning_rate": 2.8609115652598595e-06, | |
| "loss": 0.2748, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 1.5092307692307694, | |
| "grad_norm": 0.2750366525351591, | |
| "learning_rate": 2.8440103925587904e-06, | |
| "loss": 0.2719, | |
| "step": 981 | |
| }, | |
| { | |
| "epoch": 1.5107692307692306, | |
| "grad_norm": 0.2759733005813308, | |
| "learning_rate": 2.8271510108353237e-06, | |
| "loss": 0.2801, | |
| "step": 982 | |
| }, | |
| { | |
| "epoch": 1.5123076923076924, | |
| "grad_norm": 0.2723203530361154, | |
| "learning_rate": 2.810333518548246e-06, | |
| "loss": 0.2772, | |
| "step": 983 | |
| }, | |
| { | |
| "epoch": 1.5138461538461538, | |
| "grad_norm": 0.27696349217523963, | |
| "learning_rate": 2.7935580139117114e-06, | |
| "loss": 0.2807, | |
| "step": 984 | |
| }, | |
| { | |
| "epoch": 1.5153846153846153, | |
| "grad_norm": 0.27417573211998586, | |
| "learning_rate": 2.7768245948946615e-06, | |
| "loss": 0.2768, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 1.516923076923077, | |
| "grad_norm": 0.2857478167106004, | |
| "learning_rate": 2.7601333592202583e-06, | |
| "loss": 0.2789, | |
| "step": 986 | |
| }, | |
| { | |
| "epoch": 1.5184615384615383, | |
| "grad_norm": 0.27385986851994265, | |
| "learning_rate": 2.743484404365314e-06, | |
| "loss": 0.2829, | |
| "step": 987 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "grad_norm": 0.2719273680384911, | |
| "learning_rate": 2.7268778275597217e-06, | |
| "loss": 0.2735, | |
| "step": 988 | |
| }, | |
| { | |
| "epoch": 1.5215384615384615, | |
| "grad_norm": 0.277913548440064, | |
| "learning_rate": 2.7103137257858867e-06, | |
| "loss": 0.2742, | |
| "step": 989 | |
| }, | |
| { | |
| "epoch": 1.523076923076923, | |
| "grad_norm": 0.2655837420363689, | |
| "learning_rate": 2.6937921957781587e-06, | |
| "loss": 0.2737, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 1.5246153846153847, | |
| "grad_norm": 0.25939819853408147, | |
| "learning_rate": 2.6773133340222677e-06, | |
| "loss": 0.2753, | |
| "step": 991 | |
| }, | |
| { | |
| "epoch": 1.5261538461538462, | |
| "grad_norm": 0.299768904794166, | |
| "learning_rate": 2.660877236754762e-06, | |
| "loss": 0.2774, | |
| "step": 992 | |
| }, | |
| { | |
| "epoch": 1.5276923076923077, | |
| "grad_norm": 0.2918871574747949, | |
| "learning_rate": 2.6444839999624496e-06, | |
| "loss": 0.2698, | |
| "step": 993 | |
| }, | |
| { | |
| "epoch": 1.5292307692307694, | |
| "grad_norm": 0.27965606944159893, | |
| "learning_rate": 2.6281337193818267e-06, | |
| "loss": 0.2813, | |
| "step": 994 | |
| }, | |
| { | |
| "epoch": 1.5307692307692307, | |
| "grad_norm": 0.2699316433426434, | |
| "learning_rate": 2.61182649049853e-06, | |
| "loss": 0.264, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 1.5323076923076924, | |
| "grad_norm": 0.2674235361378631, | |
| "learning_rate": 2.59556240854677e-06, | |
| "loss": 0.2709, | |
| "step": 996 | |
| }, | |
| { | |
| "epoch": 1.5338461538461539, | |
| "grad_norm": 0.2737319487789186, | |
| "learning_rate": 2.5793415685087797e-06, | |
| "loss": 0.2776, | |
| "step": 997 | |
| }, | |
| { | |
| "epoch": 1.5353846153846153, | |
| "grad_norm": 0.2554653387177025, | |
| "learning_rate": 2.5631640651142654e-06, | |
| "loss": 0.2775, | |
| "step": 998 | |
| }, | |
| { | |
| "epoch": 1.536923076923077, | |
| "grad_norm": 0.2805688350077011, | |
| "learning_rate": 2.5470299928398424e-06, | |
| "loss": 0.2777, | |
| "step": 999 | |
| }, | |
| { | |
| "epoch": 1.5384615384615383, | |
| "grad_norm": 0.25830358096575184, | |
| "learning_rate": 2.5309394459084878e-06, | |
| "loss": 0.2661, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "grad_norm": 0.26874499745479663, | |
| "learning_rate": 2.514892518288988e-06, | |
| "loss": 0.2671, | |
| "step": 1001 | |
| }, | |
| { | |
| "epoch": 1.5415384615384615, | |
| "grad_norm": 0.25765810204174705, | |
| "learning_rate": 2.4988893036954045e-06, | |
| "loss": 0.2688, | |
| "step": 1002 | |
| }, | |
| { | |
| "epoch": 1.543076923076923, | |
| "grad_norm": 0.2716791072454054, | |
| "learning_rate": 2.4829298955865022e-06, | |
| "loss": 0.2714, | |
| "step": 1003 | |
| }, | |
| { | |
| "epoch": 1.5446153846153847, | |
| "grad_norm": 0.2765676929624752, | |
| "learning_rate": 2.467014387165222e-06, | |
| "loss": 0.2815, | |
| "step": 1004 | |
| }, | |
| { | |
| "epoch": 1.546153846153846, | |
| "grad_norm": 0.2892228732777564, | |
| "learning_rate": 2.451142871378124e-06, | |
| "loss": 0.2854, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 1.5476923076923077, | |
| "grad_norm": 0.279076193937244, | |
| "learning_rate": 2.4353154409148637e-06, | |
| "loss": 0.2767, | |
| "step": 1006 | |
| }, | |
| { | |
| "epoch": 1.5492307692307692, | |
| "grad_norm": 0.25962663695514016, | |
| "learning_rate": 2.4195321882076295e-06, | |
| "loss": 0.2746, | |
| "step": 1007 | |
| }, | |
| { | |
| "epoch": 1.5507692307692307, | |
| "grad_norm": 0.2727392844670763, | |
| "learning_rate": 2.4037932054306125e-06, | |
| "loss": 0.2737, | |
| "step": 1008 | |
| }, | |
| { | |
| "epoch": 1.5523076923076924, | |
| "grad_norm": 0.26846928534735465, | |
| "learning_rate": 2.3880985844994674e-06, | |
| "loss": 0.2759, | |
| "step": 1009 | |
| }, | |
| { | |
| "epoch": 1.5538461538461539, | |
| "grad_norm": 0.26462438345050504, | |
| "learning_rate": 2.372448417070776e-06, | |
| "loss": 0.2716, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 1.5553846153846154, | |
| "grad_norm": 0.3037970471484453, | |
| "learning_rate": 2.3568427945415163e-06, | |
| "loss": 0.2744, | |
| "step": 1011 | |
| }, | |
| { | |
| "epoch": 1.556923076923077, | |
| "grad_norm": 0.2753805507186117, | |
| "learning_rate": 2.3412818080485176e-06, | |
| "loss": 0.2777, | |
| "step": 1012 | |
| }, | |
| { | |
| "epoch": 1.5584615384615383, | |
| "grad_norm": 0.26354274514511733, | |
| "learning_rate": 2.3257655484679376e-06, | |
| "loss": 0.2638, | |
| "step": 1013 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "grad_norm": 0.2883351270138847, | |
| "learning_rate": 2.3102941064147287e-06, | |
| "loss": 0.2769, | |
| "step": 1014 | |
| }, | |
| { | |
| "epoch": 1.5615384615384615, | |
| "grad_norm": 0.2930116252923399, | |
| "learning_rate": 2.2948675722421086e-06, | |
| "loss": 0.2701, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 1.563076923076923, | |
| "grad_norm": 0.27465138940322426, | |
| "learning_rate": 2.279486036041034e-06, | |
| "loss": 0.274, | |
| "step": 1016 | |
| }, | |
| { | |
| "epoch": 1.5646153846153847, | |
| "grad_norm": 0.2785774804014197, | |
| "learning_rate": 2.2641495876396713e-06, | |
| "loss": 0.2909, | |
| "step": 1017 | |
| }, | |
| { | |
| "epoch": 1.566153846153846, | |
| "grad_norm": 0.2957628352417127, | |
| "learning_rate": 2.2488583166028754e-06, | |
| "loss": 0.2765, | |
| "step": 1018 | |
| }, | |
| { | |
| "epoch": 1.5676923076923077, | |
| "grad_norm": 0.2751446889954304, | |
| "learning_rate": 2.2336123122316642e-06, | |
| "loss": 0.2723, | |
| "step": 1019 | |
| }, | |
| { | |
| "epoch": 1.5692307692307692, | |
| "grad_norm": 0.2636974738762224, | |
| "learning_rate": 2.218411663562704e-06, | |
| "loss": 0.2783, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 1.5707692307692307, | |
| "grad_norm": 0.2830324109973893, | |
| "learning_rate": 2.2032564593677773e-06, | |
| "loss": 0.2781, | |
| "step": 1021 | |
| }, | |
| { | |
| "epoch": 1.5723076923076924, | |
| "grad_norm": 0.2727444671070684, | |
| "learning_rate": 2.1881467881532737e-06, | |
| "loss": 0.2695, | |
| "step": 1022 | |
| }, | |
| { | |
| "epoch": 1.573846153846154, | |
| "grad_norm": 0.27167201485030423, | |
| "learning_rate": 2.1730827381596643e-06, | |
| "loss": 0.2694, | |
| "step": 1023 | |
| }, | |
| { | |
| "epoch": 1.5753846153846154, | |
| "grad_norm": 0.2912522223958155, | |
| "learning_rate": 2.158064397361005e-06, | |
| "loss": 0.2725, | |
| "step": 1024 | |
| }, | |
| { | |
| "epoch": 1.5769230769230769, | |
| "grad_norm": 0.27334940402277946, | |
| "learning_rate": 2.1430918534643996e-06, | |
| "loss": 0.2778, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 1.5784615384615384, | |
| "grad_norm": 0.28204943693802564, | |
| "learning_rate": 2.1281651939094996e-06, | |
| "loss": 0.2773, | |
| "step": 1026 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "grad_norm": 0.2675264102046118, | |
| "learning_rate": 2.1132845058679942e-06, | |
| "loss": 0.284, | |
| "step": 1027 | |
| }, | |
| { | |
| "epoch": 1.5815384615384616, | |
| "grad_norm": 0.26218450323544323, | |
| "learning_rate": 2.098449876243096e-06, | |
| "loss": 0.2757, | |
| "step": 1028 | |
| }, | |
| { | |
| "epoch": 1.583076923076923, | |
| "grad_norm": 0.27703152952301313, | |
| "learning_rate": 2.083661391669043e-06, | |
| "loss": 0.2834, | |
| "step": 1029 | |
| }, | |
| { | |
| "epoch": 1.5846153846153848, | |
| "grad_norm": 0.2662129547997518, | |
| "learning_rate": 2.0689191385105787e-06, | |
| "loss": 0.2676, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 1.586153846153846, | |
| "grad_norm": 0.26446456191661216, | |
| "learning_rate": 2.0542232028624585e-06, | |
| "loss": 0.2694, | |
| "step": 1031 | |
| }, | |
| { | |
| "epoch": 1.5876923076923077, | |
| "grad_norm": 0.2627648312541549, | |
| "learning_rate": 2.03957367054894e-06, | |
| "loss": 0.2711, | |
| "step": 1032 | |
| }, | |
| { | |
| "epoch": 1.5892307692307692, | |
| "grad_norm": 0.27395360055874696, | |
| "learning_rate": 2.024970627123295e-06, | |
| "loss": 0.2726, | |
| "step": 1033 | |
| }, | |
| { | |
| "epoch": 1.5907692307692307, | |
| "grad_norm": 0.2697412149903341, | |
| "learning_rate": 2.0104141578672887e-06, | |
| "loss": 0.2735, | |
| "step": 1034 | |
| }, | |
| { | |
| "epoch": 1.5923076923076924, | |
| "grad_norm": 0.2754963765655869, | |
| "learning_rate": 1.9959043477907e-06, | |
| "loss": 0.2691, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 1.5938461538461537, | |
| "grad_norm": 0.262332165634561, | |
| "learning_rate": 1.981441281630816e-06, | |
| "loss": 0.2679, | |
| "step": 1036 | |
| }, | |
| { | |
| "epoch": 1.5953846153846154, | |
| "grad_norm": 0.28477683389530106, | |
| "learning_rate": 1.967025043851939e-06, | |
| "loss": 0.2661, | |
| "step": 1037 | |
| }, | |
| { | |
| "epoch": 1.596923076923077, | |
| "grad_norm": 0.26484391672500696, | |
| "learning_rate": 1.9526557186448924e-06, | |
| "loss": 0.2843, | |
| "step": 1038 | |
| }, | |
| { | |
| "epoch": 1.5984615384615384, | |
| "grad_norm": 0.27334779504852813, | |
| "learning_rate": 1.9383333899265368e-06, | |
| "loss": 0.2818, | |
| "step": 1039 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "grad_norm": 0.2848366958654719, | |
| "learning_rate": 1.9240581413392647e-06, | |
| "loss": 0.2685, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 1.6015384615384616, | |
| "grad_norm": 0.2544014207312848, | |
| "learning_rate": 1.9098300562505266e-06, | |
| "loss": 0.2827, | |
| "step": 1041 | |
| }, | |
| { | |
| "epoch": 1.603076923076923, | |
| "grad_norm": 0.2810041947592908, | |
| "learning_rate": 1.8956492177523345e-06, | |
| "loss": 0.2774, | |
| "step": 1042 | |
| }, | |
| { | |
| "epoch": 1.6046153846153848, | |
| "grad_norm": 0.27727230328919683, | |
| "learning_rate": 1.8815157086607826e-06, | |
| "loss": 0.2801, | |
| "step": 1043 | |
| }, | |
| { | |
| "epoch": 1.606153846153846, | |
| "grad_norm": 0.27123994829514625, | |
| "learning_rate": 1.86742961151556e-06, | |
| "loss": 0.2694, | |
| "step": 1044 | |
| }, | |
| { | |
| "epoch": 1.6076923076923078, | |
| "grad_norm": 0.2754553311610714, | |
| "learning_rate": 1.8533910085794714e-06, | |
| "loss": 0.2719, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 1.6092307692307692, | |
| "grad_norm": 0.27863073636816, | |
| "learning_rate": 1.8393999818379527e-06, | |
| "loss": 0.2645, | |
| "step": 1046 | |
| }, | |
| { | |
| "epoch": 1.6107692307692307, | |
| "grad_norm": 0.2671126481268661, | |
| "learning_rate": 1.8254566129985996e-06, | |
| "loss": 0.2761, | |
| "step": 1047 | |
| }, | |
| { | |
| "epoch": 1.6123076923076924, | |
| "grad_norm": 0.26267406682118105, | |
| "learning_rate": 1.8115609834906821e-06, | |
| "loss": 0.2818, | |
| "step": 1048 | |
| }, | |
| { | |
| "epoch": 1.6138461538461537, | |
| "grad_norm": 0.2671493987923625, | |
| "learning_rate": 1.7977131744646724e-06, | |
| "loss": 0.2789, | |
| "step": 1049 | |
| }, | |
| { | |
| "epoch": 1.6153846153846154, | |
| "grad_norm": 0.2505733188596916, | |
| "learning_rate": 1.7839132667917692e-06, | |
| "loss": 0.2859, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 1.616923076923077, | |
| "grad_norm": 0.263775565972645, | |
| "learning_rate": 1.7701613410634367e-06, | |
| "loss": 0.2693, | |
| "step": 1051 | |
| }, | |
| { | |
| "epoch": 1.6184615384615384, | |
| "grad_norm": 0.2702206938718406, | |
| "learning_rate": 1.7564574775909127e-06, | |
| "loss": 0.2669, | |
| "step": 1052 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "grad_norm": 0.26997570628252127, | |
| "learning_rate": 1.7428017564047594e-06, | |
| "loss": 0.2555, | |
| "step": 1053 | |
| }, | |
| { | |
| "epoch": 1.6215384615384614, | |
| "grad_norm": 0.2662750732164586, | |
| "learning_rate": 1.7291942572543806e-06, | |
| "loss": 0.289, | |
| "step": 1054 | |
| }, | |
| { | |
| "epoch": 1.623076923076923, | |
| "grad_norm": 0.2639379570596132, | |
| "learning_rate": 1.7156350596075743e-06, | |
| "loss": 0.2769, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 1.6246153846153846, | |
| "grad_norm": 0.27426927931869355, | |
| "learning_rate": 1.7021242426500495e-06, | |
| "loss": 0.2741, | |
| "step": 1056 | |
| }, | |
| { | |
| "epoch": 1.626153846153846, | |
| "grad_norm": 0.2662514430923517, | |
| "learning_rate": 1.6886618852849723e-06, | |
| "loss": 0.2705, | |
| "step": 1057 | |
| }, | |
| { | |
| "epoch": 1.6276923076923078, | |
| "grad_norm": 0.28223326673461585, | |
| "learning_rate": 1.6752480661325077e-06, | |
| "loss": 0.2763, | |
| "step": 1058 | |
| }, | |
| { | |
| "epoch": 1.6292307692307693, | |
| "grad_norm": 0.2729469232622983, | |
| "learning_rate": 1.6618828635293538e-06, | |
| "loss": 0.2622, | |
| "step": 1059 | |
| }, | |
| { | |
| "epoch": 1.6307692307692307, | |
| "grad_norm": 0.2709645746392681, | |
| "learning_rate": 1.6485663555282949e-06, | |
| "loss": 0.2725, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 1.6323076923076925, | |
| "grad_norm": 0.26714503175176485, | |
| "learning_rate": 1.6352986198977327e-06, | |
| "loss": 0.2732, | |
| "step": 1061 | |
| }, | |
| { | |
| "epoch": 1.6338461538461537, | |
| "grad_norm": 0.2772109598438791, | |
| "learning_rate": 1.6220797341212401e-06, | |
| "loss": 0.2662, | |
| "step": 1062 | |
| }, | |
| { | |
| "epoch": 1.6353846153846154, | |
| "grad_norm": 0.2753978577038448, | |
| "learning_rate": 1.6089097753971061e-06, | |
| "loss": 0.2687, | |
| "step": 1063 | |
| }, | |
| { | |
| "epoch": 1.636923076923077, | |
| "grad_norm": 0.27445806854845634, | |
| "learning_rate": 1.595788820637888e-06, | |
| "loss": 0.2647, | |
| "step": 1064 | |
| }, | |
| { | |
| "epoch": 1.6384615384615384, | |
| "grad_norm": 0.2546044446500755, | |
| "learning_rate": 1.5827169464699576e-06, | |
| "loss": 0.2795, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 1.6400000000000001, | |
| "grad_norm": 0.26446018640312374, | |
| "learning_rate": 1.5696942292330574e-06, | |
| "loss": 0.2855, | |
| "step": 1066 | |
| }, | |
| { | |
| "epoch": 1.6415384615384614, | |
| "grad_norm": 0.2703615121193336, | |
| "learning_rate": 1.5567207449798517e-06, | |
| "loss": 0.2754, | |
| "step": 1067 | |
| }, | |
| { | |
| "epoch": 1.643076923076923, | |
| "grad_norm": 0.25940079508718933, | |
| "learning_rate": 1.5437965694754842e-06, | |
| "loss": 0.2724, | |
| "step": 1068 | |
| }, | |
| { | |
| "epoch": 1.6446153846153846, | |
| "grad_norm": 0.2938743036524522, | |
| "learning_rate": 1.5309217781971419e-06, | |
| "loss": 0.2788, | |
| "step": 1069 | |
| }, | |
| { | |
| "epoch": 1.646153846153846, | |
| "grad_norm": 0.27539329200714313, | |
| "learning_rate": 1.518096446333599e-06, | |
| "loss": 0.279, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 1.6476923076923078, | |
| "grad_norm": 0.27399731752015927, | |
| "learning_rate": 1.5053206487847916e-06, | |
| "loss": 0.2707, | |
| "step": 1071 | |
| }, | |
| { | |
| "epoch": 1.6492307692307693, | |
| "grad_norm": 0.2932148175507058, | |
| "learning_rate": 1.4925944601613718e-06, | |
| "loss": 0.2742, | |
| "step": 1072 | |
| }, | |
| { | |
| "epoch": 1.6507692307692308, | |
| "grad_norm": 0.2460474178500085, | |
| "learning_rate": 1.4799179547842823e-06, | |
| "loss": 0.2737, | |
| "step": 1073 | |
| }, | |
| { | |
| "epoch": 1.6523076923076923, | |
| "grad_norm": 0.24752302212753835, | |
| "learning_rate": 1.4672912066843103e-06, | |
| "loss": 0.2684, | |
| "step": 1074 | |
| }, | |
| { | |
| "epoch": 1.6538461538461537, | |
| "grad_norm": 0.27662182502454175, | |
| "learning_rate": 1.454714289601661e-06, | |
| "loss": 0.2768, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 1.6553846153846155, | |
| "grad_norm": 0.25210921110835766, | |
| "learning_rate": 1.4421872769855262e-06, | |
| "loss": 0.2781, | |
| "step": 1076 | |
| }, | |
| { | |
| "epoch": 1.656923076923077, | |
| "grad_norm": 0.2645704559801182, | |
| "learning_rate": 1.4297102419936559e-06, | |
| "loss": 0.2754, | |
| "step": 1077 | |
| }, | |
| { | |
| "epoch": 1.6584615384615384, | |
| "grad_norm": 0.2583307882783813, | |
| "learning_rate": 1.4172832574919359e-06, | |
| "loss": 0.2716, | |
| "step": 1078 | |
| }, | |
| { | |
| "epoch": 1.6600000000000001, | |
| "grad_norm": 0.2714938620970922, | |
| "learning_rate": 1.4049063960539488e-06, | |
| "loss": 0.2794, | |
| "step": 1079 | |
| }, | |
| { | |
| "epoch": 1.6615384615384614, | |
| "grad_norm": 0.25378212618966794, | |
| "learning_rate": 1.3925797299605649e-06, | |
| "loss": 0.2748, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 1.6630769230769231, | |
| "grad_norm": 0.26790623929740864, | |
| "learning_rate": 1.3803033311995072e-06, | |
| "loss": 0.2677, | |
| "step": 1081 | |
| }, | |
| { | |
| "epoch": 1.6646153846153846, | |
| "grad_norm": 0.28686817516841456, | |
| "learning_rate": 1.368077271464946e-06, | |
| "loss": 0.2612, | |
| "step": 1082 | |
| }, | |
| { | |
| "epoch": 1.666153846153846, | |
| "grad_norm": 0.2608494931055921, | |
| "learning_rate": 1.3559016221570663e-06, | |
| "loss": 0.283, | |
| "step": 1083 | |
| }, | |
| { | |
| "epoch": 1.6676923076923078, | |
| "grad_norm": 0.2679051422710405, | |
| "learning_rate": 1.3437764543816556e-06, | |
| "loss": 0.2754, | |
| "step": 1084 | |
| }, | |
| { | |
| "epoch": 1.669230769230769, | |
| "grad_norm": 0.25609847300489263, | |
| "learning_rate": 1.3317018389496927e-06, | |
| "loss": 0.286, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 1.6707692307692308, | |
| "grad_norm": 0.26682787951436476, | |
| "learning_rate": 1.3196778463769256e-06, | |
| "loss": 0.2718, | |
| "step": 1086 | |
| }, | |
| { | |
| "epoch": 1.6723076923076923, | |
| "grad_norm": 0.28056506397402664, | |
| "learning_rate": 1.3077045468834714e-06, | |
| "loss": 0.2601, | |
| "step": 1087 | |
| }, | |
| { | |
| "epoch": 1.6738461538461538, | |
| "grad_norm": 0.24980681917796863, | |
| "learning_rate": 1.295782010393396e-06, | |
| "loss": 0.2785, | |
| "step": 1088 | |
| }, | |
| { | |
| "epoch": 1.6753846153846155, | |
| "grad_norm": 0.25805836310849545, | |
| "learning_rate": 1.2839103065343084e-06, | |
| "loss": 0.2783, | |
| "step": 1089 | |
| }, | |
| { | |
| "epoch": 1.676923076923077, | |
| "grad_norm": 0.274089796066655, | |
| "learning_rate": 1.2720895046369564e-06, | |
| "loss": 0.2695, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 1.6784615384615384, | |
| "grad_norm": 0.26254269044134937, | |
| "learning_rate": 1.2603196737348211e-06, | |
| "loss": 0.2743, | |
| "step": 1091 | |
| }, | |
| { | |
| "epoch": 1.6800000000000002, | |
| "grad_norm": 0.2617720717953636, | |
| "learning_rate": 1.2486008825637119e-06, | |
| "loss": 0.2612, | |
| "step": 1092 | |
| }, | |
| { | |
| "epoch": 1.6815384615384614, | |
| "grad_norm": 0.2759743866219765, | |
| "learning_rate": 1.2369331995613664e-06, | |
| "loss": 0.2727, | |
| "step": 1093 | |
| }, | |
| { | |
| "epoch": 1.6830769230769231, | |
| "grad_norm": 0.25653527827770223, | |
| "learning_rate": 1.2253166928670478e-06, | |
| "loss": 0.2747, | |
| "step": 1094 | |
| }, | |
| { | |
| "epoch": 1.6846153846153846, | |
| "grad_norm": 0.25821672413208674, | |
| "learning_rate": 1.213751430321156e-06, | |
| "loss": 0.2704, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 1.6861538461538461, | |
| "grad_norm": 0.2639461309608209, | |
| "learning_rate": 1.2022374794648229e-06, | |
| "loss": 0.2747, | |
| "step": 1096 | |
| }, | |
| { | |
| "epoch": 1.6876923076923078, | |
| "grad_norm": 0.2684576261799393, | |
| "learning_rate": 1.1907749075395147e-06, | |
| "loss": 0.2677, | |
| "step": 1097 | |
| }, | |
| { | |
| "epoch": 1.689230769230769, | |
| "grad_norm": 0.25985274031169525, | |
| "learning_rate": 1.179363781486651e-06, | |
| "loss": 0.2673, | |
| "step": 1098 | |
| }, | |
| { | |
| "epoch": 1.6907692307692308, | |
| "grad_norm": 0.2688011611759083, | |
| "learning_rate": 1.168004167947202e-06, | |
| "loss": 0.2777, | |
| "step": 1099 | |
| }, | |
| { | |
| "epoch": 1.6923076923076923, | |
| "grad_norm": 0.28355243170627664, | |
| "learning_rate": 1.1566961332613136e-06, | |
| "loss": 0.2846, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 1.6938461538461538, | |
| "grad_norm": 0.2603109920930245, | |
| "learning_rate": 1.1454397434679022e-06, | |
| "loss": 0.2752, | |
| "step": 1101 | |
| }, | |
| { | |
| "epoch": 1.6953846153846155, | |
| "grad_norm": 0.2895708885344929, | |
| "learning_rate": 1.1342350643042822e-06, | |
| "loss": 0.269, | |
| "step": 1102 | |
| }, | |
| { | |
| "epoch": 1.696923076923077, | |
| "grad_norm": 0.2773699105634792, | |
| "learning_rate": 1.123082161205775e-06, | |
| "loss": 0.2821, | |
| "step": 1103 | |
| }, | |
| { | |
| "epoch": 1.6984615384615385, | |
| "grad_norm": 0.2518089725672006, | |
| "learning_rate": 1.111981099305336e-06, | |
| "loss": 0.2708, | |
| "step": 1104 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "grad_norm": 0.27872001467564333, | |
| "learning_rate": 1.1009319434331623e-06, | |
| "loss": 0.2693, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 1.7015384615384614, | |
| "grad_norm": 0.2605607686909919, | |
| "learning_rate": 1.0899347581163222e-06, | |
| "loss": 0.2837, | |
| "step": 1106 | |
| }, | |
| { | |
| "epoch": 1.7030769230769232, | |
| "grad_norm": 0.26986358015400225, | |
| "learning_rate": 1.0789896075783734e-06, | |
| "loss": 0.2655, | |
| "step": 1107 | |
| }, | |
| { | |
| "epoch": 1.7046153846153846, | |
| "grad_norm": 0.27609853188608596, | |
| "learning_rate": 1.0680965557389934e-06, | |
| "loss": 0.2793, | |
| "step": 1108 | |
| }, | |
| { | |
| "epoch": 1.7061538461538461, | |
| "grad_norm": 0.27846279018096237, | |
| "learning_rate": 1.0572556662136036e-06, | |
| "loss": 0.2755, | |
| "step": 1109 | |
| }, | |
| { | |
| "epoch": 1.7076923076923078, | |
| "grad_norm": 0.2573280880106174, | |
| "learning_rate": 1.0464670023129952e-06, | |
| "loss": 0.272, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 1.709230769230769, | |
| "grad_norm": 0.26407674773187606, | |
| "learning_rate": 1.0357306270429623e-06, | |
| "loss": 0.2692, | |
| "step": 1111 | |
| }, | |
| { | |
| "epoch": 1.7107692307692308, | |
| "grad_norm": 0.2637903458053623, | |
| "learning_rate": 1.0250466031039353e-06, | |
| "loss": 0.2729, | |
| "step": 1112 | |
| }, | |
| { | |
| "epoch": 1.7123076923076923, | |
| "grad_norm": 0.2624748117136115, | |
| "learning_rate": 1.014414992890611e-06, | |
| "loss": 0.273, | |
| "step": 1113 | |
| }, | |
| { | |
| "epoch": 1.7138461538461538, | |
| "grad_norm": 0.29472250276583384, | |
| "learning_rate": 1.0038358584915896e-06, | |
| "loss": 0.2704, | |
| "step": 1114 | |
| }, | |
| { | |
| "epoch": 1.7153846153846155, | |
| "grad_norm": 0.26606886810615454, | |
| "learning_rate": 9.93309261689015e-07, | |
| "loss": 0.2762, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 1.7169230769230768, | |
| "grad_norm": 0.2643526797569404, | |
| "learning_rate": 9.828352639582073e-07, | |
| "loss": 0.2762, | |
| "step": 1116 | |
| }, | |
| { | |
| "epoch": 1.7184615384615385, | |
| "grad_norm": 0.26253274898605866, | |
| "learning_rate": 9.724139264673116e-07, | |
| "loss": 0.2785, | |
| "step": 1117 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "grad_norm": 0.28539607445052123, | |
| "learning_rate": 9.6204531007694e-07, | |
| "loss": 0.2723, | |
| "step": 1118 | |
| }, | |
| { | |
| "epoch": 1.7215384615384615, | |
| "grad_norm": 0.2754770798892866, | |
| "learning_rate": 9.517294753398066e-07, | |
| "loss": 0.2821, | |
| "step": 1119 | |
| }, | |
| { | |
| "epoch": 1.7230769230769232, | |
| "grad_norm": 0.27184406321376414, | |
| "learning_rate": 9.414664825003838e-07, | |
| "loss": 0.2768, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 1.7246153846153847, | |
| "grad_norm": 0.259292322429861, | |
| "learning_rate": 9.312563914945461e-07, | |
| "loss": 0.2636, | |
| "step": 1121 | |
| }, | |
| { | |
| "epoch": 1.7261538461538461, | |
| "grad_norm": 0.26805868801377686, | |
| "learning_rate": 9.210992619492254e-07, | |
| "loss": 0.2778, | |
| "step": 1122 | |
| }, | |
| { | |
| "epoch": 1.7276923076923076, | |
| "grad_norm": 0.25735702425768975, | |
| "learning_rate": 9.10995153182056e-07, | |
| "loss": 0.2821, | |
| "step": 1123 | |
| }, | |
| { | |
| "epoch": 1.7292307692307691, | |
| "grad_norm": 0.25299101144437586, | |
| "learning_rate": 9.009441242010287e-07, | |
| "loss": 0.2703, | |
| "step": 1124 | |
| }, | |
| { | |
| "epoch": 1.7307692307692308, | |
| "grad_norm": 0.2765481093679949, | |
| "learning_rate": 8.909462337041508e-07, | |
| "loss": 0.2731, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 1.7323076923076923, | |
| "grad_norm": 0.2755662572987114, | |
| "learning_rate": 8.810015400790994e-07, | |
| "loss": 0.2689, | |
| "step": 1126 | |
| }, | |
| { | |
| "epoch": 1.7338461538461538, | |
| "grad_norm": 0.2625023002565323, | |
| "learning_rate": 8.711101014028855e-07, | |
| "loss": 0.2728, | |
| "step": 1127 | |
| }, | |
| { | |
| "epoch": 1.7353846153846155, | |
| "grad_norm": 0.26985914746536965, | |
| "learning_rate": 8.612719754415078e-07, | |
| "loss": 0.2773, | |
| "step": 1128 | |
| }, | |
| { | |
| "epoch": 1.7369230769230768, | |
| "grad_norm": 0.27204356751705255, | |
| "learning_rate": 8.514872196496182e-07, | |
| "loss": 0.2718, | |
| "step": 1129 | |
| }, | |
| { | |
| "epoch": 1.7384615384615385, | |
| "grad_norm": 0.2589644443476737, | |
| "learning_rate": 8.417558911701884e-07, | |
| "loss": 0.2674, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "grad_norm": 0.2619031108337043, | |
| "learning_rate": 8.320780468341761e-07, | |
| "loss": 0.2769, | |
| "step": 1131 | |
| }, | |
| { | |
| "epoch": 1.7415384615384615, | |
| "grad_norm": 0.24663096862283984, | |
| "learning_rate": 8.224537431601886e-07, | |
| "loss": 0.2749, | |
| "step": 1132 | |
| }, | |
| { | |
| "epoch": 1.7430769230769232, | |
| "grad_norm": 0.25329093409597475, | |
| "learning_rate": 8.128830363541574e-07, | |
| "loss": 0.2674, | |
| "step": 1133 | |
| }, | |
| { | |
| "epoch": 1.7446153846153845, | |
| "grad_norm": 0.25855666495593144, | |
| "learning_rate": 8.03365982309009e-07, | |
| "loss": 0.2732, | |
| "step": 1134 | |
| }, | |
| { | |
| "epoch": 1.7461538461538462, | |
| "grad_norm": 0.2575342547274432, | |
| "learning_rate": 7.939026366043323e-07, | |
| "loss": 0.2734, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 1.7476923076923077, | |
| "grad_norm": 0.2572051505754092, | |
| "learning_rate": 7.844930545060703e-07, | |
| "loss": 0.2672, | |
| "step": 1136 | |
| }, | |
| { | |
| "epoch": 1.7492307692307691, | |
| "grad_norm": 0.25659868456171797, | |
| "learning_rate": 7.75137290966177e-07, | |
| "loss": 0.2694, | |
| "step": 1137 | |
| }, | |
| { | |
| "epoch": 1.7507692307692309, | |
| "grad_norm": 0.26701664877195225, | |
| "learning_rate": 7.65835400622309e-07, | |
| "loss": 0.2706, | |
| "step": 1138 | |
| }, | |
| { | |
| "epoch": 1.7523076923076923, | |
| "grad_norm": 0.28286665007577344, | |
| "learning_rate": 7.565874377975046e-07, | |
| "loss": 0.272, | |
| "step": 1139 | |
| }, | |
| { | |
| "epoch": 1.7538461538461538, | |
| "grad_norm": 0.27858205811414216, | |
| "learning_rate": 7.473934564998641e-07, | |
| "loss": 0.267, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 1.7553846153846155, | |
| "grad_norm": 0.26001654197560764, | |
| "learning_rate": 7.382535104222366e-07, | |
| "loss": 0.2741, | |
| "step": 1141 | |
| }, | |
| { | |
| "epoch": 1.7569230769230768, | |
| "grad_norm": 0.2649493247350367, | |
| "learning_rate": 7.291676529419034e-07, | |
| "loss": 0.2735, | |
| "step": 1142 | |
| }, | |
| { | |
| "epoch": 1.7584615384615385, | |
| "grad_norm": 0.2543629832367694, | |
| "learning_rate": 7.201359371202698e-07, | |
| "loss": 0.2692, | |
| "step": 1143 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "grad_norm": 0.25355471214350306, | |
| "learning_rate": 7.111584157025575e-07, | |
| "loss": 0.2768, | |
| "step": 1144 | |
| }, | |
| { | |
| "epoch": 1.7615384615384615, | |
| "grad_norm": 0.24548207312048848, | |
| "learning_rate": 7.022351411174866e-07, | |
| "loss": 0.2763, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 1.7630769230769232, | |
| "grad_norm": 0.24529106469337025, | |
| "learning_rate": 6.933661654769797e-07, | |
| "loss": 0.277, | |
| "step": 1146 | |
| }, | |
| { | |
| "epoch": 1.7646153846153845, | |
| "grad_norm": 0.2575076262302863, | |
| "learning_rate": 6.845515405758518e-07, | |
| "loss": 0.2743, | |
| "step": 1147 | |
| }, | |
| { | |
| "epoch": 1.7661538461538462, | |
| "grad_norm": 0.26604894439329496, | |
| "learning_rate": 6.757913178915087e-07, | |
| "loss": 0.2749, | |
| "step": 1148 | |
| }, | |
| { | |
| "epoch": 1.7676923076923077, | |
| "grad_norm": 0.28681485404621826, | |
| "learning_rate": 6.670855485836525e-07, | |
| "loss": 0.2686, | |
| "step": 1149 | |
| }, | |
| { | |
| "epoch": 1.7692307692307692, | |
| "grad_norm": 0.2589106809589985, | |
| "learning_rate": 6.584342834939717e-07, | |
| "loss": 0.2704, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 1.7707692307692309, | |
| "grad_norm": 0.28911580411212706, | |
| "learning_rate": 6.498375731458529e-07, | |
| "loss": 0.2652, | |
| "step": 1151 | |
| }, | |
| { | |
| "epoch": 1.7723076923076924, | |
| "grad_norm": 0.24524281594408773, | |
| "learning_rate": 6.412954677440797e-07, | |
| "loss": 0.2769, | |
| "step": 1152 | |
| }, | |
| { | |
| "epoch": 1.7738461538461539, | |
| "grad_norm": 0.2413224345249869, | |
| "learning_rate": 6.32808017174551e-07, | |
| "loss": 0.2824, | |
| "step": 1153 | |
| }, | |
| { | |
| "epoch": 1.7753846153846153, | |
| "grad_norm": 0.2488148241737766, | |
| "learning_rate": 6.243752710039719e-07, | |
| "loss": 0.2707, | |
| "step": 1154 | |
| }, | |
| { | |
| "epoch": 1.7769230769230768, | |
| "grad_norm": 0.2510817209486956, | |
| "learning_rate": 6.159972784795798e-07, | |
| "loss": 0.2669, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 1.7784615384615385, | |
| "grad_norm": 0.26078478335979977, | |
| "learning_rate": 6.076740885288479e-07, | |
| "loss": 0.2644, | |
| "step": 1156 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "grad_norm": 0.24929356810208964, | |
| "learning_rate": 5.994057497592032e-07, | |
| "loss": 0.2688, | |
| "step": 1157 | |
| }, | |
| { | |
| "epoch": 1.7815384615384615, | |
| "grad_norm": 0.25711900466450466, | |
| "learning_rate": 5.911923104577455e-07, | |
| "loss": 0.2692, | |
| "step": 1158 | |
| }, | |
| { | |
| "epoch": 1.7830769230769232, | |
| "grad_norm": 0.26857644105403744, | |
| "learning_rate": 5.830338185909545e-07, | |
| "loss": 0.27, | |
| "step": 1159 | |
| }, | |
| { | |
| "epoch": 1.7846153846153845, | |
| "grad_norm": 0.26355737383923983, | |
| "learning_rate": 5.749303218044234e-07, | |
| "loss": 0.2685, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 1.7861538461538462, | |
| "grad_norm": 0.25433788202515617, | |
| "learning_rate": 5.668818674225684e-07, | |
| "loss": 0.2705, | |
| "step": 1161 | |
| }, | |
| { | |
| "epoch": 1.7876923076923077, | |
| "grad_norm": 0.2540172872801744, | |
| "learning_rate": 5.588885024483648e-07, | |
| "loss": 0.2683, | |
| "step": 1162 | |
| }, | |
| { | |
| "epoch": 1.7892307692307692, | |
| "grad_norm": 0.2557887226091531, | |
| "learning_rate": 5.509502735630601e-07, | |
| "loss": 0.2759, | |
| "step": 1163 | |
| }, | |
| { | |
| "epoch": 1.790769230769231, | |
| "grad_norm": 0.265619907163393, | |
| "learning_rate": 5.430672271259096e-07, | |
| "loss": 0.2648, | |
| "step": 1164 | |
| }, | |
| { | |
| "epoch": 1.7923076923076922, | |
| "grad_norm": 0.2623517115628094, | |
| "learning_rate": 5.352394091739022e-07, | |
| "loss": 0.2647, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 1.7938461538461539, | |
| "grad_norm": 0.27799113214084537, | |
| "learning_rate": 5.274668654214931e-07, | |
| "loss": 0.2671, | |
| "step": 1166 | |
| }, | |
| { | |
| "epoch": 1.7953846153846154, | |
| "grad_norm": 0.25386965217553553, | |
| "learning_rate": 5.197496412603365e-07, | |
| "loss": 0.2813, | |
| "step": 1167 | |
| }, | |
| { | |
| "epoch": 1.7969230769230768, | |
| "grad_norm": 0.26600539307745497, | |
| "learning_rate": 5.120877817590197e-07, | |
| "loss": 0.2749, | |
| "step": 1168 | |
| }, | |
| { | |
| "epoch": 1.7984615384615386, | |
| "grad_norm": 0.2598010369561241, | |
| "learning_rate": 5.044813316627994e-07, | |
| "loss": 0.2672, | |
| "step": 1169 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "grad_norm": 0.28224438868540486, | |
| "learning_rate": 4.969303353933408e-07, | |
| "loss": 0.2745, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 1.8015384615384615, | |
| "grad_norm": 0.2805661553631755, | |
| "learning_rate": 4.894348370484648e-07, | |
| "loss": 0.2744, | |
| "step": 1171 | |
| }, | |
| { | |
| "epoch": 1.803076923076923, | |
| "grad_norm": 0.25794334710550715, | |
| "learning_rate": 4.819948804018771e-07, | |
| "loss": 0.2699, | |
| "step": 1172 | |
| }, | |
| { | |
| "epoch": 1.8046153846153845, | |
| "grad_norm": 0.25722518471927297, | |
| "learning_rate": 4.746105089029229e-07, | |
| "loss": 0.2774, | |
| "step": 1173 | |
| }, | |
| { | |
| "epoch": 1.8061538461538462, | |
| "grad_norm": 0.25812083560921784, | |
| "learning_rate": 4.6728176567633065e-07, | |
| "loss": 0.2727, | |
| "step": 1174 | |
| }, | |
| { | |
| "epoch": 1.8076923076923077, | |
| "grad_norm": 0.25551672554775057, | |
| "learning_rate": 4.6000869352195607e-07, | |
| "loss": 0.2814, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 1.8092307692307692, | |
| "grad_norm": 0.261306292310136, | |
| "learning_rate": 4.5279133491454406e-07, | |
| "loss": 0.2773, | |
| "step": 1176 | |
| }, | |
| { | |
| "epoch": 1.810769230769231, | |
| "grad_norm": 0.2612795334066712, | |
| "learning_rate": 4.4562973200346413e-07, | |
| "loss": 0.2844, | |
| "step": 1177 | |
| }, | |
| { | |
| "epoch": 1.8123076923076922, | |
| "grad_norm": 0.2557868514983067, | |
| "learning_rate": 4.385239266124752e-07, | |
| "loss": 0.2684, | |
| "step": 1178 | |
| }, | |
| { | |
| "epoch": 1.8138461538461539, | |
| "grad_norm": 0.2621733323408126, | |
| "learning_rate": 4.314739602394791e-07, | |
| "loss": 0.2752, | |
| "step": 1179 | |
| }, | |
| { | |
| "epoch": 1.8153846153846154, | |
| "grad_norm": 0.25941984414296076, | |
| "learning_rate": 4.2447987405628054e-07, | |
| "loss": 0.2789, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 1.8169230769230769, | |
| "grad_norm": 0.25937657312299867, | |
| "learning_rate": 4.1754170890833777e-07, | |
| "loss": 0.2644, | |
| "step": 1181 | |
| }, | |
| { | |
| "epoch": 1.8184615384615386, | |
| "grad_norm": 0.2826036343635729, | |
| "learning_rate": 4.106595053145357e-07, | |
| "loss": 0.2688, | |
| "step": 1182 | |
| }, | |
| { | |
| "epoch": 1.8199999999999998, | |
| "grad_norm": 0.25844365974175953, | |
| "learning_rate": 4.038333034669406e-07, | |
| "loss": 0.2642, | |
| "step": 1183 | |
| }, | |
| { | |
| "epoch": 1.8215384615384616, | |
| "grad_norm": 0.26940689160820397, | |
| "learning_rate": 3.9706314323056936e-07, | |
| "loss": 0.2734, | |
| "step": 1184 | |
| }, | |
| { | |
| "epoch": 1.823076923076923, | |
| "grad_norm": 0.25793667787420654, | |
| "learning_rate": 3.903490641431573e-07, | |
| "loss": 0.282, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 1.8246153846153845, | |
| "grad_norm": 0.26494408613001746, | |
| "learning_rate": 3.8369110541492396e-07, | |
| "loss": 0.2784, | |
| "step": 1186 | |
| }, | |
| { | |
| "epoch": 1.8261538461538462, | |
| "grad_norm": 0.26227052155564934, | |
| "learning_rate": 3.770893059283465e-07, | |
| "loss": 0.2819, | |
| "step": 1187 | |
| }, | |
| { | |
| "epoch": 1.8276923076923077, | |
| "grad_norm": 0.2602766507292804, | |
| "learning_rate": 3.705437042379334e-07, | |
| "loss": 0.2682, | |
| "step": 1188 | |
| }, | |
| { | |
| "epoch": 1.8292307692307692, | |
| "grad_norm": 0.2687663797936269, | |
| "learning_rate": 3.6405433856999684e-07, | |
| "loss": 0.2766, | |
| "step": 1189 | |
| }, | |
| { | |
| "epoch": 1.830769230769231, | |
| "grad_norm": 0.2638930778071268, | |
| "learning_rate": 3.5762124682242936e-07, | |
| "loss": 0.274, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 1.8323076923076922, | |
| "grad_norm": 0.26708725438168995, | |
| "learning_rate": 3.5124446656448654e-07, | |
| "loss": 0.2657, | |
| "step": 1191 | |
| }, | |
| { | |
| "epoch": 1.833846153846154, | |
| "grad_norm": 0.26563786184808197, | |
| "learning_rate": 3.4492403503656236e-07, | |
| "loss": 0.2736, | |
| "step": 1192 | |
| }, | |
| { | |
| "epoch": 1.8353846153846154, | |
| "grad_norm": 0.25267333891215393, | |
| "learning_rate": 3.3865998914997645e-07, | |
| "loss": 0.2669, | |
| "step": 1193 | |
| }, | |
| { | |
| "epoch": 1.8369230769230769, | |
| "grad_norm": 0.2670140474295601, | |
| "learning_rate": 3.324523654867551e-07, | |
| "loss": 0.2823, | |
| "step": 1194 | |
| }, | |
| { | |
| "epoch": 1.8384615384615386, | |
| "grad_norm": 0.25918910373856735, | |
| "learning_rate": 3.2630120029942034e-07, | |
| "loss": 0.2728, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 1.8399999999999999, | |
| "grad_norm": 0.2605937731790264, | |
| "learning_rate": 3.2020652951077256e-07, | |
| "loss": 0.2738, | |
| "step": 1196 | |
| }, | |
| { | |
| "epoch": 1.8415384615384616, | |
| "grad_norm": 0.2653165480130905, | |
| "learning_rate": 3.1416838871368925e-07, | |
| "loss": 0.2682, | |
| "step": 1197 | |
| }, | |
| { | |
| "epoch": 1.843076923076923, | |
| "grad_norm": 0.25866129288975076, | |
| "learning_rate": 3.081868131709109e-07, | |
| "loss": 0.2711, | |
| "step": 1198 | |
| }, | |
| { | |
| "epoch": 1.8446153846153845, | |
| "grad_norm": 0.2659815203267659, | |
| "learning_rate": 3.0226183781483897e-07, | |
| "loss": 0.2716, | |
| "step": 1199 | |
| }, | |
| { | |
| "epoch": 1.8461538461538463, | |
| "grad_norm": 0.26552289033198906, | |
| "learning_rate": 2.963934972473259e-07, | |
| "loss": 0.2804, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 1.8476923076923077, | |
| "grad_norm": 0.24900482766301524, | |
| "learning_rate": 2.905818257394799e-07, | |
| "loss": 0.2768, | |
| "step": 1201 | |
| }, | |
| { | |
| "epoch": 1.8492307692307692, | |
| "grad_norm": 0.24947347437293466, | |
| "learning_rate": 2.848268572314616e-07, | |
| "loss": 0.2783, | |
| "step": 1202 | |
| }, | |
| { | |
| "epoch": 1.8507692307692307, | |
| "grad_norm": 0.25930581415360415, | |
| "learning_rate": 2.791286253322856e-07, | |
| "loss": 0.2614, | |
| "step": 1203 | |
| }, | |
| { | |
| "epoch": 1.8523076923076922, | |
| "grad_norm": 0.26542775008735076, | |
| "learning_rate": 2.734871633196246e-07, | |
| "loss": 0.28, | |
| "step": 1204 | |
| }, | |
| { | |
| "epoch": 1.853846153846154, | |
| "grad_norm": 0.27067867703064946, | |
| "learning_rate": 2.679025041396155e-07, | |
| "loss": 0.2663, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 1.8553846153846154, | |
| "grad_norm": 0.25825385919738536, | |
| "learning_rate": 2.6237468040666515e-07, | |
| "loss": 0.2789, | |
| "step": 1206 | |
| }, | |
| { | |
| "epoch": 1.856923076923077, | |
| "grad_norm": 0.250639299715114, | |
| "learning_rate": 2.569037244032657e-07, | |
| "loss": 0.2642, | |
| "step": 1207 | |
| }, | |
| { | |
| "epoch": 1.8584615384615386, | |
| "grad_norm": 0.2627722533489089, | |
| "learning_rate": 2.5148966807979733e-07, | |
| "loss": 0.2725, | |
| "step": 1208 | |
| }, | |
| { | |
| "epoch": 1.8599999999999999, | |
| "grad_norm": 0.25698102394911715, | |
| "learning_rate": 2.461325430543482e-07, | |
| "loss": 0.2624, | |
| "step": 1209 | |
| }, | |
| { | |
| "epoch": 1.8615384615384616, | |
| "grad_norm": 0.27170513125540613, | |
| "learning_rate": 2.4083238061252565e-07, | |
| "loss": 0.266, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 1.863076923076923, | |
| "grad_norm": 0.26016401860293503, | |
| "learning_rate": 2.355892117072789e-07, | |
| "loss": 0.2754, | |
| "step": 1211 | |
| }, | |
| { | |
| "epoch": 1.8646153846153846, | |
| "grad_norm": 0.2573951151273573, | |
| "learning_rate": 2.3040306695871319e-07, | |
| "loss": 0.2771, | |
| "step": 1212 | |
| }, | |
| { | |
| "epoch": 1.8661538461538463, | |
| "grad_norm": 0.2578731976600654, | |
| "learning_rate": 2.2527397665391026e-07, | |
| "loss": 0.2766, | |
| "step": 1213 | |
| }, | |
| { | |
| "epoch": 1.8676923076923075, | |
| "grad_norm": 0.24145426645886497, | |
| "learning_rate": 2.2020197074675952e-07, | |
| "loss": 0.2555, | |
| "step": 1214 | |
| }, | |
| { | |
| "epoch": 1.8692307692307693, | |
| "grad_norm": 0.2651433209823767, | |
| "learning_rate": 2.1518707885777147e-07, | |
| "loss": 0.278, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 1.8707692307692307, | |
| "grad_norm": 0.24881430546225117, | |
| "learning_rate": 2.1022933027391555e-07, | |
| "loss": 0.2798, | |
| "step": 1216 | |
| }, | |
| { | |
| "epoch": 1.8723076923076922, | |
| "grad_norm": 0.25467344376243073, | |
| "learning_rate": 2.0532875394844053e-07, | |
| "loss": 0.2683, | |
| "step": 1217 | |
| }, | |
| { | |
| "epoch": 1.873846153846154, | |
| "grad_norm": 0.26531380563696, | |
| "learning_rate": 2.0048537850071326e-07, | |
| "loss": 0.2622, | |
| "step": 1218 | |
| }, | |
| { | |
| "epoch": 1.8753846153846154, | |
| "grad_norm": 0.2754942481670186, | |
| "learning_rate": 1.9569923221604224e-07, | |
| "loss": 0.2701, | |
| "step": 1219 | |
| }, | |
| { | |
| "epoch": 1.876923076923077, | |
| "grad_norm": 0.26202066271597907, | |
| "learning_rate": 1.909703430455223e-07, | |
| "loss": 0.2701, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 1.8784615384615386, | |
| "grad_norm": 0.26462179129135577, | |
| "learning_rate": 1.8629873860586567e-07, | |
| "loss": 0.2619, | |
| "step": 1221 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "grad_norm": 0.2599626125269344, | |
| "learning_rate": 1.8168444617924107e-07, | |
| "loss": 0.2714, | |
| "step": 1222 | |
| }, | |
| { | |
| "epoch": 1.8815384615384616, | |
| "grad_norm": 0.2717438370705554, | |
| "learning_rate": 1.7712749271311392e-07, | |
| "loss": 0.2695, | |
| "step": 1223 | |
| }, | |
| { | |
| "epoch": 1.883076923076923, | |
| "grad_norm": 0.2594326713284917, | |
| "learning_rate": 1.7262790482009184e-07, | |
| "loss": 0.2717, | |
| "step": 1224 | |
| }, | |
| { | |
| "epoch": 1.8846153846153846, | |
| "grad_norm": 0.2511033817455497, | |
| "learning_rate": 1.681857087777672e-07, | |
| "loss": 0.2678, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 1.8861538461538463, | |
| "grad_norm": 0.26691991336692045, | |
| "learning_rate": 1.6380093052856482e-07, | |
| "loss": 0.2691, | |
| "step": 1226 | |
| }, | |
| { | |
| "epoch": 1.8876923076923076, | |
| "grad_norm": 0.2631060513470928, | |
| "learning_rate": 1.5947359567958677e-07, | |
| "loss": 0.2752, | |
| "step": 1227 | |
| }, | |
| { | |
| "epoch": 1.8892307692307693, | |
| "grad_norm": 0.2551511842652523, | |
| "learning_rate": 1.5520372950246888e-07, | |
| "loss": 0.2703, | |
| "step": 1228 | |
| }, | |
| { | |
| "epoch": 1.8907692307692308, | |
| "grad_norm": 0.2649291705796171, | |
| "learning_rate": 1.5099135693322776e-07, | |
| "loss": 0.2737, | |
| "step": 1229 | |
| }, | |
| { | |
| "epoch": 1.8923076923076922, | |
| "grad_norm": 0.2641129353469266, | |
| "learning_rate": 1.4683650257211967e-07, | |
| "loss": 0.2696, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 1.893846153846154, | |
| "grad_norm": 0.25763156609824617, | |
| "learning_rate": 1.4273919068349184e-07, | |
| "loss": 0.2681, | |
| "step": 1231 | |
| }, | |
| { | |
| "epoch": 1.8953846153846152, | |
| "grad_norm": 0.2544835090162467, | |
| "learning_rate": 1.386994451956436e-07, | |
| "loss": 0.267, | |
| "step": 1232 | |
| }, | |
| { | |
| "epoch": 1.896923076923077, | |
| "grad_norm": 0.2658293069336348, | |
| "learning_rate": 1.3471728970068986e-07, | |
| "loss": 0.2708, | |
| "step": 1233 | |
| }, | |
| { | |
| "epoch": 1.8984615384615384, | |
| "grad_norm": 0.2667104526648449, | |
| "learning_rate": 1.3079274745441794e-07, | |
| "loss": 0.265, | |
| "step": 1234 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "grad_norm": 0.24963969373244324, | |
| "learning_rate": 1.2692584137615205e-07, | |
| "loss": 0.2709, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 1.9015384615384616, | |
| "grad_norm": 0.26247731750631575, | |
| "learning_rate": 1.231165940486234e-07, | |
| "loss": 0.2701, | |
| "step": 1236 | |
| }, | |
| { | |
| "epoch": 1.903076923076923, | |
| "grad_norm": 0.2725078089641997, | |
| "learning_rate": 1.1936502771783488e-07, | |
| "loss": 0.2728, | |
| "step": 1237 | |
| }, | |
| { | |
| "epoch": 1.9046153846153846, | |
| "grad_norm": 0.25986490859938516, | |
| "learning_rate": 1.1567116429293424e-07, | |
| "loss": 0.2765, | |
| "step": 1238 | |
| }, | |
| { | |
| "epoch": 1.9061538461538463, | |
| "grad_norm": 0.2730232582178096, | |
| "learning_rate": 1.1203502534608113e-07, | |
| "loss": 0.2663, | |
| "step": 1239 | |
| }, | |
| { | |
| "epoch": 1.9076923076923076, | |
| "grad_norm": 0.25156013265415067, | |
| "learning_rate": 1.0845663211232704e-07, | |
| "loss": 0.2707, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 1.9092307692307693, | |
| "grad_norm": 0.2590488093159212, | |
| "learning_rate": 1.0493600548948879e-07, | |
| "loss": 0.2811, | |
| "step": 1241 | |
| }, | |
| { | |
| "epoch": 1.9107692307692308, | |
| "grad_norm": 0.24711167458443112, | |
| "learning_rate": 1.0147316603802415e-07, | |
| "loss": 0.27, | |
| "step": 1242 | |
| }, | |
| { | |
| "epoch": 1.9123076923076923, | |
| "grad_norm": 0.25768794048943483, | |
| "learning_rate": 9.806813398091419e-08, | |
| "loss": 0.2687, | |
| "step": 1243 | |
| }, | |
| { | |
| "epoch": 1.913846153846154, | |
| "grad_norm": 0.25199192932976877, | |
| "learning_rate": 9.47209292035467e-08, | |
| "loss": 0.2748, | |
| "step": 1244 | |
| }, | |
| { | |
| "epoch": 1.9153846153846152, | |
| "grad_norm": 0.2647710191663874, | |
| "learning_rate": 9.143157125359514e-08, | |
| "loss": 0.2727, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 1.916923076923077, | |
| "grad_norm": 0.24735239434412856, | |
| "learning_rate": 8.82000793409088e-08, | |
| "loss": 0.2768, | |
| "step": 1246 | |
| }, | |
| { | |
| "epoch": 1.9184615384615384, | |
| "grad_norm": 0.2582071850232192, | |
| "learning_rate": 8.502647233740169e-08, | |
| "loss": 0.2758, | |
| "step": 1247 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "grad_norm": 0.26608807486798247, | |
| "learning_rate": 8.191076877693605e-08, | |
| "loss": 0.2726, | |
| "step": 1248 | |
| }, | |
| { | |
| "epoch": 1.9215384615384616, | |
| "grad_norm": 0.2674053658937129, | |
| "learning_rate": 7.885298685522235e-08, | |
| "loss": 0.2719, | |
| "step": 1249 | |
| }, | |
| { | |
| "epoch": 1.9230769230769231, | |
| "grad_norm": 0.28414767606911856, | |
| "learning_rate": 7.585314442970282e-08, | |
| "loss": 0.2706, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 1.9246153846153846, | |
| "grad_norm": 0.26825434762357314, | |
| "learning_rate": 7.291125901946027e-08, | |
| "loss": 0.2694, | |
| "step": 1251 | |
| }, | |
| { | |
| "epoch": 1.926153846153846, | |
| "grad_norm": 0.2618931427864471, | |
| "learning_rate": 7.002734780510279e-08, | |
| "loss": 0.2708, | |
| "step": 1252 | |
| }, | |
| { | |
| "epoch": 1.9276923076923076, | |
| "grad_norm": 0.26165575374832156, | |
| "learning_rate": 6.720142762867032e-08, | |
| "loss": 0.2729, | |
| "step": 1253 | |
| }, | |
| { | |
| "epoch": 1.9292307692307693, | |
| "grad_norm": 0.2618441624570343, | |
| "learning_rate": 6.443351499353823e-08, | |
| "loss": 0.2719, | |
| "step": 1254 | |
| }, | |
| { | |
| "epoch": 1.9307692307692308, | |
| "grad_norm": 0.2587712650866823, | |
| "learning_rate": 6.172362606431281e-08, | |
| "loss": 0.2724, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 1.9323076923076923, | |
| "grad_norm": 0.2486902402553849, | |
| "learning_rate": 5.907177666674813e-08, | |
| "loss": 0.2844, | |
| "step": 1256 | |
| }, | |
| { | |
| "epoch": 1.933846153846154, | |
| "grad_norm": 0.26350210307540733, | |
| "learning_rate": 5.647798228764156e-08, | |
| "loss": 0.2701, | |
| "step": 1257 | |
| }, | |
| { | |
| "epoch": 1.9353846153846153, | |
| "grad_norm": 0.2635749257649734, | |
| "learning_rate": 5.394225807475284e-08, | |
| "loss": 0.2663, | |
| "step": 1258 | |
| }, | |
| { | |
| "epoch": 1.936923076923077, | |
| "grad_norm": 0.25076562478883774, | |
| "learning_rate": 5.146461883671072e-08, | |
| "loss": 0.2637, | |
| "step": 1259 | |
| }, | |
| { | |
| "epoch": 1.9384615384615385, | |
| "grad_norm": 0.25547826529213463, | |
| "learning_rate": 4.9045079042926434e-08, | |
| "loss": 0.2787, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "grad_norm": 0.2367090690776121, | |
| "learning_rate": 4.6683652823513725e-08, | |
| "loss": 0.2819, | |
| "step": 1261 | |
| }, | |
| { | |
| "epoch": 1.9415384615384617, | |
| "grad_norm": 0.2512139791982206, | |
| "learning_rate": 4.438035396920004e-08, | |
| "loss": 0.2746, | |
| "step": 1262 | |
| }, | |
| { | |
| "epoch": 1.943076923076923, | |
| "grad_norm": 0.2576870779313507, | |
| "learning_rate": 4.2135195931249925e-08, | |
| "loss": 0.2683, | |
| "step": 1263 | |
| }, | |
| { | |
| "epoch": 1.9446153846153846, | |
| "grad_norm": 0.2742765354640819, | |
| "learning_rate": 3.9948191821386205e-08, | |
| "loss": 0.2651, | |
| "step": 1264 | |
| }, | |
| { | |
| "epoch": 1.9461538461538461, | |
| "grad_norm": 0.2705148767586398, | |
| "learning_rate": 3.7819354411713364e-08, | |
| "loss": 0.2734, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 1.9476923076923076, | |
| "grad_norm": 0.24567267475694976, | |
| "learning_rate": 3.5748696134639825e-08, | |
| "loss": 0.2665, | |
| "step": 1266 | |
| }, | |
| { | |
| "epoch": 1.9492307692307693, | |
| "grad_norm": 0.24414183004306245, | |
| "learning_rate": 3.373622908280916e-08, | |
| "loss": 0.2681, | |
| "step": 1267 | |
| }, | |
| { | |
| "epoch": 1.9507692307692308, | |
| "grad_norm": 0.2569335080497009, | |
| "learning_rate": 3.178196500903008e-08, | |
| "loss": 0.2719, | |
| "step": 1268 | |
| }, | |
| { | |
| "epoch": 1.9523076923076923, | |
| "grad_norm": 0.27125049629442594, | |
| "learning_rate": 2.988591532620322e-08, | |
| "loss": 0.263, | |
| "step": 1269 | |
| }, | |
| { | |
| "epoch": 1.953846153846154, | |
| "grad_norm": 0.2540792040831073, | |
| "learning_rate": 2.8048091107258925e-08, | |
| "loss": 0.2775, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 1.9553846153846153, | |
| "grad_norm": 0.26516722010683846, | |
| "learning_rate": 2.6268503085089547e-08, | |
| "loss": 0.2749, | |
| "step": 1271 | |
| }, | |
| { | |
| "epoch": 1.956923076923077, | |
| "grad_norm": 0.2555234354524852, | |
| "learning_rate": 2.4547161652488382e-08, | |
| "loss": 0.2781, | |
| "step": 1272 | |
| }, | |
| { | |
| "epoch": 1.9584615384615385, | |
| "grad_norm": 0.25473215083860884, | |
| "learning_rate": 2.2884076862089712e-08, | |
| "loss": 0.2769, | |
| "step": 1273 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "grad_norm": 0.2579144478345488, | |
| "learning_rate": 2.1279258426308848e-08, | |
| "loss": 0.2618, | |
| "step": 1274 | |
| }, | |
| { | |
| "epoch": 1.9615384615384617, | |
| "grad_norm": 0.26831411502080194, | |
| "learning_rate": 1.973271571728441e-08, | |
| "loss": 0.2683, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 1.963076923076923, | |
| "grad_norm": 0.2699221453086357, | |
| "learning_rate": 1.824445776682504e-08, | |
| "loss": 0.2638, | |
| "step": 1276 | |
| }, | |
| { | |
| "epoch": 1.9646153846153847, | |
| "grad_norm": 0.26636229804375944, | |
| "learning_rate": 1.6814493266357202e-08, | |
| "loss": 0.2719, | |
| "step": 1277 | |
| }, | |
| { | |
| "epoch": 1.9661538461538461, | |
| "grad_norm": 0.25754559387076065, | |
| "learning_rate": 1.5442830566874123e-08, | |
| "loss": 0.2723, | |
| "step": 1278 | |
| }, | |
| { | |
| "epoch": 1.9676923076923076, | |
| "grad_norm": 0.27746360300811806, | |
| "learning_rate": 1.4129477678884728e-08, | |
| "loss": 0.2748, | |
| "step": 1279 | |
| }, | |
| { | |
| "epoch": 1.9692307692307693, | |
| "grad_norm": 0.26116574851475305, | |
| "learning_rate": 1.2874442272369225e-08, | |
| "loss": 0.2758, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 1.9707692307692306, | |
| "grad_norm": 0.2738399417814919, | |
| "learning_rate": 1.1677731676733584e-08, | |
| "loss": 0.2693, | |
| "step": 1281 | |
| }, | |
| { | |
| "epoch": 1.9723076923076923, | |
| "grad_norm": 0.24827608250119376, | |
| "learning_rate": 1.053935288076957e-08, | |
| "loss": 0.2763, | |
| "step": 1282 | |
| }, | |
| { | |
| "epoch": 1.9738461538461538, | |
| "grad_norm": 0.264826230750588, | |
| "learning_rate": 9.459312532608122e-09, | |
| "loss": 0.2686, | |
| "step": 1283 | |
| }, | |
| { | |
| "epoch": 1.9753846153846153, | |
| "grad_norm": 0.25756693518005147, | |
| "learning_rate": 8.437616939683812e-09, | |
| "loss": 0.2705, | |
| "step": 1284 | |
| }, | |
| { | |
| "epoch": 1.976923076923077, | |
| "grad_norm": 0.26183486780931053, | |
| "learning_rate": 7.474272068698219e-09, | |
| "loss": 0.2765, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 1.9784615384615385, | |
| "grad_norm": 0.25258521345991264, | |
| "learning_rate": 6.569283545587724e-09, | |
| "loss": 0.2699, | |
| "step": 1286 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "grad_norm": 0.2580148577432403, | |
| "learning_rate": 5.722656655482439e-09, | |
| "loss": 0.2581, | |
| "step": 1287 | |
| }, | |
| { | |
| "epoch": 1.9815384615384617, | |
| "grad_norm": 0.2556746838024097, | |
| "learning_rate": 4.9343963426840006e-09, | |
| "loss": 0.2744, | |
| "step": 1288 | |
| }, | |
| { | |
| "epoch": 1.983076923076923, | |
| "grad_norm": 0.25211453817832763, | |
| "learning_rate": 4.204507210633368e-09, | |
| "loss": 0.2794, | |
| "step": 1289 | |
| }, | |
| { | |
| "epoch": 1.9846153846153847, | |
| "grad_norm": 0.2616628377298744, | |
| "learning_rate": 3.5329935218819668e-09, | |
| "loss": 0.2795, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 1.9861538461538462, | |
| "grad_norm": 0.2569984403141055, | |
| "learning_rate": 2.9198591980705847e-09, | |
| "loss": 0.2806, | |
| "step": 1291 | |
| }, | |
| { | |
| "epoch": 1.9876923076923076, | |
| "grad_norm": 0.2572173040806931, | |
| "learning_rate": 2.3651078199016244e-09, | |
| "loss": 0.2704, | |
| "step": 1292 | |
| }, | |
| { | |
| "epoch": 1.9892307692307694, | |
| "grad_norm": 0.26173620976106304, | |
| "learning_rate": 1.8687426271246646e-09, | |
| "loss": 0.2719, | |
| "step": 1293 | |
| }, | |
| { | |
| "epoch": 1.9907692307692306, | |
| "grad_norm": 0.24970039466414837, | |
| "learning_rate": 1.430766518512039e-09, | |
| "loss": 0.2694, | |
| "step": 1294 | |
| }, | |
| { | |
| "epoch": 1.9923076923076923, | |
| "grad_norm": 0.28147304798836176, | |
| "learning_rate": 1.0511820518432915e-09, | |
| "loss": 0.2629, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 1.9938461538461538, | |
| "grad_norm": 0.26120052432789, | |
| "learning_rate": 7.299914438929634e-10, | |
| "loss": 0.2619, | |
| "step": 1296 | |
| }, | |
| { | |
| "epoch": 1.9953846153846153, | |
| "grad_norm": 0.26837836790650904, | |
| "learning_rate": 4.671965704128312e-10, | |
| "loss": 0.2773, | |
| "step": 1297 | |
| }, | |
| { | |
| "epoch": 1.996923076923077, | |
| "grad_norm": 0.2474473453712718, | |
| "learning_rate": 2.6279896612524393e-10, | |
| "loss": 0.2753, | |
| "step": 1298 | |
| }, | |
| { | |
| "epoch": 1.9984615384615385, | |
| "grad_norm": 0.2664258429360584, | |
| "learning_rate": 1.167998247131319e-10, | |
| "loss": 0.2671, | |
| "step": 1299 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.27100229636351086, | |
| "learning_rate": 2.919999881001445e-11, | |
| "loss": 0.2669, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "step": 1300, | |
| "total_flos": 1034035157860352.0, | |
| "train_loss": 0.31700336213295277, | |
| "train_runtime": 43306.5433, | |
| "train_samples_per_second": 3.842, | |
| "train_steps_per_second": 0.03 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 1300, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 2, | |
| "save_steps": 3000000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1034035157860352.0, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |