| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.7037037037037037, | |
| "eval_steps": 100, | |
| "global_step": 1100, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.003367003367003367, | |
| "grad_norm": 190.2997283935547, | |
| "learning_rate": 6.711409395973154e-07, | |
| "loss": 13.9272, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.006734006734006734, | |
| "grad_norm": 196.5933074951172, | |
| "learning_rate": 1.3422818791946309e-06, | |
| "loss": 14.3753, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.010101010101010102, | |
| "grad_norm": 198.02767944335938, | |
| "learning_rate": 2.013422818791946e-06, | |
| "loss": 14.4143, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.013468013468013467, | |
| "grad_norm": 186.30801391601562, | |
| "learning_rate": 2.6845637583892617e-06, | |
| "loss": 13.7729, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.016835016835016835, | |
| "grad_norm": 129.32237243652344, | |
| "learning_rate": 3.3557046979865773e-06, | |
| "loss": 11.4082, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.020202020202020204, | |
| "grad_norm": 123.9930191040039, | |
| "learning_rate": 4.026845637583892e-06, | |
| "loss": 11.5581, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.02356902356902357, | |
| "grad_norm": 102.4565658569336, | |
| "learning_rate": 4.697986577181209e-06, | |
| "loss": 9.8311, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.026936026936026935, | |
| "grad_norm": 98.7117919921875, | |
| "learning_rate": 5.3691275167785235e-06, | |
| "loss": 9.825, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.030303030303030304, | |
| "grad_norm": 121.9065170288086, | |
| "learning_rate": 6.04026845637584e-06, | |
| "loss": 8.5157, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.03367003367003367, | |
| "grad_norm": 93.352294921875, | |
| "learning_rate": 6.7114093959731546e-06, | |
| "loss": 7.6328, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.037037037037037035, | |
| "grad_norm": 108.89420318603516, | |
| "learning_rate": 7.382550335570471e-06, | |
| "loss": 7.1598, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.04040404040404041, | |
| "grad_norm": 191.65274047851562, | |
| "learning_rate": 8.053691275167785e-06, | |
| "loss": 6.237, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.04377104377104377, | |
| "grad_norm": 150.62646484375, | |
| "learning_rate": 8.724832214765101e-06, | |
| "loss": 5.7063, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.04713804713804714, | |
| "grad_norm": 185.48080444335938, | |
| "learning_rate": 9.395973154362418e-06, | |
| "loss": 5.093, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.050505050505050504, | |
| "grad_norm": 1576.556640625, | |
| "learning_rate": 1.006711409395973e-05, | |
| "loss": 8.3575, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.05387205387205387, | |
| "grad_norm": 441.4505310058594, | |
| "learning_rate": 1.0738255033557047e-05, | |
| "loss": 4.679, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.05723905723905724, | |
| "grad_norm": 499.8016357421875, | |
| "learning_rate": 1.1409395973154363e-05, | |
| "loss": 3.1432, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.06060606060606061, | |
| "grad_norm": 472.59747314453125, | |
| "learning_rate": 1.208053691275168e-05, | |
| "loss": 2.9237, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.06397306397306397, | |
| "grad_norm": 506.6687927246094, | |
| "learning_rate": 1.2751677852348994e-05, | |
| "loss": 2.6882, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.06734006734006734, | |
| "grad_norm": 494.16949462890625, | |
| "learning_rate": 1.3422818791946309e-05, | |
| "loss": 2.4807, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.0707070707070707, | |
| "grad_norm": 463.3478698730469, | |
| "learning_rate": 1.4093959731543624e-05, | |
| "loss": 2.2508, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.07407407407407407, | |
| "grad_norm": 422.92401123046875, | |
| "learning_rate": 1.4765100671140942e-05, | |
| "loss": 1.9202, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.07744107744107744, | |
| "grad_norm": 417.1321105957031, | |
| "learning_rate": 1.5436241610738255e-05, | |
| "loss": 1.6106, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.08080808080808081, | |
| "grad_norm": 360.2781677246094, | |
| "learning_rate": 1.610738255033557e-05, | |
| "loss": 1.2741, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.08417508417508418, | |
| "grad_norm": 297.3291015625, | |
| "learning_rate": 1.6778523489932888e-05, | |
| "loss": 1.0282, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.08754208754208755, | |
| "grad_norm": 195.75958251953125, | |
| "learning_rate": 1.7449664429530202e-05, | |
| "loss": 0.799, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.09090909090909091, | |
| "grad_norm": 116.36829376220703, | |
| "learning_rate": 1.8120805369127517e-05, | |
| "loss": 0.6593, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.09427609427609428, | |
| "grad_norm": 70.56578063964844, | |
| "learning_rate": 1.8791946308724835e-05, | |
| "loss": 0.5787, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.09764309764309764, | |
| "grad_norm": 45.22296905517578, | |
| "learning_rate": 1.946308724832215e-05, | |
| "loss": 0.5196, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.10101010101010101, | |
| "grad_norm": 20.37734603881836, | |
| "learning_rate": 2.013422818791946e-05, | |
| "loss": 0.4681, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.10437710437710437, | |
| "grad_norm": 7.735367298126221, | |
| "learning_rate": 2.080536912751678e-05, | |
| "loss": 0.4318, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.10774410774410774, | |
| "grad_norm": 4.360243797302246, | |
| "learning_rate": 2.1476510067114094e-05, | |
| "loss": 0.4276, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.1111111111111111, | |
| "grad_norm": 4.440345287322998, | |
| "learning_rate": 2.2147651006711412e-05, | |
| "loss": 0.4463, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.11447811447811448, | |
| "grad_norm": 26.992700576782227, | |
| "learning_rate": 2.2818791946308727e-05, | |
| "loss": 0.4394, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.11784511784511785, | |
| "grad_norm": 33.81399917602539, | |
| "learning_rate": 2.348993288590604e-05, | |
| "loss": 0.6005, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.12121212121212122, | |
| "grad_norm": 7.8905029296875, | |
| "learning_rate": 2.416107382550336e-05, | |
| "loss": 0.4963, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.12457912457912458, | |
| "grad_norm": 2.6311209201812744, | |
| "learning_rate": 2.4832214765100674e-05, | |
| "loss": 0.39, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.12794612794612795, | |
| "grad_norm": 2.389883041381836, | |
| "learning_rate": 2.550335570469799e-05, | |
| "loss": 0.3782, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.13131313131313133, | |
| "grad_norm": 2.070525646209717, | |
| "learning_rate": 2.6174496644295304e-05, | |
| "loss": 0.3592, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.13468013468013468, | |
| "grad_norm": 5.955089569091797, | |
| "learning_rate": 2.6845637583892618e-05, | |
| "loss": 0.3777, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.13804713804713806, | |
| "grad_norm": 6.50673770904541, | |
| "learning_rate": 2.7516778523489933e-05, | |
| "loss": 0.389, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.1414141414141414, | |
| "grad_norm": 2.0794308185577393, | |
| "learning_rate": 2.8187919463087248e-05, | |
| "loss": 0.3618, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.1447811447811448, | |
| "grad_norm": 1.5477614402770996, | |
| "learning_rate": 2.885906040268457e-05, | |
| "loss": 0.3593, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.14814814814814814, | |
| "grad_norm": 10.740438461303711, | |
| "learning_rate": 2.9530201342281884e-05, | |
| "loss": 0.3805, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.15151515151515152, | |
| "grad_norm": 2.993213176727295, | |
| "learning_rate": 3.02013422818792e-05, | |
| "loss": 0.3673, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.15488215488215487, | |
| "grad_norm": 17.512208938598633, | |
| "learning_rate": 3.087248322147651e-05, | |
| "loss": 0.3922, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.15824915824915825, | |
| "grad_norm": 2.5222012996673584, | |
| "learning_rate": 3.1543624161073825e-05, | |
| "loss": 0.3873, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.16161616161616163, | |
| "grad_norm": 0.8730729222297668, | |
| "learning_rate": 3.221476510067114e-05, | |
| "loss": 0.3593, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.16498316498316498, | |
| "grad_norm": 0.8050268292427063, | |
| "learning_rate": 3.288590604026846e-05, | |
| "loss": 0.3491, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.16835016835016836, | |
| "grad_norm": 0.7536938190460205, | |
| "learning_rate": 3.3557046979865775e-05, | |
| "loss": 0.3469, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.1717171717171717, | |
| "grad_norm": 0.9090268015861511, | |
| "learning_rate": 3.422818791946309e-05, | |
| "loss": 0.3663, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.1750841750841751, | |
| "grad_norm": 0.8775368928909302, | |
| "learning_rate": 3.4899328859060405e-05, | |
| "loss": 0.3489, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.17845117845117844, | |
| "grad_norm": 0.5326427221298218, | |
| "learning_rate": 3.557046979865772e-05, | |
| "loss": 0.3466, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.18181818181818182, | |
| "grad_norm": 0.561137318611145, | |
| "learning_rate": 3.6241610738255034e-05, | |
| "loss": 0.3393, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.18518518518518517, | |
| "grad_norm": 0.8053128123283386, | |
| "learning_rate": 3.6912751677852356e-05, | |
| "loss": 0.352, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.18855218855218855, | |
| "grad_norm": 0.5964087843894958, | |
| "learning_rate": 3.758389261744967e-05, | |
| "loss": 0.3507, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.1919191919191919, | |
| "grad_norm": 0.5998376607894897, | |
| "learning_rate": 3.8255033557046985e-05, | |
| "loss": 0.3504, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.19528619528619529, | |
| "grad_norm": 1.2634875774383545, | |
| "learning_rate": 3.89261744966443e-05, | |
| "loss": 0.337, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.19865319865319866, | |
| "grad_norm": 0.5703901648521423, | |
| "learning_rate": 3.959731543624161e-05, | |
| "loss": 0.3408, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.20202020202020202, | |
| "grad_norm": 0.7656762003898621, | |
| "learning_rate": 4.026845637583892e-05, | |
| "loss": 0.3206, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.2053872053872054, | |
| "grad_norm": 0.6210582852363586, | |
| "learning_rate": 4.0939597315436244e-05, | |
| "loss": 0.354, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.20875420875420875, | |
| "grad_norm": 0.6622840166091919, | |
| "learning_rate": 4.161073825503356e-05, | |
| "loss": 0.3439, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.21212121212121213, | |
| "grad_norm": 0.46426376700401306, | |
| "learning_rate": 4.228187919463087e-05, | |
| "loss": 0.3434, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.21548821548821548, | |
| "grad_norm": 0.38662126660346985, | |
| "learning_rate": 4.295302013422819e-05, | |
| "loss": 0.3362, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.21885521885521886, | |
| "grad_norm": 0.5812459588050842, | |
| "learning_rate": 4.36241610738255e-05, | |
| "loss": 0.323, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.2222222222222222, | |
| "grad_norm": 0.626932680606842, | |
| "learning_rate": 4.4295302013422824e-05, | |
| "loss": 0.3427, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.2255892255892256, | |
| "grad_norm": 0.5491658449172974, | |
| "learning_rate": 4.496644295302014e-05, | |
| "loss": 0.3406, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.22895622895622897, | |
| "grad_norm": 0.4023520052433014, | |
| "learning_rate": 4.5637583892617453e-05, | |
| "loss": 0.3328, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.23232323232323232, | |
| "grad_norm": 0.478535532951355, | |
| "learning_rate": 4.630872483221477e-05, | |
| "loss": 0.3402, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.2356902356902357, | |
| "grad_norm": 0.44869011640548706, | |
| "learning_rate": 4.697986577181208e-05, | |
| "loss": 0.3516, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.23905723905723905, | |
| "grad_norm": 0.4810108244419098, | |
| "learning_rate": 4.76510067114094e-05, | |
| "loss": 0.3411, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.24242424242424243, | |
| "grad_norm": 0.3956281542778015, | |
| "learning_rate": 4.832214765100672e-05, | |
| "loss": 0.3395, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.24579124579124578, | |
| "grad_norm": 0.40301939845085144, | |
| "learning_rate": 4.8993288590604034e-05, | |
| "loss": 0.3217, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.24915824915824916, | |
| "grad_norm": 0.44550034403800964, | |
| "learning_rate": 4.966442953020135e-05, | |
| "loss": 0.3257, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.25252525252525254, | |
| "grad_norm": 0.5890341997146606, | |
| "learning_rate": 5.033557046979866e-05, | |
| "loss": 0.3335, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.2558922558922559, | |
| "grad_norm": 0.8096022009849548, | |
| "learning_rate": 5.100671140939598e-05, | |
| "loss": 0.3421, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.25925925925925924, | |
| "grad_norm": 0.6044747829437256, | |
| "learning_rate": 5.167785234899329e-05, | |
| "loss": 0.3266, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.26262626262626265, | |
| "grad_norm": 0.5191451907157898, | |
| "learning_rate": 5.234899328859061e-05, | |
| "loss": 0.331, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.265993265993266, | |
| "grad_norm": 1.0799261331558228, | |
| "learning_rate": 5.302013422818792e-05, | |
| "loss": 0.3243, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.26936026936026936, | |
| "grad_norm": 5.513405799865723, | |
| "learning_rate": 5.3691275167785237e-05, | |
| "loss": 0.379, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.2727272727272727, | |
| "grad_norm": 0.673650860786438, | |
| "learning_rate": 5.436241610738255e-05, | |
| "loss": 0.3482, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.2760942760942761, | |
| "grad_norm": 1.1485897302627563, | |
| "learning_rate": 5.5033557046979866e-05, | |
| "loss": 0.3351, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.27946127946127947, | |
| "grad_norm": 0.5018780827522278, | |
| "learning_rate": 5.570469798657718e-05, | |
| "loss": 0.3077, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.2828282828282828, | |
| "grad_norm": 4.367802619934082, | |
| "learning_rate": 5.6375838926174495e-05, | |
| "loss": 0.3284, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.28619528619528617, | |
| "grad_norm": 33.46516036987305, | |
| "learning_rate": 5.704697986577181e-05, | |
| "loss": 1.0651, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.2895622895622896, | |
| "grad_norm": 91.36512756347656, | |
| "learning_rate": 5.771812080536914e-05, | |
| "loss": 1.7174, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.29292929292929293, | |
| "grad_norm": 9.666085243225098, | |
| "learning_rate": 5.838926174496645e-05, | |
| "loss": 0.5601, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.2962962962962963, | |
| "grad_norm": 8.608613967895508, | |
| "learning_rate": 5.906040268456377e-05, | |
| "loss": 0.3865, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.2996632996632997, | |
| "grad_norm": 3.025059223175049, | |
| "learning_rate": 5.973154362416108e-05, | |
| "loss": 0.358, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.30303030303030304, | |
| "grad_norm": 9.862916946411133, | |
| "learning_rate": 6.04026845637584e-05, | |
| "loss": 0.4464, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.3063973063973064, | |
| "grad_norm": 11.05635929107666, | |
| "learning_rate": 6.107382550335571e-05, | |
| "loss": 0.3977, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.30976430976430974, | |
| "grad_norm": 1.0226973295211792, | |
| "learning_rate": 6.174496644295302e-05, | |
| "loss": 0.3206, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.31313131313131315, | |
| "grad_norm": 1.007895827293396, | |
| "learning_rate": 6.241610738255034e-05, | |
| "loss": 0.3355, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.3164983164983165, | |
| "grad_norm": 1.5956454277038574, | |
| "learning_rate": 6.308724832214765e-05, | |
| "loss": 0.3408, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.31986531986531985, | |
| "grad_norm": 21.75948715209961, | |
| "learning_rate": 6.375838926174497e-05, | |
| "loss": 0.4627, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.32323232323232326, | |
| "grad_norm": 5.754608154296875, | |
| "learning_rate": 6.442953020134228e-05, | |
| "loss": 0.3818, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.3265993265993266, | |
| "grad_norm": 3.1888318061828613, | |
| "learning_rate": 6.51006711409396e-05, | |
| "loss": 0.3713, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.32996632996632996, | |
| "grad_norm": 4.586446762084961, | |
| "learning_rate": 6.577181208053692e-05, | |
| "loss": 0.3394, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.3333333333333333, | |
| "grad_norm": 0.9332061409950256, | |
| "learning_rate": 6.644295302013423e-05, | |
| "loss": 0.3267, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.3367003367003367, | |
| "grad_norm": 4.119638442993164, | |
| "learning_rate": 6.711409395973155e-05, | |
| "loss": 0.3825, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.3367003367003367, | |
| "eval_loss": 0.16432031989097595, | |
| "eval_runtime": 33.0116, | |
| "eval_samples_per_second": 30.292, | |
| "eval_steps_per_second": 1.908, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.3400673400673401, | |
| "grad_norm": 1.244138240814209, | |
| "learning_rate": 6.778523489932886e-05, | |
| "loss": 0.3191, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.3434343434343434, | |
| "grad_norm": 4.564449310302734, | |
| "learning_rate": 6.845637583892618e-05, | |
| "loss": 0.3792, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.3468013468013468, | |
| "grad_norm": 71.92516326904297, | |
| "learning_rate": 6.912751677852349e-05, | |
| "loss": 0.9526, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.3501683501683502, | |
| "grad_norm": 6.8141374588012695, | |
| "learning_rate": 6.979865771812081e-05, | |
| "loss": 0.4132, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.35353535353535354, | |
| "grad_norm": 4.9158616065979, | |
| "learning_rate": 7.046979865771812e-05, | |
| "loss": 0.3847, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.3569023569023569, | |
| "grad_norm": 0.9838681221008301, | |
| "learning_rate": 7.114093959731544e-05, | |
| "loss": 0.335, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.3602693602693603, | |
| "grad_norm": 0.44024720788002014, | |
| "learning_rate": 7.181208053691275e-05, | |
| "loss": 0.3159, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.36363636363636365, | |
| "grad_norm": 0.5798377394676208, | |
| "learning_rate": 7.248322147651007e-05, | |
| "loss": 0.3308, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.367003367003367, | |
| "grad_norm": 0.5650081038475037, | |
| "learning_rate": 7.315436241610739e-05, | |
| "loss": 0.3161, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.37037037037037035, | |
| "grad_norm": 0.5149471163749695, | |
| "learning_rate": 7.382550335570471e-05, | |
| "loss": 0.3291, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.37373737373737376, | |
| "grad_norm": 0.4448802173137665, | |
| "learning_rate": 7.449664429530202e-05, | |
| "loss": 0.3145, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.3771043771043771, | |
| "grad_norm": 0.5278413891792297, | |
| "learning_rate": 7.516778523489934e-05, | |
| "loss": 0.3296, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.38047138047138046, | |
| "grad_norm": 0.455289363861084, | |
| "learning_rate": 7.583892617449665e-05, | |
| "loss": 0.318, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.3838383838383838, | |
| "grad_norm": 0.5316647291183472, | |
| "learning_rate": 7.651006711409397e-05, | |
| "loss": 0.3117, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.3872053872053872, | |
| "grad_norm": 0.43862929940223694, | |
| "learning_rate": 7.718120805369128e-05, | |
| "loss": 0.3062, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.39057239057239057, | |
| "grad_norm": 12.535127639770508, | |
| "learning_rate": 7.78523489932886e-05, | |
| "loss": 0.3557, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.3939393939393939, | |
| "grad_norm": 15.351152420043945, | |
| "learning_rate": 7.852348993288591e-05, | |
| "loss": 0.5181, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.39730639730639733, | |
| "grad_norm": 11.918878555297852, | |
| "learning_rate": 7.919463087248322e-05, | |
| "loss": 0.4005, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.4006734006734007, | |
| "grad_norm": 9.800668716430664, | |
| "learning_rate": 7.986577181208054e-05, | |
| "loss": 0.422, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.40404040404040403, | |
| "grad_norm": 16.235355377197266, | |
| "learning_rate": 8.053691275167784e-05, | |
| "loss": 0.4051, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.4074074074074074, | |
| "grad_norm": 1.8551958799362183, | |
| "learning_rate": 8.120805369127518e-05, | |
| "loss": 0.3506, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.4107744107744108, | |
| "grad_norm": 3.990302562713623, | |
| "learning_rate": 8.187919463087249e-05, | |
| "loss": 0.3318, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.41414141414141414, | |
| "grad_norm": 22.28190040588379, | |
| "learning_rate": 8.255033557046981e-05, | |
| "loss": 0.4316, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.4175084175084175, | |
| "grad_norm": 1.9532949924468994, | |
| "learning_rate": 8.322147651006712e-05, | |
| "loss": 0.3596, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.4208754208754209, | |
| "grad_norm": 0.8453232645988464, | |
| "learning_rate": 8.389261744966444e-05, | |
| "loss": 0.3473, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.42424242424242425, | |
| "grad_norm": 3.7085459232330322, | |
| "learning_rate": 8.456375838926175e-05, | |
| "loss": 0.3527, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.4276094276094276, | |
| "grad_norm": 1.9306743144989014, | |
| "learning_rate": 8.523489932885907e-05, | |
| "loss": 0.3415, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.43097643097643096, | |
| "grad_norm": 5.023862361907959, | |
| "learning_rate": 8.590604026845638e-05, | |
| "loss": 0.3644, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.43434343434343436, | |
| "grad_norm": 4.241243362426758, | |
| "learning_rate": 8.65771812080537e-05, | |
| "loss": 0.4321, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.4377104377104377, | |
| "grad_norm": 1.7396281957626343, | |
| "learning_rate": 8.7248322147651e-05, | |
| "loss": 0.3334, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.44107744107744107, | |
| "grad_norm": 8.367612838745117, | |
| "learning_rate": 8.791946308724833e-05, | |
| "loss": 0.3571, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.4444444444444444, | |
| "grad_norm": 7.692532539367676, | |
| "learning_rate": 8.859060402684565e-05, | |
| "loss": 0.5196, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.4478114478114478, | |
| "grad_norm": 12.191128730773926, | |
| "learning_rate": 8.926174496644296e-05, | |
| "loss": 0.6991, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.4511784511784512, | |
| "grad_norm": 7.570639133453369, | |
| "learning_rate": 8.993288590604028e-05, | |
| "loss": 0.4818, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.45454545454545453, | |
| "grad_norm": 1.7189193964004517, | |
| "learning_rate": 9.060402684563759e-05, | |
| "loss": 0.3728, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.45791245791245794, | |
| "grad_norm": 9.100985527038574, | |
| "learning_rate": 9.127516778523491e-05, | |
| "loss": 0.3869, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.4612794612794613, | |
| "grad_norm": 9.76489543914795, | |
| "learning_rate": 9.194630872483221e-05, | |
| "loss": 0.3861, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.46464646464646464, | |
| "grad_norm": 3.834136962890625, | |
| "learning_rate": 9.261744966442954e-05, | |
| "loss": 0.4222, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.468013468013468, | |
| "grad_norm": 22.2440242767334, | |
| "learning_rate": 9.328859060402684e-05, | |
| "loss": 0.7935, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.4713804713804714, | |
| "grad_norm": 1.4633365869522095, | |
| "learning_rate": 9.395973154362417e-05, | |
| "loss": 0.347, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.47474747474747475, | |
| "grad_norm": 1.9224159717559814, | |
| "learning_rate": 9.463087248322147e-05, | |
| "loss": 0.3299, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.4781144781144781, | |
| "grad_norm": 3.4107277393341064, | |
| "learning_rate": 9.53020134228188e-05, | |
| "loss": 0.4199, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.48148148148148145, | |
| "grad_norm": 1.4255735874176025, | |
| "learning_rate": 9.59731543624161e-05, | |
| "loss": 0.3559, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.48484848484848486, | |
| "grad_norm": 1.4576934576034546, | |
| "learning_rate": 9.664429530201344e-05, | |
| "loss": 0.3274, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.4882154882154882, | |
| "grad_norm": 1.3531242609024048, | |
| "learning_rate": 9.731543624161075e-05, | |
| "loss": 0.3417, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.49158249158249157, | |
| "grad_norm": 13.97393798828125, | |
| "learning_rate": 9.798657718120807e-05, | |
| "loss": 0.3434, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.494949494949495, | |
| "grad_norm": 0.8413457870483398, | |
| "learning_rate": 9.865771812080538e-05, | |
| "loss": 0.3224, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.4983164983164983, | |
| "grad_norm": 0.41903650760650635, | |
| "learning_rate": 9.93288590604027e-05, | |
| "loss": 0.3197, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.5016835016835017, | |
| "grad_norm": 1.3428220748901367, | |
| "learning_rate": 0.0001, | |
| "loss": 0.3184, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.5050505050505051, | |
| "grad_norm": 0.497494637966156, | |
| "learning_rate": 9.9999861762256e-05, | |
| "loss": 0.3064, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.5084175084175084, | |
| "grad_norm": 0.5110116600990295, | |
| "learning_rate": 9.999944704978836e-05, | |
| "loss": 0.3195, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.5117845117845118, | |
| "grad_norm": 0.4883813261985779, | |
| "learning_rate": 9.999875586489024e-05, | |
| "loss": 0.292, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.5151515151515151, | |
| "grad_norm": 0.44456565380096436, | |
| "learning_rate": 9.999778821138357e-05, | |
| "loss": 0.3084, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.5185185185185185, | |
| "grad_norm": 0.5006658434867859, | |
| "learning_rate": 9.999654409461896e-05, | |
| "loss": 0.3031, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.5218855218855218, | |
| "grad_norm": 0.4398713707923889, | |
| "learning_rate": 9.999502352147583e-05, | |
| "loss": 0.3178, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.5252525252525253, | |
| "grad_norm": 0.4853643476963043, | |
| "learning_rate": 9.999322650036214e-05, | |
| "loss": 0.3195, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.5286195286195287, | |
| "grad_norm": 0.4636339545249939, | |
| "learning_rate": 9.999115304121457e-05, | |
| "loss": 0.3052, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.531986531986532, | |
| "grad_norm": 0.525205671787262, | |
| "learning_rate": 9.998880315549834e-05, | |
| "loss": 0.3133, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.5353535353535354, | |
| "grad_norm": 0.40854206681251526, | |
| "learning_rate": 9.998617685620714e-05, | |
| "loss": 0.3076, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.5387205387205387, | |
| "grad_norm": 0.5355719327926636, | |
| "learning_rate": 9.998327415786315e-05, | |
| "loss": 0.3052, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.5420875420875421, | |
| "grad_norm": 0.3861645460128784, | |
| "learning_rate": 9.998009507651684e-05, | |
| "loss": 0.3099, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.5454545454545454, | |
| "grad_norm": 0.5338487029075623, | |
| "learning_rate": 9.997663962974697e-05, | |
| "loss": 0.3052, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.5488215488215489, | |
| "grad_norm": 0.45219364762306213, | |
| "learning_rate": 9.997290783666049e-05, | |
| "loss": 0.2948, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.5521885521885522, | |
| "grad_norm": 0.5037462711334229, | |
| "learning_rate": 9.996889971789235e-05, | |
| "loss": 0.3019, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.5555555555555556, | |
| "grad_norm": 0.3949816823005676, | |
| "learning_rate": 9.996461529560553e-05, | |
| "loss": 0.3028, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.5589225589225589, | |
| "grad_norm": 0.3921789824962616, | |
| "learning_rate": 9.996005459349074e-05, | |
| "loss": 0.2982, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.5622895622895623, | |
| "grad_norm": 0.4122919738292694, | |
| "learning_rate": 9.995521763676645e-05, | |
| "loss": 0.3071, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.5656565656565656, | |
| "grad_norm": 0.4212525188922882, | |
| "learning_rate": 9.995010445217867e-05, | |
| "loss": 0.3086, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.569023569023569, | |
| "grad_norm": 0.3997049033641815, | |
| "learning_rate": 9.994471506800079e-05, | |
| "loss": 0.2957, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.5723905723905723, | |
| "grad_norm": 0.34380048513412476, | |
| "learning_rate": 9.993904951403344e-05, | |
| "loss": 0.3122, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.5757575757575758, | |
| "grad_norm": 0.40532243251800537, | |
| "learning_rate": 9.99331078216044e-05, | |
| "loss": 0.3055, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.5791245791245792, | |
| "grad_norm": 0.4095707833766937, | |
| "learning_rate": 9.992689002356828e-05, | |
| "loss": 0.2868, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.5824915824915825, | |
| "grad_norm": 0.41159185767173767, | |
| "learning_rate": 9.992039615430648e-05, | |
| "loss": 0.318, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.5858585858585859, | |
| "grad_norm": 0.3728049397468567, | |
| "learning_rate": 9.991362624972688e-05, | |
| "loss": 0.309, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.5892255892255892, | |
| "grad_norm": 0.3249180018901825, | |
| "learning_rate": 9.990658034726379e-05, | |
| "loss": 0.2818, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.5925925925925926, | |
| "grad_norm": 0.35090282559394836, | |
| "learning_rate": 9.989925848587756e-05, | |
| "loss": 0.2839, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.5959595959595959, | |
| "grad_norm": 0.3364333212375641, | |
| "learning_rate": 9.989166070605447e-05, | |
| "loss": 0.3063, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.5993265993265994, | |
| "grad_norm": 0.4135960340499878, | |
| "learning_rate": 9.988378704980656e-05, | |
| "loss": 0.3085, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.6026936026936027, | |
| "grad_norm": 0.35615649819374084, | |
| "learning_rate": 9.987563756067129e-05, | |
| "loss": 0.2955, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.6060606060606061, | |
| "grad_norm": 0.3038477897644043, | |
| "learning_rate": 9.986721228371129e-05, | |
| "loss": 0.291, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.6094276094276094, | |
| "grad_norm": 0.4663616120815277, | |
| "learning_rate": 9.985851126551428e-05, | |
| "loss": 0.3043, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.6127946127946128, | |
| "grad_norm": 0.42187029123306274, | |
| "learning_rate": 9.984953455419258e-05, | |
| "loss": 0.2747, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.6161616161616161, | |
| "grad_norm": 0.4150826334953308, | |
| "learning_rate": 9.9840282199383e-05, | |
| "loss": 0.2854, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.6195286195286195, | |
| "grad_norm": 0.36844050884246826, | |
| "learning_rate": 9.983075425224653e-05, | |
| "loss": 0.2848, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.622895622895623, | |
| "grad_norm": 0.4171907603740692, | |
| "learning_rate": 9.982095076546807e-05, | |
| "loss": 0.3003, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.6262626262626263, | |
| "grad_norm": 0.3620002269744873, | |
| "learning_rate": 9.981087179325608e-05, | |
| "loss": 0.3043, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.6296296296296297, | |
| "grad_norm": 0.40760573744773865, | |
| "learning_rate": 9.980051739134233e-05, | |
| "loss": 0.3059, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.632996632996633, | |
| "grad_norm": 0.32069963216781616, | |
| "learning_rate": 9.978988761698161e-05, | |
| "loss": 0.2947, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.6363636363636364, | |
| "grad_norm": 0.3327488303184509, | |
| "learning_rate": 9.977898252895134e-05, | |
| "loss": 0.2805, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.6397306397306397, | |
| "grad_norm": 0.3968160152435303, | |
| "learning_rate": 9.976780218755131e-05, | |
| "loss": 0.2891, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.6430976430976431, | |
| "grad_norm": 0.4018626809120178, | |
| "learning_rate": 9.975634665460332e-05, | |
| "loss": 0.2965, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.6464646464646465, | |
| "grad_norm": 0.37805649638175964, | |
| "learning_rate": 9.974461599345088e-05, | |
| "loss": 0.3008, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.6498316498316499, | |
| "grad_norm": 0.44425806403160095, | |
| "learning_rate": 9.973261026895877e-05, | |
| "loss": 0.2921, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.6531986531986532, | |
| "grad_norm": 0.375931054353714, | |
| "learning_rate": 9.972032954751279e-05, | |
| "loss": 0.296, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.6565656565656566, | |
| "grad_norm": 0.44635701179504395, | |
| "learning_rate": 9.970777389701926e-05, | |
| "loss": 0.29, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.6599326599326599, | |
| "grad_norm": 0.28897619247436523, | |
| "learning_rate": 9.969494338690481e-05, | |
| "loss": 0.2895, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.6632996632996633, | |
| "grad_norm": 0.4542882740497589, | |
| "learning_rate": 9.968183808811586e-05, | |
| "loss": 0.2887, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.6666666666666666, | |
| "grad_norm": 0.3715568780899048, | |
| "learning_rate": 9.966845807311829e-05, | |
| "loss": 0.3038, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.67003367003367, | |
| "grad_norm": 0.36940261721611023, | |
| "learning_rate": 9.965480341589701e-05, | |
| "loss": 0.2934, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.6734006734006734, | |
| "grad_norm": 0.43046656250953674, | |
| "learning_rate": 9.96408741919556e-05, | |
| "loss": 0.2951, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.6734006734006734, | |
| "eval_loss": 0.1412619948387146, | |
| "eval_runtime": 32.6268, | |
| "eval_samples_per_second": 30.65, | |
| "eval_steps_per_second": 1.931, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.6767676767676768, | |
| "grad_norm": 0.37590286135673523, | |
| "learning_rate": 9.962667047831584e-05, | |
| "loss": 0.2922, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.6801346801346801, | |
| "grad_norm": 0.3418475389480591, | |
| "learning_rate": 9.961219235351729e-05, | |
| "loss": 0.2732, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.6835016835016835, | |
| "grad_norm": 0.3605377674102783, | |
| "learning_rate": 9.95974398976169e-05, | |
| "loss": 0.2882, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.6868686868686869, | |
| "grad_norm": 0.40477219223976135, | |
| "learning_rate": 9.958241319218848e-05, | |
| "loss": 0.2859, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.6902356902356902, | |
| "grad_norm": 0.4034753143787384, | |
| "learning_rate": 9.95671123203224e-05, | |
| "loss": 0.2984, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.6936026936026936, | |
| "grad_norm": 0.3650234043598175, | |
| "learning_rate": 9.955153736662493e-05, | |
| "loss": 0.2772, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.696969696969697, | |
| "grad_norm": 0.47222810983657837, | |
| "learning_rate": 9.953568841721797e-05, | |
| "loss": 0.28, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.7003367003367004, | |
| "grad_norm": 0.3858278691768646, | |
| "learning_rate": 9.95195655597384e-05, | |
| "loss": 0.2815, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.7037037037037037, | |
| "grad_norm": 0.4259450435638428, | |
| "learning_rate": 9.950316888333775e-05, | |
| "loss": 0.2965, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.7070707070707071, | |
| "grad_norm": 0.4309611916542053, | |
| "learning_rate": 9.948649847868159e-05, | |
| "loss": 0.2766, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.7104377104377104, | |
| "grad_norm": 0.4742699861526489, | |
| "learning_rate": 9.946955443794908e-05, | |
| "loss": 0.2859, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.7138047138047138, | |
| "grad_norm": 0.4079667329788208, | |
| "learning_rate": 9.945233685483246e-05, | |
| "loss": 0.283, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.7171717171717171, | |
| "grad_norm": 0.42072775959968567, | |
| "learning_rate": 9.943484582453653e-05, | |
| "loss": 0.298, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.7205387205387206, | |
| "grad_norm": 0.43136894702911377, | |
| "learning_rate": 9.941708144377813e-05, | |
| "loss": 0.2693, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.7239057239057239, | |
| "grad_norm": 0.42598387598991394, | |
| "learning_rate": 9.939904381078553e-05, | |
| "loss": 0.2836, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.7272727272727273, | |
| "grad_norm": 0.40432655811309814, | |
| "learning_rate": 9.938073302529804e-05, | |
| "loss": 0.2844, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.7306397306397306, | |
| "grad_norm": 0.3417808413505554, | |
| "learning_rate": 9.93621491885653e-05, | |
| "loss": 0.2849, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.734006734006734, | |
| "grad_norm": 0.35036516189575195, | |
| "learning_rate": 9.934329240334686e-05, | |
| "loss": 0.2619, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.7373737373737373, | |
| "grad_norm": 0.38956964015960693, | |
| "learning_rate": 9.932416277391143e-05, | |
| "loss": 0.2802, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.7407407407407407, | |
| "grad_norm": 0.36884164810180664, | |
| "learning_rate": 9.930476040603653e-05, | |
| "loss": 0.2961, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.7441077441077442, | |
| "grad_norm": 0.4145122468471527, | |
| "learning_rate": 9.928508540700774e-05, | |
| "loss": 0.2789, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.7474747474747475, | |
| "grad_norm": 0.36580273509025574, | |
| "learning_rate": 9.926513788561816e-05, | |
| "loss": 0.2824, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.7508417508417509, | |
| "grad_norm": 0.2912370264530182, | |
| "learning_rate": 9.924491795216777e-05, | |
| "loss": 0.2811, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.7542087542087542, | |
| "grad_norm": 0.480868399143219, | |
| "learning_rate": 9.922442571846293e-05, | |
| "loss": 0.2853, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.7575757575757576, | |
| "grad_norm": 0.3405955135822296, | |
| "learning_rate": 9.920366129781564e-05, | |
| "loss": 0.2908, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.7609427609427609, | |
| "grad_norm": 0.34814175963401794, | |
| "learning_rate": 9.918262480504295e-05, | |
| "loss": 0.2923, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.7643097643097643, | |
| "grad_norm": 0.36179670691490173, | |
| "learning_rate": 9.916131635646635e-05, | |
| "loss": 0.276, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.7676767676767676, | |
| "grad_norm": 0.3848663568496704, | |
| "learning_rate": 9.913973606991113e-05, | |
| "loss": 0.264, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.7710437710437711, | |
| "grad_norm": 0.4516603648662567, | |
| "learning_rate": 9.911788406470569e-05, | |
| "loss": 0.2854, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.7744107744107744, | |
| "grad_norm": 0.4367293119430542, | |
| "learning_rate": 9.90957604616809e-05, | |
| "loss": 0.2802, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.7777777777777778, | |
| "grad_norm": 0.41222095489501953, | |
| "learning_rate": 9.907336538316944e-05, | |
| "loss": 0.275, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.7811447811447811, | |
| "grad_norm": 0.4176308810710907, | |
| "learning_rate": 9.905069895300514e-05, | |
| "loss": 0.2854, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.7845117845117845, | |
| "grad_norm": 0.4087597131729126, | |
| "learning_rate": 9.902776129652223e-05, | |
| "loss": 0.2868, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.7878787878787878, | |
| "grad_norm": 0.41595739126205444, | |
| "learning_rate": 9.900455254055467e-05, | |
| "loss": 0.2835, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.7912457912457912, | |
| "grad_norm": 0.5036376118659973, | |
| "learning_rate": 9.898107281343556e-05, | |
| "loss": 0.2775, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.7946127946127947, | |
| "grad_norm": 0.46533098816871643, | |
| "learning_rate": 9.895732224499625e-05, | |
| "loss": 0.285, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.797979797979798, | |
| "grad_norm": 0.4155175983905792, | |
| "learning_rate": 9.893330096656574e-05, | |
| "loss": 0.2877, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.8013468013468014, | |
| "grad_norm": 0.34219178557395935, | |
| "learning_rate": 9.890900911096992e-05, | |
| "loss": 0.2751, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.8047138047138047, | |
| "grad_norm": 0.39359742403030396, | |
| "learning_rate": 9.888444681253086e-05, | |
| "loss": 0.2758, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.8080808080808081, | |
| "grad_norm": 0.3699426054954529, | |
| "learning_rate": 9.885961420706602e-05, | |
| "loss": 0.2758, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.8114478114478114, | |
| "grad_norm": 0.3353779911994934, | |
| "learning_rate": 9.883451143188753e-05, | |
| "loss": 0.2891, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.8148148148148148, | |
| "grad_norm": 0.3476376235485077, | |
| "learning_rate": 9.880913862580145e-05, | |
| "loss": 0.2699, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.8181818181818182, | |
| "grad_norm": 0.37724611163139343, | |
| "learning_rate": 9.878349592910692e-05, | |
| "loss": 0.2759, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.8215488215488216, | |
| "grad_norm": 0.3629307150840759, | |
| "learning_rate": 9.875758348359552e-05, | |
| "loss": 0.2741, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.8249158249158249, | |
| "grad_norm": 0.35653156042099, | |
| "learning_rate": 9.873140143255036e-05, | |
| "loss": 0.2717, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.8282828282828283, | |
| "grad_norm": 0.37418127059936523, | |
| "learning_rate": 9.870494992074533e-05, | |
| "loss": 0.2743, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.8316498316498316, | |
| "grad_norm": 0.3299245238304138, | |
| "learning_rate": 9.867822909444434e-05, | |
| "loss": 0.2751, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.835016835016835, | |
| "grad_norm": 0.3463493585586548, | |
| "learning_rate": 9.865123910140046e-05, | |
| "loss": 0.2778, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.8383838383838383, | |
| "grad_norm": 0.5460504293441772, | |
| "learning_rate": 9.862398009085511e-05, | |
| "loss": 0.2799, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.8417508417508418, | |
| "grad_norm": 0.3625568151473999, | |
| "learning_rate": 9.859645221353725e-05, | |
| "loss": 0.2641, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.8451178451178452, | |
| "grad_norm": 0.37554433941841125, | |
| "learning_rate": 9.856865562166256e-05, | |
| "loss": 0.2625, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.8484848484848485, | |
| "grad_norm": 0.4169292449951172, | |
| "learning_rate": 9.854059046893257e-05, | |
| "loss": 0.2754, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.8518518518518519, | |
| "grad_norm": 0.4144760072231293, | |
| "learning_rate": 9.85122569105338e-05, | |
| "loss": 0.2764, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.8552188552188552, | |
| "grad_norm": 0.3319230079650879, | |
| "learning_rate": 9.848365510313695e-05, | |
| "loss": 0.2812, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.8585858585858586, | |
| "grad_norm": 0.31481847167015076, | |
| "learning_rate": 9.845478520489599e-05, | |
| "loss": 0.2534, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.8619528619528619, | |
| "grad_norm": 0.37751275300979614, | |
| "learning_rate": 9.842564737544731e-05, | |
| "loss": 0.2796, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.8653198653198653, | |
| "grad_norm": 0.4334275424480438, | |
| "learning_rate": 9.83962417759088e-05, | |
| "loss": 0.2829, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.8686868686868687, | |
| "grad_norm": 0.4017227590084076, | |
| "learning_rate": 9.836656856887903e-05, | |
| "loss": 0.2667, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.8720538720538721, | |
| "grad_norm": 0.42103585600852966, | |
| "learning_rate": 9.833662791843627e-05, | |
| "loss": 0.2631, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.8754208754208754, | |
| "grad_norm": 0.34120380878448486, | |
| "learning_rate": 9.830641999013768e-05, | |
| "loss": 0.2613, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.8787878787878788, | |
| "grad_norm": 0.49804946780204773, | |
| "learning_rate": 9.827594495101823e-05, | |
| "loss": 0.2675, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.8821548821548821, | |
| "grad_norm": 0.3861115574836731, | |
| "learning_rate": 9.824520296959001e-05, | |
| "loss": 0.2708, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.8855218855218855, | |
| "grad_norm": 0.39489248394966125, | |
| "learning_rate": 9.821419421584107e-05, | |
| "loss": 0.2831, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.8888888888888888, | |
| "grad_norm": 0.3506355881690979, | |
| "learning_rate": 9.818291886123463e-05, | |
| "loss": 0.2784, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.8922558922558923, | |
| "grad_norm": 0.35518354177474976, | |
| "learning_rate": 9.815137707870805e-05, | |
| "loss": 0.2671, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.8956228956228957, | |
| "grad_norm": 0.35561174154281616, | |
| "learning_rate": 9.811956904267195e-05, | |
| "loss": 0.2784, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.898989898989899, | |
| "grad_norm": 0.3117510974407196, | |
| "learning_rate": 9.808749492900918e-05, | |
| "loss": 0.2824, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.9023569023569024, | |
| "grad_norm": 0.34295716881752014, | |
| "learning_rate": 9.805515491507382e-05, | |
| "loss": 0.2704, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.9057239057239057, | |
| "grad_norm": 0.3531172275543213, | |
| "learning_rate": 9.802254917969032e-05, | |
| "loss": 0.2712, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.9090909090909091, | |
| "grad_norm": 0.3834570646286011, | |
| "learning_rate": 9.798967790315244e-05, | |
| "loss": 0.285, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.9124579124579124, | |
| "grad_norm": 0.2960718274116516, | |
| "learning_rate": 9.795654126722217e-05, | |
| "loss": 0.2786, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.9158249158249159, | |
| "grad_norm": 0.3393447697162628, | |
| "learning_rate": 9.79231394551289e-05, | |
| "loss": 0.2841, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.9191919191919192, | |
| "grad_norm": 0.313174843788147, | |
| "learning_rate": 9.788947265156827e-05, | |
| "loss": 0.2605, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.9225589225589226, | |
| "grad_norm": 0.3173663914203644, | |
| "learning_rate": 9.785554104270118e-05, | |
| "loss": 0.2564, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.9259259259259259, | |
| "grad_norm": 0.36193931102752686, | |
| "learning_rate": 9.782134481615281e-05, | |
| "loss": 0.2659, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.9292929292929293, | |
| "grad_norm": 0.3565308451652527, | |
| "learning_rate": 9.778688416101154e-05, | |
| "loss": 0.2734, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.9326599326599326, | |
| "grad_norm": 0.32475653290748596, | |
| "learning_rate": 9.775215926782788e-05, | |
| "loss": 0.2754, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.936026936026936, | |
| "grad_norm": 0.4006199836730957, | |
| "learning_rate": 9.771717032861346e-05, | |
| "loss": 0.2662, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.9393939393939394, | |
| "grad_norm": 0.31218966841697693, | |
| "learning_rate": 9.768191753683998e-05, | |
| "loss": 0.2442, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.9427609427609428, | |
| "grad_norm": 0.3531815707683563, | |
| "learning_rate": 9.764640108743808e-05, | |
| "loss": 0.2485, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.9461279461279462, | |
| "grad_norm": 0.38060134649276733, | |
| "learning_rate": 9.761062117679632e-05, | |
| "loss": 0.2797, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.9494949494949495, | |
| "grad_norm": 0.416530966758728, | |
| "learning_rate": 9.757457800276006e-05, | |
| "loss": 0.2615, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.9528619528619529, | |
| "grad_norm": 0.3815469741821289, | |
| "learning_rate": 9.75382717646304e-05, | |
| "loss": 0.255, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.9562289562289562, | |
| "grad_norm": 0.41139233112335205, | |
| "learning_rate": 9.750170266316303e-05, | |
| "loss": 0.2615, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.9595959595959596, | |
| "grad_norm": 0.374959260225296, | |
| "learning_rate": 9.746487090056713e-05, | |
| "loss": 0.2638, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.9629629629629629, | |
| "grad_norm": 0.35468292236328125, | |
| "learning_rate": 9.742777668050434e-05, | |
| "loss": 0.2572, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.9663299663299664, | |
| "grad_norm": 0.35659849643707275, | |
| "learning_rate": 9.739042020808746e-05, | |
| "loss": 0.266, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.9696969696969697, | |
| "grad_norm": 0.37296387553215027, | |
| "learning_rate": 9.735280168987949e-05, | |
| "loss": 0.2677, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.9730639730639731, | |
| "grad_norm": 0.34908655285835266, | |
| "learning_rate": 9.73149213338924e-05, | |
| "loss": 0.2732, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.9764309764309764, | |
| "grad_norm": 0.3758234679698944, | |
| "learning_rate": 9.727677934958599e-05, | |
| "loss": 0.2738, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.9797979797979798, | |
| "grad_norm": 0.35159602761268616, | |
| "learning_rate": 9.723837594786672e-05, | |
| "loss": 0.2684, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.9831649831649831, | |
| "grad_norm": 0.33813127875328064, | |
| "learning_rate": 9.719971134108658e-05, | |
| "loss": 0.2682, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.9865319865319865, | |
| "grad_norm": 0.32259315252304077, | |
| "learning_rate": 9.716078574304189e-05, | |
| "loss": 0.2721, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.98989898989899, | |
| "grad_norm": 0.3467171788215637, | |
| "learning_rate": 9.712159936897213e-05, | |
| "loss": 0.264, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.9932659932659933, | |
| "grad_norm": 0.33499976992607117, | |
| "learning_rate": 9.708215243555875e-05, | |
| "loss": 0.2636, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.9966329966329966, | |
| "grad_norm": 0.2971048355102539, | |
| "learning_rate": 9.704244516092392e-05, | |
| "loss": 0.2644, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.3515819013118744, | |
| "learning_rate": 9.700247776462943e-05, | |
| "loss": 0.2587, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 1.0033670033670035, | |
| "grad_norm": 0.31730222702026367, | |
| "learning_rate": 9.696225046767538e-05, | |
| "loss": 0.2544, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 1.0067340067340067, | |
| "grad_norm": 0.3283955752849579, | |
| "learning_rate": 9.6921763492499e-05, | |
| "loss": 0.2547, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 1.0101010101010102, | |
| "grad_norm": 0.3477189242839813, | |
| "learning_rate": 9.688101706297341e-05, | |
| "loss": 0.2639, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.0101010101010102, | |
| "eval_loss": 0.12977388501167297, | |
| "eval_runtime": 33.026, | |
| "eval_samples_per_second": 30.279, | |
| "eval_steps_per_second": 1.908, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.0134680134680134, | |
| "grad_norm": 0.37081846594810486, | |
| "learning_rate": 9.684001140440639e-05, | |
| "loss": 0.2451, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 1.0168350168350169, | |
| "grad_norm": 0.36840128898620605, | |
| "learning_rate": 9.679874674353913e-05, | |
| "loss": 0.2619, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 1.02020202020202, | |
| "grad_norm": 0.2802034020423889, | |
| "learning_rate": 9.6757223308545e-05, | |
| "loss": 0.2617, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 1.0235690235690236, | |
| "grad_norm": 0.31149744987487793, | |
| "learning_rate": 9.67154413290282e-05, | |
| "loss": 0.2645, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 1.026936026936027, | |
| "grad_norm": 0.2791181802749634, | |
| "learning_rate": 9.667340103602261e-05, | |
| "loss": 0.2485, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 1.0303030303030303, | |
| "grad_norm": 0.28780505061149597, | |
| "learning_rate": 9.663110266199044e-05, | |
| "loss": 0.2481, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 1.0336700336700337, | |
| "grad_norm": 0.3085958659648895, | |
| "learning_rate": 9.658854644082098e-05, | |
| "loss": 0.259, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 1.037037037037037, | |
| "grad_norm": 0.3242364227771759, | |
| "learning_rate": 9.654573260782924e-05, | |
| "loss": 0.2638, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 1.0404040404040404, | |
| "grad_norm": 0.3183690905570984, | |
| "learning_rate": 9.650266139975474e-05, | |
| "loss": 0.2682, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 1.0437710437710437, | |
| "grad_norm": 0.2918904721736908, | |
| "learning_rate": 9.645933305476016e-05, | |
| "loss": 0.2465, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.0471380471380471, | |
| "grad_norm": 0.3348250091075897, | |
| "learning_rate": 9.641574781242999e-05, | |
| "loss": 0.2609, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 1.0505050505050506, | |
| "grad_norm": 0.3023556172847748, | |
| "learning_rate": 9.637190591376926e-05, | |
| "loss": 0.2496, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 1.0538720538720538, | |
| "grad_norm": 0.31095656752586365, | |
| "learning_rate": 9.632780760120215e-05, | |
| "loss": 0.2665, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 1.0572390572390573, | |
| "grad_norm": 0.2977689802646637, | |
| "learning_rate": 9.628345311857075e-05, | |
| "loss": 0.2475, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 1.0606060606060606, | |
| "grad_norm": 0.35640037059783936, | |
| "learning_rate": 9.623884271113359e-05, | |
| "loss": 0.255, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 1.063973063973064, | |
| "grad_norm": 0.3532579839229584, | |
| "learning_rate": 9.619397662556435e-05, | |
| "loss": 0.2587, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 1.0673400673400673, | |
| "grad_norm": 0.38389119505882263, | |
| "learning_rate": 9.614885510995047e-05, | |
| "loss": 0.2744, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 1.0707070707070707, | |
| "grad_norm": 0.3254542648792267, | |
| "learning_rate": 9.610347841379184e-05, | |
| "loss": 0.2505, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 1.074074074074074, | |
| "grad_norm": 0.36234763264656067, | |
| "learning_rate": 9.605784678799934e-05, | |
| "loss": 0.259, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 1.0774410774410774, | |
| "grad_norm": 0.3419968783855438, | |
| "learning_rate": 9.60119604848935e-05, | |
| "loss": 0.2428, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.0808080808080809, | |
| "grad_norm": 0.3403080105781555, | |
| "learning_rate": 9.596581975820303e-05, | |
| "loss": 0.2593, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 1.0841750841750841, | |
| "grad_norm": 0.3113173246383667, | |
| "learning_rate": 9.591942486306358e-05, | |
| "loss": 0.2659, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 1.0875420875420876, | |
| "grad_norm": 0.3434092104434967, | |
| "learning_rate": 9.587277605601617e-05, | |
| "loss": 0.2601, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 1.0909090909090908, | |
| "grad_norm": 0.36877501010894775, | |
| "learning_rate": 9.582587359500581e-05, | |
| "loss": 0.2633, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 1.0942760942760943, | |
| "grad_norm": 0.3114217519760132, | |
| "learning_rate": 9.577871773938011e-05, | |
| "loss": 0.2459, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 1.0976430976430978, | |
| "grad_norm": 0.3608934283256531, | |
| "learning_rate": 9.573130874988789e-05, | |
| "loss": 0.2553, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 1.101010101010101, | |
| "grad_norm": 0.3732490539550781, | |
| "learning_rate": 9.568364688867757e-05, | |
| "loss": 0.2606, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 1.1043771043771045, | |
| "grad_norm": 0.3304762840270996, | |
| "learning_rate": 9.563573241929588e-05, | |
| "loss": 0.2525, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 1.1077441077441077, | |
| "grad_norm": 0.43853023648262024, | |
| "learning_rate": 9.558756560668636e-05, | |
| "loss": 0.2537, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 1.1111111111111112, | |
| "grad_norm": 0.33033648133277893, | |
| "learning_rate": 9.553914671718787e-05, | |
| "loss": 0.2479, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.1144781144781144, | |
| "grad_norm": 0.3931716978549957, | |
| "learning_rate": 9.549047601853311e-05, | |
| "loss": 0.2618, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 1.1178451178451179, | |
| "grad_norm": 0.35425615310668945, | |
| "learning_rate": 9.544155377984722e-05, | |
| "loss": 0.264, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 1.121212121212121, | |
| "grad_norm": 0.3464037775993347, | |
| "learning_rate": 9.539238027164619e-05, | |
| "loss": 0.2633, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 1.1245791245791246, | |
| "grad_norm": 0.29783573746681213, | |
| "learning_rate": 9.534295576583538e-05, | |
| "loss": 0.2713, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 1.127946127946128, | |
| "grad_norm": 0.3788033127784729, | |
| "learning_rate": 9.52932805357081e-05, | |
| "loss": 0.2402, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 1.1313131313131313, | |
| "grad_norm": 0.33695003390312195, | |
| "learning_rate": 9.524335485594402e-05, | |
| "loss": 0.2645, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 1.1346801346801347, | |
| "grad_norm": 0.2683010399341583, | |
| "learning_rate": 9.51931790026077e-05, | |
| "loss": 0.2613, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 1.138047138047138, | |
| "grad_norm": 0.3648826479911804, | |
| "learning_rate": 9.514275325314694e-05, | |
| "loss": 0.2497, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 1.1414141414141414, | |
| "grad_norm": 0.3843471109867096, | |
| "learning_rate": 9.509207788639147e-05, | |
| "loss": 0.257, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 1.144781144781145, | |
| "grad_norm": 0.34863507747650146, | |
| "learning_rate": 9.504115318255122e-05, | |
| "loss": 0.2552, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.1481481481481481, | |
| "grad_norm": 0.292370080947876, | |
| "learning_rate": 9.498997942321483e-05, | |
| "loss": 0.2599, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 1.1515151515151516, | |
| "grad_norm": 0.34756267070770264, | |
| "learning_rate": 9.493855689134809e-05, | |
| "loss": 0.2477, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 1.1548821548821548, | |
| "grad_norm": 0.3180752098560333, | |
| "learning_rate": 9.488688587129242e-05, | |
| "loss": 0.2615, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 1.1582491582491583, | |
| "grad_norm": 0.36670762300491333, | |
| "learning_rate": 9.48349666487632e-05, | |
| "loss": 0.2648, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 1.1616161616161615, | |
| "grad_norm": 0.3237256109714508, | |
| "learning_rate": 9.47827995108483e-05, | |
| "loss": 0.2485, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 1.164983164983165, | |
| "grad_norm": 0.35194486379623413, | |
| "learning_rate": 9.47303847460064e-05, | |
| "loss": 0.2414, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 1.1683501683501682, | |
| "grad_norm": 0.3115532398223877, | |
| "learning_rate": 9.467772264406545e-05, | |
| "loss": 0.2593, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 1.1717171717171717, | |
| "grad_norm": 0.42125508189201355, | |
| "learning_rate": 9.462481349622108e-05, | |
| "loss": 0.254, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 1.1750841750841752, | |
| "grad_norm": 0.47456079721450806, | |
| "learning_rate": 9.457165759503493e-05, | |
| "loss": 0.269, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 1.1784511784511784, | |
| "grad_norm": 0.2964518666267395, | |
| "learning_rate": 9.451825523443307e-05, | |
| "loss": 0.2601, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.1818181818181819, | |
| "grad_norm": 0.38632655143737793, | |
| "learning_rate": 9.446460670970436e-05, | |
| "loss": 0.254, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 1.1851851851851851, | |
| "grad_norm": 0.30016183853149414, | |
| "learning_rate": 9.441071231749889e-05, | |
| "loss": 0.2625, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 1.1885521885521886, | |
| "grad_norm": 0.4124687910079956, | |
| "learning_rate": 9.435657235582616e-05, | |
| "loss": 0.2688, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 1.1919191919191918, | |
| "grad_norm": 0.33176562190055847, | |
| "learning_rate": 9.430218712405367e-05, | |
| "loss": 0.2558, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 1.1952861952861953, | |
| "grad_norm": 0.4196166396141052, | |
| "learning_rate": 9.424755692290507e-05, | |
| "loss": 0.2527, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 1.1986531986531987, | |
| "grad_norm": 0.3760242760181427, | |
| "learning_rate": 9.419268205445862e-05, | |
| "loss": 0.254, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 1.202020202020202, | |
| "grad_norm": 0.3473700284957886, | |
| "learning_rate": 9.413756282214537e-05, | |
| "loss": 0.2611, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 1.2053872053872055, | |
| "grad_norm": 0.3585781753063202, | |
| "learning_rate": 9.408219953074772e-05, | |
| "loss": 0.2573, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 1.2087542087542087, | |
| "grad_norm": 0.34065455198287964, | |
| "learning_rate": 9.402659248639749e-05, | |
| "loss": 0.2553, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 1.2121212121212122, | |
| "grad_norm": 0.350239634513855, | |
| "learning_rate": 9.397074199657441e-05, | |
| "loss": 0.2582, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.2154882154882154, | |
| "grad_norm": 0.3311632573604584, | |
| "learning_rate": 9.391464837010428e-05, | |
| "loss": 0.2551, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 1.2188552188552189, | |
| "grad_norm": 0.31825152039527893, | |
| "learning_rate": 9.385831191715735e-05, | |
| "loss": 0.2434, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 1.2222222222222223, | |
| "grad_norm": 0.3181901276111603, | |
| "learning_rate": 9.380173294924662e-05, | |
| "loss": 0.2738, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 1.2255892255892256, | |
| "grad_norm": 0.340090274810791, | |
| "learning_rate": 9.374491177922602e-05, | |
| "loss": 0.2413, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 1.228956228956229, | |
| "grad_norm": 0.3769470453262329, | |
| "learning_rate": 9.368784872128878e-05, | |
| "loss": 0.2511, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 1.2323232323232323, | |
| "grad_norm": 0.47145962715148926, | |
| "learning_rate": 9.363054409096561e-05, | |
| "loss": 0.2414, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 1.2356902356902357, | |
| "grad_norm": 0.3955315053462982, | |
| "learning_rate": 9.357299820512304e-05, | |
| "loss": 0.257, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 1.239057239057239, | |
| "grad_norm": 0.3811996281147003, | |
| "learning_rate": 9.35152113819616e-05, | |
| "loss": 0.2487, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 1.2424242424242424, | |
| "grad_norm": 0.3641284704208374, | |
| "learning_rate": 9.345718394101411e-05, | |
| "loss": 0.2511, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 1.2457912457912457, | |
| "grad_norm": 0.34376075863838196, | |
| "learning_rate": 9.339891620314387e-05, | |
| "loss": 0.2589, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.2491582491582491, | |
| "grad_norm": 0.400547593832016, | |
| "learning_rate": 9.334040849054289e-05, | |
| "loss": 0.2497, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 1.2525252525252526, | |
| "grad_norm": 0.3641180694103241, | |
| "learning_rate": 9.328166112673012e-05, | |
| "loss": 0.2494, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 1.2558922558922558, | |
| "grad_norm": 0.34279683232307434, | |
| "learning_rate": 9.322267443654972e-05, | |
| "loss": 0.2558, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 1.2592592592592593, | |
| "grad_norm": 0.36419054865837097, | |
| "learning_rate": 9.316344874616916e-05, | |
| "loss": 0.25, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 1.2626262626262625, | |
| "grad_norm": 0.34432828426361084, | |
| "learning_rate": 9.310398438307746e-05, | |
| "loss": 0.2495, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 1.265993265993266, | |
| "grad_norm": 0.35908621549606323, | |
| "learning_rate": 9.304428167608342e-05, | |
| "loss": 0.2523, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 1.2693602693602695, | |
| "grad_norm": 0.3936247229576111, | |
| "learning_rate": 9.29843409553137e-05, | |
| "loss": 0.2581, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 1.2727272727272727, | |
| "grad_norm": 0.4111359715461731, | |
| "learning_rate": 9.292416255221113e-05, | |
| "loss": 0.2383, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 1.2760942760942762, | |
| "grad_norm": 0.3538724482059479, | |
| "learning_rate": 9.286374679953279e-05, | |
| "loss": 0.2396, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 1.2794612794612794, | |
| "grad_norm": 0.34664541482925415, | |
| "learning_rate": 9.280309403134812e-05, | |
| "loss": 0.2523, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.2828282828282829, | |
| "grad_norm": 0.3238551914691925, | |
| "learning_rate": 9.274220458303727e-05, | |
| "loss": 0.2406, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 1.2861952861952861, | |
| "grad_norm": 0.3845024108886719, | |
| "learning_rate": 9.268107879128898e-05, | |
| "loss": 0.2545, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 1.2895622895622896, | |
| "grad_norm": 0.3516189754009247, | |
| "learning_rate": 9.261971699409893e-05, | |
| "loss": 0.2363, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 1.2929292929292928, | |
| "grad_norm": 0.5296482443809509, | |
| "learning_rate": 9.255811953076776e-05, | |
| "loss": 0.2629, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 1.2962962962962963, | |
| "grad_norm": 0.36364006996154785, | |
| "learning_rate": 9.249628674189927e-05, | |
| "loss": 0.2377, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 1.2996632996632997, | |
| "grad_norm": 0.315027117729187, | |
| "learning_rate": 9.243421896939848e-05, | |
| "loss": 0.2411, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 1.303030303030303, | |
| "grad_norm": 0.3599799871444702, | |
| "learning_rate": 9.237191655646972e-05, | |
| "loss": 0.2512, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 1.3063973063973064, | |
| "grad_norm": 0.3463782072067261, | |
| "learning_rate": 9.230937984761479e-05, | |
| "loss": 0.2477, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 1.3097643097643097, | |
| "grad_norm": 0.3749505877494812, | |
| "learning_rate": 9.224660918863104e-05, | |
| "loss": 0.2394, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 1.3131313131313131, | |
| "grad_norm": 0.39092373847961426, | |
| "learning_rate": 9.218360492660942e-05, | |
| "loss": 0.2446, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.3164983164983166, | |
| "grad_norm": 0.40045619010925293, | |
| "learning_rate": 9.212036740993266e-05, | |
| "loss": 0.238, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 1.3198653198653199, | |
| "grad_norm": 0.38552188873291016, | |
| "learning_rate": 9.205689698827318e-05, | |
| "loss": 0.2312, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 1.3232323232323233, | |
| "grad_norm": 0.394648015499115, | |
| "learning_rate": 9.199319401259131e-05, | |
| "loss": 0.2439, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 1.3265993265993266, | |
| "grad_norm": 0.3606700897216797, | |
| "learning_rate": 9.192925883513328e-05, | |
| "loss": 0.2359, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 1.32996632996633, | |
| "grad_norm": 0.4096723198890686, | |
| "learning_rate": 9.186509180942928e-05, | |
| "loss": 0.2464, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 1.3333333333333333, | |
| "grad_norm": 0.4110390543937683, | |
| "learning_rate": 9.18006932902915e-05, | |
| "loss": 0.2387, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 1.3367003367003367, | |
| "grad_norm": 0.37714695930480957, | |
| "learning_rate": 9.173606363381219e-05, | |
| "loss": 0.242, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 1.34006734006734, | |
| "grad_norm": 0.33103522658348083, | |
| "learning_rate": 9.167120319736164e-05, | |
| "loss": 0.2551, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 1.3434343434343434, | |
| "grad_norm": 0.34669333696365356, | |
| "learning_rate": 9.160611233958629e-05, | |
| "loss": 0.2413, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 1.3468013468013469, | |
| "grad_norm": 0.34055569767951965, | |
| "learning_rate": 9.154079142040668e-05, | |
| "loss": 0.2372, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.3468013468013469, | |
| "eval_loss": 0.11807812005281448, | |
| "eval_runtime": 32.9267, | |
| "eval_samples_per_second": 30.37, | |
| "eval_steps_per_second": 1.913, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.3501683501683501, | |
| "grad_norm": 0.3376658260822296, | |
| "learning_rate": 9.147524080101544e-05, | |
| "loss": 0.2428, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 1.3535353535353536, | |
| "grad_norm": 0.40585628151893616, | |
| "learning_rate": 9.140946084387538e-05, | |
| "loss": 0.2352, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 1.3569023569023568, | |
| "grad_norm": 0.34705591201782227, | |
| "learning_rate": 9.134345191271742e-05, | |
| "loss": 0.231, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 1.3602693602693603, | |
| "grad_norm": 0.3753274381160736, | |
| "learning_rate": 9.127721437253857e-05, | |
| "loss": 0.2366, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 1.3636363636363638, | |
| "grad_norm": 0.31101006269454956, | |
| "learning_rate": 9.121074858959997e-05, | |
| "loss": 0.2331, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 1.367003367003367, | |
| "grad_norm": 0.31866899132728577, | |
| "learning_rate": 9.114405493142481e-05, | |
| "loss": 0.2243, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 1.3703703703703702, | |
| "grad_norm": 0.4138029217720032, | |
| "learning_rate": 9.107713376679634e-05, | |
| "loss": 0.2406, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 1.3737373737373737, | |
| "grad_norm": 0.32557931542396545, | |
| "learning_rate": 9.100998546575576e-05, | |
| "loss": 0.2405, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 1.3771043771043772, | |
| "grad_norm": 0.29002058506011963, | |
| "learning_rate": 9.094261039960027e-05, | |
| "loss": 0.2364, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 1.3804713804713804, | |
| "grad_norm": 0.390799880027771, | |
| "learning_rate": 9.0875008940881e-05, | |
| "loss": 0.2358, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.3838383838383839, | |
| "grad_norm": 0.4212111830711365, | |
| "learning_rate": 9.08071814634008e-05, | |
| "loss": 0.2369, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 1.387205387205387, | |
| "grad_norm": 0.32682353258132935, | |
| "learning_rate": 9.073912834221241e-05, | |
| "loss": 0.2418, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 1.3905723905723906, | |
| "grad_norm": 0.29507341980934143, | |
| "learning_rate": 9.067084995361623e-05, | |
| "loss": 0.2418, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 1.393939393939394, | |
| "grad_norm": 0.36445966362953186, | |
| "learning_rate": 9.060234667515826e-05, | |
| "loss": 0.2354, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 1.3973063973063973, | |
| "grad_norm": 0.3630370795726776, | |
| "learning_rate": 9.053361888562807e-05, | |
| "loss": 0.2352, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 1.4006734006734007, | |
| "grad_norm": 0.3828338384628296, | |
| "learning_rate": 9.046466696505662e-05, | |
| "loss": 0.2287, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 1.404040404040404, | |
| "grad_norm": 0.43802979588508606, | |
| "learning_rate": 9.039549129471423e-05, | |
| "loss": 0.2327, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 1.4074074074074074, | |
| "grad_norm": 0.374553382396698, | |
| "learning_rate": 9.032609225710846e-05, | |
| "loss": 0.2369, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 1.410774410774411, | |
| "grad_norm": 0.4133577346801758, | |
| "learning_rate": 9.025647023598196e-05, | |
| "loss": 0.2309, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 1.4141414141414141, | |
| "grad_norm": 0.39232614636421204, | |
| "learning_rate": 9.018662561631036e-05, | |
| "loss": 0.227, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.4175084175084174, | |
| "grad_norm": 0.3980022370815277, | |
| "learning_rate": 9.011655878430019e-05, | |
| "loss": 0.2319, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 1.4208754208754208, | |
| "grad_norm": 0.4843936860561371, | |
| "learning_rate": 9.004627012738667e-05, | |
| "loss": 0.2291, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 1.4242424242424243, | |
| "grad_norm": 0.47197064757347107, | |
| "learning_rate": 8.997576003423159e-05, | |
| "loss": 0.2459, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 1.4276094276094276, | |
| "grad_norm": 0.48999035358428955, | |
| "learning_rate": 8.990502889472124e-05, | |
| "loss": 0.2397, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 1.430976430976431, | |
| "grad_norm": 0.3578794598579407, | |
| "learning_rate": 8.983407709996414e-05, | |
| "loss": 0.2268, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 1.4343434343434343, | |
| "grad_norm": 0.37743687629699707, | |
| "learning_rate": 8.97629050422889e-05, | |
| "loss": 0.2364, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 1.4377104377104377, | |
| "grad_norm": 0.39339321851730347, | |
| "learning_rate": 8.969151311524214e-05, | |
| "loss": 0.2433, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 1.4410774410774412, | |
| "grad_norm": 0.3382370173931122, | |
| "learning_rate": 8.96199017135862e-05, | |
| "loss": 0.2383, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 1.4444444444444444, | |
| "grad_norm": 0.4073336720466614, | |
| "learning_rate": 8.954807123329704e-05, | |
| "loss": 0.2383, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 1.4478114478114479, | |
| "grad_norm": 0.3423174023628235, | |
| "learning_rate": 8.947602207156198e-05, | |
| "loss": 0.2399, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.4511784511784511, | |
| "grad_norm": 0.29171693325042725, | |
| "learning_rate": 8.940375462677757e-05, | |
| "loss": 0.248, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 1.4545454545454546, | |
| "grad_norm": 0.3864852786064148, | |
| "learning_rate": 8.933126929854737e-05, | |
| "loss": 0.2533, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 1.457912457912458, | |
| "grad_norm": 0.36704379320144653, | |
| "learning_rate": 8.92585664876797e-05, | |
| "loss": 0.2319, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 1.4612794612794613, | |
| "grad_norm": 0.37201452255249023, | |
| "learning_rate": 8.918564659618544e-05, | |
| "loss": 0.2263, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 1.4646464646464645, | |
| "grad_norm": 0.39355430006980896, | |
| "learning_rate": 8.911251002727588e-05, | |
| "loss": 0.2463, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 1.468013468013468, | |
| "grad_norm": 0.34966838359832764, | |
| "learning_rate": 8.903915718536036e-05, | |
| "loss": 0.2319, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 1.4713804713804715, | |
| "grad_norm": 0.3407142162322998, | |
| "learning_rate": 8.896558847604414e-05, | |
| "loss": 0.2316, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 1.4747474747474747, | |
| "grad_norm": 0.32372531294822693, | |
| "learning_rate": 8.889180430612612e-05, | |
| "loss": 0.235, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 1.4781144781144782, | |
| "grad_norm": 0.315107136964798, | |
| "learning_rate": 8.88178050835966e-05, | |
| "loss": 0.2458, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 1.4814814814814814, | |
| "grad_norm": 0.32456645369529724, | |
| "learning_rate": 8.8743591217635e-05, | |
| "loss": 0.2274, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.4848484848484849, | |
| "grad_norm": 0.39389902353286743, | |
| "learning_rate": 8.86691631186076e-05, | |
| "loss": 0.2337, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 1.4882154882154883, | |
| "grad_norm": 0.334153413772583, | |
| "learning_rate": 8.859452119806532e-05, | |
| "loss": 0.2387, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 1.4915824915824916, | |
| "grad_norm": 0.3052537739276886, | |
| "learning_rate": 8.851966586874138e-05, | |
| "loss": 0.2394, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 1.494949494949495, | |
| "grad_norm": 0.3574841022491455, | |
| "learning_rate": 8.844459754454903e-05, | |
| "loss": 0.2351, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 1.4983164983164983, | |
| "grad_norm": 0.450410395860672, | |
| "learning_rate": 8.836931664057935e-05, | |
| "loss": 0.233, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 1.5016835016835017, | |
| "grad_norm": 0.46233242750167847, | |
| "learning_rate": 8.82938235730988e-05, | |
| "loss": 0.2497, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 1.5050505050505052, | |
| "grad_norm": 0.3989076316356659, | |
| "learning_rate": 8.821811875954704e-05, | |
| "loss": 0.227, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 1.5084175084175084, | |
| "grad_norm": 0.3404209315776825, | |
| "learning_rate": 8.814220261853456e-05, | |
| "loss": 0.2391, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 1.5117845117845117, | |
| "grad_norm": 0.4542754590511322, | |
| "learning_rate": 8.806607556984044e-05, | |
| "loss": 0.2229, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 1.5151515151515151, | |
| "grad_norm": 0.334740549325943, | |
| "learning_rate": 8.798973803440991e-05, | |
| "loss": 0.2319, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.5185185185185186, | |
| "grad_norm": 0.3982342779636383, | |
| "learning_rate": 8.791319043435214e-05, | |
| "loss": 0.2295, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 1.5218855218855218, | |
| "grad_norm": 0.298423171043396, | |
| "learning_rate": 8.78364331929378e-05, | |
| "loss": 0.2255, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 1.5252525252525253, | |
| "grad_norm": 0.4019949734210968, | |
| "learning_rate": 8.775946673459681e-05, | |
| "loss": 0.2311, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 1.5286195286195285, | |
| "grad_norm": 0.33627232909202576, | |
| "learning_rate": 8.768229148491598e-05, | |
| "loss": 0.2086, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 1.531986531986532, | |
| "grad_norm": 0.335822731256485, | |
| "learning_rate": 8.760490787063659e-05, | |
| "loss": 0.2272, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 1.5353535353535355, | |
| "grad_norm": 0.29926082491874695, | |
| "learning_rate": 8.75273163196521e-05, | |
| "loss": 0.2349, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 1.5387205387205387, | |
| "grad_norm": 0.31576985120773315, | |
| "learning_rate": 8.744951726100573e-05, | |
| "loss": 0.2224, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 1.542087542087542, | |
| "grad_norm": 0.3465363681316376, | |
| "learning_rate": 8.737151112488813e-05, | |
| "loss": 0.2153, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 1.5454545454545454, | |
| "grad_norm": 0.370771586894989, | |
| "learning_rate": 8.729329834263503e-05, | |
| "loss": 0.2303, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 1.5488215488215489, | |
| "grad_norm": 0.3440144956111908, | |
| "learning_rate": 8.721487934672473e-05, | |
| "loss": 0.2203, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.5521885521885523, | |
| "grad_norm": 0.4985616207122803, | |
| "learning_rate": 8.713625457077585e-05, | |
| "loss": 0.2334, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 1.5555555555555556, | |
| "grad_norm": 0.36030417680740356, | |
| "learning_rate": 8.705742444954488e-05, | |
| "loss": 0.2216, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 1.5589225589225588, | |
| "grad_norm": 0.3390983045101166, | |
| "learning_rate": 8.69783894189237e-05, | |
| "loss": 0.2336, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 1.5622895622895623, | |
| "grad_norm": 0.2929445505142212, | |
| "learning_rate": 8.68991499159373e-05, | |
| "loss": 0.2307, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 1.5656565656565657, | |
| "grad_norm": 0.3234863579273224, | |
| "learning_rate": 8.681970637874132e-05, | |
| "loss": 0.2249, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 1.569023569023569, | |
| "grad_norm": 0.3130185306072235, | |
| "learning_rate": 8.674005924661952e-05, | |
| "loss": 0.2341, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 1.5723905723905722, | |
| "grad_norm": 0.27616122364997864, | |
| "learning_rate": 8.666020895998153e-05, | |
| "loss": 0.229, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 1.5757575757575757, | |
| "grad_norm": 0.33888301253318787, | |
| "learning_rate": 8.658015596036028e-05, | |
| "loss": 0.2307, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 1.5791245791245792, | |
| "grad_norm": 0.31470242142677307, | |
| "learning_rate": 8.649990069040961e-05, | |
| "loss": 0.2195, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 1.5824915824915826, | |
| "grad_norm": 0.2968938946723938, | |
| "learning_rate": 8.641944359390182e-05, | |
| "loss": 0.2193, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.5858585858585859, | |
| "grad_norm": 0.3504616320133209, | |
| "learning_rate": 8.63387851157252e-05, | |
| "loss": 0.2286, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 1.589225589225589, | |
| "grad_norm": 0.3612460196018219, | |
| "learning_rate": 8.62579257018816e-05, | |
| "loss": 0.2183, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 1.5925925925925926, | |
| "grad_norm": 0.4213543236255646, | |
| "learning_rate": 8.617686579948397e-05, | |
| "loss": 0.22, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 1.595959595959596, | |
| "grad_norm": 0.3147771656513214, | |
| "learning_rate": 8.609560585675379e-05, | |
| "loss": 0.2121, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 1.5993265993265995, | |
| "grad_norm": 0.39270174503326416, | |
| "learning_rate": 8.601414632301869e-05, | |
| "loss": 0.2521, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 1.6026936026936027, | |
| "grad_norm": 0.4034063220024109, | |
| "learning_rate": 8.593248764871e-05, | |
| "loss": 0.2255, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 1.606060606060606, | |
| "grad_norm": 0.29727184772491455, | |
| "learning_rate": 8.585063028536016e-05, | |
| "loss": 0.2247, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 1.6094276094276094, | |
| "grad_norm": 0.38096803426742554, | |
| "learning_rate": 8.576857468560022e-05, | |
| "loss": 0.2255, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 1.612794612794613, | |
| "grad_norm": 0.4196261465549469, | |
| "learning_rate": 8.568632130315745e-05, | |
| "loss": 0.2241, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 1.6161616161616161, | |
| "grad_norm": 0.28777551651000977, | |
| "learning_rate": 8.560387059285273e-05, | |
| "loss": 0.2232, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.6195286195286194, | |
| "grad_norm": 0.37960875034332275, | |
| "learning_rate": 8.552122301059806e-05, | |
| "loss": 0.2206, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 1.6228956228956228, | |
| "grad_norm": 0.3538387417793274, | |
| "learning_rate": 8.543837901339404e-05, | |
| "loss": 0.2317, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 1.6262626262626263, | |
| "grad_norm": 0.31286677718162537, | |
| "learning_rate": 8.535533905932738e-05, | |
| "loss": 0.2178, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 1.6296296296296298, | |
| "grad_norm": 0.42340192198753357, | |
| "learning_rate": 8.52721036075683e-05, | |
| "loss": 0.2245, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 1.632996632996633, | |
| "grad_norm": 0.37731871008872986, | |
| "learning_rate": 8.518867311836808e-05, | |
| "loss": 0.2237, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 1.6363636363636362, | |
| "grad_norm": 0.3666956126689911, | |
| "learning_rate": 8.510504805305639e-05, | |
| "loss": 0.2243, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 1.6397306397306397, | |
| "grad_norm": 0.32843998074531555, | |
| "learning_rate": 8.502122887403883e-05, | |
| "loss": 0.2109, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 1.6430976430976432, | |
| "grad_norm": 0.3238717019557953, | |
| "learning_rate": 8.49372160447944e-05, | |
| "loss": 0.2181, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 1.6464646464646466, | |
| "grad_norm": 0.38621988892555237, | |
| "learning_rate": 8.485301002987284e-05, | |
| "loss": 0.2228, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 1.6498316498316499, | |
| "grad_norm": 0.3646789491176605, | |
| "learning_rate": 8.476861129489217e-05, | |
| "loss": 0.2248, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.6531986531986531, | |
| "grad_norm": 0.323712557554245, | |
| "learning_rate": 8.468402030653597e-05, | |
| "loss": 0.2119, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 1.6565656565656566, | |
| "grad_norm": 0.3144836723804474, | |
| "learning_rate": 8.459923753255097e-05, | |
| "loss": 0.2108, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 1.65993265993266, | |
| "grad_norm": 0.40645068883895874, | |
| "learning_rate": 8.451426344174433e-05, | |
| "loss": 0.2182, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 1.6632996632996633, | |
| "grad_norm": 0.41164469718933105, | |
| "learning_rate": 8.44290985039811e-05, | |
| "loss": 0.2176, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 1.6666666666666665, | |
| "grad_norm": 0.3617885708808899, | |
| "learning_rate": 8.434374319018165e-05, | |
| "loss": 0.2439, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 1.67003367003367, | |
| "grad_norm": 0.5637879371643066, | |
| "learning_rate": 8.425819797231904e-05, | |
| "loss": 0.216, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 1.6734006734006734, | |
| "grad_norm": 0.5605575442314148, | |
| "learning_rate": 8.417246332341637e-05, | |
| "loss": 0.2173, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 1.676767676767677, | |
| "grad_norm": 0.4026080369949341, | |
| "learning_rate": 8.408653971754421e-05, | |
| "loss": 0.2186, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 1.6801346801346801, | |
| "grad_norm": 0.40690577030181885, | |
| "learning_rate": 8.400042762981799e-05, | |
| "loss": 0.2142, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 1.6835016835016834, | |
| "grad_norm": 0.37610065937042236, | |
| "learning_rate": 8.391412753639534e-05, | |
| "loss": 0.2167, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.6835016835016834, | |
| "eval_loss": 0.10807202011346817, | |
| "eval_runtime": 33.2575, | |
| "eval_samples_per_second": 30.068, | |
| "eval_steps_per_second": 1.894, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.6868686868686869, | |
| "grad_norm": 0.3314391076564789, | |
| "learning_rate": 8.382763991447344e-05, | |
| "loss": 0.2269, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 1.6902356902356903, | |
| "grad_norm": 0.36875322461128235, | |
| "learning_rate": 8.374096524228647e-05, | |
| "loss": 0.2017, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 1.6936026936026936, | |
| "grad_norm": 0.4028531610965729, | |
| "learning_rate": 8.365410399910288e-05, | |
| "loss": 0.2184, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 1.696969696969697, | |
| "grad_norm": 0.3435775637626648, | |
| "learning_rate": 8.356705666522275e-05, | |
| "loss": 0.227, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 1.7003367003367003, | |
| "grad_norm": 0.3759492039680481, | |
| "learning_rate": 8.347982372197514e-05, | |
| "loss": 0.2087, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 1.7037037037037037, | |
| "grad_norm": 0.3366954028606415, | |
| "learning_rate": 8.339240565171551e-05, | |
| "loss": 0.2337, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 1.7070707070707072, | |
| "grad_norm": 0.34926289319992065, | |
| "learning_rate": 8.33048029378229e-05, | |
| "loss": 0.2097, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 1.7104377104377104, | |
| "grad_norm": 0.36330562829971313, | |
| "learning_rate": 8.321701606469736e-05, | |
| "loss": 0.2278, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 1.7138047138047137, | |
| "grad_norm": 0.3466736376285553, | |
| "learning_rate": 8.312904551775731e-05, | |
| "loss": 0.2247, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 1.7171717171717171, | |
| "grad_norm": 0.46628567576408386, | |
| "learning_rate": 8.30408917834367e-05, | |
| "loss": 0.2239, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.7205387205387206, | |
| "grad_norm": 0.34879177808761597, | |
| "learning_rate": 8.295255534918248e-05, | |
| "loss": 0.2125, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 1.723905723905724, | |
| "grad_norm": 0.35456812381744385, | |
| "learning_rate": 8.286403670345184e-05, | |
| "loss": 0.2171, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 1.7272727272727273, | |
| "grad_norm": 0.31053483486175537, | |
| "learning_rate": 8.277533633570948e-05, | |
| "loss": 0.2023, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 1.7306397306397305, | |
| "grad_norm": 0.4170824885368347, | |
| "learning_rate": 8.268645473642493e-05, | |
| "loss": 0.2221, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 1.734006734006734, | |
| "grad_norm": 0.46031665802001953, | |
| "learning_rate": 8.259739239706991e-05, | |
| "loss": 0.2156, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 1.7373737373737375, | |
| "grad_norm": 0.5810372829437256, | |
| "learning_rate": 8.250814981011545e-05, | |
| "loss": 0.2172, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 1.7407407407407407, | |
| "grad_norm": 0.42679181694984436, | |
| "learning_rate": 8.241872746902935e-05, | |
| "loss": 0.2097, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 1.7441077441077442, | |
| "grad_norm": 0.3723827600479126, | |
| "learning_rate": 8.232912586827326e-05, | |
| "loss": 0.207, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 1.7474747474747474, | |
| "grad_norm": 0.39440205693244934, | |
| "learning_rate": 8.223934550330015e-05, | |
| "loss": 0.2155, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 1.7508417508417509, | |
| "grad_norm": 0.36913472414016724, | |
| "learning_rate": 8.21493868705514e-05, | |
| "loss": 0.2217, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.7542087542087543, | |
| "grad_norm": 0.45470842719078064, | |
| "learning_rate": 8.205925046745419e-05, | |
| "loss": 0.2105, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 1.7575757575757576, | |
| "grad_norm": 0.4667101204395294, | |
| "learning_rate": 8.196893679241858e-05, | |
| "loss": 0.2184, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 1.7609427609427608, | |
| "grad_norm": 0.3914218544960022, | |
| "learning_rate": 8.187844634483496e-05, | |
| "loss": 0.2203, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 1.7643097643097643, | |
| "grad_norm": 0.41950666904449463, | |
| "learning_rate": 8.178777962507112e-05, | |
| "loss": 0.2235, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 1.7676767676767677, | |
| "grad_norm": 0.46251434087753296, | |
| "learning_rate": 8.169693713446959e-05, | |
| "loss": 0.2118, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 1.7710437710437712, | |
| "grad_norm": 0.3726736009120941, | |
| "learning_rate": 8.16059193753448e-05, | |
| "loss": 0.2084, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 1.7744107744107744, | |
| "grad_norm": 0.3658943474292755, | |
| "learning_rate": 8.151472685098036e-05, | |
| "loss": 0.2119, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 1.7777777777777777, | |
| "grad_norm": 0.3458796739578247, | |
| "learning_rate": 8.142336006562618e-05, | |
| "loss": 0.2217, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 1.7811447811447811, | |
| "grad_norm": 0.4397581219673157, | |
| "learning_rate": 8.133181952449582e-05, | |
| "loss": 0.2067, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 1.7845117845117846, | |
| "grad_norm": 0.3916018009185791, | |
| "learning_rate": 8.124010573376357e-05, | |
| "loss": 0.2233, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.7878787878787878, | |
| "grad_norm": 0.33343255519866943, | |
| "learning_rate": 8.114821920056177e-05, | |
| "loss": 0.2164, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 1.791245791245791, | |
| "grad_norm": 0.3748774826526642, | |
| "learning_rate": 8.105616043297787e-05, | |
| "loss": 0.2115, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 1.7946127946127945, | |
| "grad_norm": 0.3121494948863983, | |
| "learning_rate": 8.096392994005177e-05, | |
| "loss": 0.218, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 1.797979797979798, | |
| "grad_norm": 0.5338030457496643, | |
| "learning_rate": 8.087152823177281e-05, | |
| "loss": 0.2121, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 1.8013468013468015, | |
| "grad_norm": 0.5407494902610779, | |
| "learning_rate": 8.077895581907718e-05, | |
| "loss": 0.2184, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 1.8047138047138047, | |
| "grad_norm": 0.334550142288208, | |
| "learning_rate": 8.068621321384496e-05, | |
| "loss": 0.2119, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 1.808080808080808, | |
| "grad_norm": 0.4660499095916748, | |
| "learning_rate": 8.059330092889723e-05, | |
| "loss": 0.2172, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 1.8114478114478114, | |
| "grad_norm": 0.30550065636634827, | |
| "learning_rate": 8.050021947799342e-05, | |
| "loss": 0.1999, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 1.8148148148148149, | |
| "grad_norm": 0.33308038115501404, | |
| "learning_rate": 8.040696937582832e-05, | |
| "loss": 0.2176, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 1.8181818181818183, | |
| "grad_norm": 0.34143635630607605, | |
| "learning_rate": 8.031355113802927e-05, | |
| "loss": 0.2068, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.8215488215488216, | |
| "grad_norm": 0.398154079914093, | |
| "learning_rate": 8.021996528115335e-05, | |
| "loss": 0.2188, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 1.8249158249158248, | |
| "grad_norm": 0.3586583137512207, | |
| "learning_rate": 8.012621232268443e-05, | |
| "loss": 0.1982, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 1.8282828282828283, | |
| "grad_norm": 0.3652949035167694, | |
| "learning_rate": 8.003229278103043e-05, | |
| "loss": 0.2047, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 1.8316498316498318, | |
| "grad_norm": 0.42600250244140625, | |
| "learning_rate": 7.993820717552037e-05, | |
| "loss": 0.1962, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 1.835016835016835, | |
| "grad_norm": 0.43108507990837097, | |
| "learning_rate": 7.984395602640153e-05, | |
| "loss": 0.2144, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 1.8383838383838382, | |
| "grad_norm": 0.3814637362957001, | |
| "learning_rate": 7.974953985483655e-05, | |
| "loss": 0.2074, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 1.8417508417508417, | |
| "grad_norm": 0.3342503607273102, | |
| "learning_rate": 7.96549591829006e-05, | |
| "loss": 0.2034, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 1.8451178451178452, | |
| "grad_norm": 0.3812967836856842, | |
| "learning_rate": 7.956021453357838e-05, | |
| "loss": 0.2067, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 1.8484848484848486, | |
| "grad_norm": 0.3528459370136261, | |
| "learning_rate": 7.946530643076138e-05, | |
| "loss": 0.2072, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 1.8518518518518519, | |
| "grad_norm": 0.29267165064811707, | |
| "learning_rate": 7.937023539924486e-05, | |
| "loss": 0.2093, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.855218855218855, | |
| "grad_norm": 0.3306651711463928, | |
| "learning_rate": 7.927500196472506e-05, | |
| "loss": 0.1972, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 1.8585858585858586, | |
| "grad_norm": 0.38230523467063904, | |
| "learning_rate": 7.917960665379616e-05, | |
| "loss": 0.2082, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 1.861952861952862, | |
| "grad_norm": 0.3726579546928406, | |
| "learning_rate": 7.908404999394746e-05, | |
| "loss": 0.2176, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 1.8653198653198653, | |
| "grad_norm": 0.38537707924842834, | |
| "learning_rate": 7.898833251356045e-05, | |
| "loss": 0.2144, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 1.8686868686868687, | |
| "grad_norm": 0.39219149947166443, | |
| "learning_rate": 7.889245474190588e-05, | |
| "loss": 0.2153, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 1.872053872053872, | |
| "grad_norm": 0.30300959944725037, | |
| "learning_rate": 7.879641720914079e-05, | |
| "loss": 0.2022, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 1.8754208754208754, | |
| "grad_norm": 0.3848994970321655, | |
| "learning_rate": 7.870022044630569e-05, | |
| "loss": 0.213, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 1.878787878787879, | |
| "grad_norm": 0.24942323565483093, | |
| "learning_rate": 7.860386498532151e-05, | |
| "loss": 0.2037, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 1.8821548821548821, | |
| "grad_norm": 0.3252245783805847, | |
| "learning_rate": 7.85073513589867e-05, | |
| "loss": 0.2084, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 1.8855218855218854, | |
| "grad_norm": 0.37132528424263, | |
| "learning_rate": 7.84106801009743e-05, | |
| "loss": 0.209, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.8888888888888888, | |
| "grad_norm": 0.3328373432159424, | |
| "learning_rate": 7.831385174582901e-05, | |
| "loss": 0.1987, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 1.8922558922558923, | |
| "grad_norm": 0.41517242789268494, | |
| "learning_rate": 7.821686682896412e-05, | |
| "loss": 0.204, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 1.8956228956228958, | |
| "grad_norm": 0.3530445694923401, | |
| "learning_rate": 7.81197258866587e-05, | |
| "loss": 0.189, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 1.898989898989899, | |
| "grad_norm": 0.380113422870636, | |
| "learning_rate": 7.802242945605452e-05, | |
| "loss": 0.2092, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 1.9023569023569022, | |
| "grad_norm": 0.35453280806541443, | |
| "learning_rate": 7.792497807515317e-05, | |
| "loss": 0.2088, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 1.9057239057239057, | |
| "grad_norm": 0.3634714186191559, | |
| "learning_rate": 7.782737228281299e-05, | |
| "loss": 0.2189, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 1.9090909090909092, | |
| "grad_norm": 0.3585050702095032, | |
| "learning_rate": 7.772961261874615e-05, | |
| "loss": 0.2085, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 1.9124579124579124, | |
| "grad_norm": 0.3739174008369446, | |
| "learning_rate": 7.763169962351571e-05, | |
| "loss": 0.1955, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 1.9158249158249159, | |
| "grad_norm": 0.4537617862224579, | |
| "learning_rate": 7.753363383853249e-05, | |
| "loss": 0.2135, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 1.9191919191919191, | |
| "grad_norm": 0.49453261494636536, | |
| "learning_rate": 7.743541580605221e-05, | |
| "loss": 0.2129, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.9225589225589226, | |
| "grad_norm": 0.3620549142360687, | |
| "learning_rate": 7.733704606917247e-05, | |
| "loss": 0.1981, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 1.925925925925926, | |
| "grad_norm": 0.4411380887031555, | |
| "learning_rate": 7.723852517182966e-05, | |
| "loss": 0.2001, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 1.9292929292929293, | |
| "grad_norm": 0.5122190713882446, | |
| "learning_rate": 7.713985365879606e-05, | |
| "loss": 0.2153, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 1.9326599326599325, | |
| "grad_norm": 0.44053155183792114, | |
| "learning_rate": 7.704103207567676e-05, | |
| "loss": 0.2161, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 1.936026936026936, | |
| "grad_norm": 0.3606555461883545, | |
| "learning_rate": 7.694206096890666e-05, | |
| "loss": 0.207, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 1.9393939393939394, | |
| "grad_norm": 0.47892236709594727, | |
| "learning_rate": 7.684294088574748e-05, | |
| "loss": 0.2023, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 1.942760942760943, | |
| "grad_norm": 0.4973437488079071, | |
| "learning_rate": 7.674367237428466e-05, | |
| "loss": 0.2071, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 1.9461279461279462, | |
| "grad_norm": 0.344251811504364, | |
| "learning_rate": 7.664425598342442e-05, | |
| "loss": 0.207, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 1.9494949494949494, | |
| "grad_norm": 0.4177689850330353, | |
| "learning_rate": 7.654469226289067e-05, | |
| "loss": 0.2012, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 1.9528619528619529, | |
| "grad_norm": 0.38338756561279297, | |
| "learning_rate": 7.644498176322196e-05, | |
| "loss": 0.2109, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.9562289562289563, | |
| "grad_norm": 0.4217558801174164, | |
| "learning_rate": 7.63451250357685e-05, | |
| "loss": 0.2104, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 1.9595959595959596, | |
| "grad_norm": 0.3722991943359375, | |
| "learning_rate": 7.6245122632689e-05, | |
| "loss": 0.1995, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 1.9629629629629628, | |
| "grad_norm": 0.3889348804950714, | |
| "learning_rate": 7.614497510694774e-05, | |
| "loss": 0.201, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 1.9663299663299663, | |
| "grad_norm": 0.3091539740562439, | |
| "learning_rate": 7.604468301231143e-05, | |
| "loss": 0.1914, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 1.9696969696969697, | |
| "grad_norm": 0.38198480010032654, | |
| "learning_rate": 7.59442469033462e-05, | |
| "loss": 0.1928, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 1.9730639730639732, | |
| "grad_norm": 0.4082641303539276, | |
| "learning_rate": 7.58436673354145e-05, | |
| "loss": 0.1995, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 1.9764309764309764, | |
| "grad_norm": 0.4138404130935669, | |
| "learning_rate": 7.574294486467204e-05, | |
| "loss": 0.2016, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 1.9797979797979797, | |
| "grad_norm": 0.32085567712783813, | |
| "learning_rate": 7.564208004806467e-05, | |
| "loss": 0.2048, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 1.9831649831649831, | |
| "grad_norm": 0.30701011419296265, | |
| "learning_rate": 7.55410734433254e-05, | |
| "loss": 0.1922, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 1.9865319865319866, | |
| "grad_norm": 0.33269554376602173, | |
| "learning_rate": 7.543992560897124e-05, | |
| "loss": 0.2134, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.98989898989899, | |
| "grad_norm": 0.3297766447067261, | |
| "learning_rate": 7.533863710430012e-05, | |
| "loss": 0.1952, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 1.9932659932659933, | |
| "grad_norm": 0.3603309690952301, | |
| "learning_rate": 7.523720848938781e-05, | |
| "loss": 0.2022, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 1.9966329966329965, | |
| "grad_norm": 0.40898165106773376, | |
| "learning_rate": 7.513564032508484e-05, | |
| "loss": 0.2075, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.39114904403686523, | |
| "learning_rate": 7.503393317301337e-05, | |
| "loss": 0.1897, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 2.0033670033670035, | |
| "grad_norm": 0.35284826159477234, | |
| "learning_rate": 7.493208759556406e-05, | |
| "loss": 0.1964, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 2.006734006734007, | |
| "grad_norm": 0.42649611830711365, | |
| "learning_rate": 7.483010415589306e-05, | |
| "loss": 0.1973, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 2.01010101010101, | |
| "grad_norm": 0.32770025730133057, | |
| "learning_rate": 7.472798341791877e-05, | |
| "loss": 0.1949, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 2.0134680134680134, | |
| "grad_norm": 0.39396432042121887, | |
| "learning_rate": 7.462572594631882e-05, | |
| "loss": 0.2072, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 2.016835016835017, | |
| "grad_norm": 0.38193821907043457, | |
| "learning_rate": 7.452333230652688e-05, | |
| "loss": 0.1888, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 2.0202020202020203, | |
| "grad_norm": 0.41221076250076294, | |
| "learning_rate": 7.442080306472962e-05, | |
| "loss": 0.1993, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 2.0202020202020203, | |
| "eval_loss": 0.09800498932600021, | |
| "eval_runtime": 32.7986, | |
| "eval_samples_per_second": 30.489, | |
| "eval_steps_per_second": 1.921, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 2.0235690235690234, | |
| "grad_norm": 0.34845221042633057, | |
| "learning_rate": 7.431813878786343e-05, | |
| "loss": 0.1866, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 2.026936026936027, | |
| "grad_norm": 0.398992121219635, | |
| "learning_rate": 7.421534004361148e-05, | |
| "loss": 0.2037, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 2.0303030303030303, | |
| "grad_norm": 0.4208246171474457, | |
| "learning_rate": 7.41124074004004e-05, | |
| "loss": 0.2019, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 2.0336700336700337, | |
| "grad_norm": 0.3810597062110901, | |
| "learning_rate": 7.400934142739725e-05, | |
| "loss": 0.2012, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 2.037037037037037, | |
| "grad_norm": 0.4348938465118408, | |
| "learning_rate": 7.390614269450634e-05, | |
| "loss": 0.1936, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 2.04040404040404, | |
| "grad_norm": 0.43880605697631836, | |
| "learning_rate": 7.380281177236608e-05, | |
| "loss": 0.1999, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 2.0437710437710437, | |
| "grad_norm": 0.32157278060913086, | |
| "learning_rate": 7.369934923234577e-05, | |
| "loss": 0.1835, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 2.047138047138047, | |
| "grad_norm": 0.40847641229629517, | |
| "learning_rate": 7.359575564654259e-05, | |
| "loss": 0.1873, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 2.0505050505050506, | |
| "grad_norm": 0.3321189880371094, | |
| "learning_rate": 7.349203158777826e-05, | |
| "loss": 0.1874, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 2.053872053872054, | |
| "grad_norm": 0.4147774875164032, | |
| "learning_rate": 7.338817762959595e-05, | |
| "loss": 0.1899, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 2.057239057239057, | |
| "grad_norm": 0.391746461391449, | |
| "learning_rate": 7.32841943462572e-05, | |
| "loss": 0.1871, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 2.0606060606060606, | |
| "grad_norm": 0.41342151165008545, | |
| "learning_rate": 7.318008231273851e-05, | |
| "loss": 0.1887, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 2.063973063973064, | |
| "grad_norm": 0.4507507383823395, | |
| "learning_rate": 7.307584210472844e-05, | |
| "loss": 0.1862, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 2.0673400673400675, | |
| "grad_norm": 0.37700155377388, | |
| "learning_rate": 7.297147429862424e-05, | |
| "loss": 0.1895, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 2.0707070707070705, | |
| "grad_norm": 0.46107348799705505, | |
| "learning_rate": 7.286697947152867e-05, | |
| "loss": 0.1911, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 2.074074074074074, | |
| "grad_norm": 0.3295687139034271, | |
| "learning_rate": 7.276235820124694e-05, | |
| "loss": 0.2021, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 2.0774410774410774, | |
| "grad_norm": 0.34536072611808777, | |
| "learning_rate": 7.265761106628337e-05, | |
| "loss": 0.1944, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 2.080808080808081, | |
| "grad_norm": 0.3414456248283386, | |
| "learning_rate": 7.255273864583825e-05, | |
| "loss": 0.1827, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 2.0841750841750843, | |
| "grad_norm": 0.3441462516784668, | |
| "learning_rate": 7.244774151980466e-05, | |
| "loss": 0.1802, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 2.0875420875420874, | |
| "grad_norm": 0.3795068562030792, | |
| "learning_rate": 7.234262026876524e-05, | |
| "loss": 0.1868, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 2.090909090909091, | |
| "grad_norm": 0.3172759711742401, | |
| "learning_rate": 7.223737547398898e-05, | |
| "loss": 0.1891, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 2.0942760942760943, | |
| "grad_norm": 0.34847378730773926, | |
| "learning_rate": 7.213200771742798e-05, | |
| "loss": 0.1887, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 2.0976430976430978, | |
| "grad_norm": 0.37406644225120544, | |
| "learning_rate": 7.202651758171431e-05, | |
| "loss": 0.1818, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 2.101010101010101, | |
| "grad_norm": 0.40993303060531616, | |
| "learning_rate": 7.192090565015668e-05, | |
| "loss": 0.175, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 2.1043771043771042, | |
| "grad_norm": 0.3788850009441376, | |
| "learning_rate": 7.181517250673728e-05, | |
| "loss": 0.1869, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 2.1077441077441077, | |
| "grad_norm": 0.4065912961959839, | |
| "learning_rate": 7.170931873610859e-05, | |
| "loss": 0.1849, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 2.111111111111111, | |
| "grad_norm": 0.32020193338394165, | |
| "learning_rate": 7.160334492359007e-05, | |
| "loss": 0.1962, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 2.1144781144781146, | |
| "grad_norm": 0.3998248875141144, | |
| "learning_rate": 7.149725165516494e-05, | |
| "loss": 0.1973, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 2.1178451178451176, | |
| "grad_norm": 0.35018956661224365, | |
| "learning_rate": 7.139103951747695e-05, | |
| "loss": 0.1826, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 2.121212121212121, | |
| "grad_norm": 0.4554973542690277, | |
| "learning_rate": 7.128470909782716e-05, | |
| "loss": 0.1991, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 2.1245791245791246, | |
| "grad_norm": 0.3799019753932953, | |
| "learning_rate": 7.117826098417068e-05, | |
| "loss": 0.1966, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 2.127946127946128, | |
| "grad_norm": 0.4118320345878601, | |
| "learning_rate": 7.107169576511338e-05, | |
| "loss": 0.2056, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 2.1313131313131315, | |
| "grad_norm": 0.34512877464294434, | |
| "learning_rate": 7.096501402990865e-05, | |
| "loss": 0.1769, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 2.1346801346801345, | |
| "grad_norm": 0.4270040690898895, | |
| "learning_rate": 7.085821636845425e-05, | |
| "loss": 0.1944, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 2.138047138047138, | |
| "grad_norm": 0.3624259829521179, | |
| "learning_rate": 7.075130337128884e-05, | |
| "loss": 0.1817, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 2.1414141414141414, | |
| "grad_norm": 0.447008341550827, | |
| "learning_rate": 7.064427562958889e-05, | |
| "loss": 0.1832, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 2.144781144781145, | |
| "grad_norm": 0.3551187813282013, | |
| "learning_rate": 7.053713373516538e-05, | |
| "loss": 0.189, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 2.148148148148148, | |
| "grad_norm": 0.426107794046402, | |
| "learning_rate": 7.04298782804604e-05, | |
| "loss": 0.1996, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 2.1515151515151514, | |
| "grad_norm": 0.4078080356121063, | |
| "learning_rate": 7.032250985854409e-05, | |
| "loss": 0.1813, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 2.154882154882155, | |
| "grad_norm": 0.35285472869873047, | |
| "learning_rate": 7.021502906311113e-05, | |
| "loss": 0.1851, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 2.1582491582491583, | |
| "grad_norm": 0.44015195965766907, | |
| "learning_rate": 7.01074364884777e-05, | |
| "loss": 0.2016, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 2.1616161616161618, | |
| "grad_norm": 0.31542298197746277, | |
| "learning_rate": 6.999973272957793e-05, | |
| "loss": 0.1854, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 2.164983164983165, | |
| "grad_norm": 0.5345585942268372, | |
| "learning_rate": 6.989191838196082e-05, | |
| "loss": 0.1873, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 2.1683501683501682, | |
| "grad_norm": 0.3206551969051361, | |
| "learning_rate": 6.978399404178688e-05, | |
| "loss": 0.177, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 2.1717171717171717, | |
| "grad_norm": 0.46700677275657654, | |
| "learning_rate": 6.967596030582478e-05, | |
| "loss": 0.1805, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 2.175084175084175, | |
| "grad_norm": 0.3999563455581665, | |
| "learning_rate": 6.956781777144813e-05, | |
| "loss": 0.1904, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 2.1784511784511786, | |
| "grad_norm": 0.383768767118454, | |
| "learning_rate": 6.945956703663211e-05, | |
| "loss": 0.1852, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 2.1818181818181817, | |
| "grad_norm": 0.42654767632484436, | |
| "learning_rate": 6.935120869995023e-05, | |
| "loss": 0.2006, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 2.185185185185185, | |
| "grad_norm": 0.3127792775630951, | |
| "learning_rate": 6.924274336057099e-05, | |
| "loss": 0.1908, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 2.1885521885521886, | |
| "grad_norm": 0.4523990750312805, | |
| "learning_rate": 6.91341716182545e-05, | |
| "loss": 0.1943, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 2.191919191919192, | |
| "grad_norm": 0.3176407217979431, | |
| "learning_rate": 6.902549407334929e-05, | |
| "loss": 0.1707, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 2.1952861952861955, | |
| "grad_norm": 0.4285120666027069, | |
| "learning_rate": 6.891671132678892e-05, | |
| "loss": 0.1879, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 2.1986531986531985, | |
| "grad_norm": 0.3872413635253906, | |
| "learning_rate": 6.880782398008862e-05, | |
| "loss": 0.1979, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 2.202020202020202, | |
| "grad_norm": 0.49398812651634216, | |
| "learning_rate": 6.869883263534205e-05, | |
| "loss": 0.1923, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 2.2053872053872055, | |
| "grad_norm": 0.43145304918289185, | |
| "learning_rate": 6.858973789521793e-05, | |
| "loss": 0.1964, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 2.208754208754209, | |
| "grad_norm": 0.4020119309425354, | |
| "learning_rate": 6.848054036295666e-05, | |
| "loss": 0.1822, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 2.212121212121212, | |
| "grad_norm": 0.29532957077026367, | |
| "learning_rate": 6.837124064236709e-05, | |
| "loss": 0.1928, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 2.2154882154882154, | |
| "grad_norm": 0.3485638201236725, | |
| "learning_rate": 6.826183933782307e-05, | |
| "loss": 0.2078, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 2.218855218855219, | |
| "grad_norm": 0.3406836986541748, | |
| "learning_rate": 6.815233705426019e-05, | |
| "loss": 0.1844, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 2.2222222222222223, | |
| "grad_norm": 0.2849566638469696, | |
| "learning_rate": 6.80427343971724e-05, | |
| "loss": 0.1914, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 2.225589225589226, | |
| "grad_norm": 0.34194567799568176, | |
| "learning_rate": 6.793303197260864e-05, | |
| "loss": 0.1879, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 2.228956228956229, | |
| "grad_norm": 0.39901259541511536, | |
| "learning_rate": 6.782323038716956e-05, | |
| "loss": 0.1963, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 2.2323232323232323, | |
| "grad_norm": 0.37613964080810547, | |
| "learning_rate": 6.771333024800411e-05, | |
| "loss": 0.1827, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 2.2356902356902357, | |
| "grad_norm": 0.3915823996067047, | |
| "learning_rate": 6.760333216280617e-05, | |
| "loss": 0.1988, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 2.239057239057239, | |
| "grad_norm": 0.3280683755874634, | |
| "learning_rate": 6.74932367398112e-05, | |
| "loss": 0.1798, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 2.242424242424242, | |
| "grad_norm": 0.4062442481517792, | |
| "learning_rate": 6.738304458779293e-05, | |
| "loss": 0.1935, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 2.2457912457912457, | |
| "grad_norm": 0.3807975947856903, | |
| "learning_rate": 6.727275631605995e-05, | |
| "loss": 0.19, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 2.249158249158249, | |
| "grad_norm": 0.33653566241264343, | |
| "learning_rate": 6.716237253445235e-05, | |
| "loss": 0.1783, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 2.2525252525252526, | |
| "grad_norm": 0.33660075068473816, | |
| "learning_rate": 6.70518938533383e-05, | |
| "loss": 0.1798, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 2.255892255892256, | |
| "grad_norm": 0.33384770154953003, | |
| "learning_rate": 6.694132088361075e-05, | |
| "loss": 0.1796, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 2.259259259259259, | |
| "grad_norm": 0.3794649839401245, | |
| "learning_rate": 6.683065423668403e-05, | |
| "loss": 0.194, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 2.2626262626262625, | |
| "grad_norm": 0.3536298871040344, | |
| "learning_rate": 6.671989452449043e-05, | |
| "loss": 0.1814, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 2.265993265993266, | |
| "grad_norm": 0.33530497550964355, | |
| "learning_rate": 6.660904235947687e-05, | |
| "loss": 0.1766, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 2.2693602693602695, | |
| "grad_norm": 0.38430067896842957, | |
| "learning_rate": 6.649809835460147e-05, | |
| "loss": 0.1834, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 2.2727272727272725, | |
| "grad_norm": 0.4088013470172882, | |
| "learning_rate": 6.638706312333018e-05, | |
| "loss": 0.184, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 2.276094276094276, | |
| "grad_norm": 0.4313696324825287, | |
| "learning_rate": 6.627593727963342e-05, | |
| "loss": 0.195, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 2.2794612794612794, | |
| "grad_norm": 0.3910023272037506, | |
| "learning_rate": 6.616472143798261e-05, | |
| "loss": 0.1955, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 2.282828282828283, | |
| "grad_norm": 0.3856333792209625, | |
| "learning_rate": 6.605341621334683e-05, | |
| "loss": 0.1825, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 2.2861952861952863, | |
| "grad_norm": 0.3617812395095825, | |
| "learning_rate": 6.594202222118942e-05, | |
| "loss": 0.1866, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 2.28956228956229, | |
| "grad_norm": 0.38682255148887634, | |
| "learning_rate": 6.583054007746452e-05, | |
| "loss": 0.1892, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 2.292929292929293, | |
| "grad_norm": 0.40745124220848083, | |
| "learning_rate": 6.571897039861377e-05, | |
| "loss": 0.1917, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 2.2962962962962963, | |
| "grad_norm": 0.3059500455856323, | |
| "learning_rate": 6.560731380156275e-05, | |
| "loss": 0.1807, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 2.2996632996632997, | |
| "grad_norm": 0.3456449806690216, | |
| "learning_rate": 6.549557090371776e-05, | |
| "loss": 0.1798, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 2.303030303030303, | |
| "grad_norm": 0.4222232699394226, | |
| "learning_rate": 6.538374232296221e-05, | |
| "loss": 0.1939, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 2.3063973063973062, | |
| "grad_norm": 0.32959309220314026, | |
| "learning_rate": 6.527182867765332e-05, | |
| "loss": 0.1839, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 2.3097643097643097, | |
| "grad_norm": 0.30231377482414246, | |
| "learning_rate": 6.515983058661872e-05, | |
| "loss": 0.1772, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 2.313131313131313, | |
| "grad_norm": 0.42110297083854675, | |
| "learning_rate": 6.50477486691529e-05, | |
| "loss": 0.1911, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 2.3164983164983166, | |
| "grad_norm": 0.39979901909828186, | |
| "learning_rate": 6.493558354501397e-05, | |
| "loss": 0.1828, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 2.31986531986532, | |
| "grad_norm": 0.3712976276874542, | |
| "learning_rate": 6.482333583442002e-05, | |
| "loss": 0.1841, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 2.323232323232323, | |
| "grad_norm": 0.4817717373371124, | |
| "learning_rate": 6.471100615804591e-05, | |
| "loss": 0.1844, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 2.3265993265993266, | |
| "grad_norm": 0.4232378304004669, | |
| "learning_rate": 6.459859513701967e-05, | |
| "loss": 0.1748, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 2.32996632996633, | |
| "grad_norm": 0.3409404158592224, | |
| "learning_rate": 6.448610339291912e-05, | |
| "loss": 0.1944, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 2.3333333333333335, | |
| "grad_norm": 0.3255225121974945, | |
| "learning_rate": 6.437353154776849e-05, | |
| "loss": 0.196, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 2.3367003367003365, | |
| "grad_norm": 0.3025372624397278, | |
| "learning_rate": 6.426088022403485e-05, | |
| "loss": 0.1706, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 2.34006734006734, | |
| "grad_norm": 0.3823414146900177, | |
| "learning_rate": 6.414815004462483e-05, | |
| "loss": 0.1814, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 2.3434343434343434, | |
| "grad_norm": 0.4448075294494629, | |
| "learning_rate": 6.403534163288105e-05, | |
| "loss": 0.1966, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 2.346801346801347, | |
| "grad_norm": 0.36923545598983765, | |
| "learning_rate": 6.392245561257871e-05, | |
| "loss": 0.1896, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 2.3501683501683504, | |
| "grad_norm": 0.3588995933532715, | |
| "learning_rate": 6.380949260792217e-05, | |
| "loss": 0.1817, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 2.3535353535353534, | |
| "grad_norm": 0.4320705235004425, | |
| "learning_rate": 6.369645324354149e-05, | |
| "loss": 0.1918, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 2.356902356902357, | |
| "grad_norm": 0.4778941869735718, | |
| "learning_rate": 6.358333814448891e-05, | |
| "loss": 0.1923, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 2.356902356902357, | |
| "eval_loss": 0.09319520741701126, | |
| "eval_runtime": 32.7439, | |
| "eval_samples_per_second": 30.54, | |
| "eval_steps_per_second": 1.924, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 2.3602693602693603, | |
| "grad_norm": 0.4958076775074005, | |
| "learning_rate": 6.347014793623547e-05, | |
| "loss": 0.1977, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 2.3636363636363638, | |
| "grad_norm": 0.3545849025249481, | |
| "learning_rate": 6.335688324466756e-05, | |
| "loss": 0.1878, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 2.3670033670033668, | |
| "grad_norm": 0.40290120244026184, | |
| "learning_rate": 6.324354469608335e-05, | |
| "loss": 0.1789, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 2.3703703703703702, | |
| "grad_norm": 0.3689594864845276, | |
| "learning_rate": 6.313013291718952e-05, | |
| "loss": 0.1869, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 2.3737373737373737, | |
| "grad_norm": 0.34308475255966187, | |
| "learning_rate": 6.301664853509754e-05, | |
| "loss": 0.1877, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 2.377104377104377, | |
| "grad_norm": 0.3406205475330353, | |
| "learning_rate": 6.290309217732045e-05, | |
| "loss": 0.1835, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 2.3804713804713806, | |
| "grad_norm": 0.39080610871315, | |
| "learning_rate": 6.278946447176923e-05, | |
| "loss": 0.1968, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 2.3838383838383836, | |
| "grad_norm": 0.3915717303752899, | |
| "learning_rate": 6.267576604674938e-05, | |
| "loss": 0.1887, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 2.387205387205387, | |
| "grad_norm": 0.3860926628112793, | |
| "learning_rate": 6.256199753095745e-05, | |
| "loss": 0.1856, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 2.3905723905723906, | |
| "grad_norm": 0.3959527313709259, | |
| "learning_rate": 6.244815955347756e-05, | |
| "loss": 0.2006, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 2.393939393939394, | |
| "grad_norm": 0.3251626491546631, | |
| "learning_rate": 6.233425274377794e-05, | |
| "loss": 0.1742, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 2.3973063973063975, | |
| "grad_norm": 0.34222376346588135, | |
| "learning_rate": 6.222027773170737e-05, | |
| "loss": 0.1938, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 2.4006734006734005, | |
| "grad_norm": 0.2963486313819885, | |
| "learning_rate": 6.21062351474918e-05, | |
| "loss": 0.1775, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 2.404040404040404, | |
| "grad_norm": 0.33488693833351135, | |
| "learning_rate": 6.199212562173084e-05, | |
| "loss": 0.1859, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 2.4074074074074074, | |
| "grad_norm": 0.37009695172309875, | |
| "learning_rate": 6.187794978539419e-05, | |
| "loss": 0.1854, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 2.410774410774411, | |
| "grad_norm": 0.29789450764656067, | |
| "learning_rate": 6.176370826981828e-05, | |
| "loss": 0.1782, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 2.4141414141414144, | |
| "grad_norm": 0.3971637189388275, | |
| "learning_rate": 6.164940170670266e-05, | |
| "loss": 0.1846, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 2.4175084175084174, | |
| "grad_norm": 0.3055313229560852, | |
| "learning_rate": 6.153503072810662e-05, | |
| "loss": 0.1892, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 2.420875420875421, | |
| "grad_norm": 0.3349987864494324, | |
| "learning_rate": 6.142059596644558e-05, | |
| "loss": 0.1864, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 2.4242424242424243, | |
| "grad_norm": 0.3594989478588104, | |
| "learning_rate": 6.130609805448766e-05, | |
| "loss": 0.1745, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 2.4276094276094278, | |
| "grad_norm": 0.32809680700302124, | |
| "learning_rate": 6.119153762535021e-05, | |
| "loss": 0.1874, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 2.430976430976431, | |
| "grad_norm": 0.39594766497612, | |
| "learning_rate": 6.107691531249623e-05, | |
| "loss": 0.1838, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 2.4343434343434343, | |
| "grad_norm": 0.31413084268569946, | |
| "learning_rate": 6.09622317497309e-05, | |
| "loss": 0.1872, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 2.4377104377104377, | |
| "grad_norm": 0.33545947074890137, | |
| "learning_rate": 6.084748757119811e-05, | |
| "loss": 0.1831, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 2.441077441077441, | |
| "grad_norm": 0.3555618226528168, | |
| "learning_rate": 6.0732683411376935e-05, | |
| "loss": 0.1773, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 2.4444444444444446, | |
| "grad_norm": 0.31497722864151, | |
| "learning_rate": 6.0617819905078075e-05, | |
| "loss": 0.1957, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 2.4478114478114477, | |
| "grad_norm": 0.3945048451423645, | |
| "learning_rate": 6.050289768744042e-05, | |
| "loss": 0.1844, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 2.451178451178451, | |
| "grad_norm": 0.3777752220630646, | |
| "learning_rate": 6.038791739392748e-05, | |
| "loss": 0.1765, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 2.4545454545454546, | |
| "grad_norm": 0.3376706540584564, | |
| "learning_rate": 6.0272879660323934e-05, | |
| "loss": 0.1857, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 2.457912457912458, | |
| "grad_norm": 0.33173221349716187, | |
| "learning_rate": 6.015778512273203e-05, | |
| "loss": 0.1877, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 2.461279461279461, | |
| "grad_norm": 0.26840299367904663, | |
| "learning_rate": 6.004263441756815e-05, | |
| "loss": 0.1898, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 2.4646464646464645, | |
| "grad_norm": 0.3570701479911804, | |
| "learning_rate": 5.992742818155923e-05, | |
| "loss": 0.1794, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 2.468013468013468, | |
| "grad_norm": 0.2803226113319397, | |
| "learning_rate": 5.98121670517393e-05, | |
| "loss": 0.1826, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 2.4713804713804715, | |
| "grad_norm": 0.36934715509414673, | |
| "learning_rate": 5.9696851665445875e-05, | |
| "loss": 0.1855, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 2.474747474747475, | |
| "grad_norm": 0.38208091259002686, | |
| "learning_rate": 5.958148266031654e-05, | |
| "loss": 0.1872, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 2.478114478114478, | |
| "grad_norm": 0.2921079099178314, | |
| "learning_rate": 5.9466060674285294e-05, | |
| "loss": 0.1887, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 2.4814814814814814, | |
| "grad_norm": 0.4000777006149292, | |
| "learning_rate": 5.9350586345579165e-05, | |
| "loss": 0.1932, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 2.484848484848485, | |
| "grad_norm": 0.3915836215019226, | |
| "learning_rate": 5.9235060312714564e-05, | |
| "loss": 0.1905, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 2.4882154882154883, | |
| "grad_norm": 0.3251096308231354, | |
| "learning_rate": 5.9119483214493844e-05, | |
| "loss": 0.1763, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 2.4915824915824913, | |
| "grad_norm": 0.3224780261516571, | |
| "learning_rate": 5.900385569000166e-05, | |
| "loss": 0.1734, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 2.494949494949495, | |
| "grad_norm": 0.35873523354530334, | |
| "learning_rate": 5.8888178378601565e-05, | |
| "loss": 0.1821, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 2.4983164983164983, | |
| "grad_norm": 0.31311190128326416, | |
| "learning_rate": 5.877245191993239e-05, | |
| "loss": 0.1913, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 2.5016835016835017, | |
| "grad_norm": 0.30053961277008057, | |
| "learning_rate": 5.865667695390468e-05, | |
| "loss": 0.175, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 2.505050505050505, | |
| "grad_norm": 0.3176406919956207, | |
| "learning_rate": 5.854085412069726e-05, | |
| "loss": 0.1776, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 2.5084175084175087, | |
| "grad_norm": 0.31768059730529785, | |
| "learning_rate": 5.842498406075363e-05, | |
| "loss": 0.1767, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 2.5117845117845117, | |
| "grad_norm": 0.32042112946510315, | |
| "learning_rate": 5.8309067414778404e-05, | |
| "loss": 0.1783, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 2.515151515151515, | |
| "grad_norm": 0.2806006669998169, | |
| "learning_rate": 5.81931048237338e-05, | |
| "loss": 0.1839, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 2.5185185185185186, | |
| "grad_norm": 0.3236968517303467, | |
| "learning_rate": 5.807709692883612e-05, | |
| "loss": 0.1798, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 2.5218855218855216, | |
| "grad_norm": 0.3542146682739258, | |
| "learning_rate": 5.796104437155213e-05, | |
| "loss": 0.1768, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 2.525252525252525, | |
| "grad_norm": 0.3283354938030243, | |
| "learning_rate": 5.7844947793595584e-05, | |
| "loss": 0.1817, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 2.5286195286195285, | |
| "grad_norm": 0.35863611102104187, | |
| "learning_rate": 5.7728807836923624e-05, | |
| "loss": 0.1886, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 2.531986531986532, | |
| "grad_norm": 0.32639971375465393, | |
| "learning_rate": 5.7612625143733325e-05, | |
| "loss": 0.1835, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 2.5353535353535355, | |
| "grad_norm": 0.36184388399124146, | |
| "learning_rate": 5.749640035645798e-05, | |
| "loss": 0.1858, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 2.538720538720539, | |
| "grad_norm": 0.35223841667175293, | |
| "learning_rate": 5.73801341177637e-05, | |
| "loss": 0.1745, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 2.542087542087542, | |
| "grad_norm": 0.40928134322166443, | |
| "learning_rate": 5.7263827070545775e-05, | |
| "loss": 0.1889, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 2.5454545454545454, | |
| "grad_norm": 0.33686789870262146, | |
| "learning_rate": 5.714747985792516e-05, | |
| "loss": 0.1782, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 2.548821548821549, | |
| "grad_norm": 0.36779022216796875, | |
| "learning_rate": 5.7031093123244925e-05, | |
| "loss": 0.1824, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 2.5521885521885523, | |
| "grad_norm": 0.412527471780777, | |
| "learning_rate": 5.691466751006662e-05, | |
| "loss": 0.1761, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 2.5555555555555554, | |
| "grad_norm": 0.49104923009872437, | |
| "learning_rate": 5.679820366216684e-05, | |
| "loss": 0.1861, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 2.558922558922559, | |
| "grad_norm": 0.3897760808467865, | |
| "learning_rate": 5.668170222353355e-05, | |
| "loss": 0.188, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 2.5622895622895623, | |
| "grad_norm": 0.37621814012527466, | |
| "learning_rate": 5.656516383836262e-05, | |
| "loss": 0.1682, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 2.5656565656565657, | |
| "grad_norm": 0.4133719801902771, | |
| "learning_rate": 5.644858915105414e-05, | |
| "loss": 0.189, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 2.569023569023569, | |
| "grad_norm": 0.4049244225025177, | |
| "learning_rate": 5.633197880620904e-05, | |
| "loss": 0.1889, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 2.5723905723905722, | |
| "grad_norm": 0.46882614493370056, | |
| "learning_rate": 5.621533344862531e-05, | |
| "loss": 0.1825, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 2.5757575757575757, | |
| "grad_norm": 0.3228312134742737, | |
| "learning_rate": 5.6098653723294604e-05, | |
| "loss": 0.1775, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 2.579124579124579, | |
| "grad_norm": 0.4066309630870819, | |
| "learning_rate": 5.598194027539862e-05, | |
| "loss": 0.1818, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 2.5824915824915826, | |
| "grad_norm": 0.32752707600593567, | |
| "learning_rate": 5.586519375030549e-05, | |
| "loss": 0.1806, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 2.5858585858585856, | |
| "grad_norm": 0.35238194465637207, | |
| "learning_rate": 5.574841479356627e-05, | |
| "loss": 0.1719, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 2.589225589225589, | |
| "grad_norm": 0.3612888753414154, | |
| "learning_rate": 5.563160405091136e-05, | |
| "loss": 0.1646, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 2.5925925925925926, | |
| "grad_norm": 0.34333643317222595, | |
| "learning_rate": 5.551476216824687e-05, | |
| "loss": 0.1727, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 2.595959595959596, | |
| "grad_norm": 0.47696343064308167, | |
| "learning_rate": 5.5397889791651145e-05, | |
| "loss": 0.1844, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 2.5993265993265995, | |
| "grad_norm": 0.3781977891921997, | |
| "learning_rate": 5.5280987567371134e-05, | |
| "loss": 0.1753, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 2.602693602693603, | |
| "grad_norm": 0.4676324129104614, | |
| "learning_rate": 5.516405614181883e-05, | |
| "loss": 0.1839, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 2.606060606060606, | |
| "grad_norm": 0.3599601686000824, | |
| "learning_rate": 5.504709616156768e-05, | |
| "loss": 0.1786, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 2.6094276094276094, | |
| "grad_norm": 0.3672841191291809, | |
| "learning_rate": 5.493010827334904e-05, | |
| "loss": 0.185, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 2.612794612794613, | |
| "grad_norm": 0.44677501916885376, | |
| "learning_rate": 5.4813093124048585e-05, | |
| "loss": 0.1951, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 2.616161616161616, | |
| "grad_norm": 0.3104342818260193, | |
| "learning_rate": 5.4696051360702725e-05, | |
| "loss": 0.1767, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 2.6195286195286194, | |
| "grad_norm": 0.3832162618637085, | |
| "learning_rate": 5.457898363049504e-05, | |
| "loss": 0.1808, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 2.622895622895623, | |
| "grad_norm": 0.4450579583644867, | |
| "learning_rate": 5.446189058075265e-05, | |
| "loss": 0.1829, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 2.6262626262626263, | |
| "grad_norm": 0.31873807311058044, | |
| "learning_rate": 5.434477285894277e-05, | |
| "loss": 0.182, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 2.6296296296296298, | |
| "grad_norm": 0.42338353395462036, | |
| "learning_rate": 5.4227631112668955e-05, | |
| "loss": 0.1931, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 2.6329966329966332, | |
| "grad_norm": 0.2841234803199768, | |
| "learning_rate": 5.411046598966764e-05, | |
| "loss": 0.1716, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 2.6363636363636362, | |
| "grad_norm": 0.4316200315952301, | |
| "learning_rate": 5.3993278137804505e-05, | |
| "loss": 0.1821, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 2.6397306397306397, | |
| "grad_norm": 0.3666791021823883, | |
| "learning_rate": 5.387606820507095e-05, | |
| "loss": 0.1828, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 2.643097643097643, | |
| "grad_norm": 0.37707844376564026, | |
| "learning_rate": 5.375883683958041e-05, | |
| "loss": 0.1731, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 2.6464646464646466, | |
| "grad_norm": 0.41289880871772766, | |
| "learning_rate": 5.3641584689564875e-05, | |
| "loss": 0.188, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 2.6498316498316496, | |
| "grad_norm": 0.3792349696159363, | |
| "learning_rate": 5.3524312403371257e-05, | |
| "loss": 0.1859, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 2.653198653198653, | |
| "grad_norm": 0.33042725920677185, | |
| "learning_rate": 5.34070206294578e-05, | |
| "loss": 0.1802, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 2.6565656565656566, | |
| "grad_norm": 0.423698365688324, | |
| "learning_rate": 5.3289710016390535e-05, | |
| "loss": 0.1786, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 2.65993265993266, | |
| "grad_norm": 0.3488188683986664, | |
| "learning_rate": 5.3172381212839614e-05, | |
| "loss": 0.174, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 2.6632996632996635, | |
| "grad_norm": 0.4271824359893799, | |
| "learning_rate": 5.3055034867575826e-05, | |
| "loss": 0.182, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 2.6666666666666665, | |
| "grad_norm": 0.3261590003967285, | |
| "learning_rate": 5.293767162946691e-05, | |
| "loss": 0.1556, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 2.67003367003367, | |
| "grad_norm": 0.35868170857429504, | |
| "learning_rate": 5.282029214747404e-05, | |
| "loss": 0.1693, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 2.6734006734006734, | |
| "grad_norm": 0.44755569100379944, | |
| "learning_rate": 5.270289707064822e-05, | |
| "loss": 0.1895, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 2.676767676767677, | |
| "grad_norm": 0.42198193073272705, | |
| "learning_rate": 5.258548704812667e-05, | |
| "loss": 0.1873, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 2.68013468013468, | |
| "grad_norm": 0.518511176109314, | |
| "learning_rate": 5.246806272912925e-05, | |
| "loss": 0.1769, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 2.6835016835016834, | |
| "grad_norm": 0.38407737016677856, | |
| "learning_rate": 5.2350624762954884e-05, | |
| "loss": 0.1836, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 2.686868686868687, | |
| "grad_norm": 0.3742847740650177, | |
| "learning_rate": 5.223317379897794e-05, | |
| "loss": 0.1726, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 2.6902356902356903, | |
| "grad_norm": 0.43547365069389343, | |
| "learning_rate": 5.211571048664469e-05, | |
| "loss": 0.1772, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 2.6936026936026938, | |
| "grad_norm": 0.31277012825012207, | |
| "learning_rate": 5.199823547546963e-05, | |
| "loss": 0.179, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 2.6936026936026938, | |
| "eval_loss": 0.0905195027589798, | |
| "eval_runtime": 33.1678, | |
| "eval_samples_per_second": 30.15, | |
| "eval_steps_per_second": 1.899, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 2.6969696969696972, | |
| "grad_norm": 0.4289379119873047, | |
| "learning_rate": 5.188074941503203e-05, | |
| "loss": 0.1824, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 2.7003367003367003, | |
| "grad_norm": 0.3402915596961975, | |
| "learning_rate": 5.1763252954972167e-05, | |
| "loss": 0.1723, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 2.7037037037037037, | |
| "grad_norm": 0.36200371384620667, | |
| "learning_rate": 5.164574674498788e-05, | |
| "loss": 0.1871, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 2.707070707070707, | |
| "grad_norm": 0.39011457562446594, | |
| "learning_rate": 5.152823143483092e-05, | |
| "loss": 0.1686, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 2.71043771043771, | |
| "grad_norm": 0.2998808026313782, | |
| "learning_rate": 5.14107076743033e-05, | |
| "loss": 0.1728, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 2.7138047138047137, | |
| "grad_norm": 0.3582398593425751, | |
| "learning_rate": 5.129317611325385e-05, | |
| "loss": 0.1764, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 2.717171717171717, | |
| "grad_norm": 0.36014533042907715, | |
| "learning_rate": 5.117563740157444e-05, | |
| "loss": 0.1752, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 2.7205387205387206, | |
| "grad_norm": 0.38851645588874817, | |
| "learning_rate": 5.105809218919656e-05, | |
| "loss": 0.1873, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 2.723905723905724, | |
| "grad_norm": 0.40050944685935974, | |
| "learning_rate": 5.094054112608758e-05, | |
| "loss": 0.1839, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 2.7272727272727275, | |
| "grad_norm": 0.4239293038845062, | |
| "learning_rate": 5.082298486224728e-05, | |
| "loss": 0.174, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 2.7306397306397305, | |
| "grad_norm": 0.34228378534317017, | |
| "learning_rate": 5.070542404770413e-05, | |
| "loss": 0.1751, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 2.734006734006734, | |
| "grad_norm": 0.3773192763328552, | |
| "learning_rate": 5.058785933251184e-05, | |
| "loss": 0.1814, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 2.7373737373737375, | |
| "grad_norm": 0.3601987659931183, | |
| "learning_rate": 5.047029136674563e-05, | |
| "loss": 0.1854, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 2.7407407407407405, | |
| "grad_norm": 0.33943113684654236, | |
| "learning_rate": 5.035272080049871e-05, | |
| "loss": 0.1849, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 2.744107744107744, | |
| "grad_norm": 0.4696902334690094, | |
| "learning_rate": 5.0235148283878675e-05, | |
| "loss": 0.1879, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 2.7474747474747474, | |
| "grad_norm": 0.3241429626941681, | |
| "learning_rate": 5.011757446700392e-05, | |
| "loss": 0.1746, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 2.750841750841751, | |
| "grad_norm": 0.4660000205039978, | |
| "learning_rate": 5e-05, | |
| "loss": 0.1808, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 2.7542087542087543, | |
| "grad_norm": 0.3961378335952759, | |
| "learning_rate": 4.988242553299608e-05, | |
| "loss": 0.1779, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 2.757575757575758, | |
| "grad_norm": 0.41490015387535095, | |
| "learning_rate": 4.9764851716121337e-05, | |
| "loss": 0.1785, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 2.760942760942761, | |
| "grad_norm": 0.4009009003639221, | |
| "learning_rate": 4.964727919950131e-05, | |
| "loss": 0.1722, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 2.7643097643097643, | |
| "grad_norm": 0.3752220869064331, | |
| "learning_rate": 4.95297086332544e-05, | |
| "loss": 0.1829, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 2.7676767676767677, | |
| "grad_norm": 0.33247944712638855, | |
| "learning_rate": 4.941214066748818e-05, | |
| "loss": 0.1805, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 2.771043771043771, | |
| "grad_norm": 0.36337146162986755, | |
| "learning_rate": 4.929457595229589e-05, | |
| "loss": 0.1823, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 2.774410774410774, | |
| "grad_norm": 0.3234810531139374, | |
| "learning_rate": 4.9177015137752726e-05, | |
| "loss": 0.1699, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 2.7777777777777777, | |
| "grad_norm": 0.37526530027389526, | |
| "learning_rate": 4.905945887391242e-05, | |
| "loss": 0.1706, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 2.781144781144781, | |
| "grad_norm": 0.3150089383125305, | |
| "learning_rate": 4.8941907810803436e-05, | |
| "loss": 0.1742, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 2.7845117845117846, | |
| "grad_norm": 0.441749632358551, | |
| "learning_rate": 4.882436259842556e-05, | |
| "loss": 0.1766, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 2.787878787878788, | |
| "grad_norm": 0.3168331980705261, | |
| "learning_rate": 4.870682388674616e-05, | |
| "loss": 0.1777, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 2.791245791245791, | |
| "grad_norm": 0.3491107225418091, | |
| "learning_rate": 4.858929232569671e-05, | |
| "loss": 0.1798, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 2.7946127946127945, | |
| "grad_norm": 0.32505708932876587, | |
| "learning_rate": 4.84717685651691e-05, | |
| "loss": 0.1691, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 2.797979797979798, | |
| "grad_norm": 0.3730446994304657, | |
| "learning_rate": 4.8354253255012134e-05, | |
| "loss": 0.1851, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 2.8013468013468015, | |
| "grad_norm": 0.3792212903499603, | |
| "learning_rate": 4.8236747045027845e-05, | |
| "loss": 0.1855, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 2.8047138047138045, | |
| "grad_norm": 0.35931986570358276, | |
| "learning_rate": 4.811925058496798e-05, | |
| "loss": 0.1814, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 2.808080808080808, | |
| "grad_norm": 0.3025000989437103, | |
| "learning_rate": 4.800176452453038e-05, | |
| "loss": 0.1808, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 2.8114478114478114, | |
| "grad_norm": 0.35131269693374634, | |
| "learning_rate": 4.788428951335534e-05, | |
| "loss": 0.1779, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 2.814814814814815, | |
| "grad_norm": 0.3271830379962921, | |
| "learning_rate": 4.776682620102208e-05, | |
| "loss": 0.1872, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 2.8181818181818183, | |
| "grad_norm": 0.33299174904823303, | |
| "learning_rate": 4.7649375237045135e-05, | |
| "loss": 0.1696, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 2.821548821548822, | |
| "grad_norm": 0.36610648036003113, | |
| "learning_rate": 4.7531937270870755e-05, | |
| "loss": 0.1811, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 2.824915824915825, | |
| "grad_norm": 0.3686729371547699, | |
| "learning_rate": 4.741451295187332e-05, | |
| "loss": 0.183, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 2.8282828282828283, | |
| "grad_norm": 0.40744999051094055, | |
| "learning_rate": 4.729710292935178e-05, | |
| "loss": 0.1658, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 2.8316498316498318, | |
| "grad_norm": 0.3003140985965729, | |
| "learning_rate": 4.717970785252595e-05, | |
| "loss": 0.1709, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 2.8350168350168348, | |
| "grad_norm": 0.3362986147403717, | |
| "learning_rate": 4.706232837053311e-05, | |
| "loss": 0.1538, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 2.8383838383838382, | |
| "grad_norm": 0.3673784136772156, | |
| "learning_rate": 4.6944965132424185e-05, | |
| "loss": 0.1718, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 2.8417508417508417, | |
| "grad_norm": 0.37941762804985046, | |
| "learning_rate": 4.682761878716039e-05, | |
| "loss": 0.1683, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 2.845117845117845, | |
| "grad_norm": 0.4599365293979645, | |
| "learning_rate": 4.671028998360947e-05, | |
| "loss": 0.1746, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 2.8484848484848486, | |
| "grad_norm": 0.40842747688293457, | |
| "learning_rate": 4.659297937054221e-05, | |
| "loss": 0.1743, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 2.851851851851852, | |
| "grad_norm": 0.44149237871170044, | |
| "learning_rate": 4.647568759662876e-05, | |
| "loss": 0.1658, | |
| "step": 847 | |
| }, | |
| { | |
| "epoch": 2.855218855218855, | |
| "grad_norm": 0.33786505460739136, | |
| "learning_rate": 4.635841531043514e-05, | |
| "loss": 0.1715, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 2.8585858585858586, | |
| "grad_norm": 0.38703563809394836, | |
| "learning_rate": 4.6241163160419616e-05, | |
| "loss": 0.1885, | |
| "step": 849 | |
| }, | |
| { | |
| "epoch": 2.861952861952862, | |
| "grad_norm": 0.33466836810112, | |
| "learning_rate": 4.612393179492907e-05, | |
| "loss": 0.1727, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 2.865319865319865, | |
| "grad_norm": 0.3569240868091583, | |
| "learning_rate": 4.600672186219551e-05, | |
| "loss": 0.1594, | |
| "step": 851 | |
| }, | |
| { | |
| "epoch": 2.8686868686868685, | |
| "grad_norm": 0.3640967905521393, | |
| "learning_rate": 4.588953401033237e-05, | |
| "loss": 0.1716, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 2.872053872053872, | |
| "grad_norm": 0.3225606679916382, | |
| "learning_rate": 4.577236888733105e-05, | |
| "loss": 0.1737, | |
| "step": 853 | |
| }, | |
| { | |
| "epoch": 2.8754208754208754, | |
| "grad_norm": 0.4322353005409241, | |
| "learning_rate": 4.565522714105723e-05, | |
| "loss": 0.1818, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 2.878787878787879, | |
| "grad_norm": 0.43442046642303467, | |
| "learning_rate": 4.553810941924735e-05, | |
| "loss": 0.1707, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 2.8821548821548824, | |
| "grad_norm": 0.4079204797744751, | |
| "learning_rate": 4.542101636950497e-05, | |
| "loss": 0.1686, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 2.8855218855218854, | |
| "grad_norm": 0.4360505938529968, | |
| "learning_rate": 4.5303948639297287e-05, | |
| "loss": 0.1755, | |
| "step": 857 | |
| }, | |
| { | |
| "epoch": 2.888888888888889, | |
| "grad_norm": 0.3284655213356018, | |
| "learning_rate": 4.518690687595142e-05, | |
| "loss": 0.1743, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 2.8922558922558923, | |
| "grad_norm": 0.41207629442214966, | |
| "learning_rate": 4.5069891726650974e-05, | |
| "loss": 0.1805, | |
| "step": 859 | |
| }, | |
| { | |
| "epoch": 2.8956228956228958, | |
| "grad_norm": 0.42025890946388245, | |
| "learning_rate": 4.495290383843233e-05, | |
| "loss": 0.1646, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 2.898989898989899, | |
| "grad_norm": 0.2703207731246948, | |
| "learning_rate": 4.483594385818118e-05, | |
| "loss": 0.1772, | |
| "step": 861 | |
| }, | |
| { | |
| "epoch": 2.9023569023569022, | |
| "grad_norm": 0.4024050831794739, | |
| "learning_rate": 4.4719012432628884e-05, | |
| "loss": 0.1713, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 2.9057239057239057, | |
| "grad_norm": 0.32182058691978455, | |
| "learning_rate": 4.460211020834887e-05, | |
| "loss": 0.164, | |
| "step": 863 | |
| }, | |
| { | |
| "epoch": 2.909090909090909, | |
| "grad_norm": 0.3172072172164917, | |
| "learning_rate": 4.448523783175315e-05, | |
| "loss": 0.1803, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 2.9124579124579126, | |
| "grad_norm": 0.3312835395336151, | |
| "learning_rate": 4.436839594908866e-05, | |
| "loss": 0.1772, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 2.915824915824916, | |
| "grad_norm": 0.32331568002700806, | |
| "learning_rate": 4.425158520643372e-05, | |
| "loss": 0.1831, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 2.919191919191919, | |
| "grad_norm": 0.38694921135902405, | |
| "learning_rate": 4.413480624969452e-05, | |
| "loss": 0.1845, | |
| "step": 867 | |
| }, | |
| { | |
| "epoch": 2.9225589225589226, | |
| "grad_norm": 0.34721943736076355, | |
| "learning_rate": 4.401805972460139e-05, | |
| "loss": 0.1687, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 2.925925925925926, | |
| "grad_norm": 0.3954653739929199, | |
| "learning_rate": 4.39013462767054e-05, | |
| "loss": 0.1742, | |
| "step": 869 | |
| }, | |
| { | |
| "epoch": 2.929292929292929, | |
| "grad_norm": 0.4040646553039551, | |
| "learning_rate": 4.378466655137471e-05, | |
| "loss": 0.1741, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 2.9326599326599325, | |
| "grad_norm": 0.38577964901924133, | |
| "learning_rate": 4.3668021193790974e-05, | |
| "loss": 0.1679, | |
| "step": 871 | |
| }, | |
| { | |
| "epoch": 2.936026936026936, | |
| "grad_norm": 0.42223554849624634, | |
| "learning_rate": 4.3551410848945865e-05, | |
| "loss": 0.1729, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 2.9393939393939394, | |
| "grad_norm": 0.32160666584968567, | |
| "learning_rate": 4.343483616163739e-05, | |
| "loss": 0.179, | |
| "step": 873 | |
| }, | |
| { | |
| "epoch": 2.942760942760943, | |
| "grad_norm": 0.4601185917854309, | |
| "learning_rate": 4.3318297776466456e-05, | |
| "loss": 0.1827, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 2.9461279461279464, | |
| "grad_norm": 0.3302989900112152, | |
| "learning_rate": 4.320179633783317e-05, | |
| "loss": 0.1769, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 2.9494949494949494, | |
| "grad_norm": 0.36257827281951904, | |
| "learning_rate": 4.3085332489933384e-05, | |
| "loss": 0.1715, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 2.952861952861953, | |
| "grad_norm": 0.33515387773513794, | |
| "learning_rate": 4.29689068767551e-05, | |
| "loss": 0.1802, | |
| "step": 877 | |
| }, | |
| { | |
| "epoch": 2.9562289562289563, | |
| "grad_norm": 0.41977766156196594, | |
| "learning_rate": 4.285252014207485e-05, | |
| "loss": 0.1766, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 2.9595959595959593, | |
| "grad_norm": 0.2881879210472107, | |
| "learning_rate": 4.273617292945425e-05, | |
| "loss": 0.1807, | |
| "step": 879 | |
| }, | |
| { | |
| "epoch": 2.962962962962963, | |
| "grad_norm": 0.42628830671310425, | |
| "learning_rate": 4.2619865882236313e-05, | |
| "loss": 0.1803, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 2.9663299663299663, | |
| "grad_norm": 0.35057729482650757, | |
| "learning_rate": 4.2503599643542024e-05, | |
| "loss": 0.1603, | |
| "step": 881 | |
| }, | |
| { | |
| "epoch": 2.9696969696969697, | |
| "grad_norm": 0.3465060889720917, | |
| "learning_rate": 4.2387374856266686e-05, | |
| "loss": 0.1803, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 2.973063973063973, | |
| "grad_norm": 0.33382153511047363, | |
| "learning_rate": 4.227119216307637e-05, | |
| "loss": 0.1637, | |
| "step": 883 | |
| }, | |
| { | |
| "epoch": 2.9764309764309766, | |
| "grad_norm": 0.3177237808704376, | |
| "learning_rate": 4.215505220640442e-05, | |
| "loss": 0.17, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 2.9797979797979797, | |
| "grad_norm": 0.3879055678844452, | |
| "learning_rate": 4.203895562844789e-05, | |
| "loss": 0.1637, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 2.983164983164983, | |
| "grad_norm": 0.367330402135849, | |
| "learning_rate": 4.1922903071163886e-05, | |
| "loss": 0.1722, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 2.9865319865319866, | |
| "grad_norm": 0.38031652569770813, | |
| "learning_rate": 4.18068951762662e-05, | |
| "loss": 0.162, | |
| "step": 887 | |
| }, | |
| { | |
| "epoch": 2.98989898989899, | |
| "grad_norm": 0.2925989031791687, | |
| "learning_rate": 4.16909325852216e-05, | |
| "loss": 0.1675, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 2.993265993265993, | |
| "grad_norm": 0.4009501338005066, | |
| "learning_rate": 4.1575015939246384e-05, | |
| "loss": 0.1783, | |
| "step": 889 | |
| }, | |
| { | |
| "epoch": 2.9966329966329965, | |
| "grad_norm": 0.32625624537467957, | |
| "learning_rate": 4.145914587930275e-05, | |
| "loss": 0.1718, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "grad_norm": 0.4165663421154022, | |
| "learning_rate": 4.134332304609533e-05, | |
| "loss": 0.1755, | |
| "step": 891 | |
| }, | |
| { | |
| "epoch": 3.0033670033670035, | |
| "grad_norm": 0.2938865125179291, | |
| "learning_rate": 4.1227548080067635e-05, | |
| "loss": 0.1674, | |
| "step": 892 | |
| }, | |
| { | |
| "epoch": 3.006734006734007, | |
| "grad_norm": 0.3969976305961609, | |
| "learning_rate": 4.1111821621398446e-05, | |
| "loss": 0.1654, | |
| "step": 893 | |
| }, | |
| { | |
| "epoch": 3.01010101010101, | |
| "grad_norm": 0.3079482316970825, | |
| "learning_rate": 4.099614430999834e-05, | |
| "loss": 0.1669, | |
| "step": 894 | |
| }, | |
| { | |
| "epoch": 3.0134680134680134, | |
| "grad_norm": 0.3691355288028717, | |
| "learning_rate": 4.088051678550617e-05, | |
| "loss": 0.1634, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 3.016835016835017, | |
| "grad_norm": 0.30337515473365784, | |
| "learning_rate": 4.0764939687285434e-05, | |
| "loss": 0.1673, | |
| "step": 896 | |
| }, | |
| { | |
| "epoch": 3.0202020202020203, | |
| "grad_norm": 0.3505813181400299, | |
| "learning_rate": 4.064941365442084e-05, | |
| "loss": 0.1639, | |
| "step": 897 | |
| }, | |
| { | |
| "epoch": 3.0235690235690234, | |
| "grad_norm": 0.46245038509368896, | |
| "learning_rate": 4.053393932571472e-05, | |
| "loss": 0.1533, | |
| "step": 898 | |
| }, | |
| { | |
| "epoch": 3.026936026936027, | |
| "grad_norm": 0.3629229962825775, | |
| "learning_rate": 4.0418517339683474e-05, | |
| "loss": 0.1758, | |
| "step": 899 | |
| }, | |
| { | |
| "epoch": 3.0303030303030303, | |
| "grad_norm": 0.3285175859928131, | |
| "learning_rate": 4.030314833455413e-05, | |
| "loss": 0.1636, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 3.0303030303030303, | |
| "eval_loss": 0.0878610908985138, | |
| "eval_runtime": 33.3117, | |
| "eval_samples_per_second": 30.019, | |
| "eval_steps_per_second": 1.891, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 3.0336700336700337, | |
| "grad_norm": 0.3374682366847992, | |
| "learning_rate": 4.0187832948260705e-05, | |
| "loss": 0.1574, | |
| "step": 901 | |
| }, | |
| { | |
| "epoch": 3.037037037037037, | |
| "grad_norm": 0.337251752614975, | |
| "learning_rate": 4.007257181844078e-05, | |
| "loss": 0.1621, | |
| "step": 902 | |
| }, | |
| { | |
| "epoch": 3.04040404040404, | |
| "grad_norm": 0.3177763521671295, | |
| "learning_rate": 3.995736558243186e-05, | |
| "loss": 0.1577, | |
| "step": 903 | |
| }, | |
| { | |
| "epoch": 3.0437710437710437, | |
| "grad_norm": 0.32198965549468994, | |
| "learning_rate": 3.9842214877267986e-05, | |
| "loss": 0.1523, | |
| "step": 904 | |
| }, | |
| { | |
| "epoch": 3.047138047138047, | |
| "grad_norm": 0.27595996856689453, | |
| "learning_rate": 3.9727120339676084e-05, | |
| "loss": 0.1611, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 3.0505050505050506, | |
| "grad_norm": 0.30028364062309265, | |
| "learning_rate": 3.961208260607253e-05, | |
| "loss": 0.1419, | |
| "step": 906 | |
| }, | |
| { | |
| "epoch": 3.053872053872054, | |
| "grad_norm": 0.34587720036506653, | |
| "learning_rate": 3.94971023125596e-05, | |
| "loss": 0.1682, | |
| "step": 907 | |
| }, | |
| { | |
| "epoch": 3.057239057239057, | |
| "grad_norm": 0.35993483662605286, | |
| "learning_rate": 3.938218009492193e-05, | |
| "loss": 0.1607, | |
| "step": 908 | |
| }, | |
| { | |
| "epoch": 3.0606060606060606, | |
| "grad_norm": 0.390819787979126, | |
| "learning_rate": 3.926731658862307e-05, | |
| "loss": 0.1628, | |
| "step": 909 | |
| }, | |
| { | |
| "epoch": 3.063973063973064, | |
| "grad_norm": 0.40493595600128174, | |
| "learning_rate": 3.91525124288019e-05, | |
| "loss": 0.1388, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 3.0673400673400675, | |
| "grad_norm": 0.4477592408657074, | |
| "learning_rate": 3.903776825026911e-05, | |
| "loss": 0.1616, | |
| "step": 911 | |
| }, | |
| { | |
| "epoch": 3.0707070707070705, | |
| "grad_norm": 0.42329877614974976, | |
| "learning_rate": 3.892308468750379e-05, | |
| "loss": 0.1538, | |
| "step": 912 | |
| }, | |
| { | |
| "epoch": 3.074074074074074, | |
| "grad_norm": 0.40350863337516785, | |
| "learning_rate": 3.8808462374649803e-05, | |
| "loss": 0.1525, | |
| "step": 913 | |
| }, | |
| { | |
| "epoch": 3.0774410774410774, | |
| "grad_norm": 0.41857612133026123, | |
| "learning_rate": 3.869390194551235e-05, | |
| "loss": 0.1557, | |
| "step": 914 | |
| }, | |
| { | |
| "epoch": 3.080808080808081, | |
| "grad_norm": 0.35566312074661255, | |
| "learning_rate": 3.857940403355444e-05, | |
| "loss": 0.1592, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 3.0841750841750843, | |
| "grad_norm": 0.4137890636920929, | |
| "learning_rate": 3.846496927189339e-05, | |
| "loss": 0.15, | |
| "step": 916 | |
| }, | |
| { | |
| "epoch": 3.0875420875420874, | |
| "grad_norm": 0.3321918845176697, | |
| "learning_rate": 3.835059829329735e-05, | |
| "loss": 0.1572, | |
| "step": 917 | |
| }, | |
| { | |
| "epoch": 3.090909090909091, | |
| "grad_norm": 0.31146201491355896, | |
| "learning_rate": 3.823629173018173e-05, | |
| "loss": 0.1598, | |
| "step": 918 | |
| }, | |
| { | |
| "epoch": 3.0942760942760943, | |
| "grad_norm": 0.3829994797706604, | |
| "learning_rate": 3.812205021460582e-05, | |
| "loss": 0.1709, | |
| "step": 919 | |
| }, | |
| { | |
| "epoch": 3.0976430976430978, | |
| "grad_norm": 0.3082868158817291, | |
| "learning_rate": 3.800787437826918e-05, | |
| "loss": 0.1711, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 3.101010101010101, | |
| "grad_norm": 0.3517824709415436, | |
| "learning_rate": 3.789376485250821e-05, | |
| "loss": 0.1628, | |
| "step": 921 | |
| }, | |
| { | |
| "epoch": 3.1043771043771042, | |
| "grad_norm": 0.4294639825820923, | |
| "learning_rate": 3.777972226829264e-05, | |
| "loss": 0.1722, | |
| "step": 922 | |
| }, | |
| { | |
| "epoch": 3.1077441077441077, | |
| "grad_norm": 0.4236588478088379, | |
| "learning_rate": 3.7665747256222075e-05, | |
| "loss": 0.1624, | |
| "step": 923 | |
| }, | |
| { | |
| "epoch": 3.111111111111111, | |
| "grad_norm": 0.4085109829902649, | |
| "learning_rate": 3.755184044652244e-05, | |
| "loss": 0.1568, | |
| "step": 924 | |
| }, | |
| { | |
| "epoch": 3.1144781144781146, | |
| "grad_norm": 0.48974359035491943, | |
| "learning_rate": 3.7438002469042565e-05, | |
| "loss": 0.1492, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 3.1178451178451176, | |
| "grad_norm": 0.4154088497161865, | |
| "learning_rate": 3.732423395325063e-05, | |
| "loss": 0.1571, | |
| "step": 926 | |
| }, | |
| { | |
| "epoch": 3.121212121212121, | |
| "grad_norm": 0.3798883259296417, | |
| "learning_rate": 3.721053552823078e-05, | |
| "loss": 0.1578, | |
| "step": 927 | |
| }, | |
| { | |
| "epoch": 3.1245791245791246, | |
| "grad_norm": 0.40717121958732605, | |
| "learning_rate": 3.7096907822679564e-05, | |
| "loss": 0.1617, | |
| "step": 928 | |
| }, | |
| { | |
| "epoch": 3.127946127946128, | |
| "grad_norm": 0.3714147210121155, | |
| "learning_rate": 3.698335146490246e-05, | |
| "loss": 0.1618, | |
| "step": 929 | |
| }, | |
| { | |
| "epoch": 3.1313131313131315, | |
| "grad_norm": 0.47017040848731995, | |
| "learning_rate": 3.68698670828105e-05, | |
| "loss": 0.1665, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 3.1346801346801345, | |
| "grad_norm": 0.3854427635669708, | |
| "learning_rate": 3.675645530391665e-05, | |
| "loss": 0.1585, | |
| "step": 931 | |
| }, | |
| { | |
| "epoch": 3.138047138047138, | |
| "grad_norm": 0.36705338954925537, | |
| "learning_rate": 3.664311675533247e-05, | |
| "loss": 0.1648, | |
| "step": 932 | |
| }, | |
| { | |
| "epoch": 3.1414141414141414, | |
| "grad_norm": 0.3947654366493225, | |
| "learning_rate": 3.6529852063764545e-05, | |
| "loss": 0.1763, | |
| "step": 933 | |
| }, | |
| { | |
| "epoch": 3.144781144781145, | |
| "grad_norm": 0.4430708885192871, | |
| "learning_rate": 3.641666185551111e-05, | |
| "loss": 0.173, | |
| "step": 934 | |
| }, | |
| { | |
| "epoch": 3.148148148148148, | |
| "grad_norm": 0.3862123191356659, | |
| "learning_rate": 3.630354675645853e-05, | |
| "loss": 0.1509, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 3.1515151515151514, | |
| "grad_norm": 0.4604793190956116, | |
| "learning_rate": 3.619050739207782e-05, | |
| "loss": 0.1625, | |
| "step": 936 | |
| }, | |
| { | |
| "epoch": 3.154882154882155, | |
| "grad_norm": 0.37315207719802856, | |
| "learning_rate": 3.607754438742129e-05, | |
| "loss": 0.1561, | |
| "step": 937 | |
| }, | |
| { | |
| "epoch": 3.1582491582491583, | |
| "grad_norm": 0.4637494385242462, | |
| "learning_rate": 3.596465836711896e-05, | |
| "loss": 0.1523, | |
| "step": 938 | |
| }, | |
| { | |
| "epoch": 3.1616161616161618, | |
| "grad_norm": 0.45701348781585693, | |
| "learning_rate": 3.585184995537518e-05, | |
| "loss": 0.1577, | |
| "step": 939 | |
| }, | |
| { | |
| "epoch": 3.164983164983165, | |
| "grad_norm": 0.41691821813583374, | |
| "learning_rate": 3.5739119775965165e-05, | |
| "loss": 0.1571, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 3.1683501683501682, | |
| "grad_norm": 0.3542643189430237, | |
| "learning_rate": 3.562646845223153e-05, | |
| "loss": 0.1487, | |
| "step": 941 | |
| }, | |
| { | |
| "epoch": 3.1717171717171717, | |
| "grad_norm": 0.42012447118759155, | |
| "learning_rate": 3.551389660708088e-05, | |
| "loss": 0.157, | |
| "step": 942 | |
| }, | |
| { | |
| "epoch": 3.175084175084175, | |
| "grad_norm": 0.3715340793132782, | |
| "learning_rate": 3.540140486298035e-05, | |
| "loss": 0.1562, | |
| "step": 943 | |
| }, | |
| { | |
| "epoch": 3.1784511784511786, | |
| "grad_norm": 0.4073311388492584, | |
| "learning_rate": 3.528899384195409e-05, | |
| "loss": 0.1602, | |
| "step": 944 | |
| }, | |
| { | |
| "epoch": 3.1818181818181817, | |
| "grad_norm": 0.3359941244125366, | |
| "learning_rate": 3.5176664165579986e-05, | |
| "loss": 0.1635, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 3.185185185185185, | |
| "grad_norm": 0.3938975930213928, | |
| "learning_rate": 3.506441645498605e-05, | |
| "loss": 0.159, | |
| "step": 946 | |
| }, | |
| { | |
| "epoch": 3.1885521885521886, | |
| "grad_norm": 0.42345231771469116, | |
| "learning_rate": 3.495225133084712e-05, | |
| "loss": 0.1541, | |
| "step": 947 | |
| }, | |
| { | |
| "epoch": 3.191919191919192, | |
| "grad_norm": 0.36092641949653625, | |
| "learning_rate": 3.4840169413381305e-05, | |
| "loss": 0.158, | |
| "step": 948 | |
| }, | |
| { | |
| "epoch": 3.1952861952861955, | |
| "grad_norm": 0.3029501438140869, | |
| "learning_rate": 3.4728171322346694e-05, | |
| "loss": 0.1599, | |
| "step": 949 | |
| }, | |
| { | |
| "epoch": 3.1986531986531985, | |
| "grad_norm": 0.3518286645412445, | |
| "learning_rate": 3.46162576770378e-05, | |
| "loss": 0.1656, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 3.202020202020202, | |
| "grad_norm": 0.3297809362411499, | |
| "learning_rate": 3.450442909628224e-05, | |
| "loss": 0.1712, | |
| "step": 951 | |
| }, | |
| { | |
| "epoch": 3.2053872053872055, | |
| "grad_norm": 0.3591551184654236, | |
| "learning_rate": 3.439268619843724e-05, | |
| "loss": 0.1665, | |
| "step": 952 | |
| }, | |
| { | |
| "epoch": 3.208754208754209, | |
| "grad_norm": 0.31120550632476807, | |
| "learning_rate": 3.428102960138625e-05, | |
| "loss": 0.1604, | |
| "step": 953 | |
| }, | |
| { | |
| "epoch": 3.212121212121212, | |
| "grad_norm": 0.3053344488143921, | |
| "learning_rate": 3.4169459922535485e-05, | |
| "loss": 0.1567, | |
| "step": 954 | |
| }, | |
| { | |
| "epoch": 3.2154882154882154, | |
| "grad_norm": 0.3286115229129791, | |
| "learning_rate": 3.405797777881059e-05, | |
| "loss": 0.1516, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 3.218855218855219, | |
| "grad_norm": 0.313930869102478, | |
| "learning_rate": 3.3946583786653184e-05, | |
| "loss": 0.1624, | |
| "step": 956 | |
| }, | |
| { | |
| "epoch": 3.2222222222222223, | |
| "grad_norm": 0.3518214821815491, | |
| "learning_rate": 3.38352785620174e-05, | |
| "loss": 0.1501, | |
| "step": 957 | |
| }, | |
| { | |
| "epoch": 3.225589225589226, | |
| "grad_norm": 0.4037775993347168, | |
| "learning_rate": 3.37240627203666e-05, | |
| "loss": 0.1508, | |
| "step": 958 | |
| }, | |
| { | |
| "epoch": 3.228956228956229, | |
| "grad_norm": 0.3002485930919647, | |
| "learning_rate": 3.3612936876669834e-05, | |
| "loss": 0.1667, | |
| "step": 959 | |
| }, | |
| { | |
| "epoch": 3.2323232323232323, | |
| "grad_norm": 0.35778674483299255, | |
| "learning_rate": 3.350190164539855e-05, | |
| "loss": 0.1659, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 3.2356902356902357, | |
| "grad_norm": 0.3302362263202667, | |
| "learning_rate": 3.3390957640523145e-05, | |
| "loss": 0.161, | |
| "step": 961 | |
| }, | |
| { | |
| "epoch": 3.239057239057239, | |
| "grad_norm": 0.3861044943332672, | |
| "learning_rate": 3.328010547550959e-05, | |
| "loss": 0.1678, | |
| "step": 962 | |
| }, | |
| { | |
| "epoch": 3.242424242424242, | |
| "grad_norm": 0.3668946921825409, | |
| "learning_rate": 3.316934576331598e-05, | |
| "loss": 0.1487, | |
| "step": 963 | |
| }, | |
| { | |
| "epoch": 3.2457912457912457, | |
| "grad_norm": 0.3028615117073059, | |
| "learning_rate": 3.3058679116389244e-05, | |
| "loss": 0.1576, | |
| "step": 964 | |
| }, | |
| { | |
| "epoch": 3.249158249158249, | |
| "grad_norm": 0.4418306350708008, | |
| "learning_rate": 3.29481061466617e-05, | |
| "loss": 0.1471, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 3.2525252525252526, | |
| "grad_norm": 0.3218114674091339, | |
| "learning_rate": 3.283762746554766e-05, | |
| "loss": 0.1595, | |
| "step": 966 | |
| }, | |
| { | |
| "epoch": 3.255892255892256, | |
| "grad_norm": 0.3931596875190735, | |
| "learning_rate": 3.2727243683940045e-05, | |
| "loss": 0.1651, | |
| "step": 967 | |
| }, | |
| { | |
| "epoch": 3.259259259259259, | |
| "grad_norm": 0.38177919387817383, | |
| "learning_rate": 3.2616955412207086e-05, | |
| "loss": 0.1512, | |
| "step": 968 | |
| }, | |
| { | |
| "epoch": 3.2626262626262625, | |
| "grad_norm": 0.3416683077812195, | |
| "learning_rate": 3.250676326018882e-05, | |
| "loss": 0.1479, | |
| "step": 969 | |
| }, | |
| { | |
| "epoch": 3.265993265993266, | |
| "grad_norm": 0.478951632976532, | |
| "learning_rate": 3.239666783719385e-05, | |
| "loss": 0.1668, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 3.2693602693602695, | |
| "grad_norm": 0.3602767586708069, | |
| "learning_rate": 3.2286669751995904e-05, | |
| "loss": 0.1482, | |
| "step": 971 | |
| }, | |
| { | |
| "epoch": 3.2727272727272725, | |
| "grad_norm": 0.4089204668998718, | |
| "learning_rate": 3.217676961283044e-05, | |
| "loss": 0.1549, | |
| "step": 972 | |
| }, | |
| { | |
| "epoch": 3.276094276094276, | |
| "grad_norm": 0.40424418449401855, | |
| "learning_rate": 3.2066968027391374e-05, | |
| "loss": 0.1688, | |
| "step": 973 | |
| }, | |
| { | |
| "epoch": 3.2794612794612794, | |
| "grad_norm": 0.31489500403404236, | |
| "learning_rate": 3.195726560282763e-05, | |
| "loss": 0.1583, | |
| "step": 974 | |
| }, | |
| { | |
| "epoch": 3.282828282828283, | |
| "grad_norm": 0.4403363764286041, | |
| "learning_rate": 3.184766294573983e-05, | |
| "loss": 0.1546, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 3.2861952861952863, | |
| "grad_norm": 0.29121315479278564, | |
| "learning_rate": 3.1738160662176956e-05, | |
| "loss": 0.1608, | |
| "step": 976 | |
| }, | |
| { | |
| "epoch": 3.28956228956229, | |
| "grad_norm": 0.4291994571685791, | |
| "learning_rate": 3.162875935763294e-05, | |
| "loss": 0.1569, | |
| "step": 977 | |
| }, | |
| { | |
| "epoch": 3.292929292929293, | |
| "grad_norm": 0.3442822992801666, | |
| "learning_rate": 3.151945963704334e-05, | |
| "loss": 0.1695, | |
| "step": 978 | |
| }, | |
| { | |
| "epoch": 3.2962962962962963, | |
| "grad_norm": 0.5204855799674988, | |
| "learning_rate": 3.1410262104782085e-05, | |
| "loss": 0.1514, | |
| "step": 979 | |
| }, | |
| { | |
| "epoch": 3.2996632996632997, | |
| "grad_norm": 0.6250646710395813, | |
| "learning_rate": 3.130116736465795e-05, | |
| "loss": 0.1695, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 3.303030303030303, | |
| "grad_norm": 0.3862772285938263, | |
| "learning_rate": 3.119217601991139e-05, | |
| "loss": 0.1487, | |
| "step": 981 | |
| }, | |
| { | |
| "epoch": 3.3063973063973062, | |
| "grad_norm": 0.5631003975868225, | |
| "learning_rate": 3.1083288673211095e-05, | |
| "loss": 0.1699, | |
| "step": 982 | |
| }, | |
| { | |
| "epoch": 3.3097643097643097, | |
| "grad_norm": 0.39467087388038635, | |
| "learning_rate": 3.0974505926650724e-05, | |
| "loss": 0.1582, | |
| "step": 983 | |
| }, | |
| { | |
| "epoch": 3.313131313131313, | |
| "grad_norm": 0.5696914196014404, | |
| "learning_rate": 3.086582838174551e-05, | |
| "loss": 0.1658, | |
| "step": 984 | |
| }, | |
| { | |
| "epoch": 3.3164983164983166, | |
| "grad_norm": 0.37162837386131287, | |
| "learning_rate": 3.0757256639429025e-05, | |
| "loss": 0.1472, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 3.31986531986532, | |
| "grad_norm": 0.3216390609741211, | |
| "learning_rate": 3.064879130004978e-05, | |
| "loss": 0.1607, | |
| "step": 986 | |
| }, | |
| { | |
| "epoch": 3.323232323232323, | |
| "grad_norm": 0.6373658180236816, | |
| "learning_rate": 3.0540432963367905e-05, | |
| "loss": 0.1627, | |
| "step": 987 | |
| }, | |
| { | |
| "epoch": 3.3265993265993266, | |
| "grad_norm": 0.2934598922729492, | |
| "learning_rate": 3.04321822285519e-05, | |
| "loss": 0.1593, | |
| "step": 988 | |
| }, | |
| { | |
| "epoch": 3.32996632996633, | |
| "grad_norm": 0.5632359981536865, | |
| "learning_rate": 3.0324039694175233e-05, | |
| "loss": 0.1612, | |
| "step": 989 | |
| }, | |
| { | |
| "epoch": 3.3333333333333335, | |
| "grad_norm": 0.5589274764060974, | |
| "learning_rate": 3.0216005958213144e-05, | |
| "loss": 0.1473, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 3.3367003367003365, | |
| "grad_norm": 0.4226700961589813, | |
| "learning_rate": 3.0108081618039167e-05, | |
| "loss": 0.1575, | |
| "step": 991 | |
| }, | |
| { | |
| "epoch": 3.34006734006734, | |
| "grad_norm": 0.4167967438697815, | |
| "learning_rate": 3.0000267270422076e-05, | |
| "loss": 0.1575, | |
| "step": 992 | |
| }, | |
| { | |
| "epoch": 3.3434343434343434, | |
| "grad_norm": 0.3462510406970978, | |
| "learning_rate": 2.9892563511522304e-05, | |
| "loss": 0.1445, | |
| "step": 993 | |
| }, | |
| { | |
| "epoch": 3.346801346801347, | |
| "grad_norm": 0.40887391567230225, | |
| "learning_rate": 2.9784970936888857e-05, | |
| "loss": 0.1665, | |
| "step": 994 | |
| }, | |
| { | |
| "epoch": 3.3501683501683504, | |
| "grad_norm": 0.44167807698249817, | |
| "learning_rate": 2.9677490141455916e-05, | |
| "loss": 0.1659, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 3.3535353535353534, | |
| "grad_norm": 0.423782080411911, | |
| "learning_rate": 2.9570121719539602e-05, | |
| "loss": 0.1613, | |
| "step": 996 | |
| }, | |
| { | |
| "epoch": 3.356902356902357, | |
| "grad_norm": 0.3622366487979889, | |
| "learning_rate": 2.946286626483463e-05, | |
| "loss": 0.1549, | |
| "step": 997 | |
| }, | |
| { | |
| "epoch": 3.3602693602693603, | |
| "grad_norm": 0.3742324113845825, | |
| "learning_rate": 2.935572437041111e-05, | |
| "loss": 0.166, | |
| "step": 998 | |
| }, | |
| { | |
| "epoch": 3.3636363636363638, | |
| "grad_norm": 0.37756288051605225, | |
| "learning_rate": 2.924869662871117e-05, | |
| "loss": 0.1701, | |
| "step": 999 | |
| }, | |
| { | |
| "epoch": 3.3670033670033668, | |
| "grad_norm": 0.3715890645980835, | |
| "learning_rate": 2.9141783631545773e-05, | |
| "loss": 0.1576, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 3.3670033670033668, | |
| "eval_loss": 0.08614125102758408, | |
| "eval_runtime": 32.8393, | |
| "eval_samples_per_second": 30.451, | |
| "eval_steps_per_second": 1.918, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 3.3703703703703702, | |
| "grad_norm": 0.34478187561035156, | |
| "learning_rate": 2.9034985970091355e-05, | |
| "loss": 0.1575, | |
| "step": 1001 | |
| }, | |
| { | |
| "epoch": 3.3737373737373737, | |
| "grad_norm": 0.3495587706565857, | |
| "learning_rate": 2.8928304234886644e-05, | |
| "loss": 0.1606, | |
| "step": 1002 | |
| }, | |
| { | |
| "epoch": 3.377104377104377, | |
| "grad_norm": 0.3603619337081909, | |
| "learning_rate": 2.8821739015829337e-05, | |
| "loss": 0.1589, | |
| "step": 1003 | |
| }, | |
| { | |
| "epoch": 3.3804713804713806, | |
| "grad_norm": 0.3719029128551483, | |
| "learning_rate": 2.8715290902172863e-05, | |
| "loss": 0.159, | |
| "step": 1004 | |
| }, | |
| { | |
| "epoch": 3.3838383838383836, | |
| "grad_norm": 0.36092978715896606, | |
| "learning_rate": 2.8608960482523056e-05, | |
| "loss": 0.1618, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 3.387205387205387, | |
| "grad_norm": 0.3863452672958374, | |
| "learning_rate": 2.8502748344835063e-05, | |
| "loss": 0.1644, | |
| "step": 1006 | |
| }, | |
| { | |
| "epoch": 3.3905723905723906, | |
| "grad_norm": 0.3570246398448944, | |
| "learning_rate": 2.8396655076409923e-05, | |
| "loss": 0.1556, | |
| "step": 1007 | |
| }, | |
| { | |
| "epoch": 3.393939393939394, | |
| "grad_norm": 0.4031378924846649, | |
| "learning_rate": 2.8290681263891412e-05, | |
| "loss": 0.1538, | |
| "step": 1008 | |
| }, | |
| { | |
| "epoch": 3.3973063973063975, | |
| "grad_norm": 0.35926946997642517, | |
| "learning_rate": 2.818482749326272e-05, | |
| "loss": 0.1669, | |
| "step": 1009 | |
| }, | |
| { | |
| "epoch": 3.4006734006734005, | |
| "grad_norm": 0.46241873502731323, | |
| "learning_rate": 2.8079094349843334e-05, | |
| "loss": 0.1601, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 3.404040404040404, | |
| "grad_norm": 0.32303571701049805, | |
| "learning_rate": 2.797348241828569e-05, | |
| "loss": 0.1596, | |
| "step": 1011 | |
| }, | |
| { | |
| "epoch": 3.4074074074074074, | |
| "grad_norm": 0.3822338879108429, | |
| "learning_rate": 2.786799228257203e-05, | |
| "loss": 0.1578, | |
| "step": 1012 | |
| }, | |
| { | |
| "epoch": 3.410774410774411, | |
| "grad_norm": 0.3558747172355652, | |
| "learning_rate": 2.7762624526011038e-05, | |
| "loss": 0.1588, | |
| "step": 1013 | |
| }, | |
| { | |
| "epoch": 3.4141414141414144, | |
| "grad_norm": 0.33147427439689636, | |
| "learning_rate": 2.7657379731234768e-05, | |
| "loss": 0.1606, | |
| "step": 1014 | |
| }, | |
| { | |
| "epoch": 3.4175084175084174, | |
| "grad_norm": 0.4226522743701935, | |
| "learning_rate": 2.7552258480195347e-05, | |
| "loss": 0.1589, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 3.420875420875421, | |
| "grad_norm": 0.33748775720596313, | |
| "learning_rate": 2.744726135416177e-05, | |
| "loss": 0.1559, | |
| "step": 1016 | |
| }, | |
| { | |
| "epoch": 3.4242424242424243, | |
| "grad_norm": 0.3910854160785675, | |
| "learning_rate": 2.7342388933716668e-05, | |
| "loss": 0.1573, | |
| "step": 1017 | |
| }, | |
| { | |
| "epoch": 3.4276094276094278, | |
| "grad_norm": 0.32681888341903687, | |
| "learning_rate": 2.7237641798753083e-05, | |
| "loss": 0.1524, | |
| "step": 1018 | |
| }, | |
| { | |
| "epoch": 3.430976430976431, | |
| "grad_norm": 0.4060154855251312, | |
| "learning_rate": 2.713302052847132e-05, | |
| "loss": 0.155, | |
| "step": 1019 | |
| }, | |
| { | |
| "epoch": 3.4343434343434343, | |
| "grad_norm": 0.271331787109375, | |
| "learning_rate": 2.702852570137576e-05, | |
| "loss": 0.1557, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 3.4377104377104377, | |
| "grad_norm": 0.3912033438682556, | |
| "learning_rate": 2.6924157895271563e-05, | |
| "loss": 0.1625, | |
| "step": 1021 | |
| }, | |
| { | |
| "epoch": 3.441077441077441, | |
| "grad_norm": 0.3229612708091736, | |
| "learning_rate": 2.6819917687261493e-05, | |
| "loss": 0.1614, | |
| "step": 1022 | |
| }, | |
| { | |
| "epoch": 3.4444444444444446, | |
| "grad_norm": 0.36763250827789307, | |
| "learning_rate": 2.671580565374282e-05, | |
| "loss": 0.1677, | |
| "step": 1023 | |
| }, | |
| { | |
| "epoch": 3.4478114478114477, | |
| "grad_norm": 0.3430008888244629, | |
| "learning_rate": 2.6611822370404037e-05, | |
| "loss": 0.1617, | |
| "step": 1024 | |
| }, | |
| { | |
| "epoch": 3.451178451178451, | |
| "grad_norm": 0.33642029762268066, | |
| "learning_rate": 2.650796841222176e-05, | |
| "loss": 0.1556, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 3.4545454545454546, | |
| "grad_norm": 0.4070379137992859, | |
| "learning_rate": 2.6404244353457424e-05, | |
| "loss": 0.1642, | |
| "step": 1026 | |
| }, | |
| { | |
| "epoch": 3.457912457912458, | |
| "grad_norm": 0.33227312564849854, | |
| "learning_rate": 2.6300650767654234e-05, | |
| "loss": 0.1437, | |
| "step": 1027 | |
| }, | |
| { | |
| "epoch": 3.461279461279461, | |
| "grad_norm": 0.38948434591293335, | |
| "learning_rate": 2.619718822763394e-05, | |
| "loss": 0.1569, | |
| "step": 1028 | |
| }, | |
| { | |
| "epoch": 3.4646464646464645, | |
| "grad_norm": 0.4101947844028473, | |
| "learning_rate": 2.6093857305493664e-05, | |
| "loss": 0.158, | |
| "step": 1029 | |
| }, | |
| { | |
| "epoch": 3.468013468013468, | |
| "grad_norm": 0.44636788964271545, | |
| "learning_rate": 2.5990658572602773e-05, | |
| "loss": 0.1587, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 3.4713804713804715, | |
| "grad_norm": 0.32242777943611145, | |
| "learning_rate": 2.5887592599599618e-05, | |
| "loss": 0.1661, | |
| "step": 1031 | |
| }, | |
| { | |
| "epoch": 3.474747474747475, | |
| "grad_norm": 0.4262060821056366, | |
| "learning_rate": 2.5784659956388535e-05, | |
| "loss": 0.1476, | |
| "step": 1032 | |
| }, | |
| { | |
| "epoch": 3.478114478114478, | |
| "grad_norm": 0.32246485352516174, | |
| "learning_rate": 2.5681861212136578e-05, | |
| "loss": 0.1547, | |
| "step": 1033 | |
| }, | |
| { | |
| "epoch": 3.4814814814814814, | |
| "grad_norm": 0.3994286060333252, | |
| "learning_rate": 2.55791969352704e-05, | |
| "loss": 0.157, | |
| "step": 1034 | |
| }, | |
| { | |
| "epoch": 3.484848484848485, | |
| "grad_norm": 0.33445990085601807, | |
| "learning_rate": 2.547666769347312e-05, | |
| "loss": 0.1557, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 3.4882154882154883, | |
| "grad_norm": 0.3687197268009186, | |
| "learning_rate": 2.537427405368119e-05, | |
| "loss": 0.158, | |
| "step": 1036 | |
| }, | |
| { | |
| "epoch": 3.4915824915824913, | |
| "grad_norm": 0.3803139925003052, | |
| "learning_rate": 2.5272016582081236e-05, | |
| "loss": 0.1501, | |
| "step": 1037 | |
| }, | |
| { | |
| "epoch": 3.494949494949495, | |
| "grad_norm": 0.3825187087059021, | |
| "learning_rate": 2.516989584410696e-05, | |
| "loss": 0.154, | |
| "step": 1038 | |
| }, | |
| { | |
| "epoch": 3.4983164983164983, | |
| "grad_norm": 0.3470314145088196, | |
| "learning_rate": 2.506791240443595e-05, | |
| "loss": 0.1538, | |
| "step": 1039 | |
| }, | |
| { | |
| "epoch": 3.5016835016835017, | |
| "grad_norm": 0.33760249614715576, | |
| "learning_rate": 2.4966066826986644e-05, | |
| "loss": 0.1584, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 3.505050505050505, | |
| "grad_norm": 0.3067149817943573, | |
| "learning_rate": 2.486435967491516e-05, | |
| "loss": 0.1458, | |
| "step": 1041 | |
| }, | |
| { | |
| "epoch": 3.5084175084175087, | |
| "grad_norm": 0.3700965642929077, | |
| "learning_rate": 2.476279151061221e-05, | |
| "loss": 0.1578, | |
| "step": 1042 | |
| }, | |
| { | |
| "epoch": 3.5117845117845117, | |
| "grad_norm": 0.30760011076927185, | |
| "learning_rate": 2.4661362895699903e-05, | |
| "loss": 0.1584, | |
| "step": 1043 | |
| }, | |
| { | |
| "epoch": 3.515151515151515, | |
| "grad_norm": 0.3704482316970825, | |
| "learning_rate": 2.456007439102878e-05, | |
| "loss": 0.1498, | |
| "step": 1044 | |
| }, | |
| { | |
| "epoch": 3.5185185185185186, | |
| "grad_norm": 0.3368256688117981, | |
| "learning_rate": 2.4458926556674615e-05, | |
| "loss": 0.1587, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 3.5218855218855216, | |
| "grad_norm": 0.33141207695007324, | |
| "learning_rate": 2.4357919951935343e-05, | |
| "loss": 0.1626, | |
| "step": 1046 | |
| }, | |
| { | |
| "epoch": 3.525252525252525, | |
| "grad_norm": 0.4236656725406647, | |
| "learning_rate": 2.4257055135327976e-05, | |
| "loss": 0.1594, | |
| "step": 1047 | |
| }, | |
| { | |
| "epoch": 3.5286195286195285, | |
| "grad_norm": 0.4164685308933258, | |
| "learning_rate": 2.4156332664585495e-05, | |
| "loss": 0.1531, | |
| "step": 1048 | |
| }, | |
| { | |
| "epoch": 3.531986531986532, | |
| "grad_norm": 0.379982590675354, | |
| "learning_rate": 2.4055753096653794e-05, | |
| "loss": 0.1511, | |
| "step": 1049 | |
| }, | |
| { | |
| "epoch": 3.5353535353535355, | |
| "grad_norm": 0.41391053795814514, | |
| "learning_rate": 2.395531698768857e-05, | |
| "loss": 0.15, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 3.538720538720539, | |
| "grad_norm": 0.40213122963905334, | |
| "learning_rate": 2.3855024893052285e-05, | |
| "loss": 0.1444, | |
| "step": 1051 | |
| }, | |
| { | |
| "epoch": 3.542087542087542, | |
| "grad_norm": 0.38797199726104736, | |
| "learning_rate": 2.375487736731102e-05, | |
| "loss": 0.1476, | |
| "step": 1052 | |
| }, | |
| { | |
| "epoch": 3.5454545454545454, | |
| "grad_norm": 0.38671404123306274, | |
| "learning_rate": 2.3654874964231518e-05, | |
| "loss": 0.1594, | |
| "step": 1053 | |
| }, | |
| { | |
| "epoch": 3.548821548821549, | |
| "grad_norm": 0.40808653831481934, | |
| "learning_rate": 2.355501823677803e-05, | |
| "loss": 0.1529, | |
| "step": 1054 | |
| }, | |
| { | |
| "epoch": 3.5521885521885523, | |
| "grad_norm": 0.44210532307624817, | |
| "learning_rate": 2.345530773710934e-05, | |
| "loss": 0.1681, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 3.5555555555555554, | |
| "grad_norm": 0.3211992681026459, | |
| "learning_rate": 2.3355744016575585e-05, | |
| "loss": 0.1552, | |
| "step": 1056 | |
| }, | |
| { | |
| "epoch": 3.558922558922559, | |
| "grad_norm": 0.4301251769065857, | |
| "learning_rate": 2.3256327625715347e-05, | |
| "loss": 0.1549, | |
| "step": 1057 | |
| }, | |
| { | |
| "epoch": 3.5622895622895623, | |
| "grad_norm": 0.3025391697883606, | |
| "learning_rate": 2.3157059114252534e-05, | |
| "loss": 0.1501, | |
| "step": 1058 | |
| }, | |
| { | |
| "epoch": 3.5656565656565657, | |
| "grad_norm": 0.34864532947540283, | |
| "learning_rate": 2.3057939031093344e-05, | |
| "loss": 0.154, | |
| "step": 1059 | |
| }, | |
| { | |
| "epoch": 3.569023569023569, | |
| "grad_norm": 0.3500463366508484, | |
| "learning_rate": 2.295896792432326e-05, | |
| "loss": 0.1564, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 3.5723905723905722, | |
| "grad_norm": 0.3177832067012787, | |
| "learning_rate": 2.2860146341203937e-05, | |
| "loss": 0.1471, | |
| "step": 1061 | |
| }, | |
| { | |
| "epoch": 3.5757575757575757, | |
| "grad_norm": 0.3671930134296417, | |
| "learning_rate": 2.2761474828170338e-05, | |
| "loss": 0.1634, | |
| "step": 1062 | |
| }, | |
| { | |
| "epoch": 3.579124579124579, | |
| "grad_norm": 0.36658650636672974, | |
| "learning_rate": 2.2662953930827546e-05, | |
| "loss": 0.1585, | |
| "step": 1063 | |
| }, | |
| { | |
| "epoch": 3.5824915824915826, | |
| "grad_norm": 0.40321019291877747, | |
| "learning_rate": 2.2564584193947797e-05, | |
| "loss": 0.154, | |
| "step": 1064 | |
| }, | |
| { | |
| "epoch": 3.5858585858585856, | |
| "grad_norm": 0.3584071099758148, | |
| "learning_rate": 2.246636616146753e-05, | |
| "loss": 0.153, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 3.589225589225589, | |
| "grad_norm": 0.4597926139831543, | |
| "learning_rate": 2.2368300376484302e-05, | |
| "loss": 0.1477, | |
| "step": 1066 | |
| }, | |
| { | |
| "epoch": 3.5925925925925926, | |
| "grad_norm": 0.34057462215423584, | |
| "learning_rate": 2.227038738125385e-05, | |
| "loss": 0.152, | |
| "step": 1067 | |
| }, | |
| { | |
| "epoch": 3.595959595959596, | |
| "grad_norm": 0.4538710117340088, | |
| "learning_rate": 2.2172627717187034e-05, | |
| "loss": 0.1692, | |
| "step": 1068 | |
| }, | |
| { | |
| "epoch": 3.5993265993265995, | |
| "grad_norm": 0.45110630989074707, | |
| "learning_rate": 2.207502192484685e-05, | |
| "loss": 0.1612, | |
| "step": 1069 | |
| }, | |
| { | |
| "epoch": 3.602693602693603, | |
| "grad_norm": 0.4063069224357605, | |
| "learning_rate": 2.1977570543945486e-05, | |
| "loss": 0.1532, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 3.606060606060606, | |
| "grad_norm": 0.32669079303741455, | |
| "learning_rate": 2.188027411334131e-05, | |
| "loss": 0.156, | |
| "step": 1071 | |
| }, | |
| { | |
| "epoch": 3.6094276094276094, | |
| "grad_norm": 0.42182454466819763, | |
| "learning_rate": 2.1783133171035886e-05, | |
| "loss": 0.1615, | |
| "step": 1072 | |
| }, | |
| { | |
| "epoch": 3.612794612794613, | |
| "grad_norm": 0.36189764738082886, | |
| "learning_rate": 2.1686148254171013e-05, | |
| "loss": 0.1623, | |
| "step": 1073 | |
| }, | |
| { | |
| "epoch": 3.616161616161616, | |
| "grad_norm": 0.391677588224411, | |
| "learning_rate": 2.1589319899025705e-05, | |
| "loss": 0.1584, | |
| "step": 1074 | |
| }, | |
| { | |
| "epoch": 3.6195286195286194, | |
| "grad_norm": 0.32365673780441284, | |
| "learning_rate": 2.14926486410133e-05, | |
| "loss": 0.1572, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 3.622895622895623, | |
| "grad_norm": 0.4265686273574829, | |
| "learning_rate": 2.1396135014678507e-05, | |
| "loss": 0.164, | |
| "step": 1076 | |
| }, | |
| { | |
| "epoch": 3.6262626262626263, | |
| "grad_norm": 0.40638405084609985, | |
| "learning_rate": 2.1299779553694323e-05, | |
| "loss": 0.1634, | |
| "step": 1077 | |
| }, | |
| { | |
| "epoch": 3.6296296296296298, | |
| "grad_norm": 0.4131862223148346, | |
| "learning_rate": 2.1203582790859222e-05, | |
| "loss": 0.1608, | |
| "step": 1078 | |
| }, | |
| { | |
| "epoch": 3.6329966329966332, | |
| "grad_norm": 0.36556342244148254, | |
| "learning_rate": 2.1107545258094135e-05, | |
| "loss": 0.1606, | |
| "step": 1079 | |
| }, | |
| { | |
| "epoch": 3.6363636363636362, | |
| "grad_norm": 0.44753018021583557, | |
| "learning_rate": 2.101166748643955e-05, | |
| "loss": 0.1562, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 3.6397306397306397, | |
| "grad_norm": 0.2866370379924774, | |
| "learning_rate": 2.0915950006052553e-05, | |
| "loss": 0.1569, | |
| "step": 1081 | |
| }, | |
| { | |
| "epoch": 3.643097643097643, | |
| "grad_norm": 0.3483537435531616, | |
| "learning_rate": 2.0820393346203852e-05, | |
| "loss": 0.1494, | |
| "step": 1082 | |
| }, | |
| { | |
| "epoch": 3.6464646464646466, | |
| "grad_norm": 0.35355475544929504, | |
| "learning_rate": 2.0724998035274945e-05, | |
| "loss": 0.1478, | |
| "step": 1083 | |
| }, | |
| { | |
| "epoch": 3.6498316498316496, | |
| "grad_norm": 0.3692944049835205, | |
| "learning_rate": 2.0629764600755135e-05, | |
| "loss": 0.1469, | |
| "step": 1084 | |
| }, | |
| { | |
| "epoch": 3.653198653198653, | |
| "grad_norm": 0.3002285361289978, | |
| "learning_rate": 2.053469356923865e-05, | |
| "loss": 0.1543, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 3.6565656565656566, | |
| "grad_norm": 0.3803352117538452, | |
| "learning_rate": 2.0439785466421653e-05, | |
| "loss": 0.1662, | |
| "step": 1086 | |
| }, | |
| { | |
| "epoch": 3.65993265993266, | |
| "grad_norm": 0.4002717137336731, | |
| "learning_rate": 2.034504081709943e-05, | |
| "loss": 0.1421, | |
| "step": 1087 | |
| }, | |
| { | |
| "epoch": 3.6632996632996635, | |
| "grad_norm": 0.3593309223651886, | |
| "learning_rate": 2.025046014516346e-05, | |
| "loss": 0.1538, | |
| "step": 1088 | |
| }, | |
| { | |
| "epoch": 3.6666666666666665, | |
| "grad_norm": 0.34992143511772156, | |
| "learning_rate": 2.0156043973598476e-05, | |
| "loss": 0.1519, | |
| "step": 1089 | |
| }, | |
| { | |
| "epoch": 3.67003367003367, | |
| "grad_norm": 0.29695653915405273, | |
| "learning_rate": 2.006179282447963e-05, | |
| "loss": 0.1375, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 3.6734006734006734, | |
| "grad_norm": 0.40330347418785095, | |
| "learning_rate": 1.996770721896957e-05, | |
| "loss": 0.1492, | |
| "step": 1091 | |
| }, | |
| { | |
| "epoch": 3.676767676767677, | |
| "grad_norm": 0.3325386643409729, | |
| "learning_rate": 1.987378767731557e-05, | |
| "loss": 0.1532, | |
| "step": 1092 | |
| }, | |
| { | |
| "epoch": 3.68013468013468, | |
| "grad_norm": 0.36293134093284607, | |
| "learning_rate": 1.978003471884665e-05, | |
| "loss": 0.1432, | |
| "step": 1093 | |
| }, | |
| { | |
| "epoch": 3.6835016835016834, | |
| "grad_norm": 0.31937187910079956, | |
| "learning_rate": 1.968644886197073e-05, | |
| "loss": 0.1445, | |
| "step": 1094 | |
| }, | |
| { | |
| "epoch": 3.686868686868687, | |
| "grad_norm": 0.3382173776626587, | |
| "learning_rate": 1.9593030624171683e-05, | |
| "loss": 0.152, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 3.6902356902356903, | |
| "grad_norm": 0.4315609335899353, | |
| "learning_rate": 1.949978052200658e-05, | |
| "loss": 0.1544, | |
| "step": 1096 | |
| }, | |
| { | |
| "epoch": 3.6936026936026938, | |
| "grad_norm": 0.33818742632865906, | |
| "learning_rate": 1.9406699071102774e-05, | |
| "loss": 0.1487, | |
| "step": 1097 | |
| }, | |
| { | |
| "epoch": 3.6969696969696972, | |
| "grad_norm": 0.40022012591362, | |
| "learning_rate": 1.9313786786155076e-05, | |
| "loss": 0.1453, | |
| "step": 1098 | |
| }, | |
| { | |
| "epoch": 3.7003367003367003, | |
| "grad_norm": 0.37208884954452515, | |
| "learning_rate": 1.922104418092283e-05, | |
| "loss": 0.1578, | |
| "step": 1099 | |
| }, | |
| { | |
| "epoch": 3.7037037037037037, | |
| "grad_norm": 0.3757951855659485, | |
| "learning_rate": 1.9128471768227203e-05, | |
| "loss": 0.1545, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 3.7037037037037037, | |
| "eval_loss": 0.0845775306224823, | |
| "eval_runtime": 32.7177, | |
| "eval_samples_per_second": 30.564, | |
| "eval_steps_per_second": 1.926, | |
| "step": 1100 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 1485, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 5, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.4735886679130767e+18, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |