| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 4.995454545454545, | |
| "eval_steps": 500, | |
| "global_step": 730, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.006818181818181818, | |
| "grad_norm": 2.020231246948242, | |
| "learning_rate": 5.0000000000000004e-08, | |
| "loss": 0.8736, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.013636363636363636, | |
| "grad_norm": 1.9368854761123657, | |
| "learning_rate": 1.0000000000000001e-07, | |
| "loss": 0.8543, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.020454545454545454, | |
| "grad_norm": 1.908379077911377, | |
| "learning_rate": 1.5000000000000002e-07, | |
| "loss": 0.8339, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.02727272727272727, | |
| "grad_norm": 1.9313658475875854, | |
| "learning_rate": 2.0000000000000002e-07, | |
| "loss": 0.834, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.03409090909090909, | |
| "grad_norm": 1.983262062072754, | |
| "learning_rate": 2.5000000000000004e-07, | |
| "loss": 0.8726, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.04090909090909091, | |
| "grad_norm": 1.9994115829467773, | |
| "learning_rate": 3.0000000000000004e-07, | |
| "loss": 0.8135, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.04772727272727273, | |
| "grad_norm": 1.8889026641845703, | |
| "learning_rate": 3.5000000000000004e-07, | |
| "loss": 0.8439, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.05454545454545454, | |
| "grad_norm": 1.9258317947387695, | |
| "learning_rate": 4.0000000000000003e-07, | |
| "loss": 0.84, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.06136363636363636, | |
| "grad_norm": 1.9654443264007568, | |
| "learning_rate": 4.5000000000000003e-07, | |
| "loss": 0.8609, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.06818181818181818, | |
| "grad_norm": 1.9278813600540161, | |
| "learning_rate": 5.000000000000001e-07, | |
| "loss": 0.8515, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.075, | |
| "grad_norm": 1.8698863983154297, | |
| "learning_rate": 5.5e-07, | |
| "loss": 0.8632, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.08181818181818182, | |
| "grad_norm": 2.7078285217285156, | |
| "learning_rate": 6.000000000000001e-07, | |
| "loss": 0.8643, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.08863636363636364, | |
| "grad_norm": 1.959272027015686, | |
| "learning_rate": 6.5e-07, | |
| "loss": 0.861, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.09545454545454546, | |
| "grad_norm": 1.8952852487564087, | |
| "learning_rate": 7.000000000000001e-07, | |
| "loss": 0.8599, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.10227272727272728, | |
| "grad_norm": 1.9469462633132935, | |
| "learning_rate": 7.5e-07, | |
| "loss": 0.8467, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.10909090909090909, | |
| "grad_norm": 2.0111300945281982, | |
| "learning_rate": 8.000000000000001e-07, | |
| "loss": 0.8078, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.1159090909090909, | |
| "grad_norm": 1.739009141921997, | |
| "learning_rate": 8.500000000000001e-07, | |
| "loss": 0.8114, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.12272727272727273, | |
| "grad_norm": 1.7578809261322021, | |
| "learning_rate": 9.000000000000001e-07, | |
| "loss": 0.8489, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.12954545454545455, | |
| "grad_norm": 1.9787132740020752, | |
| "learning_rate": 9.500000000000001e-07, | |
| "loss": 0.8052, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.13636363636363635, | |
| "grad_norm": 1.6380164623260498, | |
| "learning_rate": 1.0000000000000002e-06, | |
| "loss": 0.7882, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.1431818181818182, | |
| "grad_norm": 1.686877727508545, | |
| "learning_rate": 1.0500000000000001e-06, | |
| "loss": 0.8126, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 1.690590739250183, | |
| "learning_rate": 1.1e-06, | |
| "loss": 0.8248, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.15681818181818183, | |
| "grad_norm": 1.612255334854126, | |
| "learning_rate": 1.1500000000000002e-06, | |
| "loss": 0.8024, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.16363636363636364, | |
| "grad_norm": 1.6669739484786987, | |
| "learning_rate": 1.2000000000000002e-06, | |
| "loss": 0.818, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.17045454545454544, | |
| "grad_norm": 1.7726502418518066, | |
| "learning_rate": 1.25e-06, | |
| "loss": 0.8158, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.17727272727272728, | |
| "grad_norm": 1.5930036306381226, | |
| "learning_rate": 1.3e-06, | |
| "loss": 0.7886, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.18409090909090908, | |
| "grad_norm": 1.5480856895446777, | |
| "learning_rate": 1.3500000000000002e-06, | |
| "loss": 0.7747, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.19090909090909092, | |
| "grad_norm": 1.3537116050720215, | |
| "learning_rate": 1.4000000000000001e-06, | |
| "loss": 0.7891, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.19772727272727272, | |
| "grad_norm": 1.3412829637527466, | |
| "learning_rate": 1.45e-06, | |
| "loss": 0.7799, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.20454545454545456, | |
| "grad_norm": 1.237492322921753, | |
| "learning_rate": 1.5e-06, | |
| "loss": 0.7768, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.21136363636363636, | |
| "grad_norm": 1.3467835187911987, | |
| "learning_rate": 1.5500000000000002e-06, | |
| "loss": 0.7344, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.21818181818181817, | |
| "grad_norm": 1.1665314435958862, | |
| "learning_rate": 1.6000000000000001e-06, | |
| "loss": 0.7152, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.225, | |
| "grad_norm": 1.2461743354797363, | |
| "learning_rate": 1.6500000000000003e-06, | |
| "loss": 0.758, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.2318181818181818, | |
| "grad_norm": 1.0343059301376343, | |
| "learning_rate": 1.7000000000000002e-06, | |
| "loss": 0.7362, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.23863636363636365, | |
| "grad_norm": 1.112117052078247, | |
| "learning_rate": 1.75e-06, | |
| "loss": 0.7771, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.24545454545454545, | |
| "grad_norm": 1.104337453842163, | |
| "learning_rate": 1.8000000000000001e-06, | |
| "loss": 0.7479, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.25227272727272726, | |
| "grad_norm": 0.9324578642845154, | |
| "learning_rate": 1.85e-06, | |
| "loss": 0.6936, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.2590909090909091, | |
| "grad_norm": 1.0733048915863037, | |
| "learning_rate": 1.9000000000000002e-06, | |
| "loss": 0.7302, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.26590909090909093, | |
| "grad_norm": 0.8614453673362732, | |
| "learning_rate": 1.9500000000000004e-06, | |
| "loss": 0.7466, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.2727272727272727, | |
| "grad_norm": 0.7729659080505371, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 0.725, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.27954545454545454, | |
| "grad_norm": 0.9341004490852356, | |
| "learning_rate": 2.05e-06, | |
| "loss": 0.6949, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.2863636363636364, | |
| "grad_norm": 0.6668925881385803, | |
| "learning_rate": 2.1000000000000002e-06, | |
| "loss": 0.6587, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.29318181818181815, | |
| "grad_norm": 0.681847333908081, | |
| "learning_rate": 2.15e-06, | |
| "loss": 0.6984, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 0.6222082376480103, | |
| "learning_rate": 2.2e-06, | |
| "loss": 0.7063, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.3068181818181818, | |
| "grad_norm": 0.790607213973999, | |
| "learning_rate": 2.25e-06, | |
| "loss": 0.6804, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.31363636363636366, | |
| "grad_norm": 0.6110427379608154, | |
| "learning_rate": 2.3000000000000004e-06, | |
| "loss": 0.6694, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.32045454545454544, | |
| "grad_norm": 0.6481993794441223, | |
| "learning_rate": 2.35e-06, | |
| "loss": 0.7321, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.32727272727272727, | |
| "grad_norm": 0.5973438620567322, | |
| "learning_rate": 2.4000000000000003e-06, | |
| "loss": 0.6508, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.3340909090909091, | |
| "grad_norm": 0.6252740621566772, | |
| "learning_rate": 2.4500000000000003e-06, | |
| "loss": 0.6911, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.3409090909090909, | |
| "grad_norm": 0.6826481819152832, | |
| "learning_rate": 2.5e-06, | |
| "loss": 0.6855, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.3477272727272727, | |
| "grad_norm": 0.6307975649833679, | |
| "learning_rate": 2.55e-06, | |
| "loss": 0.714, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.35454545454545455, | |
| "grad_norm": 0.5900976061820984, | |
| "learning_rate": 2.6e-06, | |
| "loss": 0.6619, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.3613636363636364, | |
| "grad_norm": 0.6203920841217041, | |
| "learning_rate": 2.6500000000000005e-06, | |
| "loss": 0.6822, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.36818181818181817, | |
| "grad_norm": 0.6197589039802551, | |
| "learning_rate": 2.7000000000000004e-06, | |
| "loss": 0.6584, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.375, | |
| "grad_norm": 0.5921110510826111, | |
| "learning_rate": 2.7500000000000004e-06, | |
| "loss": 0.6739, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.38181818181818183, | |
| "grad_norm": 0.6215619444847107, | |
| "learning_rate": 2.8000000000000003e-06, | |
| "loss": 0.6631, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.3886363636363636, | |
| "grad_norm": 0.5495648980140686, | |
| "learning_rate": 2.85e-06, | |
| "loss": 0.6531, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.39545454545454545, | |
| "grad_norm": 0.531947910785675, | |
| "learning_rate": 2.9e-06, | |
| "loss": 0.6365, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.4022727272727273, | |
| "grad_norm": 0.559112548828125, | |
| "learning_rate": 2.95e-06, | |
| "loss": 0.6391, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.4090909090909091, | |
| "grad_norm": 0.5377560257911682, | |
| "learning_rate": 3e-06, | |
| "loss": 0.6775, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.4159090909090909, | |
| "grad_norm": 0.5167352557182312, | |
| "learning_rate": 3.05e-06, | |
| "loss": 0.6518, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.42272727272727273, | |
| "grad_norm": 0.5483390092849731, | |
| "learning_rate": 3.1000000000000004e-06, | |
| "loss": 0.5972, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.42954545454545456, | |
| "grad_norm": 0.5687050223350525, | |
| "learning_rate": 3.1500000000000003e-06, | |
| "loss": 0.6515, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.43636363636363634, | |
| "grad_norm": 0.9046968817710876, | |
| "learning_rate": 3.2000000000000003e-06, | |
| "loss": 0.6497, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.4431818181818182, | |
| "grad_norm": 0.5231667757034302, | |
| "learning_rate": 3.2500000000000002e-06, | |
| "loss": 0.6364, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 0.49699342250823975, | |
| "learning_rate": 3.3000000000000006e-06, | |
| "loss": 0.6144, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.45681818181818185, | |
| "grad_norm": 0.5390080213546753, | |
| "learning_rate": 3.3500000000000005e-06, | |
| "loss": 0.6051, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.4636363636363636, | |
| "grad_norm": 0.5252938270568848, | |
| "learning_rate": 3.4000000000000005e-06, | |
| "loss": 0.6353, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.47045454545454546, | |
| "grad_norm": 0.6291780471801758, | |
| "learning_rate": 3.45e-06, | |
| "loss": 0.6326, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.4772727272727273, | |
| "grad_norm": 0.5545375943183899, | |
| "learning_rate": 3.5e-06, | |
| "loss": 0.647, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.48409090909090907, | |
| "grad_norm": 0.47775956988334656, | |
| "learning_rate": 3.5500000000000003e-06, | |
| "loss": 0.5993, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.4909090909090909, | |
| "grad_norm": 0.5016375184059143, | |
| "learning_rate": 3.6000000000000003e-06, | |
| "loss": 0.6056, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.49772727272727274, | |
| "grad_norm": 0.4780517816543579, | |
| "learning_rate": 3.65e-06, | |
| "loss": 0.6189, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.5045454545454545, | |
| "grad_norm": 0.5763716697692871, | |
| "learning_rate": 3.7e-06, | |
| "loss": 0.6262, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.5113636363636364, | |
| "grad_norm": 0.5321404337882996, | |
| "learning_rate": 3.7500000000000005e-06, | |
| "loss": 0.6336, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.5181818181818182, | |
| "grad_norm": 0.4641991853713989, | |
| "learning_rate": 3.8000000000000005e-06, | |
| "loss": 0.6317, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.525, | |
| "grad_norm": 0.45118412375450134, | |
| "learning_rate": 3.85e-06, | |
| "loss": 0.5927, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.5318181818181819, | |
| "grad_norm": 0.5500645041465759, | |
| "learning_rate": 3.900000000000001e-06, | |
| "loss": 0.62, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.5386363636363637, | |
| "grad_norm": 0.5197616219520569, | |
| "learning_rate": 3.95e-06, | |
| "loss": 0.6063, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.5454545454545454, | |
| "grad_norm": 0.4672992527484894, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 0.6156, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.5522727272727272, | |
| "grad_norm": 0.5995073318481445, | |
| "learning_rate": 4.05e-06, | |
| "loss": 0.6219, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.5590909090909091, | |
| "grad_norm": 0.5593804717063904, | |
| "learning_rate": 4.1e-06, | |
| "loss": 0.6282, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.5659090909090909, | |
| "grad_norm": 0.7544977068901062, | |
| "learning_rate": 4.15e-06, | |
| "loss": 0.6246, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.5727272727272728, | |
| "grad_norm": 0.45378655195236206, | |
| "learning_rate": 4.2000000000000004e-06, | |
| "loss": 0.5881, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.5795454545454546, | |
| "grad_norm": 0.4407907724380493, | |
| "learning_rate": 4.25e-06, | |
| "loss": 0.6026, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.5863636363636363, | |
| "grad_norm": 0.4333001673221588, | |
| "learning_rate": 4.3e-06, | |
| "loss": 0.6098, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.5931818181818181, | |
| "grad_norm": 0.45020997524261475, | |
| "learning_rate": 4.350000000000001e-06, | |
| "loss": 0.5858, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 0.424003541469574, | |
| "learning_rate": 4.4e-06, | |
| "loss": 0.6186, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.6068181818181818, | |
| "grad_norm": 0.427081435918808, | |
| "learning_rate": 4.450000000000001e-06, | |
| "loss": 0.6164, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.6136363636363636, | |
| "grad_norm": 0.4430546164512634, | |
| "learning_rate": 4.5e-06, | |
| "loss": 0.6049, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.6204545454545455, | |
| "grad_norm": 0.5469494462013245, | |
| "learning_rate": 4.5500000000000005e-06, | |
| "loss": 0.608, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.6272727272727273, | |
| "grad_norm": 0.4481559693813324, | |
| "learning_rate": 4.600000000000001e-06, | |
| "loss": 0.5532, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.634090909090909, | |
| "grad_norm": 0.6563135981559753, | |
| "learning_rate": 4.65e-06, | |
| "loss": 0.6105, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.6409090909090909, | |
| "grad_norm": 0.4572807252407074, | |
| "learning_rate": 4.7e-06, | |
| "loss": 0.5976, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.6477272727272727, | |
| "grad_norm": 0.4423324167728424, | |
| "learning_rate": 4.75e-06, | |
| "loss": 0.616, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.6545454545454545, | |
| "grad_norm": 0.6230632066726685, | |
| "learning_rate": 4.800000000000001e-06, | |
| "loss": 0.5879, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.6613636363636364, | |
| "grad_norm": 0.461467981338501, | |
| "learning_rate": 4.85e-06, | |
| "loss": 0.5992, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.6681818181818182, | |
| "grad_norm": 0.44455233216285706, | |
| "learning_rate": 4.9000000000000005e-06, | |
| "loss": 0.6138, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.675, | |
| "grad_norm": 0.47281715273857117, | |
| "learning_rate": 4.95e-06, | |
| "loss": 0.59, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.6818181818181818, | |
| "grad_norm": 0.46114516258239746, | |
| "learning_rate": 5e-06, | |
| "loss": 0.6216, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.6886363636363636, | |
| "grad_norm": 0.49900761246681213, | |
| "learning_rate": 4.9999795126530275e-06, | |
| "loss": 0.6222, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.6954545454545454, | |
| "grad_norm": 0.4764839708805084, | |
| "learning_rate": 4.999918050947891e-06, | |
| "loss": 0.5829, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.7022727272727273, | |
| "grad_norm": 0.43502742052078247, | |
| "learning_rate": 4.999815615891943e-06, | |
| "loss": 0.5982, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.7090909090909091, | |
| "grad_norm": 0.43318599462509155, | |
| "learning_rate": 4.9996722091640805e-06, | |
| "loss": 0.5827, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.7159090909090909, | |
| "grad_norm": 0.47218361496925354, | |
| "learning_rate": 4.9994878331147225e-06, | |
| "loss": 0.5907, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.7227272727272728, | |
| "grad_norm": 0.48492228984832764, | |
| "learning_rate": 4.99926249076577e-06, | |
| "loss": 0.5459, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.7295454545454545, | |
| "grad_norm": 0.4019850194454193, | |
| "learning_rate": 4.998996185810557e-06, | |
| "loss": 0.5957, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.7363636363636363, | |
| "grad_norm": 0.47361984848976135, | |
| "learning_rate": 4.998688922613788e-06, | |
| "loss": 0.579, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.7431818181818182, | |
| "grad_norm": 0.4612269401550293, | |
| "learning_rate": 4.9983407062114695e-06, | |
| "loss": 0.5814, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.45558398962020874, | |
| "learning_rate": 4.9979515423108255e-06, | |
| "loss": 0.6036, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.7568181818181818, | |
| "grad_norm": 0.41492199897766113, | |
| "learning_rate": 4.997521437290205e-06, | |
| "loss": 0.5891, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.7636363636363637, | |
| "grad_norm": 0.4484409689903259, | |
| "learning_rate": 4.997050398198977e-06, | |
| "loss": 0.6031, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.7704545454545455, | |
| "grad_norm": 0.42671382427215576, | |
| "learning_rate": 4.996538432757414e-06, | |
| "loss": 0.6106, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.7772727272727272, | |
| "grad_norm": 0.4222075343132019, | |
| "learning_rate": 4.995985549356568e-06, | |
| "loss": 0.5774, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.7840909090909091, | |
| "grad_norm": 0.421021431684494, | |
| "learning_rate": 4.995391757058129e-06, | |
| "loss": 0.5754, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.7909090909090909, | |
| "grad_norm": 0.4215952754020691, | |
| "learning_rate": 4.99475706559428e-06, | |
| "loss": 0.5895, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.7977272727272727, | |
| "grad_norm": 0.42652982473373413, | |
| "learning_rate": 4.994081485367537e-06, | |
| "loss": 0.5646, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.8045454545454546, | |
| "grad_norm": 0.4145742654800415, | |
| "learning_rate": 4.993365027450576e-06, | |
| "loss": 0.5862, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.8113636363636364, | |
| "grad_norm": 0.4754612445831299, | |
| "learning_rate": 4.992607703586058e-06, | |
| "loss": 0.5645, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.8181818181818182, | |
| "grad_norm": 0.4694959223270416, | |
| "learning_rate": 4.991809526186424e-06, | |
| "loss": 0.5989, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.825, | |
| "grad_norm": 0.4573237895965576, | |
| "learning_rate": 4.990970508333707e-06, | |
| "loss": 0.5769, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.8318181818181818, | |
| "grad_norm": 0.4592607915401459, | |
| "learning_rate": 4.990090663779305e-06, | |
| "loss": 0.5529, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.8386363636363636, | |
| "grad_norm": 0.4325920045375824, | |
| "learning_rate": 4.9891700069437635e-06, | |
| "loss": 0.5846, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.8454545454545455, | |
| "grad_norm": 0.44623467326164246, | |
| "learning_rate": 4.988208552916535e-06, | |
| "loss": 0.5997, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.8522727272727273, | |
| "grad_norm": 0.46071872115135193, | |
| "learning_rate": 4.987206317455734e-06, | |
| "loss": 0.5687, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.8590909090909091, | |
| "grad_norm": 0.43121206760406494, | |
| "learning_rate": 4.986163316987877e-06, | |
| "loss": 0.5277, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.865909090909091, | |
| "grad_norm": 0.4711630046367645, | |
| "learning_rate": 4.985079568607613e-06, | |
| "loss": 0.592, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.8727272727272727, | |
| "grad_norm": 0.4450221657752991, | |
| "learning_rate": 4.983955090077445e-06, | |
| "loss": 0.5656, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.8795454545454545, | |
| "grad_norm": 0.6546564102172852, | |
| "learning_rate": 4.982789899827439e-06, | |
| "loss": 0.572, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.8863636363636364, | |
| "grad_norm": 0.49622273445129395, | |
| "learning_rate": 4.9815840169549216e-06, | |
| "loss": 0.5719, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.8931818181818182, | |
| "grad_norm": 0.5478544235229492, | |
| "learning_rate": 4.980337461224164e-06, | |
| "loss": 0.5774, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 0.4340018033981323, | |
| "learning_rate": 4.979050253066064e-06, | |
| "loss": 0.5444, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.9068181818181819, | |
| "grad_norm": 0.4243793189525604, | |
| "learning_rate": 4.977722413577802e-06, | |
| "loss": 0.5759, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.9136363636363637, | |
| "grad_norm": 0.45201942324638367, | |
| "learning_rate": 4.976353964522509e-06, | |
| "loss": 0.5746, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.9204545454545454, | |
| "grad_norm": 0.42892220616340637, | |
| "learning_rate": 4.974944928328894e-06, | |
| "loss": 0.544, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.9272727272727272, | |
| "grad_norm": 0.43185824155807495, | |
| "learning_rate": 4.973495328090891e-06, | |
| "loss": 0.5803, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.9340909090909091, | |
| "grad_norm": 0.5512542724609375, | |
| "learning_rate": 4.972005187567267e-06, | |
| "loss": 0.5694, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.9409090909090909, | |
| "grad_norm": 0.4836059808731079, | |
| "learning_rate": 4.970474531181245e-06, | |
| "loss": 0.5581, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.9477272727272728, | |
| "grad_norm": 0.4508282244205475, | |
| "learning_rate": 4.968903384020095e-06, | |
| "loss": 0.5553, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.9545454545454546, | |
| "grad_norm": 0.4764733612537384, | |
| "learning_rate": 4.967291771834727e-06, | |
| "loss": 0.5798, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.9613636363636363, | |
| "grad_norm": 0.463468074798584, | |
| "learning_rate": 4.965639721039267e-06, | |
| "loss": 0.559, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.9681818181818181, | |
| "grad_norm": 0.4655650854110718, | |
| "learning_rate": 4.963947258710626e-06, | |
| "loss": 0.5576, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.975, | |
| "grad_norm": 0.4651234745979309, | |
| "learning_rate": 4.962214412588053e-06, | |
| "loss": 0.5792, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.9818181818181818, | |
| "grad_norm": 0.43080538511276245, | |
| "learning_rate": 4.960441211072686e-06, | |
| "loss": 0.5314, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.9886363636363636, | |
| "grad_norm": 0.4403238594532013, | |
| "learning_rate": 4.9586276832270785e-06, | |
| "loss": 0.5592, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.9954545454545455, | |
| "grad_norm": 0.43968647718429565, | |
| "learning_rate": 4.9567738587747314e-06, | |
| "loss": 0.5573, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 1.0068181818181818, | |
| "grad_norm": 0.8337184190750122, | |
| "learning_rate": 4.954879768099599e-06, | |
| "loss": 1.1118, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 1.0136363636363637, | |
| "grad_norm": 0.4042762815952301, | |
| "learning_rate": 4.952945442245598e-06, | |
| "loss": 0.5731, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 1.0204545454545455, | |
| "grad_norm": 0.5666297078132629, | |
| "learning_rate": 4.95097091291609e-06, | |
| "loss": 0.5415, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 1.0272727272727273, | |
| "grad_norm": 0.6148372888565063, | |
| "learning_rate": 4.948956212473371e-06, | |
| "loss": 0.5486, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.0340909090909092, | |
| "grad_norm": 0.5780720710754395, | |
| "learning_rate": 4.946901373938132e-06, | |
| "loss": 0.5373, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 1.040909090909091, | |
| "grad_norm": 0.4648835361003876, | |
| "learning_rate": 4.944806430988927e-06, | |
| "loss": 0.5419, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 1.0477272727272728, | |
| "grad_norm": 0.41131219267845154, | |
| "learning_rate": 4.942671417961615e-06, | |
| "loss": 0.5514, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 1.0545454545454545, | |
| "grad_norm": 0.4733349084854126, | |
| "learning_rate": 4.940496369848795e-06, | |
| "loss": 0.5563, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 1.0613636363636363, | |
| "grad_norm": 0.41179028153419495, | |
| "learning_rate": 4.938281322299243e-06, | |
| "loss": 0.5413, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 1.0681818181818181, | |
| "grad_norm": 0.4770941138267517, | |
| "learning_rate": 4.936026311617316e-06, | |
| "loss": 0.5561, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 1.075, | |
| "grad_norm": 0.7752397060394287, | |
| "learning_rate": 4.933731374762361e-06, | |
| "loss": 0.5439, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 1.0818181818181818, | |
| "grad_norm": 0.4323958456516266, | |
| "learning_rate": 4.931396549348115e-06, | |
| "loss": 0.5401, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 1.0886363636363636, | |
| "grad_norm": 0.4497937858104706, | |
| "learning_rate": 4.9290218736420795e-06, | |
| "loss": 0.523, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 1.0954545454545455, | |
| "grad_norm": 0.4881570339202881, | |
| "learning_rate": 4.926607386564898e-06, | |
| "loss": 0.5441, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 1.1022727272727273, | |
| "grad_norm": 0.43357932567596436, | |
| "learning_rate": 4.9241531276897196e-06, | |
| "loss": 0.5499, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 1.1090909090909091, | |
| "grad_norm": 1.1125361919403076, | |
| "learning_rate": 4.921659137241544e-06, | |
| "loss": 0.5265, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 1.115909090909091, | |
| "grad_norm": 0.43150508403778076, | |
| "learning_rate": 4.919125456096574e-06, | |
| "loss": 0.5193, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 1.1227272727272728, | |
| "grad_norm": 0.40771809220314026, | |
| "learning_rate": 4.916552125781529e-06, | |
| "loss": 0.538, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 1.1295454545454546, | |
| "grad_norm": 0.5830567479133606, | |
| "learning_rate": 4.913939188472979e-06, | |
| "loss": 0.5548, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 1.1363636363636362, | |
| "grad_norm": 0.4776526391506195, | |
| "learning_rate": 4.911286686996648e-06, | |
| "loss": 0.5328, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 1.143181818181818, | |
| "grad_norm": 0.41657742857933044, | |
| "learning_rate": 4.908594664826708e-06, | |
| "loss": 0.5336, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "grad_norm": 0.4651948809623718, | |
| "learning_rate": 4.905863166085076e-06, | |
| "loss": 0.5812, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 1.1568181818181817, | |
| "grad_norm": 0.4399661719799042, | |
| "learning_rate": 4.903092235540679e-06, | |
| "loss": 0.5143, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 1.1636363636363636, | |
| "grad_norm": 0.4548935294151306, | |
| "learning_rate": 4.900281918608732e-06, | |
| "loss": 0.5425, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 1.1704545454545454, | |
| "grad_norm": 0.43515875935554504, | |
| "learning_rate": 4.897432261349984e-06, | |
| "loss": 0.534, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 1.1772727272727272, | |
| "grad_norm": 0.42954760789871216, | |
| "learning_rate": 4.894543310469968e-06, | |
| "loss": 0.5436, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 1.184090909090909, | |
| "grad_norm": 0.4063640534877777, | |
| "learning_rate": 4.891615113318236e-06, | |
| "loss": 0.5481, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 1.190909090909091, | |
| "grad_norm": 0.49416476488113403, | |
| "learning_rate": 4.888647717887582e-06, | |
| "loss": 0.5298, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 1.1977272727272728, | |
| "grad_norm": 1.4082964658737183, | |
| "learning_rate": 4.8856411728132526e-06, | |
| "loss": 0.5366, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 1.2045454545454546, | |
| "grad_norm": 0.44356465339660645, | |
| "learning_rate": 4.8825955273721524e-06, | |
| "loss": 0.5563, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 1.2113636363636364, | |
| "grad_norm": 0.40925371646881104, | |
| "learning_rate": 4.879510831482039e-06, | |
| "loss": 0.5444, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 1.2181818181818183, | |
| "grad_norm": 0.44379451870918274, | |
| "learning_rate": 4.876387135700701e-06, | |
| "loss": 0.5311, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 1.225, | |
| "grad_norm": 0.41487446427345276, | |
| "learning_rate": 4.873224491225128e-06, | |
| "loss": 0.5637, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 1.231818181818182, | |
| "grad_norm": 0.4520554542541504, | |
| "learning_rate": 4.870022949890676e-06, | |
| "loss": 0.5677, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.2386363636363638, | |
| "grad_norm": 0.4528166949748993, | |
| "learning_rate": 4.866782564170217e-06, | |
| "loss": 0.5095, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 1.2454545454545454, | |
| "grad_norm": 0.4409842789173126, | |
| "learning_rate": 4.863503387173276e-06, | |
| "loss": 0.5423, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 1.2522727272727272, | |
| "grad_norm": 0.4355996251106262, | |
| "learning_rate": 4.860185472645161e-06, | |
| "loss": 0.5422, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 1.259090909090909, | |
| "grad_norm": 0.4871126115322113, | |
| "learning_rate": 4.856828874966086e-06, | |
| "loss": 0.5293, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 1.2659090909090909, | |
| "grad_norm": 0.442348450422287, | |
| "learning_rate": 4.853433649150276e-06, | |
| "loss": 0.5616, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 1.2727272727272727, | |
| "grad_norm": 0.44014668464660645, | |
| "learning_rate": 4.849999850845066e-06, | |
| "loss": 0.5354, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 1.2795454545454545, | |
| "grad_norm": 0.4496249854564667, | |
| "learning_rate": 4.8465275363299905e-06, | |
| "loss": 0.5298, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 1.2863636363636364, | |
| "grad_norm": 0.46060171723365784, | |
| "learning_rate": 4.84301676251586e-06, | |
| "loss": 0.554, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 1.2931818181818182, | |
| "grad_norm": 0.7829347252845764, | |
| "learning_rate": 4.839467586943825e-06, | |
| "loss": 0.5606, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "grad_norm": 0.45566731691360474, | |
| "learning_rate": 4.835880067784441e-06, | |
| "loss": 0.5118, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.3068181818181819, | |
| "grad_norm": 0.43211469054222107, | |
| "learning_rate": 4.832254263836708e-06, | |
| "loss": 0.535, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 1.3136363636363637, | |
| "grad_norm": 0.4350590705871582, | |
| "learning_rate": 4.828590234527107e-06, | |
| "loss": 0.5595, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 1.3204545454545453, | |
| "grad_norm": 0.4056399464607239, | |
| "learning_rate": 4.82488803990863e-06, | |
| "loss": 0.55, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 1.3272727272727272, | |
| "grad_norm": 0.4458341598510742, | |
| "learning_rate": 4.821147740659795e-06, | |
| "loss": 0.501, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 1.334090909090909, | |
| "grad_norm": 0.446504145860672, | |
| "learning_rate": 4.817369398083648e-06, | |
| "loss": 0.5393, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 1.3409090909090908, | |
| "grad_norm": 0.47838401794433594, | |
| "learning_rate": 4.813553074106761e-06, | |
| "loss": 0.5354, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 1.3477272727272727, | |
| "grad_norm": 0.4675792157649994, | |
| "learning_rate": 4.809698831278217e-06, | |
| "loss": 0.5202, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 1.3545454545454545, | |
| "grad_norm": 0.4351370632648468, | |
| "learning_rate": 4.805806732768585e-06, | |
| "loss": 0.5256, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 1.3613636363636363, | |
| "grad_norm": 0.4466511607170105, | |
| "learning_rate": 4.801876842368882e-06, | |
| "loss": 0.5238, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 1.3681818181818182, | |
| "grad_norm": 0.45672544836997986, | |
| "learning_rate": 4.797909224489531e-06, | |
| "loss": 0.5426, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.375, | |
| "grad_norm": 0.422638863325119, | |
| "learning_rate": 4.793903944159303e-06, | |
| "loss": 0.5385, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 1.3818181818181818, | |
| "grad_norm": 0.4254551827907562, | |
| "learning_rate": 4.789861067024253e-06, | |
| "loss": 0.503, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 1.3886363636363637, | |
| "grad_norm": 0.43860334157943726, | |
| "learning_rate": 4.785780659346642e-06, | |
| "loss": 0.519, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 1.3954545454545455, | |
| "grad_norm": 0.43294480443000793, | |
| "learning_rate": 4.781662788003851e-06, | |
| "loss": 0.5537, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 1.4022727272727273, | |
| "grad_norm": 0.45006492733955383, | |
| "learning_rate": 4.777507520487289e-06, | |
| "loss": 0.5318, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 1.4090909090909092, | |
| "grad_norm": 0.4178854823112488, | |
| "learning_rate": 4.773314924901281e-06, | |
| "loss": 0.5287, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 1.415909090909091, | |
| "grad_norm": 0.45662209391593933, | |
| "learning_rate": 4.769085069961955e-06, | |
| "loss": 0.5609, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 1.4227272727272728, | |
| "grad_norm": 0.41578733921051025, | |
| "learning_rate": 4.764818024996117e-06, | |
| "loss": 0.5249, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 1.4295454545454547, | |
| "grad_norm": 0.48305758833885193, | |
| "learning_rate": 4.760513859940112e-06, | |
| "loss": 0.515, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 1.4363636363636363, | |
| "grad_norm": 0.4564506709575653, | |
| "learning_rate": 4.756172645338675e-06, | |
| "loss": 0.535, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.4431818181818181, | |
| "grad_norm": 0.4499240815639496, | |
| "learning_rate": 4.751794452343785e-06, | |
| "loss": 0.5236, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "grad_norm": 0.41851088404655457, | |
| "learning_rate": 4.747379352713489e-06, | |
| "loss": 0.5254, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 1.4568181818181818, | |
| "grad_norm": 0.4690937399864197, | |
| "learning_rate": 4.7429274188107275e-06, | |
| "loss": 0.5312, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 1.4636363636363636, | |
| "grad_norm": 0.5516958832740784, | |
| "learning_rate": 4.738438723602154e-06, | |
| "loss": 0.5315, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 1.4704545454545455, | |
| "grad_norm": 1.2040799856185913, | |
| "learning_rate": 4.733913340656933e-06, | |
| "loss": 0.544, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 1.4772727272727273, | |
| "grad_norm": 0.445425808429718, | |
| "learning_rate": 4.729351344145536e-06, | |
| "loss": 0.5309, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 1.4840909090909091, | |
| "grad_norm": 0.4186754822731018, | |
| "learning_rate": 4.7247528088385296e-06, | |
| "loss": 0.5138, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 1.490909090909091, | |
| "grad_norm": 0.43284520506858826, | |
| "learning_rate": 4.720117810105341e-06, | |
| "loss": 0.521, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 1.4977272727272728, | |
| "grad_norm": 0.4308505058288574, | |
| "learning_rate": 4.715446423913036e-06, | |
| "loss": 0.5436, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 1.5045454545454544, | |
| "grad_norm": 0.4471941888332367, | |
| "learning_rate": 4.710738726825059e-06, | |
| "loss": 0.5528, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.5113636363636362, | |
| "grad_norm": 0.4374965727329254, | |
| "learning_rate": 4.705994795999991e-06, | |
| "loss": 0.5238, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 1.518181818181818, | |
| "grad_norm": 0.6026428937911987, | |
| "learning_rate": 4.701214709190277e-06, | |
| "loss": 0.5268, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 1.525, | |
| "grad_norm": 0.4413348138332367, | |
| "learning_rate": 4.696398544740955e-06, | |
| "loss": 0.5534, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 1.5318181818181817, | |
| "grad_norm": 0.45917898416519165, | |
| "learning_rate": 4.69154638158837e-06, | |
| "loss": 0.511, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 1.5386363636363636, | |
| "grad_norm": 0.5339583158493042, | |
| "learning_rate": 4.686658299258881e-06, | |
| "loss": 0.5395, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.5454545454545454, | |
| "grad_norm": 0.4617757201194763, | |
| "learning_rate": 4.681734377867562e-06, | |
| "loss": 0.5425, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 1.5522727272727272, | |
| "grad_norm": 0.4513031840324402, | |
| "learning_rate": 4.67677469811688e-06, | |
| "loss": 0.5241, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 1.559090909090909, | |
| "grad_norm": 0.43548423051834106, | |
| "learning_rate": 4.671779341295378e-06, | |
| "loss": 0.5224, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 1.565909090909091, | |
| "grad_norm": 0.5516494512557983, | |
| "learning_rate": 4.666748389276344e-06, | |
| "loss": 0.5346, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 1.5727272727272728, | |
| "grad_norm": 0.4441164433956146, | |
| "learning_rate": 4.661681924516466e-06, | |
| "loss": 0.5208, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.5795454545454546, | |
| "grad_norm": 0.4057452380657196, | |
| "learning_rate": 4.6565800300544805e-06, | |
| "loss": 0.5173, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 1.5863636363636364, | |
| "grad_norm": 0.4162004590034485, | |
| "learning_rate": 4.651442789509813e-06, | |
| "loss": 0.5307, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 1.5931818181818183, | |
| "grad_norm": 0.43314146995544434, | |
| "learning_rate": 4.646270287081208e-06, | |
| "loss": 0.5359, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "grad_norm": 0.5786082148551941, | |
| "learning_rate": 4.641062607545347e-06, | |
| "loss": 0.5307, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 1.606818181818182, | |
| "grad_norm": 0.43811941146850586, | |
| "learning_rate": 4.6358198362554585e-06, | |
| "loss": 0.5556, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 1.6136363636363638, | |
| "grad_norm": 0.4374074935913086, | |
| "learning_rate": 4.630542059139923e-06, | |
| "loss": 0.5253, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 1.6204545454545456, | |
| "grad_norm": 0.4387815594673157, | |
| "learning_rate": 4.625229362700863e-06, | |
| "loss": 0.534, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 1.6272727272727274, | |
| "grad_norm": 0.4976162016391754, | |
| "learning_rate": 4.61988183401272e-06, | |
| "loss": 0.5255, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 1.634090909090909, | |
| "grad_norm": 0.4603639543056488, | |
| "learning_rate": 4.614499560720837e-06, | |
| "loss": 0.511, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 1.6409090909090909, | |
| "grad_norm": 0.43136921525001526, | |
| "learning_rate": 4.609082631040012e-06, | |
| "loss": 0.5099, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.6477272727272727, | |
| "grad_norm": 0.4573090970516205, | |
| "learning_rate": 4.603631133753061e-06, | |
| "loss": 0.5515, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 1.6545454545454545, | |
| "grad_norm": 0.4292807877063751, | |
| "learning_rate": 4.598145158209356e-06, | |
| "loss": 0.5048, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 1.6613636363636364, | |
| "grad_norm": 0.6377942562103271, | |
| "learning_rate": 4.592624794323366e-06, | |
| "loss": 0.5333, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 1.6681818181818182, | |
| "grad_norm": 0.49924615025520325, | |
| "learning_rate": 4.587070132573178e-06, | |
| "loss": 0.5127, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 1.675, | |
| "grad_norm": 0.4723946154117584, | |
| "learning_rate": 4.581481263999019e-06, | |
| "loss": 0.5127, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 1.6818181818181817, | |
| "grad_norm": 1.1846898794174194, | |
| "learning_rate": 4.575858280201761e-06, | |
| "loss": 0.5367, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 1.6886363636363635, | |
| "grad_norm": 0.4306166172027588, | |
| "learning_rate": 4.570201273341418e-06, | |
| "loss": 0.52, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 1.6954545454545453, | |
| "grad_norm": 0.45308929681777954, | |
| "learning_rate": 4.564510336135642e-06, | |
| "loss": 0.5589, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 1.7022727272727272, | |
| "grad_norm": 0.4482448399066925, | |
| "learning_rate": 4.558785561858196e-06, | |
| "loss": 0.5161, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 1.709090909090909, | |
| "grad_norm": 0.44250935316085815, | |
| "learning_rate": 4.5530270443374305e-06, | |
| "loss": 0.5342, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.7159090909090908, | |
| "grad_norm": 0.8177540898323059, | |
| "learning_rate": 4.547234877954741e-06, | |
| "loss": 0.5402, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 1.7227272727272727, | |
| "grad_norm": 0.5169854760169983, | |
| "learning_rate": 4.541409157643027e-06, | |
| "loss": 0.5301, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 1.7295454545454545, | |
| "grad_norm": 0.4544285237789154, | |
| "learning_rate": 4.535549978885132e-06, | |
| "loss": 0.5314, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 1.7363636363636363, | |
| "grad_norm": 0.42860639095306396, | |
| "learning_rate": 4.5296574377122765e-06, | |
| "loss": 0.5249, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 1.7431818181818182, | |
| "grad_norm": 0.526492178440094, | |
| "learning_rate": 4.5237316307024895e-06, | |
| "loss": 0.5142, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "grad_norm": 0.4208003878593445, | |
| "learning_rate": 4.517772654979024e-06, | |
| "loss": 0.5151, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 1.7568181818181818, | |
| "grad_norm": 0.4974212944507599, | |
| "learning_rate": 4.51178060820876e-06, | |
| "loss": 0.5161, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 1.7636363636363637, | |
| "grad_norm": 0.4148060083389282, | |
| "learning_rate": 4.505755588600613e-06, | |
| "loss": 0.5099, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 1.7704545454545455, | |
| "grad_norm": 0.4106168746948242, | |
| "learning_rate": 4.499697694903915e-06, | |
| "loss": 0.5376, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 1.7772727272727273, | |
| "grad_norm": 0.49017220735549927, | |
| "learning_rate": 4.493607026406802e-06, | |
| "loss": 0.525, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.7840909090909092, | |
| "grad_norm": 0.4141218662261963, | |
| "learning_rate": 4.487483682934587e-06, | |
| "loss": 0.5303, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 1.790909090909091, | |
| "grad_norm": 0.44826585054397583, | |
| "learning_rate": 4.481327764848118e-06, | |
| "loss": 0.4886, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 1.7977272727272728, | |
| "grad_norm": 0.42013809084892273, | |
| "learning_rate": 4.47513937304214e-06, | |
| "loss": 0.5204, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 1.8045454545454547, | |
| "grad_norm": 0.44327741861343384, | |
| "learning_rate": 4.4689186089436365e-06, | |
| "loss": 0.5097, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 1.8113636363636365, | |
| "grad_norm": 0.39498746395111084, | |
| "learning_rate": 4.462665574510169e-06, | |
| "loss": 0.5102, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 1.8181818181818183, | |
| "grad_norm": 0.48296087980270386, | |
| "learning_rate": 4.456380372228208e-06, | |
| "loss": 0.5299, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 1.825, | |
| "grad_norm": 0.4398634433746338, | |
| "learning_rate": 4.450063105111447e-06, | |
| "loss": 0.5419, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 1.8318181818181818, | |
| "grad_norm": 0.43333035707473755, | |
| "learning_rate": 4.443713876699124e-06, | |
| "loss": 0.5047, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 1.8386363636363636, | |
| "grad_norm": 0.5823659300804138, | |
| "learning_rate": 4.4373327910543125e-06, | |
| "loss": 0.5082, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 1.8454545454545455, | |
| "grad_norm": 0.44385817646980286, | |
| "learning_rate": 4.430919952762226e-06, | |
| "loss": 0.5171, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.8522727272727273, | |
| "grad_norm": 0.4444347023963928, | |
| "learning_rate": 4.424475466928499e-06, | |
| "loss": 0.5351, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 1.8590909090909091, | |
| "grad_norm": 0.549674391746521, | |
| "learning_rate": 4.417999439177465e-06, | |
| "loss": 0.498, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 1.865909090909091, | |
| "grad_norm": 0.4340274930000305, | |
| "learning_rate": 4.4114919756504275e-06, | |
| "loss": 0.5429, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 1.8727272727272726, | |
| "grad_norm": 0.44019854068756104, | |
| "learning_rate": 4.404953183003916e-06, | |
| "loss": 0.5133, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 1.8795454545454544, | |
| "grad_norm": 0.4473486542701721, | |
| "learning_rate": 4.398383168407941e-06, | |
| "loss": 0.5413, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.8863636363636362, | |
| "grad_norm": 0.44450780749320984, | |
| "learning_rate": 4.391782039544239e-06, | |
| "loss": 0.4998, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 1.893181818181818, | |
| "grad_norm": 0.4310123920440674, | |
| "learning_rate": 4.385149904604502e-06, | |
| "loss": 0.5134, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "grad_norm": 0.48953917622566223, | |
| "learning_rate": 4.378486872288611e-06, | |
| "loss": 0.5102, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 1.9068181818181817, | |
| "grad_norm": 0.49219778180122375, | |
| "learning_rate": 4.371793051802849e-06, | |
| "loss": 0.5213, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 1.9136363636363636, | |
| "grad_norm": 0.818645715713501, | |
| "learning_rate": 4.365068552858116e-06, | |
| "loss": 0.5251, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.9204545454545454, | |
| "grad_norm": 0.4407028257846832, | |
| "learning_rate": 4.358313485668124e-06, | |
| "loss": 0.5191, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 1.9272727272727272, | |
| "grad_norm": 0.5301506519317627, | |
| "learning_rate": 4.3515279609476e-06, | |
| "loss": 0.5003, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 1.934090909090909, | |
| "grad_norm": 0.4673819839954376, | |
| "learning_rate": 4.3447120899104615e-06, | |
| "loss": 0.521, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 1.940909090909091, | |
| "grad_norm": 0.4106168746948242, | |
| "learning_rate": 4.337865984268002e-06, | |
| "loss": 0.5199, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 1.9477272727272728, | |
| "grad_norm": 0.43583595752716064, | |
| "learning_rate": 4.3309897562270525e-06, | |
| "loss": 0.5367, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 1.9545454545454546, | |
| "grad_norm": 0.4984910488128662, | |
| "learning_rate": 4.324083518488151e-06, | |
| "loss": 0.5202, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 1.9613636363636364, | |
| "grad_norm": 0.4516090452671051, | |
| "learning_rate": 4.317147384243688e-06, | |
| "loss": 0.5432, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 1.9681818181818183, | |
| "grad_norm": 0.43753015995025635, | |
| "learning_rate": 4.3101814671760546e-06, | |
| "loss": 0.5309, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 1.975, | |
| "grad_norm": 0.4472252130508423, | |
| "learning_rate": 4.303185881455778e-06, | |
| "loss": 0.5344, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 1.981818181818182, | |
| "grad_norm": 1.178377389907837, | |
| "learning_rate": 4.296160741739652e-06, | |
| "loss": 0.5283, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.9886363636363638, | |
| "grad_norm": 0.5085738301277161, | |
| "learning_rate": 4.289106163168858e-06, | |
| "loss": 0.5133, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 1.9954545454545456, | |
| "grad_norm": 0.52077716588974, | |
| "learning_rate": 4.282022261367074e-06, | |
| "loss": 0.5155, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 2.006818181818182, | |
| "grad_norm": 0.8534443378448486, | |
| "learning_rate": 4.274909152438582e-06, | |
| "loss": 1.003, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 2.0136363636363637, | |
| "grad_norm": 0.46160876750946045, | |
| "learning_rate": 4.267766952966369e-06, | |
| "loss": 0.4958, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 2.0204545454545455, | |
| "grad_norm": 0.5113462209701538, | |
| "learning_rate": 4.260595780010209e-06, | |
| "loss": 0.4895, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 2.0272727272727273, | |
| "grad_norm": 0.4553380608558655, | |
| "learning_rate": 4.2533957511047485e-06, | |
| "loss": 0.4826, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 2.034090909090909, | |
| "grad_norm": 0.43557822704315186, | |
| "learning_rate": 4.24616698425758e-06, | |
| "loss": 0.4591, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 2.040909090909091, | |
| "grad_norm": 0.45537200570106506, | |
| "learning_rate": 4.238909597947307e-06, | |
| "loss": 0.4811, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 2.047727272727273, | |
| "grad_norm": 0.48833370208740234, | |
| "learning_rate": 4.231623711121603e-06, | |
| "loss": 0.4967, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 2.0545454545454547, | |
| "grad_norm": 0.45504844188690186, | |
| "learning_rate": 4.224309443195261e-06, | |
| "loss": 0.4964, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 2.0613636363636365, | |
| "grad_norm": 0.4313275218009949, | |
| "learning_rate": 4.2169669140482365e-06, | |
| "loss": 0.4832, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 2.0681818181818183, | |
| "grad_norm": 0.4298369884490967, | |
| "learning_rate": 4.2095962440236846e-06, | |
| "loss": 0.5071, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 2.075, | |
| "grad_norm": 0.4188523590564728, | |
| "learning_rate": 4.202197553925983e-06, | |
| "loss": 0.4859, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 2.081818181818182, | |
| "grad_norm": 0.39195290207862854, | |
| "learning_rate": 4.194770965018758e-06, | |
| "loss": 0.5081, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 2.088636363636364, | |
| "grad_norm": 0.45844170451164246, | |
| "learning_rate": 4.187316599022892e-06, | |
| "loss": 0.508, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 2.0954545454545457, | |
| "grad_norm": 0.6743896007537842, | |
| "learning_rate": 4.179834578114531e-06, | |
| "loss": 0.4748, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 2.102272727272727, | |
| "grad_norm": 0.4317929446697235, | |
| "learning_rate": 4.172325024923083e-06, | |
| "loss": 0.504, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 2.109090909090909, | |
| "grad_norm": 0.4857175350189209, | |
| "learning_rate": 4.164788062529203e-06, | |
| "loss": 0.4987, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 2.1159090909090907, | |
| "grad_norm": 0.4454379677772522, | |
| "learning_rate": 4.157223814462784e-06, | |
| "loss": 0.4938, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 2.1227272727272726, | |
| "grad_norm": 0.5128078460693359, | |
| "learning_rate": 4.149632404700925e-06, | |
| "loss": 0.4917, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 2.1295454545454544, | |
| "grad_norm": 0.5195722579956055, | |
| "learning_rate": 4.142013957665903e-06, | |
| "loss": 0.4753, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 2.1363636363636362, | |
| "grad_norm": 0.4242931604385376, | |
| "learning_rate": 4.134368598223132e-06, | |
| "loss": 0.4832, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 2.143181818181818, | |
| "grad_norm": 0.4419071674346924, | |
| "learning_rate": 4.126696451679116e-06, | |
| "loss": 0.4783, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "grad_norm": 0.42852380871772766, | |
| "learning_rate": 4.118997643779401e-06, | |
| "loss": 0.5034, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 2.1568181818181817, | |
| "grad_norm": 0.7373576164245605, | |
| "learning_rate": 4.111272300706502e-06, | |
| "loss": 0.4934, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 2.1636363636363636, | |
| "grad_norm": 0.4435337781906128, | |
| "learning_rate": 4.1035205490778505e-06, | |
| "loss": 0.5089, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 2.1704545454545454, | |
| "grad_norm": 0.4714502692222595, | |
| "learning_rate": 4.095742515943703e-06, | |
| "loss": 0.502, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 2.1772727272727272, | |
| "grad_norm": 0.4732494056224823, | |
| "learning_rate": 4.087938328785071e-06, | |
| "loss": 0.5073, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 2.184090909090909, | |
| "grad_norm": 0.45177504420280457, | |
| "learning_rate": 4.080108115511629e-06, | |
| "loss": 0.5076, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 2.190909090909091, | |
| "grad_norm": 0.506157398223877, | |
| "learning_rate": 4.072252004459612e-06, | |
| "loss": 0.4837, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 2.1977272727272728, | |
| "grad_norm": 0.43044376373291016, | |
| "learning_rate": 4.064370124389718e-06, | |
| "loss": 0.5167, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 2.2045454545454546, | |
| "grad_norm": 0.45878180861473083, | |
| "learning_rate": 4.056462604484998e-06, | |
| "loss": 0.4936, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 2.2113636363636364, | |
| "grad_norm": 0.43078047037124634, | |
| "learning_rate": 4.048529574348734e-06, | |
| "loss": 0.4783, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 2.2181818181818183, | |
| "grad_norm": 0.42789366841316223, | |
| "learning_rate": 4.040571164002319e-06, | |
| "loss": 0.4853, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 2.225, | |
| "grad_norm": 0.44888222217559814, | |
| "learning_rate": 4.032587503883124e-06, | |
| "loss": 0.5097, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 2.231818181818182, | |
| "grad_norm": 0.446851521730423, | |
| "learning_rate": 4.0245787248423614e-06, | |
| "loss": 0.4844, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 2.2386363636363638, | |
| "grad_norm": 0.436567097902298, | |
| "learning_rate": 4.0165449581429404e-06, | |
| "loss": 0.4549, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 2.2454545454545456, | |
| "grad_norm": 0.4813944697380066, | |
| "learning_rate": 4.008486335457312e-06, | |
| "loss": 0.49, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 2.2522727272727274, | |
| "grad_norm": 0.4935426115989685, | |
| "learning_rate": 4.000402988865316e-06, | |
| "loss": 0.4995, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 2.2590909090909093, | |
| "grad_norm": 1.1695573329925537, | |
| "learning_rate": 3.992295050852013e-06, | |
| "loss": 0.5033, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 2.265909090909091, | |
| "grad_norm": 0.41830316185951233, | |
| "learning_rate": 3.984162654305516e-06, | |
| "loss": 0.4828, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 2.2727272727272725, | |
| "grad_norm": 0.4522007703781128, | |
| "learning_rate": 3.976005932514807e-06, | |
| "loss": 0.4948, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 2.2795454545454543, | |
| "grad_norm": 0.8304370045661926, | |
| "learning_rate": 3.967825019167559e-06, | |
| "loss": 0.504, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 2.286363636363636, | |
| "grad_norm": 0.4533236622810364, | |
| "learning_rate": 3.959620048347938e-06, | |
| "loss": 0.4487, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 2.293181818181818, | |
| "grad_norm": 0.43578940629959106, | |
| "learning_rate": 3.951391154534415e-06, | |
| "loss": 0.4702, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "grad_norm": 0.5146592259407043, | |
| "learning_rate": 3.943138472597549e-06, | |
| "loss": 0.4984, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 2.3068181818181817, | |
| "grad_norm": 0.6412249207496643, | |
| "learning_rate": 3.934862137797788e-06, | |
| "loss": 0.4761, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 2.3136363636363635, | |
| "grad_norm": 1.3352930545806885, | |
| "learning_rate": 3.9265622857832455e-06, | |
| "loss": 0.499, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 2.3204545454545453, | |
| "grad_norm": 0.4465159475803375, | |
| "learning_rate": 3.918239052587481e-06, | |
| "loss": 0.4525, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 2.327272727272727, | |
| "grad_norm": 0.43597185611724854, | |
| "learning_rate": 3.909892574627267e-06, | |
| "loss": 0.4952, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 2.334090909090909, | |
| "grad_norm": 0.7057136297225952, | |
| "learning_rate": 3.901522988700355e-06, | |
| "loss": 0.4576, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 2.340909090909091, | |
| "grad_norm": 0.45416319370269775, | |
| "learning_rate": 3.893130431983234e-06, | |
| "loss": 0.4902, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 2.3477272727272727, | |
| "grad_norm": 0.43900424242019653, | |
| "learning_rate": 3.884715042028882e-06, | |
| "loss": 0.495, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 2.3545454545454545, | |
| "grad_norm": 0.4553733468055725, | |
| "learning_rate": 3.876276956764509e-06, | |
| "loss": 0.4861, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 2.3613636363636363, | |
| "grad_norm": 0.5471370816230774, | |
| "learning_rate": 3.867816314489301e-06, | |
| "loss": 0.5043, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 2.368181818181818, | |
| "grad_norm": 0.43938082456588745, | |
| "learning_rate": 3.8593332538721465e-06, | |
| "loss": 0.4678, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 2.375, | |
| "grad_norm": 0.4583636224269867, | |
| "learning_rate": 3.8508279139493736e-06, | |
| "loss": 0.4679, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 2.381818181818182, | |
| "grad_norm": 0.44170665740966797, | |
| "learning_rate": 3.84230043412246e-06, | |
| "loss": 0.4647, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 2.3886363636363637, | |
| "grad_norm": 0.4952428340911865, | |
| "learning_rate": 3.833750954155757e-06, | |
| "loss": 0.4796, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 2.3954545454545455, | |
| "grad_norm": 0.42052993178367615, | |
| "learning_rate": 3.825179614174195e-06, | |
| "loss": 0.4606, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 2.4022727272727273, | |
| "grad_norm": 0.5792545676231384, | |
| "learning_rate": 3.816586554660987e-06, | |
| "loss": 0.4805, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 2.409090909090909, | |
| "grad_norm": 0.4477898180484772, | |
| "learning_rate": 3.807971916455325e-06, | |
| "loss": 0.4793, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 2.415909090909091, | |
| "grad_norm": 0.45617541670799255, | |
| "learning_rate": 3.799335840750077e-06, | |
| "loss": 0.502, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 2.422727272727273, | |
| "grad_norm": 0.45021289587020874, | |
| "learning_rate": 3.790678469089465e-06, | |
| "loss": 0.478, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 2.4295454545454547, | |
| "grad_norm": 0.4506435990333557, | |
| "learning_rate": 3.7819999433667503e-06, | |
| "loss": 0.4828, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 2.4363636363636365, | |
| "grad_norm": 0.48021042346954346, | |
| "learning_rate": 3.773300405821908e-06, | |
| "loss": 0.4827, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 2.4431818181818183, | |
| "grad_norm": 0.4759495258331299, | |
| "learning_rate": 3.764579999039293e-06, | |
| "loss": 0.4633, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "grad_norm": 0.4734678268432617, | |
| "learning_rate": 3.7558388659453052e-06, | |
| "loss": 0.4982, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 2.456818181818182, | |
| "grad_norm": 0.4502730369567871, | |
| "learning_rate": 3.7470771498060455e-06, | |
| "loss": 0.5009, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 2.463636363636364, | |
| "grad_norm": 0.444940447807312, | |
| "learning_rate": 3.7382949942249695e-06, | |
| "loss": 0.4994, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 2.4704545454545457, | |
| "grad_norm": 0.4882519245147705, | |
| "learning_rate": 3.7294925431405306e-06, | |
| "loss": 0.4607, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 2.4772727272727275, | |
| "grad_norm": 0.51338791847229, | |
| "learning_rate": 3.720669940823827e-06, | |
| "loss": 0.4801, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 2.484090909090909, | |
| "grad_norm": 0.495211124420166, | |
| "learning_rate": 3.7118273318762275e-06, | |
| "loss": 0.4984, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 2.4909090909090907, | |
| "grad_norm": 0.4894484281539917, | |
| "learning_rate": 3.702964861227013e-06, | |
| "loss": 0.4834, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 2.4977272727272726, | |
| "grad_norm": 0.5554991364479065, | |
| "learning_rate": 3.694082674130991e-06, | |
| "loss": 0.4874, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 2.5045454545454544, | |
| "grad_norm": 0.4586203396320343, | |
| "learning_rate": 3.6851809161661206e-06, | |
| "loss": 0.5117, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 2.5113636363636362, | |
| "grad_norm": 0.7342172861099243, | |
| "learning_rate": 3.6762597332311254e-06, | |
| "loss": 0.5184, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 2.518181818181818, | |
| "grad_norm": 0.46638190746307373, | |
| "learning_rate": 3.6673192715431016e-06, | |
| "loss": 0.47, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 2.525, | |
| "grad_norm": 0.4584850072860718, | |
| "learning_rate": 3.658359677635122e-06, | |
| "loss": 0.45, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 2.5318181818181817, | |
| "grad_norm": 0.4285255968570709, | |
| "learning_rate": 3.649381098353834e-06, | |
| "loss": 0.513, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 2.5386363636363636, | |
| "grad_norm": 0.49487683176994324, | |
| "learning_rate": 3.6403836808570512e-06, | |
| "loss": 0.5048, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 2.5454545454545454, | |
| "grad_norm": 0.44076815247535706, | |
| "learning_rate": 3.631367572611348e-06, | |
| "loss": 0.4927, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 2.5522727272727272, | |
| "grad_norm": 0.44529926776885986, | |
| "learning_rate": 3.6223329213896313e-06, | |
| "loss": 0.4863, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 2.559090909090909, | |
| "grad_norm": 0.5416815876960754, | |
| "learning_rate": 3.613279875268731e-06, | |
| "loss": 0.4808, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 2.565909090909091, | |
| "grad_norm": 0.43630415201187134, | |
| "learning_rate": 3.604208582626964e-06, | |
| "loss": 0.4792, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 2.5727272727272728, | |
| "grad_norm": 0.47734326124191284, | |
| "learning_rate": 3.5951191921417063e-06, | |
| "loss": 0.4462, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 2.5795454545454546, | |
| "grad_norm": 0.4540305435657501, | |
| "learning_rate": 3.586011852786955e-06, | |
| "loss": 0.4915, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 2.5863636363636364, | |
| "grad_norm": 0.47168779373168945, | |
| "learning_rate": 3.5768867138308872e-06, | |
| "loss": 0.47, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 2.5931818181818183, | |
| "grad_norm": 0.46372920274734497, | |
| "learning_rate": 3.5677439248334133e-06, | |
| "loss": 0.4775, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "grad_norm": 0.41653329133987427, | |
| "learning_rate": 3.5585836356437266e-06, | |
| "loss": 0.4875, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 2.606818181818182, | |
| "grad_norm": 0.4495103657245636, | |
| "learning_rate": 3.5494059963978433e-06, | |
| "loss": 0.4637, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 2.6136363636363638, | |
| "grad_norm": 0.432577520608902, | |
| "learning_rate": 3.540211157516149e-06, | |
| "loss": 0.4991, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 2.6204545454545456, | |
| "grad_norm": 0.4517219662666321, | |
| "learning_rate": 3.530999269700927e-06, | |
| "loss": 0.4785, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 2.6272727272727274, | |
| "grad_norm": 0.519922137260437, | |
| "learning_rate": 3.521770483933891e-06, | |
| "loss": 0.5026, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 2.634090909090909, | |
| "grad_norm": 0.48750364780426025, | |
| "learning_rate": 3.5125249514737093e-06, | |
| "loss": 0.4805, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 2.6409090909090907, | |
| "grad_norm": 0.6416886448860168, | |
| "learning_rate": 3.503262823853527e-06, | |
| "loss": 0.4849, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 2.6477272727272725, | |
| "grad_norm": 0.5748171806335449, | |
| "learning_rate": 3.493984252878483e-06, | |
| "loss": 0.4885, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 2.6545454545454543, | |
| "grad_norm": 0.5148530006408691, | |
| "learning_rate": 3.484689390623218e-06, | |
| "loss": 0.484, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 2.661363636363636, | |
| "grad_norm": 0.526839017868042, | |
| "learning_rate": 3.4753783894293886e-06, | |
| "loss": 0.4758, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 2.668181818181818, | |
| "grad_norm": 0.7565727829933167, | |
| "learning_rate": 3.466051401903162e-06, | |
| "loss": 0.479, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 2.675, | |
| "grad_norm": 0.4598790109157562, | |
| "learning_rate": 3.4567085809127247e-06, | |
| "loss": 0.4972, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 2.6818181818181817, | |
| "grad_norm": 0.4911792278289795, | |
| "learning_rate": 3.4473500795857674e-06, | |
| "loss": 0.4714, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 2.6886363636363635, | |
| "grad_norm": 0.4214136600494385, | |
| "learning_rate": 3.4379760513069804e-06, | |
| "loss": 0.4906, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 2.6954545454545453, | |
| "grad_norm": 0.5070644021034241, | |
| "learning_rate": 3.428586649715542e-06, | |
| "loss": 0.4936, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 2.702272727272727, | |
| "grad_norm": 0.4591734707355499, | |
| "learning_rate": 3.4191820287025916e-06, | |
| "loss": 0.4751, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 2.709090909090909, | |
| "grad_norm": 0.579781711101532, | |
| "learning_rate": 3.4097623424087196e-06, | |
| "loss": 0.4935, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 2.715909090909091, | |
| "grad_norm": 0.4178840219974518, | |
| "learning_rate": 3.4003277452214284e-06, | |
| "loss": 0.4771, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 2.7227272727272727, | |
| "grad_norm": 0.5002609491348267, | |
| "learning_rate": 3.3908783917726123e-06, | |
| "loss": 0.4739, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 2.7295454545454545, | |
| "grad_norm": 0.5176806449890137, | |
| "learning_rate": 3.381414436936018e-06, | |
| "loss": 0.4965, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 2.7363636363636363, | |
| "grad_norm": 0.8039212226867676, | |
| "learning_rate": 3.3719360358247054e-06, | |
| "loss": 0.4822, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 2.743181818181818, | |
| "grad_norm": 0.48943471908569336, | |
| "learning_rate": 3.36244334378851e-06, | |
| "loss": 0.4815, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "grad_norm": 0.43710342049598694, | |
| "learning_rate": 3.3529365164114903e-06, | |
| "loss": 0.4932, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 2.756818181818182, | |
| "grad_norm": 0.5585265159606934, | |
| "learning_rate": 3.3434157095093846e-06, | |
| "loss": 0.5125, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 2.7636363636363637, | |
| "grad_norm": 0.4626101553440094, | |
| "learning_rate": 3.333881079127052e-06, | |
| "loss": 0.4934, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 2.7704545454545455, | |
| "grad_norm": 0.4675556421279907, | |
| "learning_rate": 3.3243327815359168e-06, | |
| "loss": 0.4903, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 2.7772727272727273, | |
| "grad_norm": 0.45658671855926514, | |
| "learning_rate": 3.314770973231408e-06, | |
| "loss": 0.4778, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 2.784090909090909, | |
| "grad_norm": 0.4911910593509674, | |
| "learning_rate": 3.305195810930393e-06, | |
| "loss": 0.4795, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 2.790909090909091, | |
| "grad_norm": 0.4397219121456146, | |
| "learning_rate": 3.2956074515686105e-06, | |
| "loss": 0.4589, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 2.797727272727273, | |
| "grad_norm": 0.5184643864631653, | |
| "learning_rate": 3.2860060522980945e-06, | |
| "loss": 0.4827, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 2.8045454545454547, | |
| "grad_norm": 0.45334312319755554, | |
| "learning_rate": 3.276391770484606e-06, | |
| "loss": 0.4821, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 2.8113636363636365, | |
| "grad_norm": 0.41846391558647156, | |
| "learning_rate": 3.266764763705046e-06, | |
| "loss": 0.5157, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 2.8181818181818183, | |
| "grad_norm": 0.45868799090385437, | |
| "learning_rate": 3.257125189744877e-06, | |
| "loss": 0.4876, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 2.825, | |
| "grad_norm": 0.4785497486591339, | |
| "learning_rate": 3.247473206595536e-06, | |
| "loss": 0.4809, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 2.831818181818182, | |
| "grad_norm": 0.5683679580688477, | |
| "learning_rate": 3.2378089724518464e-06, | |
| "loss": 0.505, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 2.838636363636364, | |
| "grad_norm": 0.48336583375930786, | |
| "learning_rate": 3.228132645709421e-06, | |
| "loss": 0.4816, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 2.8454545454545457, | |
| "grad_norm": 0.4236341416835785, | |
| "learning_rate": 3.218444384962071e-06, | |
| "loss": 0.4885, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 2.8522727272727275, | |
| "grad_norm": 0.4892752766609192, | |
| "learning_rate": 3.2087443489992043e-06, | |
| "loss": 0.4782, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 2.8590909090909093, | |
| "grad_norm": 0.4632202088832855, | |
| "learning_rate": 3.1990326968032225e-06, | |
| "loss": 0.5211, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 2.865909090909091, | |
| "grad_norm": 0.8842809796333313, | |
| "learning_rate": 3.189309587546917e-06, | |
| "loss": 0.4716, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 2.8727272727272726, | |
| "grad_norm": 0.46771398186683655, | |
| "learning_rate": 3.1795751805908578e-06, | |
| "loss": 0.4745, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 2.8795454545454544, | |
| "grad_norm": 0.8178832530975342, | |
| "learning_rate": 3.169829635480783e-06, | |
| "loss": 0.4917, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 2.8863636363636362, | |
| "grad_norm": 0.43057572841644287, | |
| "learning_rate": 3.160073111944983e-06, | |
| "loss": 0.4926, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 2.893181818181818, | |
| "grad_norm": 0.4258928894996643, | |
| "learning_rate": 3.150305769891686e-06, | |
| "loss": 0.4698, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "grad_norm": 0.4559054374694824, | |
| "learning_rate": 3.1405277694064306e-06, | |
| "loss": 0.4537, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 2.9068181818181817, | |
| "grad_norm": 0.4342961609363556, | |
| "learning_rate": 3.13073927074945e-06, | |
| "loss": 0.4834, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 2.9136363636363636, | |
| "grad_norm": 0.48813825845718384, | |
| "learning_rate": 3.1209404343530374e-06, | |
| "loss": 0.4853, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 2.9204545454545454, | |
| "grad_norm": 0.47864893078804016, | |
| "learning_rate": 3.111131420818922e-06, | |
| "loss": 0.4627, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 2.9272727272727272, | |
| "grad_norm": 0.4394248425960541, | |
| "learning_rate": 3.1013123909156347e-06, | |
| "loss": 0.4817, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 2.934090909090909, | |
| "grad_norm": 0.6629825830459595, | |
| "learning_rate": 3.091483505575873e-06, | |
| "loss": 0.4738, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 2.940909090909091, | |
| "grad_norm": 0.47470518946647644, | |
| "learning_rate": 3.081644925893866e-06, | |
| "loss": 0.4943, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 2.9477272727272728, | |
| "grad_norm": 0.44007545709609985, | |
| "learning_rate": 3.0717968131227285e-06, | |
| "loss": 0.4805, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 2.9545454545454546, | |
| "grad_norm": 0.4425961375236511, | |
| "learning_rate": 3.061939328671824e-06, | |
| "loss": 0.5092, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 2.9613636363636364, | |
| "grad_norm": 0.4215753972530365, | |
| "learning_rate": 3.0520726341041165e-06, | |
| "loss": 0.4616, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 2.9681818181818183, | |
| "grad_norm": 0.4265761077404022, | |
| "learning_rate": 3.0421968911335196e-06, | |
| "loss": 0.4764, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 2.975, | |
| "grad_norm": 0.45009252429008484, | |
| "learning_rate": 3.032312261622255e-06, | |
| "loss": 0.4865, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 2.981818181818182, | |
| "grad_norm": 0.5175682306289673, | |
| "learning_rate": 3.0224189075781886e-06, | |
| "loss": 0.5085, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 2.9886363636363638, | |
| "grad_norm": 0.4518980383872986, | |
| "learning_rate": 3.012516991152181e-06, | |
| "loss": 0.4568, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 2.9954545454545456, | |
| "grad_norm": 0.4789412021636963, | |
| "learning_rate": 3.002606674635432e-06, | |
| "loss": 0.4942, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 3.006818181818182, | |
| "grad_norm": 0.495550274848938, | |
| "learning_rate": 2.9926881204568153e-06, | |
| "loss": 0.9793, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 3.0136363636363637, | |
| "grad_norm": 0.47357404232025146, | |
| "learning_rate": 2.9827614911802205e-06, | |
| "loss": 0.4689, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 3.0204545454545455, | |
| "grad_norm": 0.4523085355758667, | |
| "learning_rate": 2.972826949501884e-06, | |
| "loss": 0.4632, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 3.0272727272727273, | |
| "grad_norm": 0.4381036162376404, | |
| "learning_rate": 2.9628846582477305e-06, | |
| "loss": 0.4508, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 3.034090909090909, | |
| "grad_norm": 0.5367632508277893, | |
| "learning_rate": 2.9529347803706943e-06, | |
| "loss": 0.4472, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 3.040909090909091, | |
| "grad_norm": 0.5390172600746155, | |
| "learning_rate": 2.9429774789480576e-06, | |
| "loss": 0.4475, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 3.047727272727273, | |
| "grad_norm": 0.44644543528556824, | |
| "learning_rate": 2.9330129171787704e-06, | |
| "loss": 0.497, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 3.0545454545454547, | |
| "grad_norm": 0.46184423565864563, | |
| "learning_rate": 2.923041258380779e-06, | |
| "loss": 0.4545, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 3.0613636363636365, | |
| "grad_norm": 0.45963627099990845, | |
| "learning_rate": 2.9130626659883537e-06, | |
| "loss": 0.4571, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 3.0681818181818183, | |
| "grad_norm": 0.4504947364330292, | |
| "learning_rate": 2.9030773035493997e-06, | |
| "loss": 0.4563, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 3.075, | |
| "grad_norm": 0.4393884241580963, | |
| "learning_rate": 2.893085334722786e-06, | |
| "loss": 0.4433, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 3.081818181818182, | |
| "grad_norm": 0.7640544772148132, | |
| "learning_rate": 2.883086923275658e-06, | |
| "loss": 0.4598, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 3.088636363636364, | |
| "grad_norm": 0.44159695506095886, | |
| "learning_rate": 2.8730822330807556e-06, | |
| "loss": 0.4443, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 3.0954545454545457, | |
| "grad_norm": 0.4525455832481384, | |
| "learning_rate": 2.8630714281137263e-06, | |
| "loss": 0.4692, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 3.102272727272727, | |
| "grad_norm": 0.44391655921936035, | |
| "learning_rate": 2.853054672450437e-06, | |
| "loss": 0.4412, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 3.109090909090909, | |
| "grad_norm": 0.45541912317276, | |
| "learning_rate": 2.8430321302642887e-06, | |
| "loss": 0.4417, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 3.1159090909090907, | |
| "grad_norm": 0.43590644001960754, | |
| "learning_rate": 2.8330039658235194e-06, | |
| "loss": 0.4529, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 3.1227272727272726, | |
| "grad_norm": 0.48097532987594604, | |
| "learning_rate": 2.8229703434885165e-06, | |
| "loss": 0.4519, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 3.1295454545454544, | |
| "grad_norm": 0.43121981620788574, | |
| "learning_rate": 2.8129314277091224e-06, | |
| "loss": 0.4663, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 3.1363636363636362, | |
| "grad_norm": 0.46553748846054077, | |
| "learning_rate": 2.8028873830219373e-06, | |
| "loss": 0.4521, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 3.143181818181818, | |
| "grad_norm": 0.449290007352829, | |
| "learning_rate": 2.7928383740476247e-06, | |
| "loss": 0.4586, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 3.15, | |
| "grad_norm": 0.679371178150177, | |
| "learning_rate": 2.7827845654882112e-06, | |
| "loss": 0.4476, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 3.1568181818181817, | |
| "grad_norm": 0.49499261379241943, | |
| "learning_rate": 2.7727261221243875e-06, | |
| "loss": 0.4752, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 3.1636363636363636, | |
| "grad_norm": 0.4675534963607788, | |
| "learning_rate": 2.76266320881281e-06, | |
| "loss": 0.478, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 3.1704545454545454, | |
| "grad_norm": 0.44854965806007385, | |
| "learning_rate": 2.7525959904833955e-06, | |
| "loss": 0.4289, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 3.1772727272727272, | |
| "grad_norm": 0.4300600290298462, | |
| "learning_rate": 2.7425246321366205e-06, | |
| "loss": 0.4588, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 3.184090909090909, | |
| "grad_norm": 0.46728187799453735, | |
| "learning_rate": 2.7324492988408146e-06, | |
| "loss": 0.4498, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 3.190909090909091, | |
| "grad_norm": 0.46769359707832336, | |
| "learning_rate": 2.7223701557294574e-06, | |
| "loss": 0.4324, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 3.1977272727272728, | |
| "grad_norm": 0.4275517463684082, | |
| "learning_rate": 2.712287367998471e-06, | |
| "loss": 0.4574, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 3.2045454545454546, | |
| "grad_norm": 0.5888628959655762, | |
| "learning_rate": 2.702201100903511e-06, | |
| "loss": 0.4397, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 3.2113636363636364, | |
| "grad_norm": 0.4613579511642456, | |
| "learning_rate": 2.692111519757261e-06, | |
| "loss": 0.4391, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 3.2181818181818183, | |
| "grad_norm": 0.4581657946109772, | |
| "learning_rate": 2.6820187899267203e-06, | |
| "loss": 0.4734, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 3.225, | |
| "grad_norm": 0.4940917491912842, | |
| "learning_rate": 2.671923076830496e-06, | |
| "loss": 0.4726, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 3.231818181818182, | |
| "grad_norm": 0.7280719876289368, | |
| "learning_rate": 2.6618245459360896e-06, | |
| "loss": 0.4741, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 3.2386363636363638, | |
| "grad_norm": 0.519322395324707, | |
| "learning_rate": 2.651723362757186e-06, | |
| "loss": 0.4549, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 3.2454545454545456, | |
| "grad_norm": 0.47500962018966675, | |
| "learning_rate": 2.641619692850941e-06, | |
| "loss": 0.4598, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 3.2522727272727274, | |
| "grad_norm": 0.4661007821559906, | |
| "learning_rate": 2.631513701815267e-06, | |
| "loss": 0.4536, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 3.2590909090909093, | |
| "grad_norm": 0.48142874240875244, | |
| "learning_rate": 2.6214055552861213e-06, | |
| "loss": 0.474, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 3.265909090909091, | |
| "grad_norm": 0.5227756500244141, | |
| "learning_rate": 2.611295418934786e-06, | |
| "loss": 0.4516, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 3.2727272727272725, | |
| "grad_norm": 0.4693076014518738, | |
| "learning_rate": 2.6011834584651597e-06, | |
| "loss": 0.4409, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 3.2795454545454543, | |
| "grad_norm": 0.44575586915016174, | |
| "learning_rate": 2.591069839611036e-06, | |
| "loss": 0.4382, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 3.286363636363636, | |
| "grad_norm": 0.4569104313850403, | |
| "learning_rate": 2.5809547281333904e-06, | |
| "loss": 0.476, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 3.293181818181818, | |
| "grad_norm": 0.4542261064052582, | |
| "learning_rate": 2.570838289817661e-06, | |
| "loss": 0.4542, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 3.3, | |
| "grad_norm": 0.5086669921875, | |
| "learning_rate": 2.560720690471033e-06, | |
| "loss": 0.4673, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 3.3068181818181817, | |
| "grad_norm": 0.46243855357170105, | |
| "learning_rate": 2.5506020959197218e-06, | |
| "loss": 0.4462, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 3.3136363636363635, | |
| "grad_norm": 0.4877456724643707, | |
| "learning_rate": 2.5404826720062544e-06, | |
| "loss": 0.4463, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 3.3204545454545453, | |
| "grad_norm": 0.4592823088169098, | |
| "learning_rate": 2.5303625845867475e-06, | |
| "loss": 0.4674, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 3.327272727272727, | |
| "grad_norm": 0.4744127690792084, | |
| "learning_rate": 2.5202419995281966e-06, | |
| "loss": 0.4502, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 3.334090909090909, | |
| "grad_norm": 0.4267658591270447, | |
| "learning_rate": 2.5101210827057516e-06, | |
| "loss": 0.457, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 3.340909090909091, | |
| "grad_norm": 0.5804760456085205, | |
| "learning_rate": 2.5e-06, | |
| "loss": 0.4809, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 3.3477272727272727, | |
| "grad_norm": 0.5594801306724548, | |
| "learning_rate": 2.4898789172942492e-06, | |
| "loss": 0.4522, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 3.3545454545454545, | |
| "grad_norm": 0.4550947844982147, | |
| "learning_rate": 2.4797580004718038e-06, | |
| "loss": 0.4528, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 3.3613636363636363, | |
| "grad_norm": 0.430431991815567, | |
| "learning_rate": 2.4696374154132533e-06, | |
| "loss": 0.4657, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 3.368181818181818, | |
| "grad_norm": 0.4511764645576477, | |
| "learning_rate": 2.4595173279937464e-06, | |
| "loss": 0.4494, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 3.375, | |
| "grad_norm": 0.47958365082740784, | |
| "learning_rate": 2.4493979040802786e-06, | |
| "loss": 0.4508, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 3.381818181818182, | |
| "grad_norm": 0.4632541835308075, | |
| "learning_rate": 2.4392793095289677e-06, | |
| "loss": 0.4444, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 3.3886363636363637, | |
| "grad_norm": 0.46840110421180725, | |
| "learning_rate": 2.42916171018234e-06, | |
| "loss": 0.4676, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 3.3954545454545455, | |
| "grad_norm": 0.49999886751174927, | |
| "learning_rate": 2.419045271866611e-06, | |
| "loss": 0.4589, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 3.4022727272727273, | |
| "grad_norm": 0.44237902760505676, | |
| "learning_rate": 2.408930160388965e-06, | |
| "loss": 0.4569, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 3.409090909090909, | |
| "grad_norm": 0.4862026572227478, | |
| "learning_rate": 2.3988165415348416e-06, | |
| "loss": 0.4534, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 3.415909090909091, | |
| "grad_norm": 0.48773396015167236, | |
| "learning_rate": 2.388704581065215e-06, | |
| "loss": 0.4411, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 3.422727272727273, | |
| "grad_norm": 0.4380181133747101, | |
| "learning_rate": 2.3785944447138804e-06, | |
| "loss": 0.4529, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 3.4295454545454547, | |
| "grad_norm": 0.4616672992706299, | |
| "learning_rate": 2.368486298184733e-06, | |
| "loss": 0.4596, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 3.4363636363636365, | |
| "grad_norm": 0.4307732582092285, | |
| "learning_rate": 2.358380307149059e-06, | |
| "loss": 0.4626, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 3.4431818181818183, | |
| "grad_norm": 0.4402921795845032, | |
| "learning_rate": 2.348276637242814e-06, | |
| "loss": 0.4606, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 3.45, | |
| "grad_norm": 0.4782681465148926, | |
| "learning_rate": 2.3381754540639108e-06, | |
| "loss": 0.4508, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 3.456818181818182, | |
| "grad_norm": 0.43719062209129333, | |
| "learning_rate": 2.328076923169504e-06, | |
| "loss": 0.4476, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 3.463636363636364, | |
| "grad_norm": 0.4890111982822418, | |
| "learning_rate": 2.31798121007328e-06, | |
| "loss": 0.4484, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 3.4704545454545457, | |
| "grad_norm": 0.47682395577430725, | |
| "learning_rate": 2.3078884802427394e-06, | |
| "loss": 0.4494, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 3.4772727272727275, | |
| "grad_norm": 0.566819429397583, | |
| "learning_rate": 2.29779889909649e-06, | |
| "loss": 0.4539, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 3.484090909090909, | |
| "grad_norm": 0.4956795871257782, | |
| "learning_rate": 2.2877126320015295e-06, | |
| "loss": 0.4476, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 3.4909090909090907, | |
| "grad_norm": 0.5630896687507629, | |
| "learning_rate": 2.2776298442705434e-06, | |
| "loss": 0.4436, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 3.4977272727272726, | |
| "grad_norm": 0.9436272382736206, | |
| "learning_rate": 2.267550701159186e-06, | |
| "loss": 0.4558, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 3.5045454545454544, | |
| "grad_norm": 0.43885400891304016, | |
| "learning_rate": 2.25747536786338e-06, | |
| "loss": 0.4375, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 3.5113636363636362, | |
| "grad_norm": 0.762102484703064, | |
| "learning_rate": 2.247404009516605e-06, | |
| "loss": 0.4491, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 3.518181818181818, | |
| "grad_norm": 0.46883025765419006, | |
| "learning_rate": 2.2373367911871904e-06, | |
| "loss": 0.4393, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 3.525, | |
| "grad_norm": 0.4758882522583008, | |
| "learning_rate": 2.227273877875613e-06, | |
| "loss": 0.4446, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 3.5318181818181817, | |
| "grad_norm": 0.4799712300300598, | |
| "learning_rate": 2.2172154345117896e-06, | |
| "loss": 0.455, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 3.5386363636363636, | |
| "grad_norm": 0.46632879972457886, | |
| "learning_rate": 2.207161625952376e-06, | |
| "loss": 0.4481, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 3.5454545454545454, | |
| "grad_norm": 0.6286160945892334, | |
| "learning_rate": 2.1971126169780636e-06, | |
| "loss": 0.4436, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 3.5522727272727272, | |
| "grad_norm": 0.4679605960845947, | |
| "learning_rate": 2.1870685722908784e-06, | |
| "loss": 0.4575, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 3.559090909090909, | |
| "grad_norm": 0.495728075504303, | |
| "learning_rate": 2.1770296565114847e-06, | |
| "loss": 0.4399, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 3.565909090909091, | |
| "grad_norm": 0.4634799361228943, | |
| "learning_rate": 2.166996034176482e-06, | |
| "loss": 0.4553, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 3.5727272727272728, | |
| "grad_norm": 0.4464644491672516, | |
| "learning_rate": 2.1569678697357126e-06, | |
| "loss": 0.4693, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 3.5795454545454546, | |
| "grad_norm": 0.4530065655708313, | |
| "learning_rate": 2.1469453275495634e-06, | |
| "loss": 0.4418, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 3.5863636363636364, | |
| "grad_norm": 0.4806428551673889, | |
| "learning_rate": 2.136928571886275e-06, | |
| "loss": 0.4412, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 3.5931818181818183, | |
| "grad_norm": 0.46114400029182434, | |
| "learning_rate": 2.126917766919245e-06, | |
| "loss": 0.4502, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "grad_norm": 0.7423635125160217, | |
| "learning_rate": 2.1169130767243424e-06, | |
| "loss": 0.4573, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 3.606818181818182, | |
| "grad_norm": 0.47871625423431396, | |
| "learning_rate": 2.1069146652772142e-06, | |
| "loss": 0.4427, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 3.6136363636363638, | |
| "grad_norm": 0.45554301142692566, | |
| "learning_rate": 2.0969226964506007e-06, | |
| "loss": 0.4429, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 3.6204545454545456, | |
| "grad_norm": 0.44822901487350464, | |
| "learning_rate": 2.0869373340116467e-06, | |
| "loss": 0.4616, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 3.6272727272727274, | |
| "grad_norm": 0.4922308027744293, | |
| "learning_rate": 2.0769587416192212e-06, | |
| "loss": 0.4482, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 3.634090909090909, | |
| "grad_norm": 0.4700371026992798, | |
| "learning_rate": 2.066987082821231e-06, | |
| "loss": 0.4652, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 3.6409090909090907, | |
| "grad_norm": 0.5513899326324463, | |
| "learning_rate": 2.0570225210519433e-06, | |
| "loss": 0.4542, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 3.6477272727272725, | |
| "grad_norm": 0.47292518615722656, | |
| "learning_rate": 2.047065219629306e-06, | |
| "loss": 0.4675, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 3.6545454545454543, | |
| "grad_norm": 0.5037868618965149, | |
| "learning_rate": 2.0371153417522703e-06, | |
| "loss": 0.4637, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 3.661363636363636, | |
| "grad_norm": 0.4578365087509155, | |
| "learning_rate": 2.0271730504981165e-06, | |
| "loss": 0.4432, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 3.668181818181818, | |
| "grad_norm": 0.518247127532959, | |
| "learning_rate": 2.0172385088197804e-06, | |
| "loss": 0.4574, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 3.675, | |
| "grad_norm": 0.5211175680160522, | |
| "learning_rate": 2.007311879543185e-06, | |
| "loss": 0.4607, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 3.6818181818181817, | |
| "grad_norm": 0.4695160686969757, | |
| "learning_rate": 1.9973933253645684e-06, | |
| "loss": 0.447, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 3.6886363636363635, | |
| "grad_norm": 0.5875166654586792, | |
| "learning_rate": 1.9874830088478196e-06, | |
| "loss": 0.4615, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 3.6954545454545453, | |
| "grad_norm": 0.4762527644634247, | |
| "learning_rate": 1.9775810924218126e-06, | |
| "loss": 0.4677, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 3.702272727272727, | |
| "grad_norm": 0.47306928038597107, | |
| "learning_rate": 1.967687738377746e-06, | |
| "loss": 0.4526, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 3.709090909090909, | |
| "grad_norm": 0.44723549485206604, | |
| "learning_rate": 1.9578031088664812e-06, | |
| "loss": 0.469, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 3.715909090909091, | |
| "grad_norm": 0.4447214901447296, | |
| "learning_rate": 1.9479273658958852e-06, | |
| "loss": 0.4375, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 3.7227272727272727, | |
| "grad_norm": 0.5299004316329956, | |
| "learning_rate": 1.9380606713281773e-06, | |
| "loss": 0.4544, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 3.7295454545454545, | |
| "grad_norm": 0.4843369424343109, | |
| "learning_rate": 1.928203186877273e-06, | |
| "loss": 0.4692, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 3.7363636363636363, | |
| "grad_norm": 0.4805986285209656, | |
| "learning_rate": 1.9183550741061354e-06, | |
| "loss": 0.4498, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 3.743181818181818, | |
| "grad_norm": 0.6539422273635864, | |
| "learning_rate": 1.9085164944241275e-06, | |
| "loss": 0.4398, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "grad_norm": 0.47688615322113037, | |
| "learning_rate": 1.8986876090843668e-06, | |
| "loss": 0.4642, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 3.756818181818182, | |
| "grad_norm": 0.4808521270751953, | |
| "learning_rate": 1.8888685791810784e-06, | |
| "loss": 0.4166, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 3.7636363636363637, | |
| "grad_norm": 0.49430814385414124, | |
| "learning_rate": 1.8790595656469628e-06, | |
| "loss": 0.4404, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 3.7704545454545455, | |
| "grad_norm": 0.5284092426300049, | |
| "learning_rate": 1.86926072925055e-06, | |
| "loss": 0.445, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 3.7772727272727273, | |
| "grad_norm": 0.4650699496269226, | |
| "learning_rate": 1.8594722305935691e-06, | |
| "loss": 0.4381, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 3.784090909090909, | |
| "grad_norm": 0.4954371452331543, | |
| "learning_rate": 1.8496942301083142e-06, | |
| "loss": 0.4534, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 3.790909090909091, | |
| "grad_norm": 0.4756702482700348, | |
| "learning_rate": 1.8399268880550174e-06, | |
| "loss": 0.4563, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 3.797727272727273, | |
| "grad_norm": 0.4566079378128052, | |
| "learning_rate": 1.8301703645192178e-06, | |
| "loss": 0.4422, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 3.8045454545454547, | |
| "grad_norm": 1.6503181457519531, | |
| "learning_rate": 1.8204248194091429e-06, | |
| "loss": 0.4548, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 3.8113636363636365, | |
| "grad_norm": 0.5488978028297424, | |
| "learning_rate": 1.8106904124530839e-06, | |
| "loss": 0.4398, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 3.8181818181818183, | |
| "grad_norm": 0.44948896765708923, | |
| "learning_rate": 1.800967303196778e-06, | |
| "loss": 0.4331, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 3.825, | |
| "grad_norm": 0.49597036838531494, | |
| "learning_rate": 1.7912556510007967e-06, | |
| "loss": 0.4409, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 3.831818181818182, | |
| "grad_norm": 0.5093109011650085, | |
| "learning_rate": 1.7815556150379298e-06, | |
| "loss": 0.4615, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 3.838636363636364, | |
| "grad_norm": 0.4719341993331909, | |
| "learning_rate": 1.77186735429058e-06, | |
| "loss": 0.4316, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 3.8454545454545457, | |
| "grad_norm": 0.5281686782836914, | |
| "learning_rate": 1.7621910275481544e-06, | |
| "loss": 0.4474, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 3.8522727272727275, | |
| "grad_norm": 0.4955421984195709, | |
| "learning_rate": 1.7525267934044642e-06, | |
| "loss": 0.462, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 3.8590909090909093, | |
| "grad_norm": 0.4997479021549225, | |
| "learning_rate": 1.7428748102551237e-06, | |
| "loss": 0.4546, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 3.865909090909091, | |
| "grad_norm": 0.47572800517082214, | |
| "learning_rate": 1.7332352362949546e-06, | |
| "loss": 0.4587, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 3.8727272727272726, | |
| "grad_norm": 0.5156065225601196, | |
| "learning_rate": 1.7236082295153948e-06, | |
| "loss": 0.4375, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 3.8795454545454544, | |
| "grad_norm": 0.44926485419273376, | |
| "learning_rate": 1.7139939477019057e-06, | |
| "loss": 0.45, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 3.8863636363636362, | |
| "grad_norm": 0.5433095693588257, | |
| "learning_rate": 1.7043925484313911e-06, | |
| "loss": 0.4464, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 3.893181818181818, | |
| "grad_norm": 0.4719317555427551, | |
| "learning_rate": 1.6948041890696076e-06, | |
| "loss": 0.4467, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 3.9, | |
| "grad_norm": 0.4679432511329651, | |
| "learning_rate": 1.685229026768593e-06, | |
| "loss": 0.4577, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 3.9068181818181817, | |
| "grad_norm": 0.5053668022155762, | |
| "learning_rate": 1.6756672184640847e-06, | |
| "loss": 0.4379, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 3.9136363636363636, | |
| "grad_norm": 0.4880099594593048, | |
| "learning_rate": 1.6661189208729492e-06, | |
| "loss": 0.4585, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 3.9204545454545454, | |
| "grad_norm": 0.44481492042541504, | |
| "learning_rate": 1.6565842904906154e-06, | |
| "loss": 0.4319, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 3.9272727272727272, | |
| "grad_norm": 0.4998759329319, | |
| "learning_rate": 1.6470634835885097e-06, | |
| "loss": 0.4349, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 3.934090909090909, | |
| "grad_norm": 0.4702436625957489, | |
| "learning_rate": 1.6375566562114903e-06, | |
| "loss": 0.4409, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 3.940909090909091, | |
| "grad_norm": 1.1932134628295898, | |
| "learning_rate": 1.6280639641752944e-06, | |
| "loss": 0.4435, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 3.9477272727272728, | |
| "grad_norm": 0.46057313680648804, | |
| "learning_rate": 1.6185855630639818e-06, | |
| "loss": 0.451, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 3.9545454545454546, | |
| "grad_norm": 0.5858541131019592, | |
| "learning_rate": 1.6091216082273875e-06, | |
| "loss": 0.4662, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 3.9613636363636364, | |
| "grad_norm": 0.46951207518577576, | |
| "learning_rate": 1.5996722547785722e-06, | |
| "loss": 0.4561, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 3.9681818181818183, | |
| "grad_norm": 0.4747212529182434, | |
| "learning_rate": 1.5902376575912815e-06, | |
| "loss": 0.4507, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 3.975, | |
| "grad_norm": 0.5081924200057983, | |
| "learning_rate": 1.580817971297409e-06, | |
| "loss": 0.4649, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 3.981818181818182, | |
| "grad_norm": 0.4900786280632019, | |
| "learning_rate": 1.5714133502844591e-06, | |
| "loss": 0.4587, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 3.9886363636363638, | |
| "grad_norm": 0.4827897250652313, | |
| "learning_rate": 1.56202394869302e-06, | |
| "loss": 0.441, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 3.9954545454545456, | |
| "grad_norm": 0.499607652425766, | |
| "learning_rate": 1.5526499204142332e-06, | |
| "loss": 0.4559, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 4.006818181818182, | |
| "grad_norm": 1.06562340259552, | |
| "learning_rate": 1.5432914190872757e-06, | |
| "loss": 0.928, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 4.013636363636364, | |
| "grad_norm": 0.5927309393882751, | |
| "learning_rate": 1.5339485980968383e-06, | |
| "loss": 0.4446, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 4.0204545454545455, | |
| "grad_norm": 0.46158862113952637, | |
| "learning_rate": 1.5246216105706124e-06, | |
| "loss": 0.4394, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 4.027272727272727, | |
| "grad_norm": 0.46964529156684875, | |
| "learning_rate": 1.5153106093767827e-06, | |
| "loss": 0.4364, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 4.034090909090909, | |
| "grad_norm": 0.4228534698486328, | |
| "learning_rate": 1.506015747121518e-06, | |
| "loss": 0.4234, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 4.040909090909091, | |
| "grad_norm": 0.5211766362190247, | |
| "learning_rate": 1.4967371761464738e-06, | |
| "loss": 0.4428, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 4.047727272727273, | |
| "grad_norm": 0.4786047637462616, | |
| "learning_rate": 1.4874750485262917e-06, | |
| "loss": 0.4416, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 4.054545454545455, | |
| "grad_norm": 2.0014617443084717, | |
| "learning_rate": 1.4782295160661103e-06, | |
| "loss": 0.4338, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 4.0613636363636365, | |
| "grad_norm": 0.5174881219863892, | |
| "learning_rate": 1.469000730299074e-06, | |
| "loss": 0.431, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 4.068181818181818, | |
| "grad_norm": 0.4879564344882965, | |
| "learning_rate": 1.4597888424838519e-06, | |
| "loss": 0.4236, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 4.075, | |
| "grad_norm": 0.4452327787876129, | |
| "learning_rate": 1.450594003602158e-06, | |
| "loss": 0.4447, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 4.081818181818182, | |
| "grad_norm": 0.6323989033699036, | |
| "learning_rate": 1.4414163643562755e-06, | |
| "loss": 0.4241, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 4.088636363636364, | |
| "grad_norm": 0.532921314239502, | |
| "learning_rate": 1.4322560751665873e-06, | |
| "loss": 0.4388, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 4.095454545454546, | |
| "grad_norm": 0.49364256858825684, | |
| "learning_rate": 1.4231132861691128e-06, | |
| "loss": 0.4244, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 4.1022727272727275, | |
| "grad_norm": 0.47677481174468994, | |
| "learning_rate": 1.4139881472130453e-06, | |
| "loss": 0.4235, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 4.109090909090909, | |
| "grad_norm": 0.4729495644569397, | |
| "learning_rate": 1.4048808078582943e-06, | |
| "loss": 0.4177, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 4.115909090909091, | |
| "grad_norm": 0.44931760430336, | |
| "learning_rate": 1.3957914173730366e-06, | |
| "loss": 0.4429, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 4.122727272727273, | |
| "grad_norm": 0.5063349604606628, | |
| "learning_rate": 1.3867201247312697e-06, | |
| "loss": 0.4432, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 4.129545454545455, | |
| "grad_norm": 0.5439795255661011, | |
| "learning_rate": 1.3776670786103685e-06, | |
| "loss": 0.4455, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 4.136363636363637, | |
| "grad_norm": 0.4854280650615692, | |
| "learning_rate": 1.3686324273886531e-06, | |
| "loss": 0.4334, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 4.1431818181818185, | |
| "grad_norm": 0.47003456950187683, | |
| "learning_rate": 1.359616319142949e-06, | |
| "loss": 0.438, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 4.15, | |
| "grad_norm": 0.6236258149147034, | |
| "learning_rate": 1.3506189016461674e-06, | |
| "loss": 0.4356, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 4.156818181818182, | |
| "grad_norm": 0.9478527903556824, | |
| "learning_rate": 1.341640322364878e-06, | |
| "loss": 0.4353, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 4.163636363636364, | |
| "grad_norm": 0.48970767855644226, | |
| "learning_rate": 1.3326807284568984e-06, | |
| "loss": 0.4319, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 4.170454545454546, | |
| "grad_norm": 0.5102277994155884, | |
| "learning_rate": 1.323740266768875e-06, | |
| "loss": 0.438, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 4.177272727272728, | |
| "grad_norm": 0.48640403151512146, | |
| "learning_rate": 1.3148190838338804e-06, | |
| "loss": 0.4001, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 4.184090909090909, | |
| "grad_norm": 0.4933972656726837, | |
| "learning_rate": 1.3059173258690102e-06, | |
| "loss": 0.451, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 4.190909090909091, | |
| "grad_norm": 0.4632134437561035, | |
| "learning_rate": 1.2970351387729875e-06, | |
| "loss": 0.4396, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 4.197727272727272, | |
| "grad_norm": 0.5672054290771484, | |
| "learning_rate": 1.2881726681237727e-06, | |
| "loss": 0.4433, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 4.204545454545454, | |
| "grad_norm": 0.4600079655647278, | |
| "learning_rate": 1.2793300591761742e-06, | |
| "loss": 0.4038, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 4.211363636363636, | |
| "grad_norm": 0.44386839866638184, | |
| "learning_rate": 1.27050745685947e-06, | |
| "loss": 0.4442, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 4.218181818181818, | |
| "grad_norm": 0.793684184551239, | |
| "learning_rate": 1.2617050057750322e-06, | |
| "loss": 0.4309, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 4.225, | |
| "grad_norm": 0.4929650127887726, | |
| "learning_rate": 1.252922850193955e-06, | |
| "loss": 0.4304, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 4.2318181818181815, | |
| "grad_norm": 0.5073612332344055, | |
| "learning_rate": 1.2441611340546958e-06, | |
| "loss": 0.4271, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 4.238636363636363, | |
| "grad_norm": 0.4528202414512634, | |
| "learning_rate": 1.2354200009607081e-06, | |
| "loss": 0.4384, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 4.245454545454545, | |
| "grad_norm": 0.46402111649513245, | |
| "learning_rate": 1.2266995941780934e-06, | |
| "loss": 0.43, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 4.252272727272727, | |
| "grad_norm": 0.567254364490509, | |
| "learning_rate": 1.2180000566332503e-06, | |
| "loss": 0.3905, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 4.259090909090909, | |
| "grad_norm": 0.5210692286491394, | |
| "learning_rate": 1.2093215309105352e-06, | |
| "loss": 0.4262, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 4.265909090909091, | |
| "grad_norm": 1.3842746019363403, | |
| "learning_rate": 1.2006641592499233e-06, | |
| "loss": 0.4268, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 4.2727272727272725, | |
| "grad_norm": 0.49402037262916565, | |
| "learning_rate": 1.192028083544675e-06, | |
| "loss": 0.4244, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 4.279545454545454, | |
| "grad_norm": 0.4855121374130249, | |
| "learning_rate": 1.1834134453390136e-06, | |
| "loss": 0.4274, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 4.286363636363636, | |
| "grad_norm": 0.5327234864234924, | |
| "learning_rate": 1.1748203858258056e-06, | |
| "loss": 0.429, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 4.293181818181818, | |
| "grad_norm": 0.482337087392807, | |
| "learning_rate": 1.166249045844243e-06, | |
| "loss": 0.4253, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 4.3, | |
| "grad_norm": 0.9390714168548584, | |
| "learning_rate": 1.1576995658775405e-06, | |
| "loss": 0.4224, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 4.306818181818182, | |
| "grad_norm": 0.5063149929046631, | |
| "learning_rate": 1.1491720860506273e-06, | |
| "loss": 0.4243, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 4.3136363636363635, | |
| "grad_norm": 0.4565083980560303, | |
| "learning_rate": 1.140666746127854e-06, | |
| "loss": 0.4137, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 4.320454545454545, | |
| "grad_norm": 0.48249998688697815, | |
| "learning_rate": 1.1321836855107007e-06, | |
| "loss": 0.4143, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 4.327272727272727, | |
| "grad_norm": 0.5024334788322449, | |
| "learning_rate": 1.1237230432354912e-06, | |
| "loss": 0.4302, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 4.334090909090909, | |
| "grad_norm": 0.4903137981891632, | |
| "learning_rate": 1.1152849579711187e-06, | |
| "loss": 0.4522, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 4.340909090909091, | |
| "grad_norm": 0.47344261407852173, | |
| "learning_rate": 1.1068695680167665e-06, | |
| "loss": 0.4487, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 4.347727272727273, | |
| "grad_norm": 0.5437475442886353, | |
| "learning_rate": 1.0984770112996463e-06, | |
| "loss": 0.4255, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 4.3545454545454545, | |
| "grad_norm": 0.4634874165058136, | |
| "learning_rate": 1.0901074253727338e-06, | |
| "loss": 0.4345, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 4.361363636363636, | |
| "grad_norm": 0.5247130393981934, | |
| "learning_rate": 1.0817609474125195e-06, | |
| "loss": 0.4296, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 4.368181818181818, | |
| "grad_norm": 0.503746747970581, | |
| "learning_rate": 1.0734377142167549e-06, | |
| "loss": 0.4251, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 4.375, | |
| "grad_norm": 0.5096430778503418, | |
| "learning_rate": 1.065137862202213e-06, | |
| "loss": 0.4186, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 4.381818181818182, | |
| "grad_norm": 0.4813026785850525, | |
| "learning_rate": 1.0568615274024521e-06, | |
| "loss": 0.4265, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 4.388636363636364, | |
| "grad_norm": 0.45982855558395386, | |
| "learning_rate": 1.0486088454655856e-06, | |
| "loss": 0.4188, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 4.3954545454545455, | |
| "grad_norm": 0.5047855377197266, | |
| "learning_rate": 1.0403799516520619e-06, | |
| "loss": 0.4231, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 4.402272727272727, | |
| "grad_norm": 0.46901944279670715, | |
| "learning_rate": 1.0321749808324425e-06, | |
| "loss": 0.4173, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 4.409090909090909, | |
| "grad_norm": 0.48727908730506897, | |
| "learning_rate": 1.0239940674851943e-06, | |
| "loss": 0.4631, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 4.415909090909091, | |
| "grad_norm": 0.6558519005775452, | |
| "learning_rate": 1.0158373456944856e-06, | |
| "loss": 0.4177, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 4.422727272727273, | |
| "grad_norm": 0.4979148805141449, | |
| "learning_rate": 1.0077049491479874e-06, | |
| "loss": 0.4046, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 4.429545454545455, | |
| "grad_norm": 0.5404463410377502, | |
| "learning_rate": 9.995970111346842e-07, | |
| "loss": 0.4314, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 4.4363636363636365, | |
| "grad_norm": 0.7192148566246033, | |
| "learning_rate": 9.915136645426885e-07, | |
| "loss": 0.429, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 4.443181818181818, | |
| "grad_norm": 0.47399184107780457, | |
| "learning_rate": 9.834550418570602e-07, | |
| "loss": 0.4392, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 4.45, | |
| "grad_norm": 0.48218435049057007, | |
| "learning_rate": 9.754212751576386e-07, | |
| "loss": 0.4282, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 4.456818181818182, | |
| "grad_norm": 0.5437904596328735, | |
| "learning_rate": 9.67412496116876e-07, | |
| "loss": 0.4031, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 4.463636363636364, | |
| "grad_norm": 0.45721930265426636, | |
| "learning_rate": 9.594288359976817e-07, | |
| "loss": 0.4268, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 4.470454545454546, | |
| "grad_norm": 0.48765209317207336, | |
| "learning_rate": 9.514704256512669e-07, | |
| "loss": 0.4491, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 4.4772727272727275, | |
| "grad_norm": 0.45831185579299927, | |
| "learning_rate": 9.435373955150032e-07, | |
| "loss": 0.423, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 4.484090909090909, | |
| "grad_norm": 0.50241619348526, | |
| "learning_rate": 9.35629875610283e-07, | |
| "loss": 0.4334, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 4.490909090909091, | |
| "grad_norm": 0.4767427444458008, | |
| "learning_rate": 9.277479955403887e-07, | |
| "loss": 0.4475, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 4.497727272727273, | |
| "grad_norm": 0.48379838466644287, | |
| "learning_rate": 9.198918844883714e-07, | |
| "loss": 0.4353, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 4.504545454545455, | |
| "grad_norm": 0.5098893046379089, | |
| "learning_rate": 9.120616712149291e-07, | |
| "loss": 0.4285, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 4.511363636363637, | |
| "grad_norm": 0.5217880010604858, | |
| "learning_rate": 9.042574840562982e-07, | |
| "loss": 0.4395, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 4.5181818181818185, | |
| "grad_norm": 0.449683278799057, | |
| "learning_rate": 8.964794509221508e-07, | |
| "loss": 0.4382, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 4.525, | |
| "grad_norm": 0.5583493113517761, | |
| "learning_rate": 8.887276992934976e-07, | |
| "loss": 0.4378, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 4.531818181818182, | |
| "grad_norm": 0.503844678401947, | |
| "learning_rate": 8.810023562206e-07, | |
| "loss": 0.4222, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 4.538636363636364, | |
| "grad_norm": 0.46325281262397766, | |
| "learning_rate": 8.733035483208841e-07, | |
| "loss": 0.441, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 4.545454545454545, | |
| "grad_norm": 0.5824127197265625, | |
| "learning_rate": 8.656314017768694e-07, | |
| "loss": 0.4224, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 4.552272727272728, | |
| "grad_norm": 0.5601843595504761, | |
| "learning_rate": 8.579860423340977e-07, | |
| "loss": 0.4348, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 4.559090909090909, | |
| "grad_norm": 0.45808762311935425, | |
| "learning_rate": 8.503675952990756e-07, | |
| "loss": 0.4445, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 4.565909090909091, | |
| "grad_norm": 0.494069367647171, | |
| "learning_rate": 8.427761855372169e-07, | |
| "loss": 0.4271, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 4.572727272727272, | |
| "grad_norm": 0.5897441506385803, | |
| "learning_rate": 8.352119374707979e-07, | |
| "loss": 0.4218, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 4.579545454545455, | |
| "grad_norm": 0.5399529337882996, | |
| "learning_rate": 8.276749750769186e-07, | |
| "loss": 0.4295, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 4.586363636363636, | |
| "grad_norm": 0.44742587208747864, | |
| "learning_rate": 8.20165421885469e-07, | |
| "loss": 0.4406, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 4.593181818181818, | |
| "grad_norm": 0.4736122786998749, | |
| "learning_rate": 8.126834009771079e-07, | |
| "loss": 0.4255, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 4.6, | |
| "grad_norm": 0.4699339270591736, | |
| "learning_rate": 8.052290349812419e-07, | |
| "loss": 0.4388, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 4.6068181818181815, | |
| "grad_norm": 1.0179539918899536, | |
| "learning_rate": 7.978024460740169e-07, | |
| "loss": 0.445, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 4.613636363636363, | |
| "grad_norm": 0.4838411211967468, | |
| "learning_rate": 7.904037559763162e-07, | |
| "loss": 0.4518, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 4.620454545454545, | |
| "grad_norm": 0.4670931398868561, | |
| "learning_rate": 7.83033085951764e-07, | |
| "loss": 0.4294, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 4.627272727272727, | |
| "grad_norm": 0.47013846039772034, | |
| "learning_rate": 7.756905568047393e-07, | |
| "loss": 0.4395, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 4.634090909090909, | |
| "grad_norm": 0.875905454158783, | |
| "learning_rate": 7.683762888783977e-07, | |
| "loss": 0.4288, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 4.640909090909091, | |
| "grad_norm": 0.47206172347068787, | |
| "learning_rate": 7.610904020526938e-07, | |
| "loss": 0.4244, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 4.6477272727272725, | |
| "grad_norm": 0.6244398355484009, | |
| "learning_rate": 7.538330157424212e-07, | |
| "loss": 0.4444, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 4.654545454545454, | |
| "grad_norm": 0.5075065493583679, | |
| "learning_rate": 7.466042488952521e-07, | |
| "loss": 0.433, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 4.661363636363636, | |
| "grad_norm": 0.7290046811103821, | |
| "learning_rate": 7.394042199897916e-07, | |
| "loss": 0.4509, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 4.668181818181818, | |
| "grad_norm": 0.4735564589500427, | |
| "learning_rate": 7.322330470336314e-07, | |
| "loss": 0.425, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 4.675, | |
| "grad_norm": 0.4939814805984497, | |
| "learning_rate": 7.250908475614185e-07, | |
| "loss": 0.4058, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 4.681818181818182, | |
| "grad_norm": 0.44935867190361023, | |
| "learning_rate": 7.179777386329276e-07, | |
| "loss": 0.4132, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 4.6886363636363635, | |
| "grad_norm": 0.48104918003082275, | |
| "learning_rate": 7.108938368311424e-07, | |
| "loss": 0.4581, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 4.695454545454545, | |
| "grad_norm": 0.5192425847053528, | |
| "learning_rate": 7.038392582603481e-07, | |
| "loss": 0.4542, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 4.702272727272727, | |
| "grad_norm": 0.5787845253944397, | |
| "learning_rate": 6.968141185442229e-07, | |
| "loss": 0.421, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 4.709090909090909, | |
| "grad_norm": 0.4911254942417145, | |
| "learning_rate": 6.898185328239468e-07, | |
| "loss": 0.412, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 4.715909090909091, | |
| "grad_norm": 0.5646374821662903, | |
| "learning_rate": 6.828526157563126e-07, | |
| "loss": 0.428, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 4.722727272727273, | |
| "grad_norm": 0.4580458402633667, | |
| "learning_rate": 6.759164815118493e-07, | |
| "loss": 0.4342, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 4.7295454545454545, | |
| "grad_norm": 0.5442156791687012, | |
| "learning_rate": 6.690102437729481e-07, | |
| "loss": 0.442, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 4.736363636363636, | |
| "grad_norm": 0.4839988052845001, | |
| "learning_rate": 6.621340157319998e-07, | |
| "loss": 0.4384, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 4.743181818181818, | |
| "grad_norm": 0.5340309143066406, | |
| "learning_rate": 6.552879100895396e-07, | |
| "loss": 0.4224, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 4.75, | |
| "grad_norm": 0.5196749567985535, | |
| "learning_rate": 6.484720390524008e-07, | |
| "loss": 0.4317, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 4.756818181818182, | |
| "grad_norm": 0.9252122044563293, | |
| "learning_rate": 6.416865143318757e-07, | |
| "loss": 0.4375, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 4.763636363636364, | |
| "grad_norm": 0.7175036668777466, | |
| "learning_rate": 6.349314471418849e-07, | |
| "loss": 0.4475, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 4.7704545454545455, | |
| "grad_norm": 0.5099776387214661, | |
| "learning_rate": 6.282069481971514e-07, | |
| "loss": 0.4231, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 4.777272727272727, | |
| "grad_norm": 0.6147667169570923, | |
| "learning_rate": 6.2151312771139e-07, | |
| "loss": 0.4437, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 4.784090909090909, | |
| "grad_norm": 0.45040184259414673, | |
| "learning_rate": 6.148500953954992e-07, | |
| "loss": 0.4086, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 4.790909090909091, | |
| "grad_norm": 0.45515206456184387, | |
| "learning_rate": 6.082179604557617e-07, | |
| "loss": 0.4147, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 4.797727272727273, | |
| "grad_norm": 0.5819535255432129, | |
| "learning_rate": 6.016168315920593e-07, | |
| "loss": 0.4126, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 4.804545454545455, | |
| "grad_norm": 0.5001727342605591, | |
| "learning_rate": 5.950468169960846e-07, | |
| "loss": 0.4498, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 4.8113636363636365, | |
| "grad_norm": 0.5675827860832214, | |
| "learning_rate": 5.885080243495731e-07, | |
| "loss": 0.443, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 4.818181818181818, | |
| "grad_norm": 0.4939325749874115, | |
| "learning_rate": 5.820005608225345e-07, | |
| "loss": 0.4102, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 4.825, | |
| "grad_norm": 0.5586816072463989, | |
| "learning_rate": 5.755245330715014e-07, | |
| "loss": 0.4322, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 4.831818181818182, | |
| "grad_norm": 0.4761182367801666, | |
| "learning_rate": 5.690800472377747e-07, | |
| "loss": 0.4386, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 4.838636363636364, | |
| "grad_norm": 0.4821193218231201, | |
| "learning_rate": 5.626672089456887e-07, | |
| "loss": 0.4397, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 4.845454545454546, | |
| "grad_norm": 0.5768219828605652, | |
| "learning_rate": 5.562861233008774e-07, | |
| "loss": 0.4328, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 4.8522727272727275, | |
| "grad_norm": 0.49105873703956604, | |
| "learning_rate": 5.499368948885528e-07, | |
| "loss": 0.4331, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 4.859090909090909, | |
| "grad_norm": 0.5111908316612244, | |
| "learning_rate": 5.436196277717928e-07, | |
| "loss": 0.4165, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 4.865909090909091, | |
| "grad_norm": 0.49336686730384827, | |
| "learning_rate": 5.373344254898313e-07, | |
| "loss": 0.4326, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 4.872727272727273, | |
| "grad_norm": 0.5113787651062012, | |
| "learning_rate": 5.310813910563645e-07, | |
| "loss": 0.4259, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 4.879545454545455, | |
| "grad_norm": 1.0626885890960693, | |
| "learning_rate": 5.24860626957861e-07, | |
| "loss": 0.4279, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 4.886363636363637, | |
| "grad_norm": 0.5634323954582214, | |
| "learning_rate": 5.186722351518822e-07, | |
| "loss": 0.4315, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 4.8931818181818185, | |
| "grad_norm": 0.5127065181732178, | |
| "learning_rate": 5.125163170654138e-07, | |
| "loss": 0.4142, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 4.9, | |
| "grad_norm": 0.4855251610279083, | |
| "learning_rate": 5.063929735931985e-07, | |
| "loss": 0.4024, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 4.906818181818182, | |
| "grad_norm": 0.9671811461448669, | |
| "learning_rate": 5.003023050960865e-07, | |
| "loss": 0.4201, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 4.913636363636364, | |
| "grad_norm": 0.5237546563148499, | |
| "learning_rate": 4.94244411399388e-07, | |
| "loss": 0.4175, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 4.920454545454545, | |
| "grad_norm": 0.46599382162094116, | |
| "learning_rate": 4.882193917912398e-07, | |
| "loss": 0.4611, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 4.927272727272728, | |
| "grad_norm": 0.5131009221076965, | |
| "learning_rate": 4.822273450209767e-07, | |
| "loss": 0.4081, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 4.934090909090909, | |
| "grad_norm": 0.504088819026947, | |
| "learning_rate": 4.7626836929751035e-07, | |
| "loss": 0.4533, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 4.940909090909091, | |
| "grad_norm": 0.5421441197395325, | |
| "learning_rate": 4.703425622877239e-07, | |
| "loss": 0.4275, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 4.947727272727272, | |
| "grad_norm": 0.5401121377944946, | |
| "learning_rate": 4.6445002111486866e-07, | |
| "loss": 0.4449, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 4.954545454545455, | |
| "grad_norm": 0.46559831500053406, | |
| "learning_rate": 4.5859084235697236e-07, | |
| "loss": 0.4449, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 4.961363636363636, | |
| "grad_norm": 0.5192095041275024, | |
| "learning_rate": 4.527651220452589e-07, | |
| "loss": 0.4192, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 4.968181818181818, | |
| "grad_norm": 0.4992344379425049, | |
| "learning_rate": 4.469729556625704e-07, | |
| "loss": 0.4212, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 4.975, | |
| "grad_norm": 0.5019513964653015, | |
| "learning_rate": 4.412144381418049e-07, | |
| "loss": 0.4549, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 4.9818181818181815, | |
| "grad_norm": 0.47997716069221497, | |
| "learning_rate": 4.354896638643591e-07, | |
| "loss": 0.4397, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 4.988636363636363, | |
| "grad_norm": 0.5492618680000305, | |
| "learning_rate": 4.2979872665858266e-07, | |
| "loss": 0.4301, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 4.995454545454545, | |
| "grad_norm": 0.48399415612220764, | |
| "learning_rate": 4.2414171979824e-07, | |
| "loss": 0.4062, | |
| "step": 730 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 876, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 6, | |
| "save_steps": 146, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 4.704801364671201e+19, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |