| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.9968652037617556, | |
| "eval_steps": 500, | |
| "global_step": 848, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.004702194357366771, | |
| "grad_norm": 2.658230781555176, | |
| "learning_rate": 5.0000000000000004e-08, | |
| "loss": 0.8839, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.009404388714733543, | |
| "grad_norm": 2.568281650543213, | |
| "learning_rate": 1.0000000000000001e-07, | |
| "loss": 0.8864, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.014106583072100314, | |
| "grad_norm": 2.142171859741211, | |
| "learning_rate": 1.5000000000000002e-07, | |
| "loss": 0.8549, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.018808777429467086, | |
| "grad_norm": 2.254922866821289, | |
| "learning_rate": 2.0000000000000002e-07, | |
| "loss": 0.8955, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.023510971786833857, | |
| "grad_norm": 2.330303430557251, | |
| "learning_rate": 2.5000000000000004e-07, | |
| "loss": 0.8506, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.02821316614420063, | |
| "grad_norm": 2.304783344268799, | |
| "learning_rate": 3.0000000000000004e-07, | |
| "loss": 0.8773, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.032915360501567396, | |
| "grad_norm": 2.2242815494537354, | |
| "learning_rate": 3.5000000000000004e-07, | |
| "loss": 0.7988, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.03761755485893417, | |
| "grad_norm": 2.267207622528076, | |
| "learning_rate": 4.0000000000000003e-07, | |
| "loss": 0.8603, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.04231974921630094, | |
| "grad_norm": 2.4641835689544678, | |
| "learning_rate": 4.5000000000000003e-07, | |
| "loss": 0.8803, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.047021943573667714, | |
| "grad_norm": 2.6719448566436768, | |
| "learning_rate": 5.000000000000001e-07, | |
| "loss": 0.8245, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.05172413793103448, | |
| "grad_norm": 2.502462148666382, | |
| "learning_rate": 5.5e-07, | |
| "loss": 0.856, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.05642633228840126, | |
| "grad_norm": 2.3637027740478516, | |
| "learning_rate": 6.000000000000001e-07, | |
| "loss": 0.8867, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.061128526645768025, | |
| "grad_norm": 2.157748222351074, | |
| "learning_rate": 6.5e-07, | |
| "loss": 0.8473, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.06583072100313479, | |
| "grad_norm": 2.151967763900757, | |
| "learning_rate": 7.000000000000001e-07, | |
| "loss": 0.8424, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.07053291536050156, | |
| "grad_norm": 2.308736562728882, | |
| "learning_rate": 7.5e-07, | |
| "loss": 0.8623, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.07523510971786834, | |
| "grad_norm": 2.247464179992676, | |
| "learning_rate": 8.000000000000001e-07, | |
| "loss": 0.8209, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.07993730407523511, | |
| "grad_norm": 2.0756967067718506, | |
| "learning_rate": 8.500000000000001e-07, | |
| "loss": 0.8815, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.08463949843260188, | |
| "grad_norm": 2.0295917987823486, | |
| "learning_rate": 9.000000000000001e-07, | |
| "loss": 0.8166, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.08934169278996865, | |
| "grad_norm": 2.0582873821258545, | |
| "learning_rate": 9.500000000000001e-07, | |
| "loss": 0.8118, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.09404388714733543, | |
| "grad_norm": 2.036282777786255, | |
| "learning_rate": 1.0000000000000002e-06, | |
| "loss": 0.8054, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.0987460815047022, | |
| "grad_norm": 1.8246358633041382, | |
| "learning_rate": 1.0500000000000001e-06, | |
| "loss": 0.8301, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.10344827586206896, | |
| "grad_norm": 1.9658900499343872, | |
| "learning_rate": 1.1e-06, | |
| "loss": 0.7796, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.10815047021943573, | |
| "grad_norm": 2.4345409870147705, | |
| "learning_rate": 1.1500000000000002e-06, | |
| "loss": 0.7841, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.11285266457680251, | |
| "grad_norm": 2.0551528930664062, | |
| "learning_rate": 1.2000000000000002e-06, | |
| "loss": 0.7503, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.11755485893416928, | |
| "grad_norm": 1.8872517347335815, | |
| "learning_rate": 1.25e-06, | |
| "loss": 0.7527, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.12225705329153605, | |
| "grad_norm": 1.7877418994903564, | |
| "learning_rate": 1.3e-06, | |
| "loss": 0.7346, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.12695924764890282, | |
| "grad_norm": 1.773200511932373, | |
| "learning_rate": 1.3500000000000002e-06, | |
| "loss": 0.7255, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.13166144200626959, | |
| "grad_norm": 1.3603609800338745, | |
| "learning_rate": 1.4000000000000001e-06, | |
| "loss": 0.707, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.13636363636363635, | |
| "grad_norm": 1.5466724634170532, | |
| "learning_rate": 1.45e-06, | |
| "loss": 0.7433, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.14106583072100312, | |
| "grad_norm": 1.2189550399780273, | |
| "learning_rate": 1.5e-06, | |
| "loss": 0.7345, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.14576802507836992, | |
| "grad_norm": 1.0862981081008911, | |
| "learning_rate": 1.5500000000000002e-06, | |
| "loss": 0.7405, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.15047021943573669, | |
| "grad_norm": 1.024996042251587, | |
| "learning_rate": 1.6000000000000001e-06, | |
| "loss": 0.7333, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.15517241379310345, | |
| "grad_norm": 0.9142002463340759, | |
| "learning_rate": 1.6500000000000003e-06, | |
| "loss": 0.6663, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.15987460815047022, | |
| "grad_norm": 0.9509531259536743, | |
| "learning_rate": 1.7000000000000002e-06, | |
| "loss": 0.6502, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.164576802507837, | |
| "grad_norm": 1.0602822303771973, | |
| "learning_rate": 1.75e-06, | |
| "loss": 0.7449, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.16927899686520376, | |
| "grad_norm": 0.9628905057907104, | |
| "learning_rate": 1.8000000000000001e-06, | |
| "loss": 0.6664, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.17398119122257052, | |
| "grad_norm": 0.8804571628570557, | |
| "learning_rate": 1.85e-06, | |
| "loss": 0.693, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.1786833855799373, | |
| "grad_norm": 0.8594156503677368, | |
| "learning_rate": 1.9000000000000002e-06, | |
| "loss": 0.7439, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.1833855799373041, | |
| "grad_norm": 1.092551827430725, | |
| "learning_rate": 1.9500000000000004e-06, | |
| "loss": 0.6681, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.18808777429467086, | |
| "grad_norm": 0.9434524774551392, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 0.6981, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.19278996865203762, | |
| "grad_norm": 0.7898667454719543, | |
| "learning_rate": 2.05e-06, | |
| "loss": 0.6602, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.1974921630094044, | |
| "grad_norm": 0.843589186668396, | |
| "learning_rate": 2.1000000000000002e-06, | |
| "loss": 0.7026, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.20219435736677116, | |
| "grad_norm": 0.7910621166229248, | |
| "learning_rate": 2.15e-06, | |
| "loss": 0.6721, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.20689655172413793, | |
| "grad_norm": 0.7569350004196167, | |
| "learning_rate": 2.2e-06, | |
| "loss": 0.6601, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.2115987460815047, | |
| "grad_norm": 0.9156045913696289, | |
| "learning_rate": 2.25e-06, | |
| "loss": 0.6407, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.21630094043887146, | |
| "grad_norm": 0.7267714738845825, | |
| "learning_rate": 2.3000000000000004e-06, | |
| "loss": 0.6459, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.22100313479623823, | |
| "grad_norm": 0.6335541009902954, | |
| "learning_rate": 2.35e-06, | |
| "loss": 0.6244, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.22570532915360503, | |
| "grad_norm": 0.6519566178321838, | |
| "learning_rate": 2.4000000000000003e-06, | |
| "loss": 0.6137, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.2304075235109718, | |
| "grad_norm": 0.7315823435783386, | |
| "learning_rate": 2.4500000000000003e-06, | |
| "loss": 0.6603, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.23510971786833856, | |
| "grad_norm": 0.684124767780304, | |
| "learning_rate": 2.5e-06, | |
| "loss": 0.6726, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.23981191222570533, | |
| "grad_norm": 0.6655241250991821, | |
| "learning_rate": 2.55e-06, | |
| "loss": 0.6716, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.2445141065830721, | |
| "grad_norm": 0.6261876821517944, | |
| "learning_rate": 2.6e-06, | |
| "loss": 0.6267, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.24921630094043887, | |
| "grad_norm": 0.8665097951889038, | |
| "learning_rate": 2.6500000000000005e-06, | |
| "loss": 0.6291, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.25391849529780564, | |
| "grad_norm": 0.6287381052970886, | |
| "learning_rate": 2.7000000000000004e-06, | |
| "loss": 0.6223, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.25862068965517243, | |
| "grad_norm": 0.661234438419342, | |
| "learning_rate": 2.7500000000000004e-06, | |
| "loss": 0.6655, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.26332288401253917, | |
| "grad_norm": 0.6680541634559631, | |
| "learning_rate": 2.8000000000000003e-06, | |
| "loss": 0.629, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.26802507836990597, | |
| "grad_norm": 0.6534361243247986, | |
| "learning_rate": 2.85e-06, | |
| "loss": 0.6305, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.2727272727272727, | |
| "grad_norm": 0.5792294144630432, | |
| "learning_rate": 2.9e-06, | |
| "loss": 0.6034, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.2774294670846395, | |
| "grad_norm": 0.6130394339561462, | |
| "learning_rate": 2.95e-06, | |
| "loss": 0.6418, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.28213166144200624, | |
| "grad_norm": 0.6305087804794312, | |
| "learning_rate": 3e-06, | |
| "loss": 0.6687, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.28683385579937304, | |
| "grad_norm": 0.5708836913108826, | |
| "learning_rate": 3.05e-06, | |
| "loss": 0.5886, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.29153605015673983, | |
| "grad_norm": 0.5345668196678162, | |
| "learning_rate": 3.1000000000000004e-06, | |
| "loss": 0.585, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.2962382445141066, | |
| "grad_norm": 0.6531270742416382, | |
| "learning_rate": 3.1500000000000003e-06, | |
| "loss": 0.6274, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.30094043887147337, | |
| "grad_norm": 0.5387993454933167, | |
| "learning_rate": 3.2000000000000003e-06, | |
| "loss": 0.6393, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.3056426332288401, | |
| "grad_norm": 0.5837404131889343, | |
| "learning_rate": 3.2500000000000002e-06, | |
| "loss": 0.5805, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.3103448275862069, | |
| "grad_norm": 0.668530285358429, | |
| "learning_rate": 3.3000000000000006e-06, | |
| "loss": 0.5982, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.31504702194357365, | |
| "grad_norm": 0.5391478538513184, | |
| "learning_rate": 3.3500000000000005e-06, | |
| "loss": 0.627, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.31974921630094044, | |
| "grad_norm": 0.5848458409309387, | |
| "learning_rate": 3.4000000000000005e-06, | |
| "loss": 0.5944, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.32445141065830724, | |
| "grad_norm": 0.5370898246765137, | |
| "learning_rate": 3.45e-06, | |
| "loss": 0.5803, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.329153605015674, | |
| "grad_norm": 0.5453668832778931, | |
| "learning_rate": 3.5e-06, | |
| "loss": 0.6497, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.3338557993730408, | |
| "grad_norm": 0.5806694626808167, | |
| "learning_rate": 3.5500000000000003e-06, | |
| "loss": 0.6194, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.3385579937304075, | |
| "grad_norm": 0.4973330497741699, | |
| "learning_rate": 3.6000000000000003e-06, | |
| "loss": 0.598, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.3432601880877743, | |
| "grad_norm": 0.5627994537353516, | |
| "learning_rate": 3.65e-06, | |
| "loss": 0.5883, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.34796238244514105, | |
| "grad_norm": 0.7145661115646362, | |
| "learning_rate": 3.7e-06, | |
| "loss": 0.6033, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.35266457680250785, | |
| "grad_norm": 0.49009037017822266, | |
| "learning_rate": 3.7500000000000005e-06, | |
| "loss": 0.5978, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.3573667711598746, | |
| "grad_norm": 0.5787577629089355, | |
| "learning_rate": 3.8000000000000005e-06, | |
| "loss": 0.5981, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.3620689655172414, | |
| "grad_norm": 0.5676442384719849, | |
| "learning_rate": 3.85e-06, | |
| "loss": 0.6101, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.3667711598746082, | |
| "grad_norm": 0.49533793330192566, | |
| "learning_rate": 3.900000000000001e-06, | |
| "loss": 0.5564, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.3714733542319749, | |
| "grad_norm": 0.6033879518508911, | |
| "learning_rate": 3.95e-06, | |
| "loss": 0.6175, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.3761755485893417, | |
| "grad_norm": 0.502393901348114, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 0.5394, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.38087774294670845, | |
| "grad_norm": 0.49090903997421265, | |
| "learning_rate": 4.05e-06, | |
| "loss": 0.5998, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.38557993730407525, | |
| "grad_norm": 0.8119765520095825, | |
| "learning_rate": 4.1e-06, | |
| "loss": 0.5704, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.390282131661442, | |
| "grad_norm": 0.561651349067688, | |
| "learning_rate": 4.15e-06, | |
| "loss": 0.5784, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.3949843260188088, | |
| "grad_norm": 0.5209910273551941, | |
| "learning_rate": 4.2000000000000004e-06, | |
| "loss": 0.5832, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.3996865203761755, | |
| "grad_norm": 0.5189244151115417, | |
| "learning_rate": 4.25e-06, | |
| "loss": 0.6044, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.4043887147335423, | |
| "grad_norm": 0.5183352828025818, | |
| "learning_rate": 4.3e-06, | |
| "loss": 0.6107, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.4090909090909091, | |
| "grad_norm": 0.505588710308075, | |
| "learning_rate": 4.350000000000001e-06, | |
| "loss": 0.5637, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.41379310344827586, | |
| "grad_norm": 0.5937759280204773, | |
| "learning_rate": 4.4e-06, | |
| "loss": 0.59, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.41849529780564265, | |
| "grad_norm": 0.6988442540168762, | |
| "learning_rate": 4.450000000000001e-06, | |
| "loss": 0.5878, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.4231974921630094, | |
| "grad_norm": 0.5292773842811584, | |
| "learning_rate": 4.5e-06, | |
| "loss": 0.5675, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.4278996865203762, | |
| "grad_norm": 0.5222378373146057, | |
| "learning_rate": 4.5500000000000005e-06, | |
| "loss": 0.5941, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.43260188087774293, | |
| "grad_norm": 0.5161038637161255, | |
| "learning_rate": 4.600000000000001e-06, | |
| "loss": 0.5974, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.4373040752351097, | |
| "grad_norm": 0.4662926495075226, | |
| "learning_rate": 4.65e-06, | |
| "loss": 0.5258, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.44200626959247646, | |
| "grad_norm": 0.5594052672386169, | |
| "learning_rate": 4.7e-06, | |
| "loss": 0.5452, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.44670846394984326, | |
| "grad_norm": 0.5721049904823303, | |
| "learning_rate": 4.75e-06, | |
| "loss": 0.5486, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.45141065830721006, | |
| "grad_norm": 0.47619950771331787, | |
| "learning_rate": 4.800000000000001e-06, | |
| "loss": 0.5495, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.4561128526645768, | |
| "grad_norm": 0.5395726561546326, | |
| "learning_rate": 4.85e-06, | |
| "loss": 0.5543, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.4608150470219436, | |
| "grad_norm": 0.5578956007957458, | |
| "learning_rate": 4.9000000000000005e-06, | |
| "loss": 0.5709, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.46551724137931033, | |
| "grad_norm": 0.565992534160614, | |
| "learning_rate": 4.95e-06, | |
| "loss": 0.5527, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.4702194357366771, | |
| "grad_norm": 0.5071743726730347, | |
| "learning_rate": 5e-06, | |
| "loss": 0.5778, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.47492163009404387, | |
| "grad_norm": 0.504084050655365, | |
| "learning_rate": 4.9999910183883085e-06, | |
| "loss": 0.5575, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.47962382445141066, | |
| "grad_norm": 0.5246759653091431, | |
| "learning_rate": 4.999964073617768e-06, | |
| "loss": 0.5666, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.4843260188087774, | |
| "grad_norm": 0.464106023311615, | |
| "learning_rate": 4.999919165881985e-06, | |
| "loss": 0.5541, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.4890282131661442, | |
| "grad_norm": 0.48115217685699463, | |
| "learning_rate": 4.999856295503635e-06, | |
| "loss": 0.5646, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.493730407523511, | |
| "grad_norm": 0.48950889706611633, | |
| "learning_rate": 4.9997754629344596e-06, | |
| "loss": 0.5424, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.49843260188087773, | |
| "grad_norm": 0.691669762134552, | |
| "learning_rate": 4.999676668755263e-06, | |
| "loss": 0.5315, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.5031347962382445, | |
| "grad_norm": 0.49993181228637695, | |
| "learning_rate": 4.999559913675912e-06, | |
| "loss": 0.5638, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.5078369905956113, | |
| "grad_norm": 0.5278601050376892, | |
| "learning_rate": 4.999425198535325e-06, | |
| "loss": 0.5441, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.512539184952978, | |
| "grad_norm": 0.5409471392631531, | |
| "learning_rate": 4.999272524301469e-06, | |
| "loss": 0.5474, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.5172413793103449, | |
| "grad_norm": 0.5190234780311584, | |
| "learning_rate": 4.9991018920713505e-06, | |
| "loss": 0.5638, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.5219435736677116, | |
| "grad_norm": 0.5064226388931274, | |
| "learning_rate": 4.9989133030710154e-06, | |
| "loss": 0.5474, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.5266457680250783, | |
| "grad_norm": 0.584128201007843, | |
| "learning_rate": 4.9987067586555275e-06, | |
| "loss": 0.546, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.5313479623824452, | |
| "grad_norm": 0.5325257778167725, | |
| "learning_rate": 4.998482260308969e-06, | |
| "loss": 0.5643, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.5360501567398119, | |
| "grad_norm": 0.5349085330963135, | |
| "learning_rate": 4.998239809644427e-06, | |
| "loss": 0.5585, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.5407523510971787, | |
| "grad_norm": 0.5952833890914917, | |
| "learning_rate": 4.9979794084039755e-06, | |
| "loss": 0.5711, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.5454545454545454, | |
| "grad_norm": 0.48140987753868103, | |
| "learning_rate": 4.997701058458677e-06, | |
| "loss": 0.5319, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.5501567398119123, | |
| "grad_norm": 0.6170802712440491, | |
| "learning_rate": 4.997404761808554e-06, | |
| "loss": 0.5659, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.554858934169279, | |
| "grad_norm": 0.506353497505188, | |
| "learning_rate": 4.9970905205825845e-06, | |
| "loss": 0.5721, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.5595611285266457, | |
| "grad_norm": 0.5027800798416138, | |
| "learning_rate": 4.996758337038683e-06, | |
| "loss": 0.5618, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.5642633228840125, | |
| "grad_norm": 0.5272043943405151, | |
| "learning_rate": 4.996408213563684e-06, | |
| "loss": 0.5548, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.5689655172413793, | |
| "grad_norm": 0.4655597507953644, | |
| "learning_rate": 4.996040152673326e-06, | |
| "loss": 0.522, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.5736677115987461, | |
| "grad_norm": 0.49152645468711853, | |
| "learning_rate": 4.995654157012233e-06, | |
| "loss": 0.5353, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.5783699059561128, | |
| "grad_norm": 0.5532233119010925, | |
| "learning_rate": 4.995250229353895e-06, | |
| "loss": 0.529, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.5830721003134797, | |
| "grad_norm": 0.4706387221813202, | |
| "learning_rate": 4.99482837260065e-06, | |
| "loss": 0.5438, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.5877742946708464, | |
| "grad_norm": 0.4653231203556061, | |
| "learning_rate": 4.99438858978366e-06, | |
| "loss": 0.5735, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.5924764890282131, | |
| "grad_norm": 0.7020223736763, | |
| "learning_rate": 4.993930884062892e-06, | |
| "loss": 0.5557, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.5971786833855799, | |
| "grad_norm": 0.6187618970870972, | |
| "learning_rate": 4.993455258727094e-06, | |
| "loss": 0.5557, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.6018808777429467, | |
| "grad_norm": 0.5230109691619873, | |
| "learning_rate": 4.992961717193773e-06, | |
| "loss": 0.5575, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.6065830721003135, | |
| "grad_norm": 1.7068945169448853, | |
| "learning_rate": 4.9924502630091655e-06, | |
| "loss": 0.565, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.6112852664576802, | |
| "grad_norm": 0.5217270255088806, | |
| "learning_rate": 4.99192089984822e-06, | |
| "loss": 0.549, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.6159874608150471, | |
| "grad_norm": 0.5645350217819214, | |
| "learning_rate": 4.9913736315145614e-06, | |
| "loss": 0.5568, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.6206896551724138, | |
| "grad_norm": 0.5037111639976501, | |
| "learning_rate": 4.990808461940474e-06, | |
| "loss": 0.5375, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.6253918495297806, | |
| "grad_norm": 0.5933152437210083, | |
| "learning_rate": 4.990225395186862e-06, | |
| "loss": 0.5443, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.6300940438871473, | |
| "grad_norm": 0.532664954662323, | |
| "learning_rate": 4.9896244354432314e-06, | |
| "loss": 0.5411, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.6347962382445141, | |
| "grad_norm": 1.2658472061157227, | |
| "learning_rate": 4.98900558702765e-06, | |
| "loss": 0.5506, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.6394984326018809, | |
| "grad_norm": 0.4731752872467041, | |
| "learning_rate": 4.9883688543867225e-06, | |
| "loss": 0.5609, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.6442006269592476, | |
| "grad_norm": 0.542219340801239, | |
| "learning_rate": 4.987714242095558e-06, | |
| "loss": 0.5306, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.6489028213166145, | |
| "grad_norm": 0.502611517906189, | |
| "learning_rate": 4.9870417548577355e-06, | |
| "loss": 0.5353, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.6536050156739812, | |
| "grad_norm": 0.4692250192165375, | |
| "learning_rate": 4.9863513975052696e-06, | |
| "loss": 0.5434, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.658307210031348, | |
| "grad_norm": 0.5601999163627625, | |
| "learning_rate": 4.985643174998578e-06, | |
| "loss": 0.5654, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.6630094043887147, | |
| "grad_norm": 0.5011409521102905, | |
| "learning_rate": 4.984917092426445e-06, | |
| "loss": 0.5518, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.6677115987460815, | |
| "grad_norm": 0.5076945424079895, | |
| "learning_rate": 4.984173155005982e-06, | |
| "loss": 0.5407, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.6724137931034483, | |
| "grad_norm": 0.4602852165699005, | |
| "learning_rate": 4.983411368082597e-06, | |
| "loss": 0.5465, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.677115987460815, | |
| "grad_norm": 0.7861032485961914, | |
| "learning_rate": 4.982631737129948e-06, | |
| "loss": 0.529, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.6818181818181818, | |
| "grad_norm": 0.48418697714805603, | |
| "learning_rate": 4.98183426774991e-06, | |
| "loss": 0.5684, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.6865203761755486, | |
| "grad_norm": 0.5176032781600952, | |
| "learning_rate": 4.981018965672529e-06, | |
| "loss": 0.5686, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.6912225705329154, | |
| "grad_norm": 0.5493810772895813, | |
| "learning_rate": 4.98018583675599e-06, | |
| "loss": 0.5496, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.6959247648902821, | |
| "grad_norm": 0.6066202521324158, | |
| "learning_rate": 4.979334886986562e-06, | |
| "loss": 0.5503, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.700626959247649, | |
| "grad_norm": 0.6061354875564575, | |
| "learning_rate": 4.978466122478567e-06, | |
| "loss": 0.5648, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.7053291536050157, | |
| "grad_norm": 0.48728325963020325, | |
| "learning_rate": 4.97757954947433e-06, | |
| "loss": 0.5327, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.7100313479623824, | |
| "grad_norm": 0.5088427662849426, | |
| "learning_rate": 4.976675174344132e-06, | |
| "loss": 0.5469, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.7147335423197492, | |
| "grad_norm": 1.0665165185928345, | |
| "learning_rate": 4.975753003586172e-06, | |
| "loss": 0.5268, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.719435736677116, | |
| "grad_norm": 0.528404712677002, | |
| "learning_rate": 4.974813043826513e-06, | |
| "loss": 0.52, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.7241379310344828, | |
| "grad_norm": 0.5167615413665771, | |
| "learning_rate": 4.973855301819039e-06, | |
| "loss": 0.5376, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.7288401253918495, | |
| "grad_norm": 0.4985736906528473, | |
| "learning_rate": 4.972879784445402e-06, | |
| "loss": 0.5192, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.7335423197492164, | |
| "grad_norm": 0.5043352842330933, | |
| "learning_rate": 4.971886498714978e-06, | |
| "loss": 0.5413, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.7382445141065831, | |
| "grad_norm": 0.5240288376808167, | |
| "learning_rate": 4.97087545176481e-06, | |
| "loss": 0.5184, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.7429467084639498, | |
| "grad_norm": 0.8111078143119812, | |
| "learning_rate": 4.9698466508595655e-06, | |
| "loss": 0.5467, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.7476489028213166, | |
| "grad_norm": 0.48973363637924194, | |
| "learning_rate": 4.9688001033914756e-06, | |
| "loss": 0.5341, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.7523510971786834, | |
| "grad_norm": 0.5965595841407776, | |
| "learning_rate": 4.967735816880286e-06, | |
| "loss": 0.5452, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.7570532915360502, | |
| "grad_norm": 0.5201161503791809, | |
| "learning_rate": 4.966653798973205e-06, | |
| "loss": 0.5246, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.7617554858934169, | |
| "grad_norm": 0.5854963660240173, | |
| "learning_rate": 4.965554057444842e-06, | |
| "loss": 0.5178, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.7664576802507836, | |
| "grad_norm": 0.5000066161155701, | |
| "learning_rate": 4.964436600197161e-06, | |
| "loss": 0.5413, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.7711598746081505, | |
| "grad_norm": 0.49025341868400574, | |
| "learning_rate": 4.963301435259413e-06, | |
| "loss": 0.5111, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.7758620689655172, | |
| "grad_norm": 0.5398334860801697, | |
| "learning_rate": 4.962148570788088e-06, | |
| "loss": 0.5313, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.780564263322884, | |
| "grad_norm": 0.6545300483703613, | |
| "learning_rate": 4.96097801506685e-06, | |
| "loss": 0.52, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.7852664576802508, | |
| "grad_norm": 0.6166910529136658, | |
| "learning_rate": 4.959789776506482e-06, | |
| "loss": 0.5288, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.7899686520376176, | |
| "grad_norm": 0.5018202662467957, | |
| "learning_rate": 4.958583863644821e-06, | |
| "loss": 0.5517, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.7946708463949843, | |
| "grad_norm": 0.5005012154579163, | |
| "learning_rate": 4.9573602851466985e-06, | |
| "loss": 0.517, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.799373040752351, | |
| "grad_norm": 0.51115882396698, | |
| "learning_rate": 4.9561190498038815e-06, | |
| "loss": 0.5404, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.8040752351097179, | |
| "grad_norm": 0.533501148223877, | |
| "learning_rate": 4.954860166535005e-06, | |
| "loss": 0.5304, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.8087774294670846, | |
| "grad_norm": 0.4484115242958069, | |
| "learning_rate": 4.95358364438551e-06, | |
| "loss": 0.5355, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.8134796238244514, | |
| "grad_norm": 0.48717421293258667, | |
| "learning_rate": 4.952289492527576e-06, | |
| "loss": 0.5608, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.8181818181818182, | |
| "grad_norm": 0.780621349811554, | |
| "learning_rate": 4.9509777202600605e-06, | |
| "loss": 0.4935, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.822884012539185, | |
| "grad_norm": 0.49938517808914185, | |
| "learning_rate": 4.949648337008425e-06, | |
| "loss": 0.5409, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.8275862068965517, | |
| "grad_norm": 0.5497399568557739, | |
| "learning_rate": 4.948301352324674e-06, | |
| "loss": 0.5343, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.8322884012539185, | |
| "grad_norm": 0.49721062183380127, | |
| "learning_rate": 4.946936775887281e-06, | |
| "loss": 0.53, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.8369905956112853, | |
| "grad_norm": 0.48117756843566895, | |
| "learning_rate": 4.945554617501124e-06, | |
| "loss": 0.5238, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.841692789968652, | |
| "grad_norm": 0.49232199788093567, | |
| "learning_rate": 4.944154887097411e-06, | |
| "loss": 0.5554, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.8463949843260188, | |
| "grad_norm": 0.49600890278816223, | |
| "learning_rate": 4.942737594733608e-06, | |
| "loss": 0.5265, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.8510971786833855, | |
| "grad_norm": 0.502390444278717, | |
| "learning_rate": 4.941302750593373e-06, | |
| "loss": 0.5427, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.8557993730407524, | |
| "grad_norm": 0.4632537066936493, | |
| "learning_rate": 4.939850364986475e-06, | |
| "loss": 0.4842, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.8605015673981191, | |
| "grad_norm": 0.5433545708656311, | |
| "learning_rate": 4.938380448348725e-06, | |
| "loss": 0.4928, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.8652037617554859, | |
| "grad_norm": 0.5086951851844788, | |
| "learning_rate": 4.9368930112419e-06, | |
| "loss": 0.5333, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.8699059561128527, | |
| "grad_norm": 0.47532474994659424, | |
| "learning_rate": 4.935388064353665e-06, | |
| "loss": 0.5357, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.8746081504702194, | |
| "grad_norm": 0.49426794052124023, | |
| "learning_rate": 4.9338656184975e-06, | |
| "loss": 0.5316, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.8793103448275862, | |
| "grad_norm": 0.49909746646881104, | |
| "learning_rate": 4.932325684612618e-06, | |
| "loss": 0.543, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.8840125391849529, | |
| "grad_norm": 0.5019994378089905, | |
| "learning_rate": 4.93076827376389e-06, | |
| "loss": 0.5464, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.8887147335423198, | |
| "grad_norm": 0.5011009573936462, | |
| "learning_rate": 4.9291933971417635e-06, | |
| "loss": 0.543, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.8934169278996865, | |
| "grad_norm": 0.4558373987674713, | |
| "learning_rate": 4.9276010660621835e-06, | |
| "loss": 0.5298, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.8981191222570533, | |
| "grad_norm": 0.5847108960151672, | |
| "learning_rate": 4.925991291966508e-06, | |
| "loss": 0.5089, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.9028213166144201, | |
| "grad_norm": 0.5339157581329346, | |
| "learning_rate": 4.92436408642143e-06, | |
| "loss": 0.5466, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.9075235109717869, | |
| "grad_norm": 0.5474189519882202, | |
| "learning_rate": 4.9227194611188934e-06, | |
| "loss": 0.5224, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.9122257053291536, | |
| "grad_norm": 0.4983315169811249, | |
| "learning_rate": 4.921057427876007e-06, | |
| "loss": 0.4919, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.9169278996865203, | |
| "grad_norm": 0.5565828680992126, | |
| "learning_rate": 4.919377998634959e-06, | |
| "loss": 0.5499, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.9216300940438872, | |
| "grad_norm": 0.9578695893287659, | |
| "learning_rate": 4.917681185462934e-06, | |
| "loss": 0.5356, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.9263322884012539, | |
| "grad_norm": 0.5272259712219238, | |
| "learning_rate": 4.915967000552028e-06, | |
| "loss": 0.526, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.9310344827586207, | |
| "grad_norm": 1.1794627904891968, | |
| "learning_rate": 4.914235456219154e-06, | |
| "loss": 0.5193, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.9357366771159875, | |
| "grad_norm": 0.49176672101020813, | |
| "learning_rate": 4.912486564905959e-06, | |
| "loss": 0.5477, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.9404388714733543, | |
| "grad_norm": 0.48104146122932434, | |
| "learning_rate": 4.910720339178735e-06, | |
| "loss": 0.5303, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.945141065830721, | |
| "grad_norm": 0.48861178755760193, | |
| "learning_rate": 4.908936791728323e-06, | |
| "loss": 0.5353, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.9498432601880877, | |
| "grad_norm": 0.5078539848327637, | |
| "learning_rate": 4.907135935370027e-06, | |
| "loss": 0.5336, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.9545454545454546, | |
| "grad_norm": 0.5397623777389526, | |
| "learning_rate": 4.905317783043523e-06, | |
| "loss": 0.5422, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.9592476489028213, | |
| "grad_norm": 0.5742900371551514, | |
| "learning_rate": 4.9034823478127605e-06, | |
| "loss": 0.5225, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.9639498432601881, | |
| "grad_norm": 0.4920111298561096, | |
| "learning_rate": 4.901629642865872e-06, | |
| "loss": 0.5001, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.9686520376175548, | |
| "grad_norm": 0.5811502933502197, | |
| "learning_rate": 4.89975968151508e-06, | |
| "loss": 0.5237, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.9733542319749217, | |
| "grad_norm": 0.9640730619430542, | |
| "learning_rate": 4.8978724771965965e-06, | |
| "loss": 0.5136, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.9780564263322884, | |
| "grad_norm": 0.5389460921287537, | |
| "learning_rate": 4.895968043470532e-06, | |
| "loss": 0.5306, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.9827586206896551, | |
| "grad_norm": 0.6080204248428345, | |
| "learning_rate": 4.894046394020794e-06, | |
| "loss": 0.4957, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.987460815047022, | |
| "grad_norm": 0.5144377946853638, | |
| "learning_rate": 4.892107542654988e-06, | |
| "loss": 0.5275, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.9921630094043887, | |
| "grad_norm": 0.4928152859210968, | |
| "learning_rate": 4.890151503304325e-06, | |
| "loss": 0.5496, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.9968652037617555, | |
| "grad_norm": 0.4954489469528198, | |
| "learning_rate": 4.88817829002351e-06, | |
| "loss": 0.5247, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 1.0047021943573669, | |
| "grad_norm": 1.0198856592178345, | |
| "learning_rate": 4.886187916990653e-06, | |
| "loss": 1.0557, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 1.0094043887147335, | |
| "grad_norm": 0.4713587462902069, | |
| "learning_rate": 4.884180398507163e-06, | |
| "loss": 0.5031, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 1.0141065830721003, | |
| "grad_norm": 0.46703630685806274, | |
| "learning_rate": 4.882155748997636e-06, | |
| "loss": 0.4952, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 1.0188087774294672, | |
| "grad_norm": 0.48665955662727356, | |
| "learning_rate": 4.8801139830097685e-06, | |
| "loss": 0.5024, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 1.0235109717868338, | |
| "grad_norm": 0.5477427840232849, | |
| "learning_rate": 4.878055115214238e-06, | |
| "loss": 0.5124, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 1.0282131661442007, | |
| "grad_norm": 0.4587133228778839, | |
| "learning_rate": 4.875979160404607e-06, | |
| "loss": 0.5098, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 1.0329153605015673, | |
| "grad_norm": 0.5163841247558594, | |
| "learning_rate": 4.873886133497209e-06, | |
| "loss": 0.5187, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 1.0376175548589341, | |
| "grad_norm": 0.4965426027774811, | |
| "learning_rate": 4.87177604953105e-06, | |
| "loss": 0.5148, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.042319749216301, | |
| "grad_norm": 0.5206760764122009, | |
| "learning_rate": 4.869648923667694e-06, | |
| "loss": 0.4681, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 1.0470219435736676, | |
| "grad_norm": 2.3893775939941406, | |
| "learning_rate": 4.867504771191154e-06, | |
| "loss": 0.4942, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 1.0517241379310345, | |
| "grad_norm": 0.4828566312789917, | |
| "learning_rate": 4.865343607507788e-06, | |
| "loss": 0.5019, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 1.0564263322884013, | |
| "grad_norm": 0.5338858962059021, | |
| "learning_rate": 4.86316544814618e-06, | |
| "loss": 0.5172, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 1.061128526645768, | |
| "grad_norm": 0.5287072062492371, | |
| "learning_rate": 4.860970308757038e-06, | |
| "loss": 0.462, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.0658307210031348, | |
| "grad_norm": 0.4908182621002197, | |
| "learning_rate": 4.858758205113072e-06, | |
| "loss": 0.4908, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 1.0705329153605017, | |
| "grad_norm": 0.4617731273174286, | |
| "learning_rate": 4.856529153108888e-06, | |
| "loss": 0.5229, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 1.0752351097178683, | |
| "grad_norm": 0.8010035753250122, | |
| "learning_rate": 4.854283168760868e-06, | |
| "loss": 0.5016, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 1.0799373040752351, | |
| "grad_norm": 0.5060340762138367, | |
| "learning_rate": 4.85202026820706e-06, | |
| "loss": 0.4996, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 1.084639498432602, | |
| "grad_norm": 0.5193985104560852, | |
| "learning_rate": 4.84974046770706e-06, | |
| "loss": 0.5361, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.0893416927899686, | |
| "grad_norm": 0.4898582100868225, | |
| "learning_rate": 4.847443783641893e-06, | |
| "loss": 0.4466, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 1.0940438871473355, | |
| "grad_norm": 0.49226534366607666, | |
| "learning_rate": 4.845130232513901e-06, | |
| "loss": 0.4921, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 1.098746081504702, | |
| "grad_norm": 0.531612753868103, | |
| "learning_rate": 4.842799830946615e-06, | |
| "loss": 0.4883, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 1.103448275862069, | |
| "grad_norm": 0.48687735199928284, | |
| "learning_rate": 4.840452595684646e-06, | |
| "loss": 0.4845, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 1.1081504702194358, | |
| "grad_norm": 0.5371798276901245, | |
| "learning_rate": 4.83808854359356e-06, | |
| "loss": 0.4788, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 1.1128526645768024, | |
| "grad_norm": 0.4869842529296875, | |
| "learning_rate": 4.835707691659753e-06, | |
| "loss": 0.4821, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 1.1175548589341693, | |
| "grad_norm": 0.6263965368270874, | |
| "learning_rate": 4.8333100569903365e-06, | |
| "loss": 0.4936, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 1.1222570532915361, | |
| "grad_norm": 0.5065476298332214, | |
| "learning_rate": 4.8308956568130094e-06, | |
| "loss": 0.5136, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 1.1269592476489028, | |
| "grad_norm": 0.5552216172218323, | |
| "learning_rate": 4.828464508475934e-06, | |
| "loss": 0.5072, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 1.1316614420062696, | |
| "grad_norm": 0.5835661888122559, | |
| "learning_rate": 4.826016629447616e-06, | |
| "loss": 0.509, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.1363636363636362, | |
| "grad_norm": 0.4653923511505127, | |
| "learning_rate": 4.823552037316775e-06, | |
| "loss": 0.4845, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 1.141065830721003, | |
| "grad_norm": 0.6101334095001221, | |
| "learning_rate": 4.821070749792218e-06, | |
| "loss": 0.5401, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 1.14576802507837, | |
| "grad_norm": 0.6516410708427429, | |
| "learning_rate": 4.818572784702713e-06, | |
| "loss": 0.5107, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 1.1504702194357366, | |
| "grad_norm": 0.5194690823554993, | |
| "learning_rate": 4.816058159996863e-06, | |
| "loss": 0.521, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 1.1551724137931034, | |
| "grad_norm": 0.4790078401565552, | |
| "learning_rate": 4.813526893742972e-06, | |
| "loss": 0.5038, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 1.1598746081504703, | |
| "grad_norm": 0.4948838949203491, | |
| "learning_rate": 4.810979004128924e-06, | |
| "loss": 0.4966, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 1.164576802507837, | |
| "grad_norm": 0.49500057101249695, | |
| "learning_rate": 4.808414509462042e-06, | |
| "loss": 0.5171, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 1.1692789968652038, | |
| "grad_norm": 0.46993380784988403, | |
| "learning_rate": 4.80583342816896e-06, | |
| "loss": 0.484, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 1.1739811912225706, | |
| "grad_norm": 0.5349460244178772, | |
| "learning_rate": 4.803235778795496e-06, | |
| "loss": 0.5232, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 1.1786833855799372, | |
| "grad_norm": 0.7706441283226013, | |
| "learning_rate": 4.800621580006511e-06, | |
| "loss": 0.4677, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.183385579937304, | |
| "grad_norm": 0.4796987473964691, | |
| "learning_rate": 4.797990850585782e-06, | |
| "loss": 0.5151, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 1.188087774294671, | |
| "grad_norm": 1.0548434257507324, | |
| "learning_rate": 4.79534360943586e-06, | |
| "loss": 0.4954, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 1.1927899686520376, | |
| "grad_norm": 0.6616916060447693, | |
| "learning_rate": 4.792679875577937e-06, | |
| "loss": 0.4803, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 1.1974921630094044, | |
| "grad_norm": 0.5972627997398376, | |
| "learning_rate": 4.789999668151714e-06, | |
| "loss": 0.5145, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 1.2021943573667713, | |
| "grad_norm": 0.46703872084617615, | |
| "learning_rate": 4.7873030064152545e-06, | |
| "loss": 0.4946, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.206896551724138, | |
| "grad_norm": 0.477717787027359, | |
| "learning_rate": 4.784589909744856e-06, | |
| "loss": 0.4891, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 1.2115987460815048, | |
| "grad_norm": 0.5048542022705078, | |
| "learning_rate": 4.7818603976349005e-06, | |
| "loss": 0.5018, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 1.2163009404388714, | |
| "grad_norm": 0.4797464907169342, | |
| "learning_rate": 4.779114489697724e-06, | |
| "loss": 0.4978, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 1.2210031347962382, | |
| "grad_norm": 0.5975466966629028, | |
| "learning_rate": 4.776352205663469e-06, | |
| "loss": 0.5009, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 1.225705329153605, | |
| "grad_norm": 0.9671257734298706, | |
| "learning_rate": 4.773573565379947e-06, | |
| "loss": 0.5118, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.2304075235109717, | |
| "grad_norm": 0.5943232178688049, | |
| "learning_rate": 4.770778588812489e-06, | |
| "loss": 0.4772, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 1.2351097178683386, | |
| "grad_norm": 0.4887758493423462, | |
| "learning_rate": 4.7679672960438135e-06, | |
| "loss": 0.5038, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 1.2398119122257054, | |
| "grad_norm": 0.6083370447158813, | |
| "learning_rate": 4.765139707273872e-06, | |
| "loss": 0.4916, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 1.244514106583072, | |
| "grad_norm": 0.49385467171669006, | |
| "learning_rate": 4.762295842819707e-06, | |
| "loss": 0.5036, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 1.249216300940439, | |
| "grad_norm": 0.5786624550819397, | |
| "learning_rate": 4.759435723115308e-06, | |
| "loss": 0.4692, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 1.2539184952978055, | |
| "grad_norm": 0.48176029324531555, | |
| "learning_rate": 4.756559368711463e-06, | |
| "loss": 0.5065, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 1.2586206896551724, | |
| "grad_norm": 0.5976678729057312, | |
| "learning_rate": 4.75366680027561e-06, | |
| "loss": 0.4879, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 1.2633228840125392, | |
| "grad_norm": 0.5139954686164856, | |
| "learning_rate": 4.7507580385916906e-06, | |
| "loss": 0.4906, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 1.2680250783699059, | |
| "grad_norm": 0.5675197839736938, | |
| "learning_rate": 4.747833104559999e-06, | |
| "loss": 0.4667, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 1.2727272727272727, | |
| "grad_norm": 0.48431986570358276, | |
| "learning_rate": 4.744892019197033e-06, | |
| "loss": 0.4809, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.2774294670846396, | |
| "grad_norm": 0.5017510056495667, | |
| "learning_rate": 4.74193480363534e-06, | |
| "loss": 0.4874, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 1.2821316614420062, | |
| "grad_norm": 0.5467999577522278, | |
| "learning_rate": 4.738961479123373e-06, | |
| "loss": 0.497, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 1.286833855799373, | |
| "grad_norm": 1.3385310173034668, | |
| "learning_rate": 4.735972067025326e-06, | |
| "loss": 0.5027, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 1.29153605015674, | |
| "grad_norm": 0.5216737389564514, | |
| "learning_rate": 4.732966588820991e-06, | |
| "loss": 0.4953, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 1.2962382445141065, | |
| "grad_norm": 0.5195981860160828, | |
| "learning_rate": 4.729945066105599e-06, | |
| "loss": 0.474, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.3009404388714734, | |
| "grad_norm": 0.47055330872535706, | |
| "learning_rate": 4.726907520589664e-06, | |
| "loss": 0.4659, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 1.3056426332288402, | |
| "grad_norm": 0.5389472246170044, | |
| "learning_rate": 4.72385397409883e-06, | |
| "loss": 0.5076, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 1.3103448275862069, | |
| "grad_norm": 0.506074070930481, | |
| "learning_rate": 4.720784448573712e-06, | |
| "loss": 0.4993, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 1.3150470219435737, | |
| "grad_norm": 0.5159019827842712, | |
| "learning_rate": 4.717698966069739e-06, | |
| "loss": 0.5292, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 1.3197492163009406, | |
| "grad_norm": 0.5021179914474487, | |
| "learning_rate": 4.7145975487569965e-06, | |
| "loss": 0.5076, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.3244514106583072, | |
| "grad_norm": 0.5160107016563416, | |
| "learning_rate": 4.711480218920064e-06, | |
| "loss": 0.4693, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 1.329153605015674, | |
| "grad_norm": 0.5437175631523132, | |
| "learning_rate": 4.708346998957859e-06, | |
| "loss": 0.5179, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 1.3338557993730409, | |
| "grad_norm": 0.5456722378730774, | |
| "learning_rate": 4.705197911383473e-06, | |
| "loss": 0.489, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 1.3385579937304075, | |
| "grad_norm": 0.4638400375843048, | |
| "learning_rate": 4.7020329788240115e-06, | |
| "loss": 0.4752, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 1.3432601880877744, | |
| "grad_norm": 0.5053242444992065, | |
| "learning_rate": 4.6988522240204325e-06, | |
| "loss": 0.4847, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 1.347962382445141, | |
| "grad_norm": 0.5066615343093872, | |
| "learning_rate": 4.695655669827377e-06, | |
| "loss": 0.4992, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 1.3526645768025078, | |
| "grad_norm": 0.5127485990524292, | |
| "learning_rate": 4.6924433392130135e-06, | |
| "loss": 0.4915, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 1.3573667711598745, | |
| "grad_norm": 0.5612887144088745, | |
| "learning_rate": 4.689215255258866e-06, | |
| "loss": 0.5093, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 1.3620689655172413, | |
| "grad_norm": 0.47511914372444153, | |
| "learning_rate": 4.685971441159653e-06, | |
| "loss": 0.4784, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 1.3667711598746082, | |
| "grad_norm": 0.5002272129058838, | |
| "learning_rate": 4.682711920223115e-06, | |
| "loss": 0.4774, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.3714733542319748, | |
| "grad_norm": 0.5161554217338562, | |
| "learning_rate": 4.679436715869856e-06, | |
| "loss": 0.4977, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 1.3761755485893417, | |
| "grad_norm": 0.5877403020858765, | |
| "learning_rate": 4.676145851633166e-06, | |
| "loss": 0.515, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 1.3808777429467085, | |
| "grad_norm": 0.47495776414871216, | |
| "learning_rate": 4.672839351158856e-06, | |
| "loss": 0.5002, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 1.3855799373040751, | |
| "grad_norm": 0.5374031066894531, | |
| "learning_rate": 4.669517238205089e-06, | |
| "loss": 0.4841, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 1.390282131661442, | |
| "grad_norm": 0.5107132792472839, | |
| "learning_rate": 4.666179536642208e-06, | |
| "loss": 0.4847, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 1.3949843260188088, | |
| "grad_norm": 0.48953261971473694, | |
| "learning_rate": 4.662826270452565e-06, | |
| "loss": 0.4806, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 1.3996865203761755, | |
| "grad_norm": 0.5226054191589355, | |
| "learning_rate": 4.659457463730347e-06, | |
| "loss": 0.4908, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 1.4043887147335423, | |
| "grad_norm": 0.5019803643226624, | |
| "learning_rate": 4.6560731406814056e-06, | |
| "loss": 0.5054, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 1.4090909090909092, | |
| "grad_norm": 0.5146315693855286, | |
| "learning_rate": 4.65267332562308e-06, | |
| "loss": 0.5076, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 1.4137931034482758, | |
| "grad_norm": 0.5293256640434265, | |
| "learning_rate": 4.649258042984026e-06, | |
| "loss": 0.506, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.4184952978056427, | |
| "grad_norm": 0.536259651184082, | |
| "learning_rate": 4.6458273173040395e-06, | |
| "loss": 0.4622, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 1.4231974921630095, | |
| "grad_norm": 0.5373066067695618, | |
| "learning_rate": 4.642381173233874e-06, | |
| "loss": 0.4995, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 1.4278996865203761, | |
| "grad_norm": 0.5316248536109924, | |
| "learning_rate": 4.638919635535073e-06, | |
| "loss": 0.4555, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 1.432601880877743, | |
| "grad_norm": 1.0662360191345215, | |
| "learning_rate": 4.635442729079788e-06, | |
| "loss": 0.4831, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 1.4373040752351098, | |
| "grad_norm": 0.6498283743858337, | |
| "learning_rate": 4.6319504788505956e-06, | |
| "loss": 0.4774, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 1.4420062695924765, | |
| "grad_norm": 0.4853154122829437, | |
| "learning_rate": 4.628442909940325e-06, | |
| "loss": 0.4897, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 1.4467084639498433, | |
| "grad_norm": 0.8411519527435303, | |
| "learning_rate": 4.624920047551874e-06, | |
| "loss": 0.5064, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 1.4514106583072102, | |
| "grad_norm": 0.547363817691803, | |
| "learning_rate": 4.621381916998029e-06, | |
| "loss": 0.4733, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 1.4561128526645768, | |
| "grad_norm": 0.5782042145729065, | |
| "learning_rate": 4.6178285437012806e-06, | |
| "loss": 0.5085, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 1.4608150470219436, | |
| "grad_norm": 0.5078956484794617, | |
| "learning_rate": 4.6142599531936435e-06, | |
| "loss": 0.4715, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.4655172413793103, | |
| "grad_norm": 0.49163371324539185, | |
| "learning_rate": 4.610676171116475e-06, | |
| "loss": 0.492, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 1.4702194357366771, | |
| "grad_norm": 0.5490444898605347, | |
| "learning_rate": 4.607077223220286e-06, | |
| "loss": 0.4954, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 1.4749216300940438, | |
| "grad_norm": 0.5161562561988831, | |
| "learning_rate": 4.603463135364556e-06, | |
| "loss": 0.4659, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 1.4796238244514106, | |
| "grad_norm": 0.4908446669578552, | |
| "learning_rate": 4.5998339335175555e-06, | |
| "loss": 0.4884, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 1.4843260188087775, | |
| "grad_norm": 0.5703722834587097, | |
| "learning_rate": 4.596189643756147e-06, | |
| "loss": 0.4659, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 1.489028213166144, | |
| "grad_norm": 0.6954686045646667, | |
| "learning_rate": 4.592530292265609e-06, | |
| "loss": 0.4834, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 1.493730407523511, | |
| "grad_norm": 0.498858243227005, | |
| "learning_rate": 4.58885590533944e-06, | |
| "loss": 0.4925, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 1.4984326018808778, | |
| "grad_norm": 0.4805036783218384, | |
| "learning_rate": 4.585166509379173e-06, | |
| "loss": 0.5152, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 1.5031347962382444, | |
| "grad_norm": 0.5069772005081177, | |
| "learning_rate": 4.581462130894186e-06, | |
| "loss": 0.4926, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 1.5078369905956113, | |
| "grad_norm": 0.5127174854278564, | |
| "learning_rate": 4.57774279650151e-06, | |
| "loss": 0.4839, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.5125391849529781, | |
| "grad_norm": 0.5314028263092041, | |
| "learning_rate": 4.574008532925638e-06, | |
| "loss": 0.5101, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 1.5172413793103448, | |
| "grad_norm": 0.4849459230899811, | |
| "learning_rate": 4.570259366998336e-06, | |
| "loss": 0.495, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 1.5219435736677116, | |
| "grad_norm": 0.5260595679283142, | |
| "learning_rate": 4.566495325658445e-06, | |
| "loss": 0.5232, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 1.5266457680250785, | |
| "grad_norm": 0.4890941083431244, | |
| "learning_rate": 4.5627164359516915e-06, | |
| "loss": 0.5034, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 1.531347962382445, | |
| "grad_norm": 0.516516387462616, | |
| "learning_rate": 4.558922725030491e-06, | |
| "loss": 0.4773, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 1.536050156739812, | |
| "grad_norm": 0.622726321220398, | |
| "learning_rate": 4.555114220153755e-06, | |
| "loss": 0.4306, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 1.5407523510971788, | |
| "grad_norm": 0.50007164478302, | |
| "learning_rate": 4.551290948686693e-06, | |
| "loss": 0.5134, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 1.5454545454545454, | |
| "grad_norm": 0.669569194316864, | |
| "learning_rate": 4.547452938100615e-06, | |
| "loss": 0.5173, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 1.5501567398119123, | |
| "grad_norm": 0.5498544573783875, | |
| "learning_rate": 4.54360021597274e-06, | |
| "loss": 0.4946, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 1.5548589341692791, | |
| "grad_norm": 0.5294123888015747, | |
| "learning_rate": 4.539732809985989e-06, | |
| "loss": 0.4853, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.5595611285266457, | |
| "grad_norm": 0.5199066996574402, | |
| "learning_rate": 4.535850747928796e-06, | |
| "loss": 0.4989, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 1.5642633228840124, | |
| "grad_norm": 0.5348268151283264, | |
| "learning_rate": 4.531954057694897e-06, | |
| "loss": 0.4823, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 1.5689655172413794, | |
| "grad_norm": 0.47487759590148926, | |
| "learning_rate": 4.5280427672831414e-06, | |
| "loss": 0.4894, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 1.573667711598746, | |
| "grad_norm": 0.4653705060482025, | |
| "learning_rate": 4.524116904797281e-06, | |
| "loss": 0.4656, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 1.5783699059561127, | |
| "grad_norm": 0.5206255912780762, | |
| "learning_rate": 4.520176498445774e-06, | |
| "loss": 0.4782, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 1.5830721003134798, | |
| "grad_norm": 0.7349448800086975, | |
| "learning_rate": 4.516221576541581e-06, | |
| "loss": 0.4781, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 1.5877742946708464, | |
| "grad_norm": 0.4842517077922821, | |
| "learning_rate": 4.512252167501959e-06, | |
| "loss": 0.4781, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 1.592476489028213, | |
| "grad_norm": 0.5287013053894043, | |
| "learning_rate": 4.508268299848262e-06, | |
| "loss": 0.4841, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 1.59717868338558, | |
| "grad_norm": 0.5405049324035645, | |
| "learning_rate": 4.50427000220573e-06, | |
| "loss": 0.4986, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 1.6018808777429467, | |
| "grad_norm": 0.5042012333869934, | |
| "learning_rate": 4.50025730330329e-06, | |
| "loss": 0.4757, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.6065830721003134, | |
| "grad_norm": 0.5087094902992249, | |
| "learning_rate": 4.4962302319733445e-06, | |
| "loss": 0.4927, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 1.6112852664576802, | |
| "grad_norm": 0.5267127156257629, | |
| "learning_rate": 4.492188817151565e-06, | |
| "loss": 0.5282, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 1.615987460815047, | |
| "grad_norm": 0.5424356460571289, | |
| "learning_rate": 4.488133087876688e-06, | |
| "loss": 0.4664, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 1.6206896551724137, | |
| "grad_norm": 0.7226302027702332, | |
| "learning_rate": 4.484063073290301e-06, | |
| "loss": 0.4648, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 1.6253918495297806, | |
| "grad_norm": 0.5150883197784424, | |
| "learning_rate": 4.479978802636637e-06, | |
| "loss": 0.4995, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 1.6300940438871474, | |
| "grad_norm": 0.6959285140037537, | |
| "learning_rate": 4.475880305262362e-06, | |
| "loss": 0.5036, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 1.634796238244514, | |
| "grad_norm": 0.48316431045532227, | |
| "learning_rate": 4.471767610616366e-06, | |
| "loss": 0.4956, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 1.6394984326018809, | |
| "grad_norm": 0.5120992064476013, | |
| "learning_rate": 4.467640748249549e-06, | |
| "loss": 0.47, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 1.6442006269592477, | |
| "grad_norm": 0.495301753282547, | |
| "learning_rate": 4.4634997478146125e-06, | |
| "loss": 0.4889, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 1.6489028213166144, | |
| "grad_norm": 0.5113374590873718, | |
| "learning_rate": 4.459344639065842e-06, | |
| "loss": 0.4814, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.6536050156739812, | |
| "grad_norm": 0.5123418569564819, | |
| "learning_rate": 4.455175451858897e-06, | |
| "loss": 0.4897, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 1.658307210031348, | |
| "grad_norm": 0.4938543438911438, | |
| "learning_rate": 4.450992216150592e-06, | |
| "loss": 0.4981, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 1.6630094043887147, | |
| "grad_norm": 0.6186752319335938, | |
| "learning_rate": 4.446794961998689e-06, | |
| "loss": 0.4669, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 1.6677115987460815, | |
| "grad_norm": 0.46507078409194946, | |
| "learning_rate": 4.442583719561671e-06, | |
| "loss": 0.4927, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 1.6724137931034484, | |
| "grad_norm": 0.6733965873718262, | |
| "learning_rate": 4.438358519098536e-06, | |
| "loss": 0.5021, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 1.677115987460815, | |
| "grad_norm": 0.49244073033332825, | |
| "learning_rate": 4.4341193909685685e-06, | |
| "loss": 0.485, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 1.6818181818181817, | |
| "grad_norm": 0.5992756485939026, | |
| "learning_rate": 4.429866365631134e-06, | |
| "loss": 0.4906, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 1.6865203761755487, | |
| "grad_norm": 0.5802706480026245, | |
| "learning_rate": 4.425599473645447e-06, | |
| "loss": 0.4813, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 1.6912225705329154, | |
| "grad_norm": 0.5721625685691833, | |
| "learning_rate": 4.421318745670364e-06, | |
| "loss": 0.4815, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 1.695924764890282, | |
| "grad_norm": 0.4937971234321594, | |
| "learning_rate": 4.4170242124641524e-06, | |
| "loss": 0.4596, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.700626959247649, | |
| "grad_norm": 0.49360817670822144, | |
| "learning_rate": 4.412715904884277e-06, | |
| "loss": 0.4893, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 1.7053291536050157, | |
| "grad_norm": 0.5080047845840454, | |
| "learning_rate": 4.4083938538871735e-06, | |
| "loss": 0.4682, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 1.7100313479623823, | |
| "grad_norm": 0.5415318608283997, | |
| "learning_rate": 4.4040580905280295e-06, | |
| "loss": 0.487, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 1.7147335423197492, | |
| "grad_norm": 0.607445240020752, | |
| "learning_rate": 4.3997086459605586e-06, | |
| "loss": 0.4837, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 1.719435736677116, | |
| "grad_norm": 0.47159233689308167, | |
| "learning_rate": 4.395345551436779e-06, | |
| "loss": 0.5081, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 1.7241379310344827, | |
| "grad_norm": 0.516380786895752, | |
| "learning_rate": 4.390968838306788e-06, | |
| "loss": 0.4634, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 1.7288401253918495, | |
| "grad_norm": 0.4957869350910187, | |
| "learning_rate": 4.386578538018535e-06, | |
| "loss": 0.4618, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 1.7335423197492164, | |
| "grad_norm": 0.5057672262191772, | |
| "learning_rate": 4.382174682117598e-06, | |
| "loss": 0.5083, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 1.738244514106583, | |
| "grad_norm": 0.5154942274093628, | |
| "learning_rate": 4.377757302246956e-06, | |
| "loss": 0.444, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 1.7429467084639498, | |
| "grad_norm": 0.51052325963974, | |
| "learning_rate": 4.373326430146762e-06, | |
| "loss": 0.5009, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.7476489028213167, | |
| "grad_norm": 0.4869065582752228, | |
| "learning_rate": 4.368882097654113e-06, | |
| "loss": 0.4956, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 1.7523510971786833, | |
| "grad_norm": 0.46982306241989136, | |
| "learning_rate": 4.364424336702825e-06, | |
| "loss": 0.4701, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 1.7570532915360502, | |
| "grad_norm": 0.5106102824211121, | |
| "learning_rate": 4.3599531793232e-06, | |
| "loss": 0.4879, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 1.761755485893417, | |
| "grad_norm": 0.6152806282043457, | |
| "learning_rate": 4.355468657641797e-06, | |
| "loss": 0.4825, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 1.7664576802507836, | |
| "grad_norm": 0.523330807685852, | |
| "learning_rate": 4.3509708038812035e-06, | |
| "loss": 0.4863, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 1.7711598746081505, | |
| "grad_norm": 0.505370020866394, | |
| "learning_rate": 4.346459650359798e-06, | |
| "loss": 0.4845, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 1.7758620689655173, | |
| "grad_norm": 4.284893989562988, | |
| "learning_rate": 4.341935229491525e-06, | |
| "loss": 0.4543, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 1.780564263322884, | |
| "grad_norm": 0.4744650423526764, | |
| "learning_rate": 4.337397573785659e-06, | |
| "loss": 0.5041, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 1.7852664576802508, | |
| "grad_norm": 0.8564308285713196, | |
| "learning_rate": 4.332846715846566e-06, | |
| "loss": 0.4704, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 1.7899686520376177, | |
| "grad_norm": 0.573800265789032, | |
| "learning_rate": 4.328282688373479e-06, | |
| "loss": 0.491, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.7946708463949843, | |
| "grad_norm": 0.5891814827919006, | |
| "learning_rate": 4.323705524160258e-06, | |
| "loss": 0.4892, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 1.799373040752351, | |
| "grad_norm": 0.5569722056388855, | |
| "learning_rate": 4.319115256095149e-06, | |
| "loss": 0.4657, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 1.804075235109718, | |
| "grad_norm": 0.4922447204589844, | |
| "learning_rate": 4.314511917160557e-06, | |
| "loss": 0.4669, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 1.8087774294670846, | |
| "grad_norm": 0.49718669056892395, | |
| "learning_rate": 4.3098955404328045e-06, | |
| "loss": 0.4593, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 1.8134796238244513, | |
| "grad_norm": 0.544978141784668, | |
| "learning_rate": 4.305266159081895e-06, | |
| "loss": 0.4806, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 1.8181818181818183, | |
| "grad_norm": 0.5504943132400513, | |
| "learning_rate": 4.3006238063712725e-06, | |
| "loss": 0.4663, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 1.822884012539185, | |
| "grad_norm": 0.5914635062217712, | |
| "learning_rate": 4.295968515657583e-06, | |
| "loss": 0.5008, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 1.8275862068965516, | |
| "grad_norm": 0.6797587275505066, | |
| "learning_rate": 4.29130032039044e-06, | |
| "loss": 0.4837, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 1.8322884012539185, | |
| "grad_norm": 0.49545228481292725, | |
| "learning_rate": 4.2866192541121755e-06, | |
| "loss": 0.4741, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 1.8369905956112853, | |
| "grad_norm": 0.5231736898422241, | |
| "learning_rate": 4.281925350457606e-06, | |
| "loss": 0.4763, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.841692789968652, | |
| "grad_norm": 0.4704969525337219, | |
| "learning_rate": 4.277218643153787e-06, | |
| "loss": 0.4778, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 1.8463949843260188, | |
| "grad_norm": 0.5036218762397766, | |
| "learning_rate": 4.272499166019771e-06, | |
| "loss": 0.4778, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 1.8510971786833856, | |
| "grad_norm": 0.5395018458366394, | |
| "learning_rate": 4.267766952966369e-06, | |
| "loss": 0.4659, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 1.8557993730407523, | |
| "grad_norm": 0.5380336046218872, | |
| "learning_rate": 4.2630220379959006e-06, | |
| "loss": 0.4433, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 1.8605015673981191, | |
| "grad_norm": 0.5290525555610657, | |
| "learning_rate": 4.258264455201953e-06, | |
| "loss": 0.4663, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 1.865203761755486, | |
| "grad_norm": 1.051685094833374, | |
| "learning_rate": 4.2534942387691335e-06, | |
| "loss": 0.4906, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 1.8699059561128526, | |
| "grad_norm": 0.5118448734283447, | |
| "learning_rate": 4.248711422972829e-06, | |
| "loss": 0.477, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 1.8746081504702194, | |
| "grad_norm": 0.6070737242698669, | |
| "learning_rate": 4.243916042178954e-06, | |
| "loss": 0.4602, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 1.8793103448275863, | |
| "grad_norm": 0.827873170375824, | |
| "learning_rate": 4.239108130843709e-06, | |
| "loss": 0.4671, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 1.884012539184953, | |
| "grad_norm": 0.5535209774971008, | |
| "learning_rate": 4.234287723513326e-06, | |
| "loss": 0.4933, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.8887147335423198, | |
| "grad_norm": 0.5281742811203003, | |
| "learning_rate": 4.229454854823827e-06, | |
| "loss": 0.4904, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 1.8934169278996866, | |
| "grad_norm": 0.5530508756637573, | |
| "learning_rate": 4.224609559500772e-06, | |
| "loss": 0.5034, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 1.8981191222570533, | |
| "grad_norm": 0.47475168108940125, | |
| "learning_rate": 4.21975187235901e-06, | |
| "loss": 0.454, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 1.90282131661442, | |
| "grad_norm": 0.5026959776878357, | |
| "learning_rate": 4.21488182830243e-06, | |
| "loss": 0.4833, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 1.907523510971787, | |
| "grad_norm": 0.520931601524353, | |
| "learning_rate": 4.209999462323706e-06, | |
| "loss": 0.4587, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 1.9122257053291536, | |
| "grad_norm": 0.584832489490509, | |
| "learning_rate": 4.20510480950405e-06, | |
| "loss": 0.4924, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 1.9169278996865202, | |
| "grad_norm": 0.5310719609260559, | |
| "learning_rate": 4.200197905012961e-06, | |
| "loss": 0.4525, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 1.9216300940438873, | |
| "grad_norm": 0.48304978013038635, | |
| "learning_rate": 4.195278784107965e-06, | |
| "loss": 0.4703, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 1.926332288401254, | |
| "grad_norm": 0.4856540560722351, | |
| "learning_rate": 4.19034748213437e-06, | |
| "loss": 0.472, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 1.9310344827586206, | |
| "grad_norm": 0.500950038433075, | |
| "learning_rate": 4.185404034525008e-06, | |
| "loss": 0.4641, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.9357366771159876, | |
| "grad_norm": 0.5088744163513184, | |
| "learning_rate": 4.180448476799981e-06, | |
| "loss": 0.5019, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 1.9404388714733543, | |
| "grad_norm": 0.5200552940368652, | |
| "learning_rate": 4.175480844566404e-06, | |
| "loss": 0.4744, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 1.9451410658307209, | |
| "grad_norm": 0.47198665142059326, | |
| "learning_rate": 4.170501173518152e-06, | |
| "loss": 0.4678, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 1.9498432601880877, | |
| "grad_norm": 0.4798610806465149, | |
| "learning_rate": 4.165509499435604e-06, | |
| "loss": 0.4668, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 1.9545454545454546, | |
| "grad_norm": 0.48917219042778015, | |
| "learning_rate": 4.16050585818538e-06, | |
| "loss": 0.4811, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 1.9592476489028212, | |
| "grad_norm": 0.7600466012954712, | |
| "learning_rate": 4.155490285720092e-06, | |
| "loss": 0.504, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 1.963949843260188, | |
| "grad_norm": 0.48938053846359253, | |
| "learning_rate": 4.150462818078079e-06, | |
| "loss": 0.4918, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 1.968652037617555, | |
| "grad_norm": 0.6591557264328003, | |
| "learning_rate": 4.145423491383153e-06, | |
| "loss": 0.4864, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 1.9733542319749215, | |
| "grad_norm": 0.5030936598777771, | |
| "learning_rate": 4.14037234184433e-06, | |
| "loss": 0.5032, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 1.9780564263322884, | |
| "grad_norm": 0.46507832407951355, | |
| "learning_rate": 4.135309405755583e-06, | |
| "loss": 0.4885, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.9827586206896552, | |
| "grad_norm": 0.6155677437782288, | |
| "learning_rate": 4.130234719495574e-06, | |
| "loss": 0.4721, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 1.9874608150470219, | |
| "grad_norm": 0.5396779179573059, | |
| "learning_rate": 4.125148319527391e-06, | |
| "loss": 0.4628, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 1.9921630094043887, | |
| "grad_norm": 0.5095715522766113, | |
| "learning_rate": 4.1200502423982904e-06, | |
| "loss": 0.4831, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 1.9968652037617556, | |
| "grad_norm": 0.5792081356048584, | |
| "learning_rate": 4.1149405247394295e-06, | |
| "loss": 0.4878, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 2.0047021943573666, | |
| "grad_norm": 1.0187262296676636, | |
| "learning_rate": 4.10981920326561e-06, | |
| "loss": 0.913, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 2.0094043887147337, | |
| "grad_norm": 0.4980756938457489, | |
| "learning_rate": 4.104686314775009e-06, | |
| "loss": 0.4405, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 2.0141065830721003, | |
| "grad_norm": 0.49685895442962646, | |
| "learning_rate": 4.099541896148914e-06, | |
| "loss": 0.4496, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 2.018808777429467, | |
| "grad_norm": 0.5676024556159973, | |
| "learning_rate": 4.094385984351462e-06, | |
| "loss": 0.444, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 2.023510971786834, | |
| "grad_norm": 0.9495695233345032, | |
| "learning_rate": 4.0892186164293715e-06, | |
| "loss": 0.4645, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 2.0282131661442007, | |
| "grad_norm": 0.5299291610717773, | |
| "learning_rate": 4.0840398295116745e-06, | |
| "loss": 0.4527, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 2.0329153605015673, | |
| "grad_norm": 1.048509955406189, | |
| "learning_rate": 4.078849660809456e-06, | |
| "loss": 0.4487, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 2.0376175548589344, | |
| "grad_norm": 0.5003053545951843, | |
| "learning_rate": 4.073648147615579e-06, | |
| "loss": 0.4304, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 2.042319749216301, | |
| "grad_norm": 0.5424538254737854, | |
| "learning_rate": 4.068435327304421e-06, | |
| "loss": 0.457, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 2.0470219435736676, | |
| "grad_norm": 0.467619925737381, | |
| "learning_rate": 4.063211237331603e-06, | |
| "loss": 0.4549, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 2.0517241379310347, | |
| "grad_norm": 0.5217760801315308, | |
| "learning_rate": 4.057975915233725e-06, | |
| "loss": 0.4385, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 2.0564263322884013, | |
| "grad_norm": 0.673935055732727, | |
| "learning_rate": 4.052729398628089e-06, | |
| "loss": 0.4657, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 2.061128526645768, | |
| "grad_norm": 0.5437114834785461, | |
| "learning_rate": 4.047471725212437e-06, | |
| "loss": 0.4614, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 2.0658307210031346, | |
| "grad_norm": 0.5773066282272339, | |
| "learning_rate": 4.042202932764673e-06, | |
| "loss": 0.4476, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 2.0705329153605017, | |
| "grad_norm": 0.4945369362831116, | |
| "learning_rate": 4.036923059142595e-06, | |
| "loss": 0.4487, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 2.0752351097178683, | |
| "grad_norm": 0.49815094470977783, | |
| "learning_rate": 4.031632142283623e-06, | |
| "loss": 0.4424, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 2.079937304075235, | |
| "grad_norm": 0.5249453186988831, | |
| "learning_rate": 4.026330220204524e-06, | |
| "loss": 0.451, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 2.084639498432602, | |
| "grad_norm": 0.4735757112503052, | |
| "learning_rate": 4.021017331001146e-06, | |
| "loss": 0.4413, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 2.0893416927899686, | |
| "grad_norm": 0.484040766954422, | |
| "learning_rate": 4.015693512848131e-06, | |
| "loss": 0.4487, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 2.0940438871473352, | |
| "grad_norm": 0.5543732047080994, | |
| "learning_rate": 4.0103588039986556e-06, | |
| "loss": 0.452, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 2.0987460815047023, | |
| "grad_norm": 0.5293512940406799, | |
| "learning_rate": 4.005013242784146e-06, | |
| "loss": 0.4605, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 2.103448275862069, | |
| "grad_norm": 0.5147669911384583, | |
| "learning_rate": 3.999656867614006e-06, | |
| "loss": 0.4479, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 2.1081504702194356, | |
| "grad_norm": 0.47510066628456116, | |
| "learning_rate": 3.994289716975341e-06, | |
| "loss": 0.4629, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 2.1128526645768027, | |
| "grad_norm": 0.5011350512504578, | |
| "learning_rate": 3.988911829432682e-06, | |
| "loss": 0.4487, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 2.1175548589341693, | |
| "grad_norm": 0.5059776902198792, | |
| "learning_rate": 3.983523243627706e-06, | |
| "loss": 0.4456, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 2.122257053291536, | |
| "grad_norm": 0.4974157512187958, | |
| "learning_rate": 3.978123998278962e-06, | |
| "loss": 0.436, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 2.126959247648903, | |
| "grad_norm": 0.645297646522522, | |
| "learning_rate": 3.97271413218159e-06, | |
| "loss": 0.4653, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 2.1316614420062696, | |
| "grad_norm": 0.46000078320503235, | |
| "learning_rate": 3.9672936842070425e-06, | |
| "loss": 0.4618, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 2.1363636363636362, | |
| "grad_norm": 0.54063880443573, | |
| "learning_rate": 3.9618626933028086e-06, | |
| "loss": 0.4662, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 2.1410658307210033, | |
| "grad_norm": 0.4782627820968628, | |
| "learning_rate": 3.956421198492128e-06, | |
| "loss": 0.448, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 2.14576802507837, | |
| "grad_norm": 0.6332294344902039, | |
| "learning_rate": 3.950969238873714e-06, | |
| "loss": 0.447, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 2.1504702194357366, | |
| "grad_norm": 0.49692678451538086, | |
| "learning_rate": 3.9455068536214765e-06, | |
| "loss": 0.4758, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 2.1551724137931036, | |
| "grad_norm": 0.5347000360488892, | |
| "learning_rate": 3.9400340819842335e-06, | |
| "loss": 0.4565, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 2.1598746081504703, | |
| "grad_norm": 0.5034692883491516, | |
| "learning_rate": 3.934550963285432e-06, | |
| "loss": 0.4565, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 2.164576802507837, | |
| "grad_norm": 0.4939899444580078, | |
| "learning_rate": 3.9290575369228664e-06, | |
| "loss": 0.4284, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 2.169278996865204, | |
| "grad_norm": 0.5838932394981384, | |
| "learning_rate": 3.923553842368396e-06, | |
| "loss": 0.4667, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 2.1739811912225706, | |
| "grad_norm": 0.4959818124771118, | |
| "learning_rate": 3.918039919167658e-06, | |
| "loss": 0.4335, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 2.1786833855799372, | |
| "grad_norm": 0.5034835934638977, | |
| "learning_rate": 3.912515806939786e-06, | |
| "loss": 0.448, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 2.183385579937304, | |
| "grad_norm": 0.526311993598938, | |
| "learning_rate": 3.906981545377124e-06, | |
| "loss": 0.4597, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 2.188087774294671, | |
| "grad_norm": 0.5168907046318054, | |
| "learning_rate": 3.901437174244943e-06, | |
| "loss": 0.4299, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 2.1927899686520376, | |
| "grad_norm": 0.5047626495361328, | |
| "learning_rate": 3.895882733381154e-06, | |
| "loss": 0.4477, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 2.197492163009404, | |
| "grad_norm": 0.541617214679718, | |
| "learning_rate": 3.890318262696023e-06, | |
| "loss": 0.4664, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 2.2021943573667713, | |
| "grad_norm": 0.49817150831222534, | |
| "learning_rate": 3.8847438021718805e-06, | |
| "loss": 0.4417, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 2.206896551724138, | |
| "grad_norm": 0.5194390416145325, | |
| "learning_rate": 3.879159391862839e-06, | |
| "loss": 0.4631, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 2.2115987460815045, | |
| "grad_norm": 0.5216783285140991, | |
| "learning_rate": 3.873565071894503e-06, | |
| "loss": 0.4344, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 2.2163009404388716, | |
| "grad_norm": 0.5285272002220154, | |
| "learning_rate": 3.86796088246368e-06, | |
| "loss": 0.4318, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 2.2210031347962382, | |
| "grad_norm": 0.7032628059387207, | |
| "learning_rate": 3.8623468638380905e-06, | |
| "loss": 0.4147, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 2.225705329153605, | |
| "grad_norm": 0.5304352045059204, | |
| "learning_rate": 3.856723056356085e-06, | |
| "loss": 0.4587, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 2.230407523510972, | |
| "grad_norm": 0.5279494524002075, | |
| "learning_rate": 3.851089500426346e-06, | |
| "loss": 0.4456, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 2.2351097178683386, | |
| "grad_norm": 0.5182958841323853, | |
| "learning_rate": 3.845446236527605e-06, | |
| "loss": 0.4449, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 2.239811912225705, | |
| "grad_norm": 0.5063281059265137, | |
| "learning_rate": 3.8397933052083445e-06, | |
| "loss": 0.4623, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 2.2445141065830723, | |
| "grad_norm": 0.5559848546981812, | |
| "learning_rate": 3.834130747086512e-06, | |
| "loss": 0.4473, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 2.249216300940439, | |
| "grad_norm": 0.5276119709014893, | |
| "learning_rate": 3.828458602849226e-06, | |
| "loss": 0.4468, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 2.2539184952978055, | |
| "grad_norm": 0.5662305951118469, | |
| "learning_rate": 3.822776913252485e-06, | |
| "loss": 0.4504, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 2.2586206896551726, | |
| "grad_norm": 0.5402324199676514, | |
| "learning_rate": 3.817085719120872e-06, | |
| "loss": 0.4017, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 2.2633228840125392, | |
| "grad_norm": 0.7308655977249146, | |
| "learning_rate": 3.811385061347263e-06, | |
| "loss": 0.444, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 2.268025078369906, | |
| "grad_norm": 0.5518504977226257, | |
| "learning_rate": 3.805674980892535e-06, | |
| "loss": 0.4693, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 2.2727272727272725, | |
| "grad_norm": 0.6556211113929749, | |
| "learning_rate": 3.7999555187852667e-06, | |
| "loss": 0.4567, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 2.2774294670846396, | |
| "grad_norm": 0.5011089444160461, | |
| "learning_rate": 3.7942267161214497e-06, | |
| "loss": 0.4449, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 2.282131661442006, | |
| "grad_norm": 0.5480395555496216, | |
| "learning_rate": 3.7884886140641884e-06, | |
| "loss": 0.4678, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 2.2868338557993733, | |
| "grad_norm": 0.5292133092880249, | |
| "learning_rate": 3.7827412538434062e-06, | |
| "loss": 0.4323, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 2.29153605015674, | |
| "grad_norm": 0.49213892221450806, | |
| "learning_rate": 3.7769846767555495e-06, | |
| "loss": 0.4591, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 2.2962382445141065, | |
| "grad_norm": 0.5282753109931946, | |
| "learning_rate": 3.7712189241632898e-06, | |
| "loss": 0.4549, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 2.300940438871473, | |
| "grad_norm": 0.5287954211235046, | |
| "learning_rate": 3.7654440374952288e-06, | |
| "loss": 0.4419, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 2.30564263322884, | |
| "grad_norm": 0.48144471645355225, | |
| "learning_rate": 3.7596600582455976e-06, | |
| "loss": 0.4196, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 2.310344827586207, | |
| "grad_norm": 0.5843647122383118, | |
| "learning_rate": 3.75386702797396e-06, | |
| "loss": 0.4198, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 2.3150470219435735, | |
| "grad_norm": 0.4764229953289032, | |
| "learning_rate": 3.7480649883049164e-06, | |
| "loss": 0.4254, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 2.3197492163009406, | |
| "grad_norm": 0.5592864155769348, | |
| "learning_rate": 3.7422539809277993e-06, | |
| "loss": 0.4364, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 2.324451410658307, | |
| "grad_norm": 0.5710645318031311, | |
| "learning_rate": 3.736434047596379e-06, | |
| "loss": 0.4435, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 2.329153605015674, | |
| "grad_norm": 0.5412624478340149, | |
| "learning_rate": 3.73060523012856e-06, | |
| "loss": 0.4504, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 2.333855799373041, | |
| "grad_norm": 0.49665457010269165, | |
| "learning_rate": 3.724767570406082e-06, | |
| "loss": 0.4677, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 2.3385579937304075, | |
| "grad_norm": 0.5091891884803772, | |
| "learning_rate": 3.7189211103742206e-06, | |
| "loss": 0.4249, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 2.343260188087774, | |
| "grad_norm": 0.4907149076461792, | |
| "learning_rate": 3.7130658920414818e-06, | |
| "loss": 0.46, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 2.347962382445141, | |
| "grad_norm": 0.4705374240875244, | |
| "learning_rate": 3.7072019574793034e-06, | |
| "loss": 0.4715, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 2.352664576802508, | |
| "grad_norm": 0.5139763355255127, | |
| "learning_rate": 3.701329348821752e-06, | |
| "loss": 0.4523, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 2.3573667711598745, | |
| "grad_norm": 0.5120819211006165, | |
| "learning_rate": 3.695448108265221e-06, | |
| "loss": 0.4373, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 2.3620689655172415, | |
| "grad_norm": 0.45697054266929626, | |
| "learning_rate": 3.6895582780681254e-06, | |
| "loss": 0.4337, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 2.366771159874608, | |
| "grad_norm": 0.48620718717575073, | |
| "learning_rate": 3.683659900550598e-06, | |
| "loss": 0.4618, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 2.371473354231975, | |
| "grad_norm": 0.5233144760131836, | |
| "learning_rate": 3.6777530180941894e-06, | |
| "loss": 0.4441, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 2.376175548589342, | |
| "grad_norm": 0.5168657898902893, | |
| "learning_rate": 3.671837673141559e-06, | |
| "loss": 0.4309, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 2.3808777429467085, | |
| "grad_norm": 0.5887295007705688, | |
| "learning_rate": 3.6659139081961707e-06, | |
| "loss": 0.4469, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 2.385579937304075, | |
| "grad_norm": 0.5009861588478088, | |
| "learning_rate": 3.6599817658219916e-06, | |
| "loss": 0.4487, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 2.3902821316614418, | |
| "grad_norm": 0.49702706933021545, | |
| "learning_rate": 3.6540412886431796e-06, | |
| "loss": 0.4605, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 2.394984326018809, | |
| "grad_norm": 0.4844914376735687, | |
| "learning_rate": 3.648092519343783e-06, | |
| "loss": 0.4435, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 2.3996865203761755, | |
| "grad_norm": 0.5124401450157166, | |
| "learning_rate": 3.642135500667431e-06, | |
| "loss": 0.4312, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 2.4043887147335425, | |
| "grad_norm": 0.5017268657684326, | |
| "learning_rate": 3.6361702754170247e-06, | |
| "loss": 0.4465, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 2.409090909090909, | |
| "grad_norm": 0.51381915807724, | |
| "learning_rate": 3.630196886454435e-06, | |
| "loss": 0.4288, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 2.413793103448276, | |
| "grad_norm": 0.5275219082832336, | |
| "learning_rate": 3.62421537670019e-06, | |
| "loss": 0.4431, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 2.4184952978056424, | |
| "grad_norm": 0.5186277627944946, | |
| "learning_rate": 3.618225789133167e-06, | |
| "loss": 0.4465, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 2.4231974921630095, | |
| "grad_norm": 0.7465965151786804, | |
| "learning_rate": 3.612228166790287e-06, | |
| "loss": 0.445, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 2.427899686520376, | |
| "grad_norm": 0.5389880537986755, | |
| "learning_rate": 3.606222552766201e-06, | |
| "loss": 0.4548, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 2.4326018808777428, | |
| "grad_norm": 0.8773667216300964, | |
| "learning_rate": 3.6002089902129844e-06, | |
| "loss": 0.4482, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 2.43730407523511, | |
| "grad_norm": 0.4819961190223694, | |
| "learning_rate": 3.5941875223398225e-06, | |
| "loss": 0.4387, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 2.4420062695924765, | |
| "grad_norm": 0.5448062419891357, | |
| "learning_rate": 3.588158192412707e-06, | |
| "loss": 0.4347, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 2.446708463949843, | |
| "grad_norm": 4.079322338104248, | |
| "learning_rate": 3.582121043754116e-06, | |
| "loss": 0.4383, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 2.45141065830721, | |
| "grad_norm": 0.9761962890625, | |
| "learning_rate": 3.5760761197427097e-06, | |
| "loss": 0.4377, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 2.456112852664577, | |
| "grad_norm": 0.5383471846580505, | |
| "learning_rate": 3.570023463813017e-06, | |
| "loss": 0.43, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 2.4608150470219434, | |
| "grad_norm": 0.47819817066192627, | |
| "learning_rate": 3.5639631194551216e-06, | |
| "loss": 0.4526, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 2.4655172413793105, | |
| "grad_norm": 0.5026260018348694, | |
| "learning_rate": 3.557895130214352e-06, | |
| "loss": 0.4445, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 2.470219435736677, | |
| "grad_norm": 0.4826788008213043, | |
| "learning_rate": 3.5518195396909653e-06, | |
| "loss": 0.4634, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 2.4749216300940438, | |
| "grad_norm": 0.5463977456092834, | |
| "learning_rate": 3.5457363915398384e-06, | |
| "loss": 0.4526, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 2.479623824451411, | |
| "grad_norm": 0.5236705541610718, | |
| "learning_rate": 3.539645729470151e-06, | |
| "loss": 0.4383, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 2.4843260188087775, | |
| "grad_norm": 0.5190213322639465, | |
| "learning_rate": 3.5335475972450715e-06, | |
| "loss": 0.4378, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 2.489028213166144, | |
| "grad_norm": 0.7202126383781433, | |
| "learning_rate": 3.5274420386814458e-06, | |
| "loss": 0.4639, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 2.493730407523511, | |
| "grad_norm": 0.6717228293418884, | |
| "learning_rate": 3.521329097649478e-06, | |
| "loss": 0.4458, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 2.498432601880878, | |
| "grad_norm": 0.49824005365371704, | |
| "learning_rate": 3.515208818072418e-06, | |
| "loss": 0.4429, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 2.5031347962382444, | |
| "grad_norm": 0.5616832971572876, | |
| "learning_rate": 3.509081243926247e-06, | |
| "loss": 0.4306, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 2.507836990595611, | |
| "grad_norm": 0.5087556838989258, | |
| "learning_rate": 3.5029464192393557e-06, | |
| "loss": 0.4629, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 2.512539184952978, | |
| "grad_norm": 0.5290883183479309, | |
| "learning_rate": 3.4968043880922363e-06, | |
| "loss": 0.414, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 2.5172413793103448, | |
| "grad_norm": 0.5456948280334473, | |
| "learning_rate": 3.4906551946171603e-06, | |
| "loss": 0.4238, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 2.521943573667712, | |
| "grad_norm": 0.49592944979667664, | |
| "learning_rate": 3.484498882997861e-06, | |
| "loss": 0.4224, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 2.5266457680250785, | |
| "grad_norm": 0.4961735010147095, | |
| "learning_rate": 3.478335497469219e-06, | |
| "loss": 0.4507, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 2.531347962382445, | |
| "grad_norm": 0.5094319581985474, | |
| "learning_rate": 3.472165082316943e-06, | |
| "loss": 0.4495, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 2.5360501567398117, | |
| "grad_norm": 0.620319664478302, | |
| "learning_rate": 3.465987681877251e-06, | |
| "loss": 0.4421, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 2.540752351097179, | |
| "grad_norm": 0.5389400720596313, | |
| "learning_rate": 3.4598033405365527e-06, | |
| "loss": 0.4547, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 2.5454545454545454, | |
| "grad_norm": 0.7147080898284912, | |
| "learning_rate": 3.45361210273113e-06, | |
| "loss": 0.4478, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 2.5501567398119125, | |
| "grad_norm": 0.5107600092887878, | |
| "learning_rate": 3.447414012946818e-06, | |
| "loss": 0.4349, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 2.554858934169279, | |
| "grad_norm": 0.5057627558708191, | |
| "learning_rate": 3.4412091157186853e-06, | |
| "loss": 0.4502, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 2.5595611285266457, | |
| "grad_norm": 0.5278921127319336, | |
| "learning_rate": 3.4349974556307146e-06, | |
| "loss": 0.4415, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 2.5642633228840124, | |
| "grad_norm": 0.5077342987060547, | |
| "learning_rate": 3.4287790773154807e-06, | |
| "loss": 0.465, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 2.5689655172413794, | |
| "grad_norm": 0.5148009061813354, | |
| "learning_rate": 3.4225540254538297e-06, | |
| "loss": 0.4613, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 2.573667711598746, | |
| "grad_norm": 0.6661203503608704, | |
| "learning_rate": 3.416322344774562e-06, | |
| "loss": 0.4412, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 2.5783699059561127, | |
| "grad_norm": 0.5129944086074829, | |
| "learning_rate": 3.4100840800541055e-06, | |
| "loss": 0.463, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 2.58307210031348, | |
| "grad_norm": 1.101260781288147, | |
| "learning_rate": 3.4038392761161986e-06, | |
| "loss": 0.4528, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 2.5877742946708464, | |
| "grad_norm": 0.9880437254905701, | |
| "learning_rate": 3.3975879778315634e-06, | |
| "loss": 0.4229, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 2.592476489028213, | |
| "grad_norm": 0.4811207354068756, | |
| "learning_rate": 3.391330230117587e-06, | |
| "loss": 0.3961, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 2.5971786833855797, | |
| "grad_norm": 0.5389580726623535, | |
| "learning_rate": 3.385066077937997e-06, | |
| "loss": 0.4654, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 2.6018808777429467, | |
| "grad_norm": 0.5345444679260254, | |
| "learning_rate": 3.378795566302541e-06, | |
| "loss": 0.4249, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 2.6065830721003134, | |
| "grad_norm": 0.5955337285995483, | |
| "learning_rate": 3.372518740266658e-06, | |
| "loss": 0.4436, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 2.6112852664576804, | |
| "grad_norm": 0.5519671440124512, | |
| "learning_rate": 3.36623564493116e-06, | |
| "loss": 0.4559, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 2.615987460815047, | |
| "grad_norm": 0.4986394941806793, | |
| "learning_rate": 3.3599463254419047e-06, | |
| "loss": 0.4598, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 2.6206896551724137, | |
| "grad_norm": 0.5792630314826965, | |
| "learning_rate": 3.3536508269894724e-06, | |
| "loss": 0.4672, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 2.6253918495297803, | |
| "grad_norm": 0.5608819127082825, | |
| "learning_rate": 3.347349194808842e-06, | |
| "loss": 0.454, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 2.6300940438871474, | |
| "grad_norm": 0.5010476112365723, | |
| "learning_rate": 3.3410414741790625e-06, | |
| "loss": 0.4288, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 2.634796238244514, | |
| "grad_norm": 0.5262719392776489, | |
| "learning_rate": 3.3347277104229332e-06, | |
| "loss": 0.4436, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 2.639498432601881, | |
| "grad_norm": 0.5418802499771118, | |
| "learning_rate": 3.3284079489066728e-06, | |
| "loss": 0.4382, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 2.6442006269592477, | |
| "grad_norm": 0.5096928477287292, | |
| "learning_rate": 3.3220822350395966e-06, | |
| "loss": 0.43, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 2.6489028213166144, | |
| "grad_norm": 0.5615900158882141, | |
| "learning_rate": 3.31575061427379e-06, | |
| "loss": 0.4312, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 2.653605015673981, | |
| "grad_norm": 0.5129327774047852, | |
| "learning_rate": 3.3094131321037783e-06, | |
| "loss": 0.4378, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 2.658307210031348, | |
| "grad_norm": 0.4975152313709259, | |
| "learning_rate": 3.303069834066206e-06, | |
| "loss": 0.4331, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 2.6630094043887147, | |
| "grad_norm": 0.4807284474372864, | |
| "learning_rate": 3.2967207657395055e-06, | |
| "loss": 0.4745, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 2.6677115987460818, | |
| "grad_norm": 0.5497413873672485, | |
| "learning_rate": 3.2903659727435692e-06, | |
| "loss": 0.4618, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 2.6724137931034484, | |
| "grad_norm": 0.6683759689331055, | |
| "learning_rate": 3.284005500739423e-06, | |
| "loss": 0.4575, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 2.677115987460815, | |
| "grad_norm": 0.497938334941864, | |
| "learning_rate": 3.2776393954289e-06, | |
| "loss": 0.4293, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 2.6818181818181817, | |
| "grad_norm": 0.6120790839195251, | |
| "learning_rate": 3.271267702554307e-06, | |
| "loss": 0.4202, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 2.6865203761755487, | |
| "grad_norm": 0.5267842411994934, | |
| "learning_rate": 3.2648904678981032e-06, | |
| "loss": 0.4643, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 2.6912225705329154, | |
| "grad_norm": 0.6027296185493469, | |
| "learning_rate": 3.2585077372825636e-06, | |
| "loss": 0.4127, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 2.695924764890282, | |
| "grad_norm": 0.501907229423523, | |
| "learning_rate": 3.2521195565694543e-06, | |
| "loss": 0.4444, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 2.700626959247649, | |
| "grad_norm": 0.5379273891448975, | |
| "learning_rate": 3.2457259716597023e-06, | |
| "loss": 0.4458, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 2.7053291536050157, | |
| "grad_norm": 0.4921974241733551, | |
| "learning_rate": 3.2393270284930658e-06, | |
| "loss": 0.4538, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 2.7100313479623823, | |
| "grad_norm": 0.5309234261512756, | |
| "learning_rate": 3.2329227730478026e-06, | |
| "loss": 0.4302, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 2.714733542319749, | |
| "grad_norm": 0.5694313049316406, | |
| "learning_rate": 3.2265132513403415e-06, | |
| "loss": 0.42, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 2.719435736677116, | |
| "grad_norm": 0.5232118964195251, | |
| "learning_rate": 3.22009850942495e-06, | |
| "loss": 0.4451, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 2.7241379310344827, | |
| "grad_norm": 0.4894232749938965, | |
| "learning_rate": 3.213678593393405e-06, | |
| "loss": 0.4603, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 2.7288401253918497, | |
| "grad_norm": 0.5060969591140747, | |
| "learning_rate": 3.207253549374662e-06, | |
| "loss": 0.4565, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 2.7335423197492164, | |
| "grad_norm": 0.5979425311088562, | |
| "learning_rate": 3.200823423534519e-06, | |
| "loss": 0.4438, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 2.738244514106583, | |
| "grad_norm": 0.5185328125953674, | |
| "learning_rate": 3.194388262075293e-06, | |
| "loss": 0.4475, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 2.7429467084639496, | |
| "grad_norm": 0.4780375361442566, | |
| "learning_rate": 3.1879481112354804e-06, | |
| "loss": 0.449, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 2.7476489028213167, | |
| "grad_norm": 0.5120851993560791, | |
| "learning_rate": 3.181503017289428e-06, | |
| "loss": 0.4108, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 2.7523510971786833, | |
| "grad_norm": 0.5004064440727234, | |
| "learning_rate": 3.175053026547002e-06, | |
| "loss": 0.4154, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 2.7570532915360504, | |
| "grad_norm": 0.5117692351341248, | |
| "learning_rate": 3.16859818535325e-06, | |
| "loss": 0.4577, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 2.761755485893417, | |
| "grad_norm": 0.48328641057014465, | |
| "learning_rate": 3.1621385400880756e-06, | |
| "loss": 0.4523, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 2.7664576802507836, | |
| "grad_norm": 0.6039829850196838, | |
| "learning_rate": 3.1556741371658984e-06, | |
| "loss": 0.4549, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 2.7711598746081503, | |
| "grad_norm": 0.5623074769973755, | |
| "learning_rate": 3.1492050230353238e-06, | |
| "loss": 0.4461, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 2.7758620689655173, | |
| "grad_norm": 0.5452325344085693, | |
| "learning_rate": 3.142731244178809e-06, | |
| "loss": 0.4202, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 2.780564263322884, | |
| "grad_norm": 0.5552520155906677, | |
| "learning_rate": 3.1362528471123277e-06, | |
| "loss": 0.4033, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 2.785266457680251, | |
| "grad_norm": 0.566600501537323, | |
| "learning_rate": 3.129769878385039e-06, | |
| "loss": 0.4086, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 2.7899686520376177, | |
| "grad_norm": 0.4819733798503876, | |
| "learning_rate": 3.1232823845789473e-06, | |
| "loss": 0.4511, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 2.7946708463949843, | |
| "grad_norm": 0.7382413744926453, | |
| "learning_rate": 3.1167904123085736e-06, | |
| "loss": 0.4563, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 2.799373040752351, | |
| "grad_norm": 0.5518636703491211, | |
| "learning_rate": 3.110294008220617e-06, | |
| "loss": 0.4437, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 2.804075235109718, | |
| "grad_norm": 0.48016345500946045, | |
| "learning_rate": 3.1037932189936205e-06, | |
| "loss": 0.4408, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 2.8087774294670846, | |
| "grad_norm": 0.5128410458564758, | |
| "learning_rate": 3.097288091337635e-06, | |
| "loss": 0.4359, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 2.8134796238244513, | |
| "grad_norm": 0.5333647727966309, | |
| "learning_rate": 3.0907786719938876e-06, | |
| "loss": 0.4251, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 2.8181818181818183, | |
| "grad_norm": 0.46841663122177124, | |
| "learning_rate": 3.084265007734436e-06, | |
| "loss": 0.4357, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 2.822884012539185, | |
| "grad_norm": 0.5592591762542725, | |
| "learning_rate": 3.0777471453618457e-06, | |
| "loss": 0.4596, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 2.8275862068965516, | |
| "grad_norm": 0.5454399585723877, | |
| "learning_rate": 3.0712251317088426e-06, | |
| "loss": 0.4322, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 2.8322884012539182, | |
| "grad_norm": 0.5095115303993225, | |
| "learning_rate": 3.064699013637983e-06, | |
| "loss": 0.451, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 2.8369905956112853, | |
| "grad_norm": 0.56089848279953, | |
| "learning_rate": 3.0581688380413115e-06, | |
| "loss": 0.4352, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 2.841692789968652, | |
| "grad_norm": 0.5011700391769409, | |
| "learning_rate": 3.0516346518400315e-06, | |
| "loss": 0.4521, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 2.846394984326019, | |
| "grad_norm": 0.5560495853424072, | |
| "learning_rate": 3.0450965019841593e-06, | |
| "loss": 0.4522, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 2.8510971786833856, | |
| "grad_norm": 0.5144981145858765, | |
| "learning_rate": 3.0385544354521957e-06, | |
| "loss": 0.4146, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 2.8557993730407523, | |
| "grad_norm": 0.5166004300117493, | |
| "learning_rate": 3.0320084992507814e-06, | |
| "loss": 0.4423, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 2.860501567398119, | |
| "grad_norm": 0.5098627805709839, | |
| "learning_rate": 3.0254587404143604e-06, | |
| "loss": 0.4793, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 2.865203761755486, | |
| "grad_norm": 0.5003807544708252, | |
| "learning_rate": 3.0189052060048464e-06, | |
| "loss": 0.4409, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 2.8699059561128526, | |
| "grad_norm": 0.5339270830154419, | |
| "learning_rate": 3.01234794311128e-06, | |
| "loss": 0.4376, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 2.8746081504702197, | |
| "grad_norm": 0.5303454995155334, | |
| "learning_rate": 3.0057869988494925e-06, | |
| "loss": 0.4626, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 2.8793103448275863, | |
| "grad_norm": 0.5876331329345703, | |
| "learning_rate": 2.999222420361767e-06, | |
| "loss": 0.4532, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 2.884012539184953, | |
| "grad_norm": 0.5893226861953735, | |
| "learning_rate": 2.9926542548165e-06, | |
| "loss": 0.4654, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 2.8887147335423196, | |
| "grad_norm": 0.5249570608139038, | |
| "learning_rate": 2.9860825494078605e-06, | |
| "loss": 0.4363, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 2.8934169278996866, | |
| "grad_norm": 0.5067557692527771, | |
| "learning_rate": 2.979507351355454e-06, | |
| "loss": 0.4532, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 2.8981191222570533, | |
| "grad_norm": 0.6186209917068481, | |
| "learning_rate": 2.972928707903981e-06, | |
| "loss": 0.4417, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 2.9028213166144203, | |
| "grad_norm": 0.5927693843841553, | |
| "learning_rate": 2.966346666322898e-06, | |
| "loss": 0.4379, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 2.907523510971787, | |
| "grad_norm": 0.6109799742698669, | |
| "learning_rate": 2.9597612739060775e-06, | |
| "loss": 0.4181, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 2.9122257053291536, | |
| "grad_norm": 0.5325103998184204, | |
| "learning_rate": 2.9531725779714713e-06, | |
| "loss": 0.4494, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 2.91692789968652, | |
| "grad_norm": 0.5224953293800354, | |
| "learning_rate": 2.9465806258607653e-06, | |
| "loss": 0.4558, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 2.9216300940438873, | |
| "grad_norm": 0.5702230334281921, | |
| "learning_rate": 2.939985464939043e-06, | |
| "loss": 0.4587, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 2.926332288401254, | |
| "grad_norm": 0.549354612827301, | |
| "learning_rate": 2.9333871425944434e-06, | |
| "loss": 0.4047, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 2.9310344827586206, | |
| "grad_norm": 0.5000407099723816, | |
| "learning_rate": 2.926785706237822e-06, | |
| "loss": 0.4338, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 2.9357366771159876, | |
| "grad_norm": 0.4803110361099243, | |
| "learning_rate": 2.920181203302409e-06, | |
| "loss": 0.4252, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 2.9404388714733543, | |
| "grad_norm": 0.519071102142334, | |
| "learning_rate": 2.91357368124347e-06, | |
| "loss": 0.4226, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 2.945141065830721, | |
| "grad_norm": 0.5117707252502441, | |
| "learning_rate": 2.906963187537962e-06, | |
| "loss": 0.4343, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 2.9498432601880875, | |
| "grad_norm": 0.5518343448638916, | |
| "learning_rate": 2.9003497696841955e-06, | |
| "loss": 0.4136, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 2.9545454545454546, | |
| "grad_norm": 0.4980529546737671, | |
| "learning_rate": 2.8937334752014913e-06, | |
| "loss": 0.4662, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 2.959247648902821, | |
| "grad_norm": 0.5124077796936035, | |
| "learning_rate": 2.887114351629839e-06, | |
| "loss": 0.4424, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 2.9639498432601883, | |
| "grad_norm": 0.5710898041725159, | |
| "learning_rate": 2.8804924465295575e-06, | |
| "loss": 0.4652, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 2.968652037617555, | |
| "grad_norm": 0.5048180818557739, | |
| "learning_rate": 2.873867807480951e-06, | |
| "loss": 0.4725, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 2.9733542319749215, | |
| "grad_norm": 0.5638993382453918, | |
| "learning_rate": 2.8672404820839676e-06, | |
| "loss": 0.4399, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 2.978056426332288, | |
| "grad_norm": 0.593982458114624, | |
| "learning_rate": 2.8606105179578584e-06, | |
| "loss": 0.4479, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 2.9827586206896552, | |
| "grad_norm": 0.5415588617324829, | |
| "learning_rate": 2.8539779627408332e-06, | |
| "loss": 0.4258, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 2.987460815047022, | |
| "grad_norm": 0.6535062789916992, | |
| "learning_rate": 2.847342864089721e-06, | |
| "loss": 0.4433, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 2.992163009404389, | |
| "grad_norm": 0.4936605989933014, | |
| "learning_rate": 2.8407052696796255e-06, | |
| "loss": 0.4299, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 2.9968652037617556, | |
| "grad_norm": 0.5490683913230896, | |
| "learning_rate": 2.834065227203584e-06, | |
| "loss": 0.4488, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 3.0047021943573666, | |
| "grad_norm": 0.5618948936462402, | |
| "learning_rate": 2.8274227843722213e-06, | |
| "loss": 0.866, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 3.0094043887147337, | |
| "grad_norm": 0.5641200542449951, | |
| "learning_rate": 2.820777988913412e-06, | |
| "loss": 0.4171, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 3.0141065830721003, | |
| "grad_norm": 0.5129956603050232, | |
| "learning_rate": 2.8141308885719337e-06, | |
| "loss": 0.4164, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 3.018808777429467, | |
| "grad_norm": 0.587412416934967, | |
| "learning_rate": 2.8074815311091265e-06, | |
| "loss": 0.3903, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 3.023510971786834, | |
| "grad_norm": 0.5251805782318115, | |
| "learning_rate": 2.8008299643025477e-06, | |
| "loss": 0.4326, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 3.0282131661442007, | |
| "grad_norm": 0.5621777772903442, | |
| "learning_rate": 2.7941762359456294e-06, | |
| "loss": 0.4258, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 3.0329153605015673, | |
| "grad_norm": 0.5080258846282959, | |
| "learning_rate": 2.787520393847334e-06, | |
| "loss": 0.4163, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 3.0376175548589344, | |
| "grad_norm": 0.6336125731468201, | |
| "learning_rate": 2.780862485831814e-06, | |
| "loss": 0.4289, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 3.042319749216301, | |
| "grad_norm": 0.5735598802566528, | |
| "learning_rate": 2.7742025597380644e-06, | |
| "loss": 0.4306, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 3.0470219435736676, | |
| "grad_norm": 0.5653077960014343, | |
| "learning_rate": 2.7675406634195824e-06, | |
| "loss": 0.422, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 3.0517241379310347, | |
| "grad_norm": 0.5047882199287415, | |
| "learning_rate": 2.7608768447440193e-06, | |
| "loss": 0.4076, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 3.0564263322884013, | |
| "grad_norm": 1.2697781324386597, | |
| "learning_rate": 2.754211151592841e-06, | |
| "loss": 0.4115, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 3.061128526645768, | |
| "grad_norm": 0.6861006617546082, | |
| "learning_rate": 2.7475436318609827e-06, | |
| "loss": 0.4309, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 3.0658307210031346, | |
| "grad_norm": 0.5912951827049255, | |
| "learning_rate": 2.7408743334565006e-06, | |
| "loss": 0.4071, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 3.0705329153605017, | |
| "grad_norm": 0.5096245408058167, | |
| "learning_rate": 2.734203304300235e-06, | |
| "loss": 0.4001, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 3.0752351097178683, | |
| "grad_norm": 0.5158068537712097, | |
| "learning_rate": 2.7275305923254607e-06, | |
| "loss": 0.4115, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 3.079937304075235, | |
| "grad_norm": 0.5201400518417358, | |
| "learning_rate": 2.720856245477544e-06, | |
| "loss": 0.4024, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 3.084639498432602, | |
| "grad_norm": 0.5423036813735962, | |
| "learning_rate": 2.7141803117135978e-06, | |
| "loss": 0.3963, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 3.0893416927899686, | |
| "grad_norm": 0.6557552218437195, | |
| "learning_rate": 2.7075028390021385e-06, | |
| "loss": 0.4, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 3.0940438871473352, | |
| "grad_norm": 0.5141010880470276, | |
| "learning_rate": 2.7008238753227385e-06, | |
| "loss": 0.4056, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 3.0987460815047023, | |
| "grad_norm": 0.504351794719696, | |
| "learning_rate": 2.694143468665685e-06, | |
| "loss": 0.4271, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 3.103448275862069, | |
| "grad_norm": 0.5583532452583313, | |
| "learning_rate": 2.6874616670316338e-06, | |
| "loss": 0.3936, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 3.1081504702194356, | |
| "grad_norm": 0.5661462545394897, | |
| "learning_rate": 2.6807785184312618e-06, | |
| "loss": 0.4136, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 3.1128526645768027, | |
| "grad_norm": 0.5078592300415039, | |
| "learning_rate": 2.674094070884926e-06, | |
| "loss": 0.4138, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 3.1175548589341693, | |
| "grad_norm": 0.579448401927948, | |
| "learning_rate": 2.6674083724223166e-06, | |
| "loss": 0.4333, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 3.122257053291536, | |
| "grad_norm": 0.5355094075202942, | |
| "learning_rate": 2.6607214710821112e-06, | |
| "loss": 0.4041, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 3.126959247648903, | |
| "grad_norm": 0.5290988683700562, | |
| "learning_rate": 2.6540334149116304e-06, | |
| "loss": 0.4181, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 3.1316614420062696, | |
| "grad_norm": 0.5365299582481384, | |
| "learning_rate": 2.647344251966493e-06, | |
| "loss": 0.4152, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 3.1363636363636362, | |
| "grad_norm": 0.5370200872421265, | |
| "learning_rate": 2.6406540303102714e-06, | |
| "loss": 0.4149, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 3.1410658307210033, | |
| "grad_norm": 0.5290679335594177, | |
| "learning_rate": 2.6339627980141425e-06, | |
| "loss": 0.4172, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 3.14576802507837, | |
| "grad_norm": 0.5143046379089355, | |
| "learning_rate": 2.6272706031565482e-06, | |
| "loss": 0.4021, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 3.1504702194357366, | |
| "grad_norm": 0.6136928796768188, | |
| "learning_rate": 2.6205774938228433e-06, | |
| "loss": 0.3976, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 3.1551724137931036, | |
| "grad_norm": 0.5212892889976501, | |
| "learning_rate": 2.6138835181049556e-06, | |
| "loss": 0.4225, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 3.1598746081504703, | |
| "grad_norm": 0.5384306907653809, | |
| "learning_rate": 2.6071887241010374e-06, | |
| "loss": 0.405, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 3.164576802507837, | |
| "grad_norm": 0.56938236951828, | |
| "learning_rate": 2.6004931599151223e-06, | |
| "loss": 0.3928, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 3.169278996865204, | |
| "grad_norm": 0.5221254825592041, | |
| "learning_rate": 2.593796873656775e-06, | |
| "loss": 0.4348, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 3.1739811912225706, | |
| "grad_norm": 0.5515516996383667, | |
| "learning_rate": 2.587099913440749e-06, | |
| "loss": 0.4141, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 3.1786833855799372, | |
| "grad_norm": 0.5437397360801697, | |
| "learning_rate": 2.580402327386643e-06, | |
| "loss": 0.4026, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 3.183385579937304, | |
| "grad_norm": 0.637416660785675, | |
| "learning_rate": 2.5737041636185496e-06, | |
| "loss": 0.4088, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 3.188087774294671, | |
| "grad_norm": 0.5309600830078125, | |
| "learning_rate": 2.5670054702647146e-06, | |
| "loss": 0.3933, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 3.1927899686520376, | |
| "grad_norm": 0.5138253569602966, | |
| "learning_rate": 2.5603062954571872e-06, | |
| "loss": 0.4172, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 3.197492163009404, | |
| "grad_norm": 0.5631712675094604, | |
| "learning_rate": 2.553606687331477e-06, | |
| "loss": 0.4376, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 3.2021943573667713, | |
| "grad_norm": 1.246191382408142, | |
| "learning_rate": 2.5469066940262073e-06, | |
| "loss": 0.4072, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 3.206896551724138, | |
| "grad_norm": 0.5201125741004944, | |
| "learning_rate": 2.540206363682768e-06, | |
| "loss": 0.4004, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 3.2115987460815045, | |
| "grad_norm": 0.49803441762924194, | |
| "learning_rate": 2.533505744444972e-06, | |
| "loss": 0.4178, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 3.2163009404388716, | |
| "grad_norm": 0.49885454773902893, | |
| "learning_rate": 2.526804884458707e-06, | |
| "loss": 0.4089, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 3.2210031347962382, | |
| "grad_norm": 0.527959942817688, | |
| "learning_rate": 2.520103831871591e-06, | |
| "loss": 0.4156, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 3.225705329153605, | |
| "grad_norm": 0.5077382922172546, | |
| "learning_rate": 2.513402634832627e-06, | |
| "loss": 0.3946, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 3.230407523510972, | |
| "grad_norm": 0.602326512336731, | |
| "learning_rate": 2.5067013414918523e-06, | |
| "loss": 0.4002, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 3.2351097178683386, | |
| "grad_norm": 1.0339138507843018, | |
| "learning_rate": 2.5e-06, | |
| "loss": 0.4133, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 3.239811912225705, | |
| "grad_norm": 0.5707440376281738, | |
| "learning_rate": 2.493298658508149e-06, | |
| "loss": 0.395, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 3.2445141065830723, | |
| "grad_norm": 0.7559914588928223, | |
| "learning_rate": 2.4865973651673743e-06, | |
| "loss": 0.4128, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 3.249216300940439, | |
| "grad_norm": 0.5365283489227295, | |
| "learning_rate": 2.4798961681284096e-06, | |
| "loss": 0.4371, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 3.2539184952978055, | |
| "grad_norm": 0.5633559823036194, | |
| "learning_rate": 2.473195115541293e-06, | |
| "loss": 0.3945, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 3.2586206896551726, | |
| "grad_norm": 0.502936840057373, | |
| "learning_rate": 2.466494255555029e-06, | |
| "loss": 0.4411, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 3.2633228840125392, | |
| "grad_norm": 0.5298876166343689, | |
| "learning_rate": 2.459793636317233e-06, | |
| "loss": 0.4046, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 3.268025078369906, | |
| "grad_norm": 0.6075465083122253, | |
| "learning_rate": 2.4530933059737936e-06, | |
| "loss": 0.4411, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 3.2727272727272725, | |
| "grad_norm": 0.5472120046615601, | |
| "learning_rate": 2.4463933126685236e-06, | |
| "loss": 0.4055, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 3.2774294670846396, | |
| "grad_norm": 0.5046771764755249, | |
| "learning_rate": 2.439693704542814e-06, | |
| "loss": 0.4015, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 3.282131661442006, | |
| "grad_norm": 0.5108353495597839, | |
| "learning_rate": 2.432994529735286e-06, | |
| "loss": 0.4083, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 3.2868338557993733, | |
| "grad_norm": 0.4968450963497162, | |
| "learning_rate": 2.4262958363814512e-06, | |
| "loss": 0.4185, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 3.29153605015674, | |
| "grad_norm": 0.541350245475769, | |
| "learning_rate": 2.4195976726133574e-06, | |
| "loss": 0.4062, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 3.2962382445141065, | |
| "grad_norm": 0.6516889929771423, | |
| "learning_rate": 2.4129000865592517e-06, | |
| "loss": 0.4059, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 3.300940438871473, | |
| "grad_norm": 0.7150237560272217, | |
| "learning_rate": 2.4062031263432267e-06, | |
| "loss": 0.4259, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 3.30564263322884, | |
| "grad_norm": 0.4952857792377472, | |
| "learning_rate": 2.3995068400848785e-06, | |
| "loss": 0.403, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 3.310344827586207, | |
| "grad_norm": 0.6034581661224365, | |
| "learning_rate": 2.392811275898963e-06, | |
| "loss": 0.4192, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 3.3150470219435735, | |
| "grad_norm": 0.7284024953842163, | |
| "learning_rate": 2.3861164818950448e-06, | |
| "loss": 0.3871, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 3.3197492163009406, | |
| "grad_norm": 0.5211794972419739, | |
| "learning_rate": 2.379422506177157e-06, | |
| "loss": 0.4118, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 3.324451410658307, | |
| "grad_norm": 0.5211071372032166, | |
| "learning_rate": 2.372729396843453e-06, | |
| "loss": 0.4139, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 3.329153605015674, | |
| "grad_norm": 0.53047114610672, | |
| "learning_rate": 2.366037201985858e-06, | |
| "loss": 0.4169, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 3.333855799373041, | |
| "grad_norm": 0.5385318994522095, | |
| "learning_rate": 2.3593459696897294e-06, | |
| "loss": 0.3949, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 3.3385579937304075, | |
| "grad_norm": 0.5405430793762207, | |
| "learning_rate": 2.352655748033508e-06, | |
| "loss": 0.4126, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 3.343260188087774, | |
| "grad_norm": 0.5243842005729675, | |
| "learning_rate": 2.3459665850883704e-06, | |
| "loss": 0.4342, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 3.347962382445141, | |
| "grad_norm": 0.5552899241447449, | |
| "learning_rate": 2.33927852891789e-06, | |
| "loss": 0.4183, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 3.352664576802508, | |
| "grad_norm": 0.7859904170036316, | |
| "learning_rate": 2.3325916275776834e-06, | |
| "loss": 0.4394, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 3.3573667711598745, | |
| "grad_norm": 0.5205579400062561, | |
| "learning_rate": 2.3259059291150744e-06, | |
| "loss": 0.4004, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 3.3620689655172415, | |
| "grad_norm": 0.5282108783721924, | |
| "learning_rate": 2.319221481568739e-06, | |
| "loss": 0.4205, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 3.366771159874608, | |
| "grad_norm": 0.6452942490577698, | |
| "learning_rate": 2.3125383329683666e-06, | |
| "loss": 0.422, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 3.371473354231975, | |
| "grad_norm": 0.5008051991462708, | |
| "learning_rate": 2.3058565313343152e-06, | |
| "loss": 0.3893, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 3.376175548589342, | |
| "grad_norm": 0.6496668457984924, | |
| "learning_rate": 2.2991761246772623e-06, | |
| "loss": 0.4045, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 3.3808777429467085, | |
| "grad_norm": 0.5258035659790039, | |
| "learning_rate": 2.2924971609978623e-06, | |
| "loss": 0.3973, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 3.385579937304075, | |
| "grad_norm": 0.5373122096061707, | |
| "learning_rate": 2.285819688286403e-06, | |
| "loss": 0.3862, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 3.3902821316614418, | |
| "grad_norm": 0.5864964723587036, | |
| "learning_rate": 2.2791437545224563e-06, | |
| "loss": 0.433, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 3.394984326018809, | |
| "grad_norm": 0.5025304555892944, | |
| "learning_rate": 2.2724694076745397e-06, | |
| "loss": 0.4115, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 3.3996865203761755, | |
| "grad_norm": 0.76083904504776, | |
| "learning_rate": 2.265796695699766e-06, | |
| "loss": 0.4155, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 3.4043887147335425, | |
| "grad_norm": 0.5206389427185059, | |
| "learning_rate": 2.2591256665434998e-06, | |
| "loss": 0.4176, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 3.409090909090909, | |
| "grad_norm": 0.559341311454773, | |
| "learning_rate": 2.252456368139019e-06, | |
| "loss": 0.414, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 3.413793103448276, | |
| "grad_norm": 0.5281516313552856, | |
| "learning_rate": 2.245788848407159e-06, | |
| "loss": 0.4217, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 3.4184952978056424, | |
| "grad_norm": 1.5208245515823364, | |
| "learning_rate": 2.2391231552559815e-06, | |
| "loss": 0.4209, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 3.4231974921630095, | |
| "grad_norm": 0.5414798855781555, | |
| "learning_rate": 2.2324593365804184e-06, | |
| "loss": 0.3908, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 3.427899686520376, | |
| "grad_norm": 0.587730348110199, | |
| "learning_rate": 2.225797440261936e-06, | |
| "loss": 0.4319, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 3.4326018808777428, | |
| "grad_norm": 0.5572887659072876, | |
| "learning_rate": 2.219137514168187e-06, | |
| "loss": 0.3952, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 3.43730407523511, | |
| "grad_norm": 0.589335560798645, | |
| "learning_rate": 2.212479606152667e-06, | |
| "loss": 0.4251, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 3.4420062695924765, | |
| "grad_norm": 0.9469087719917297, | |
| "learning_rate": 2.205823764054372e-06, | |
| "loss": 0.4089, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 3.446708463949843, | |
| "grad_norm": 0.5122615694999695, | |
| "learning_rate": 2.199170035697453e-06, | |
| "loss": 0.4083, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 3.45141065830721, | |
| "grad_norm": 0.5385236144065857, | |
| "learning_rate": 2.1925184688908735e-06, | |
| "loss": 0.4127, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 3.456112852664577, | |
| "grad_norm": 0.5631992220878601, | |
| "learning_rate": 2.185869111428067e-06, | |
| "loss": 0.4358, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 3.4608150470219434, | |
| "grad_norm": 0.599616289138794, | |
| "learning_rate": 2.1792220110865885e-06, | |
| "loss": 0.4224, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 3.4655172413793105, | |
| "grad_norm": 0.5281793475151062, | |
| "learning_rate": 2.1725772156277795e-06, | |
| "loss": 0.4028, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 3.470219435736677, | |
| "grad_norm": 0.5679891705513, | |
| "learning_rate": 2.165934772796417e-06, | |
| "loss": 0.4127, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 3.4749216300940438, | |
| "grad_norm": 0.58733731508255, | |
| "learning_rate": 2.159294730320374e-06, | |
| "loss": 0.4261, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 3.479623824451411, | |
| "grad_norm": 0.5951966643333435, | |
| "learning_rate": 2.15265713591028e-06, | |
| "loss": 0.4132, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 3.4843260188087775, | |
| "grad_norm": 0.5945015549659729, | |
| "learning_rate": 2.1460220372591676e-06, | |
| "loss": 0.4327, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 3.489028213166144, | |
| "grad_norm": 0.5072157979011536, | |
| "learning_rate": 2.139389482042142e-06, | |
| "loss": 0.3988, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 3.493730407523511, | |
| "grad_norm": 0.5170702338218689, | |
| "learning_rate": 2.1327595179160332e-06, | |
| "loss": 0.4157, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 3.498432601880878, | |
| "grad_norm": 0.5536943674087524, | |
| "learning_rate": 2.1261321925190492e-06, | |
| "loss": 0.4061, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 3.5031347962382444, | |
| "grad_norm": 0.5288745164871216, | |
| "learning_rate": 2.1195075534704433e-06, | |
| "loss": 0.383, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 3.507836990595611, | |
| "grad_norm": 0.5845092535018921, | |
| "learning_rate": 2.1128856483701625e-06, | |
| "loss": 0.411, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 3.512539184952978, | |
| "grad_norm": 0.5369064211845398, | |
| "learning_rate": 2.10626652479851e-06, | |
| "loss": 0.3936, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 3.5172413793103448, | |
| "grad_norm": 0.5584667325019836, | |
| "learning_rate": 2.0996502303158057e-06, | |
| "loss": 0.4081, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 3.521943573667712, | |
| "grad_norm": 0.537060022354126, | |
| "learning_rate": 2.0930368124620385e-06, | |
| "loss": 0.4087, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 3.5266457680250785, | |
| "grad_norm": 0.5309715270996094, | |
| "learning_rate": 2.086426318756531e-06, | |
| "loss": 0.4255, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 3.531347962382445, | |
| "grad_norm": 0.5657020807266235, | |
| "learning_rate": 2.0798187966975917e-06, | |
| "loss": 0.4317, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 3.5360501567398117, | |
| "grad_norm": 0.5245983600616455, | |
| "learning_rate": 2.073214293762179e-06, | |
| "loss": 0.4203, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 3.540752351097179, | |
| "grad_norm": 0.5540620684623718, | |
| "learning_rate": 2.0666128574055575e-06, | |
| "loss": 0.4178, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 3.5454545454545454, | |
| "grad_norm": 0.5255053639411926, | |
| "learning_rate": 2.0600145350609585e-06, | |
| "loss": 0.4183, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 3.5501567398119125, | |
| "grad_norm": 0.5604968070983887, | |
| "learning_rate": 2.053419374139235e-06, | |
| "loss": 0.4157, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 3.554858934169279, | |
| "grad_norm": 0.5198661684989929, | |
| "learning_rate": 2.0468274220285295e-06, | |
| "loss": 0.4145, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 3.5595611285266457, | |
| "grad_norm": 0.6241534352302551, | |
| "learning_rate": 2.0402387260939224e-06, | |
| "loss": 0.4101, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 3.5642633228840124, | |
| "grad_norm": 0.5478257536888123, | |
| "learning_rate": 2.033653333677103e-06, | |
| "loss": 0.4211, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 3.5689655172413794, | |
| "grad_norm": 0.544718861579895, | |
| "learning_rate": 2.02707129209602e-06, | |
| "loss": 0.403, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 3.573667711598746, | |
| "grad_norm": 0.567952036857605, | |
| "learning_rate": 2.0204926486445463e-06, | |
| "loss": 0.4224, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 3.5783699059561127, | |
| "grad_norm": 0.576141357421875, | |
| "learning_rate": 2.0139174505921403e-06, | |
| "loss": 0.4424, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 3.58307210031348, | |
| "grad_norm": 0.5517438650131226, | |
| "learning_rate": 2.0073457451835e-06, | |
| "loss": 0.3991, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 3.5877742946708464, | |
| "grad_norm": 0.49781471490859985, | |
| "learning_rate": 2.0007775796382335e-06, | |
| "loss": 0.4252, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 3.592476489028213, | |
| "grad_norm": 0.5482198596000671, | |
| "learning_rate": 1.994213001150508e-06, | |
| "loss": 0.3928, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 3.5971786833855797, | |
| "grad_norm": 0.5472637414932251, | |
| "learning_rate": 1.9876520568887207e-06, | |
| "loss": 0.4193, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 3.6018808777429467, | |
| "grad_norm": 0.6185949444770813, | |
| "learning_rate": 1.981094793995155e-06, | |
| "loss": 0.4034, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 3.6065830721003134, | |
| "grad_norm": 0.5806577801704407, | |
| "learning_rate": 1.974541259585641e-06, | |
| "loss": 0.4316, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 3.6112852664576804, | |
| "grad_norm": 0.5567718744277954, | |
| "learning_rate": 1.9679915007492194e-06, | |
| "loss": 0.412, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 3.615987460815047, | |
| "grad_norm": 0.7912635207176208, | |
| "learning_rate": 1.9614455645478047e-06, | |
| "loss": 0.4014, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 3.6206896551724137, | |
| "grad_norm": 0.5250074863433838, | |
| "learning_rate": 1.9549034980158403e-06, | |
| "loss": 0.4146, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 3.6253918495297803, | |
| "grad_norm": 0.5616881847381592, | |
| "learning_rate": 1.9483653481599697e-06, | |
| "loss": 0.4197, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 3.6300940438871474, | |
| "grad_norm": 0.558824896812439, | |
| "learning_rate": 1.9418311619586897e-06, | |
| "loss": 0.4258, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 3.634796238244514, | |
| "grad_norm": 0.5861754417419434, | |
| "learning_rate": 1.935300986362018e-06, | |
| "loss": 0.3986, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 3.639498432601881, | |
| "grad_norm": 0.9162700176239014, | |
| "learning_rate": 1.9287748682911582e-06, | |
| "loss": 0.4302, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 3.6442006269592477, | |
| "grad_norm": 0.9436094760894775, | |
| "learning_rate": 1.9222528546381543e-06, | |
| "loss": 0.4216, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 3.6489028213166144, | |
| "grad_norm": 0.8662758469581604, | |
| "learning_rate": 1.9157349922655648e-06, | |
| "loss": 0.3978, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 3.653605015673981, | |
| "grad_norm": 0.5423682332038879, | |
| "learning_rate": 1.909221328006114e-06, | |
| "loss": 0.409, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 3.658307210031348, | |
| "grad_norm": 0.5691127777099609, | |
| "learning_rate": 1.9027119086623647e-06, | |
| "loss": 0.4215, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 3.6630094043887147, | |
| "grad_norm": 0.5373672246932983, | |
| "learning_rate": 1.8962067810063806e-06, | |
| "loss": 0.4367, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 3.6677115987460818, | |
| "grad_norm": 0.5069293975830078, | |
| "learning_rate": 1.8897059917793844e-06, | |
| "loss": 0.3924, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 3.6724137931034484, | |
| "grad_norm": 0.5330380797386169, | |
| "learning_rate": 1.8832095876914268e-06, | |
| "loss": 0.4103, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 3.677115987460815, | |
| "grad_norm": 0.5398195385932922, | |
| "learning_rate": 1.8767176154210537e-06, | |
| "loss": 0.4176, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 3.6818181818181817, | |
| "grad_norm": 0.5592144727706909, | |
| "learning_rate": 1.8702301216149616e-06, | |
| "loss": 0.4074, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 3.6865203761755487, | |
| "grad_norm": 0.5743839144706726, | |
| "learning_rate": 1.8637471528876727e-06, | |
| "loss": 0.4076, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 3.6912225705329154, | |
| "grad_norm": 0.5384632349014282, | |
| "learning_rate": 1.8572687558211923e-06, | |
| "loss": 0.4189, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 3.695924764890282, | |
| "grad_norm": 0.5259303450584412, | |
| "learning_rate": 1.850794976964677e-06, | |
| "loss": 0.3814, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 3.700626959247649, | |
| "grad_norm": 0.5677003860473633, | |
| "learning_rate": 1.8443258628341026e-06, | |
| "loss": 0.4149, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 3.7053291536050157, | |
| "grad_norm": 0.6973782181739807, | |
| "learning_rate": 1.837861459911925e-06, | |
| "loss": 0.4248, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 3.7100313479623823, | |
| "grad_norm": 0.5952889323234558, | |
| "learning_rate": 1.8314018146467505e-06, | |
| "loss": 0.4051, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 3.714733542319749, | |
| "grad_norm": 0.5266870856285095, | |
| "learning_rate": 1.8249469734529995e-06, | |
| "loss": 0.3879, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 3.719435736677116, | |
| "grad_norm": 0.5926518440246582, | |
| "learning_rate": 1.818496982710572e-06, | |
| "loss": 0.4233, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 3.7241379310344827, | |
| "grad_norm": 0.6534157395362854, | |
| "learning_rate": 1.81205188876452e-06, | |
| "loss": 0.4239, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 3.7288401253918497, | |
| "grad_norm": 0.5505423545837402, | |
| "learning_rate": 1.8056117379247078e-06, | |
| "loss": 0.4267, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 3.7335423197492164, | |
| "grad_norm": 0.626750111579895, | |
| "learning_rate": 1.7991765764654813e-06, | |
| "loss": 0.415, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 3.738244514106583, | |
| "grad_norm": 0.5631802082061768, | |
| "learning_rate": 1.7927464506253394e-06, | |
| "loss": 0.4237, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 3.7429467084639496, | |
| "grad_norm": 0.5428839325904846, | |
| "learning_rate": 1.7863214066065951e-06, | |
| "loss": 0.391, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 3.7476489028213167, | |
| "grad_norm": 0.5595945119857788, | |
| "learning_rate": 1.779901490575051e-06, | |
| "loss": 0.4186, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 3.7523510971786833, | |
| "grad_norm": 0.5219384431838989, | |
| "learning_rate": 1.7734867486596596e-06, | |
| "loss": 0.4204, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 3.7570532915360504, | |
| "grad_norm": 0.5223650932312012, | |
| "learning_rate": 1.767077226952198e-06, | |
| "loss": 0.4267, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 3.761755485893417, | |
| "grad_norm": 0.5161250829696655, | |
| "learning_rate": 1.7606729715069349e-06, | |
| "loss": 0.4146, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 3.7664576802507836, | |
| "grad_norm": 0.5676782727241516, | |
| "learning_rate": 1.7542740283402981e-06, | |
| "loss": 0.4177, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 3.7711598746081503, | |
| "grad_norm": 0.5766341090202332, | |
| "learning_rate": 1.7478804434305466e-06, | |
| "loss": 0.4, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 3.7758620689655173, | |
| "grad_norm": 0.5547144412994385, | |
| "learning_rate": 1.741492262717438e-06, | |
| "loss": 0.4182, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 3.780564263322884, | |
| "grad_norm": 0.5687820315361023, | |
| "learning_rate": 1.7351095321018974e-06, | |
| "loss": 0.3857, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 3.785266457680251, | |
| "grad_norm": 0.5266862511634827, | |
| "learning_rate": 1.7287322974456933e-06, | |
| "loss": 0.3928, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 3.7899686520376177, | |
| "grad_norm": 0.5501623153686523, | |
| "learning_rate": 1.7223606045711006e-06, | |
| "loss": 0.4165, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 3.7946708463949843, | |
| "grad_norm": 0.5451024770736694, | |
| "learning_rate": 1.7159944992605774e-06, | |
| "loss": 0.4204, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 3.799373040752351, | |
| "grad_norm": 0.5108196139335632, | |
| "learning_rate": 1.7096340272564318e-06, | |
| "loss": 0.409, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 3.804075235109718, | |
| "grad_norm": 0.5510005950927734, | |
| "learning_rate": 1.7032792342604947e-06, | |
| "loss": 0.4005, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 3.8087774294670846, | |
| "grad_norm": 0.5405788421630859, | |
| "learning_rate": 1.6969301659337944e-06, | |
| "loss": 0.4155, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 3.8134796238244513, | |
| "grad_norm": 0.6032125353813171, | |
| "learning_rate": 1.6905868678962225e-06, | |
| "loss": 0.42, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 3.8181818181818183, | |
| "grad_norm": 0.9348533749580383, | |
| "learning_rate": 1.684249385726211e-06, | |
| "loss": 0.4113, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 3.822884012539185, | |
| "grad_norm": 0.5582144856452942, | |
| "learning_rate": 1.677917764960404e-06, | |
| "loss": 0.4012, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 3.8275862068965516, | |
| "grad_norm": 0.5778011679649353, | |
| "learning_rate": 1.6715920510933277e-06, | |
| "loss": 0.4314, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 3.8322884012539182, | |
| "grad_norm": 1.647594928741455, | |
| "learning_rate": 1.6652722895770676e-06, | |
| "loss": 0.4274, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 3.8369905956112853, | |
| "grad_norm": 0.5681491494178772, | |
| "learning_rate": 1.6589585258209383e-06, | |
| "loss": 0.3784, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 3.841692789968652, | |
| "grad_norm": 0.5122655630111694, | |
| "learning_rate": 1.6526508051911588e-06, | |
| "loss": 0.4186, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 3.846394984326019, | |
| "grad_norm": 0.6520569920539856, | |
| "learning_rate": 1.6463491730105282e-06, | |
| "loss": 0.4057, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 3.8510971786833856, | |
| "grad_norm": 0.5363179445266724, | |
| "learning_rate": 1.6400536745580955e-06, | |
| "loss": 0.3887, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 3.8557993730407523, | |
| "grad_norm": 0.5956358909606934, | |
| "learning_rate": 1.6337643550688408e-06, | |
| "loss": 0.4171, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 3.860501567398119, | |
| "grad_norm": 0.5232042670249939, | |
| "learning_rate": 1.627481259733343e-06, | |
| "loss": 0.3965, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 3.865203761755486, | |
| "grad_norm": 0.5099393129348755, | |
| "learning_rate": 1.6212044336974598e-06, | |
| "loss": 0.3954, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 3.8699059561128526, | |
| "grad_norm": 0.5676040053367615, | |
| "learning_rate": 1.614933922062003e-06, | |
| "loss": 0.4064, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 3.8746081504702197, | |
| "grad_norm": 0.5794877409934998, | |
| "learning_rate": 1.6086697698824144e-06, | |
| "loss": 0.4017, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 3.8793103448275863, | |
| "grad_norm": 0.572999119758606, | |
| "learning_rate": 1.6024120221684373e-06, | |
| "loss": 0.4002, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 3.884012539184953, | |
| "grad_norm": 0.5625258088111877, | |
| "learning_rate": 1.5961607238838022e-06, | |
| "loss": 0.4086, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 3.8887147335423196, | |
| "grad_norm": 0.5278126001358032, | |
| "learning_rate": 1.589915919945894e-06, | |
| "loss": 0.418, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 3.8934169278996866, | |
| "grad_norm": 0.6338787078857422, | |
| "learning_rate": 1.5836776552254386e-06, | |
| "loss": 0.4353, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 3.8981191222570533, | |
| "grad_norm": 0.6143558621406555, | |
| "learning_rate": 1.5774459745461711e-06, | |
| "loss": 0.4052, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 3.9028213166144203, | |
| "grad_norm": 0.5196287631988525, | |
| "learning_rate": 1.5712209226845201e-06, | |
| "loss": 0.3821, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 3.907523510971787, | |
| "grad_norm": 0.5631325840950012, | |
| "learning_rate": 1.565002544369286e-06, | |
| "loss": 0.4154, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 3.9122257053291536, | |
| "grad_norm": 0.6087117195129395, | |
| "learning_rate": 1.5587908842813142e-06, | |
| "loss": 0.4056, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 3.91692789968652, | |
| "grad_norm": 0.5318475365638733, | |
| "learning_rate": 1.5525859870531823e-06, | |
| "loss": 0.4212, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 3.9216300940438873, | |
| "grad_norm": 0.5385299324989319, | |
| "learning_rate": 1.5463878972688707e-06, | |
| "loss": 0.4081, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 3.926332288401254, | |
| "grad_norm": 0.5263051986694336, | |
| "learning_rate": 1.5401966594634483e-06, | |
| "loss": 0.4351, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 3.9310344827586206, | |
| "grad_norm": 0.5557870864868164, | |
| "learning_rate": 1.5340123181227495e-06, | |
| "loss": 0.4235, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 3.9357366771159876, | |
| "grad_norm": 0.5445315837860107, | |
| "learning_rate": 1.527834917683058e-06, | |
| "loss": 0.391, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 3.9404388714733543, | |
| "grad_norm": 0.5401822924613953, | |
| "learning_rate": 1.5216645025307813e-06, | |
| "loss": 0.4057, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 3.945141065830721, | |
| "grad_norm": 0.5524025559425354, | |
| "learning_rate": 1.5155011170021399e-06, | |
| "loss": 0.4134, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 3.9498432601880875, | |
| "grad_norm": 0.5604265332221985, | |
| "learning_rate": 1.5093448053828402e-06, | |
| "loss": 0.4231, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 3.9545454545454546, | |
| "grad_norm": 0.5822537541389465, | |
| "learning_rate": 1.503195611907764e-06, | |
| "loss": 0.4238, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 3.959247648902821, | |
| "grad_norm": 0.5396215319633484, | |
| "learning_rate": 1.4970535807606453e-06, | |
| "loss": 0.3825, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 3.9639498432601883, | |
| "grad_norm": 0.5705523490905762, | |
| "learning_rate": 1.4909187560737542e-06, | |
| "loss": 0.3951, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 3.968652037617555, | |
| "grad_norm": 0.5342106819152832, | |
| "learning_rate": 1.4847911819275829e-06, | |
| "loss": 0.4068, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 3.9733542319749215, | |
| "grad_norm": 0.5606650710105896, | |
| "learning_rate": 1.4786709023505224e-06, | |
| "loss": 0.397, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 3.978056426332288, | |
| "grad_norm": 0.6344854235649109, | |
| "learning_rate": 1.4725579613185549e-06, | |
| "loss": 0.4226, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 3.9827586206896552, | |
| "grad_norm": 0.6624012589454651, | |
| "learning_rate": 1.4664524027549291e-06, | |
| "loss": 0.4096, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 3.987460815047022, | |
| "grad_norm": 0.5686039328575134, | |
| "learning_rate": 1.4603542705298493e-06, | |
| "loss": 0.395, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 3.992163009404389, | |
| "grad_norm": 0.5838780999183655, | |
| "learning_rate": 1.4542636084601624e-06, | |
| "loss": 0.3682, | |
| "step": 847 | |
| }, | |
| { | |
| "epoch": 3.9968652037617556, | |
| "grad_norm": 0.555031418800354, | |
| "learning_rate": 1.4481804603090358e-06, | |
| "loss": 0.4111, | |
| "step": 848 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 1272, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 6, | |
| "save_steps": 212, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 5.461772594205793e+19, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |