| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 5.996865203761756, | |
| "eval_steps": 500, | |
| "global_step": 1272, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.004702194357366771, | |
| "grad_norm": 3.308954954147339, | |
| "learning_rate": 5.0000000000000004e-08, | |
| "loss": 0.9758, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.009404388714733543, | |
| "grad_norm": 3.0945961475372314, | |
| "learning_rate": 1.0000000000000001e-07, | |
| "loss": 0.9784, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.014106583072100314, | |
| "grad_norm": 2.6266396045684814, | |
| "learning_rate": 1.5000000000000002e-07, | |
| "loss": 0.9407, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.018808777429467086, | |
| "grad_norm": 2.781346321105957, | |
| "learning_rate": 2.0000000000000002e-07, | |
| "loss": 0.983, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.023510971786833857, | |
| "grad_norm": 2.8360986709594727, | |
| "learning_rate": 2.5000000000000004e-07, | |
| "loss": 0.941, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.02821316614420063, | |
| "grad_norm": 2.814222812652588, | |
| "learning_rate": 3.0000000000000004e-07, | |
| "loss": 0.9675, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.032915360501567396, | |
| "grad_norm": 2.821528196334839, | |
| "learning_rate": 3.5000000000000004e-07, | |
| "loss": 0.8856, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.03761755485893417, | |
| "grad_norm": 2.7562029361724854, | |
| "learning_rate": 4.0000000000000003e-07, | |
| "loss": 0.95, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.04231974921630094, | |
| "grad_norm": 2.9743621349334717, | |
| "learning_rate": 4.5000000000000003e-07, | |
| "loss": 0.9654, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.047021943573667714, | |
| "grad_norm": 3.0444681644439697, | |
| "learning_rate": 5.000000000000001e-07, | |
| "loss": 0.9133, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.05172413793103448, | |
| "grad_norm": 2.9696431159973145, | |
| "learning_rate": 5.5e-07, | |
| "loss": 0.9425, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.05642633228840126, | |
| "grad_norm": 2.8478922843933105, | |
| "learning_rate": 6.000000000000001e-07, | |
| "loss": 0.9743, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.061128526645768025, | |
| "grad_norm": 2.664818525314331, | |
| "learning_rate": 6.5e-07, | |
| "loss": 0.9276, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.06583072100313479, | |
| "grad_norm": 2.727060556411743, | |
| "learning_rate": 7.000000000000001e-07, | |
| "loss": 0.9221, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.07053291536050156, | |
| "grad_norm": 2.8166537284851074, | |
| "learning_rate": 7.5e-07, | |
| "loss": 0.9406, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.07523510971786834, | |
| "grad_norm": 2.8412551879882812, | |
| "learning_rate": 8.000000000000001e-07, | |
| "loss": 0.901, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.07993730407523511, | |
| "grad_norm": 2.6464271545410156, | |
| "learning_rate": 8.500000000000001e-07, | |
| "loss": 0.9586, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.08463949843260188, | |
| "grad_norm": 2.506002187728882, | |
| "learning_rate": 9.000000000000001e-07, | |
| "loss": 0.8895, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.08934169278996865, | |
| "grad_norm": 2.663196086883545, | |
| "learning_rate": 9.500000000000001e-07, | |
| "loss": 0.8833, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.09404388714733543, | |
| "grad_norm": 2.5434982776641846, | |
| "learning_rate": 1.0000000000000002e-06, | |
| "loss": 0.8737, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.0987460815047022, | |
| "grad_norm": 2.2520358562469482, | |
| "learning_rate": 1.0500000000000001e-06, | |
| "loss": 0.8895, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.10344827586206896, | |
| "grad_norm": 2.3946690559387207, | |
| "learning_rate": 1.1e-06, | |
| "loss": 0.8405, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.10815047021943573, | |
| "grad_norm": 2.209516763687134, | |
| "learning_rate": 1.1500000000000002e-06, | |
| "loss": 0.8441, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.11285266457680251, | |
| "grad_norm": 2.3084263801574707, | |
| "learning_rate": 1.2000000000000002e-06, | |
| "loss": 0.8103, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.11755485893416928, | |
| "grad_norm": 2.4108471870422363, | |
| "learning_rate": 1.25e-06, | |
| "loss": 0.808, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.12225705329153605, | |
| "grad_norm": 2.116116762161255, | |
| "learning_rate": 1.3e-06, | |
| "loss": 0.7901, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.12695924764890282, | |
| "grad_norm": 2.2457115650177, | |
| "learning_rate": 1.3500000000000002e-06, | |
| "loss": 0.7737, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.13166144200626959, | |
| "grad_norm": 1.9436711072921753, | |
| "learning_rate": 1.4000000000000001e-06, | |
| "loss": 0.7465, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.13636363636363635, | |
| "grad_norm": 2.1678073406219482, | |
| "learning_rate": 1.45e-06, | |
| "loss": 0.7819, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.14106583072100312, | |
| "grad_norm": 1.8869282007217407, | |
| "learning_rate": 1.5e-06, | |
| "loss": 0.7653, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.14576802507836992, | |
| "grad_norm": 1.6957731246948242, | |
| "learning_rate": 1.5500000000000002e-06, | |
| "loss": 0.7659, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.15047021943573669, | |
| "grad_norm": 1.4701424837112427, | |
| "learning_rate": 1.6000000000000001e-06, | |
| "loss": 0.7553, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.15517241379310345, | |
| "grad_norm": 1.3234986066818237, | |
| "learning_rate": 1.6500000000000003e-06, | |
| "loss": 0.6875, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.15987460815047022, | |
| "grad_norm": 1.3325831890106201, | |
| "learning_rate": 1.7000000000000002e-06, | |
| "loss": 0.6681, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.164576802507837, | |
| "grad_norm": 1.3700138330459595, | |
| "learning_rate": 1.75e-06, | |
| "loss": 0.7635, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.16927899686520376, | |
| "grad_norm": 1.2316628694534302, | |
| "learning_rate": 1.8000000000000001e-06, | |
| "loss": 0.6776, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.17398119122257052, | |
| "grad_norm": 1.170616865158081, | |
| "learning_rate": 1.85e-06, | |
| "loss": 0.7073, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.1786833855799373, | |
| "grad_norm": 1.1797659397125244, | |
| "learning_rate": 1.9000000000000002e-06, | |
| "loss": 0.7572, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.1833855799373041, | |
| "grad_norm": 1.0123047828674316, | |
| "learning_rate": 1.9500000000000004e-06, | |
| "loss": 0.6798, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.18808777429467086, | |
| "grad_norm": 0.975169837474823, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 0.7088, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.19278996865203762, | |
| "grad_norm": 0.891377866268158, | |
| "learning_rate": 2.05e-06, | |
| "loss": 0.6696, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.1974921630094044, | |
| "grad_norm": 0.8997182250022888, | |
| "learning_rate": 2.1000000000000002e-06, | |
| "loss": 0.7135, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.20219435736677116, | |
| "grad_norm": 0.9378617405891418, | |
| "learning_rate": 2.15e-06, | |
| "loss": 0.6827, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.20689655172413793, | |
| "grad_norm": 0.8060838580131531, | |
| "learning_rate": 2.2e-06, | |
| "loss": 0.6704, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.2115987460815047, | |
| "grad_norm": 0.7708892822265625, | |
| "learning_rate": 2.25e-06, | |
| "loss": 0.6499, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.21630094043887146, | |
| "grad_norm": 0.7236770987510681, | |
| "learning_rate": 2.3000000000000004e-06, | |
| "loss": 0.657, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.22100313479623823, | |
| "grad_norm": 0.6666117906570435, | |
| "learning_rate": 2.35e-06, | |
| "loss": 0.634, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.22570532915360503, | |
| "grad_norm": 0.6475918292999268, | |
| "learning_rate": 2.4000000000000003e-06, | |
| "loss": 0.6237, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.2304075235109718, | |
| "grad_norm": 0.7277692556381226, | |
| "learning_rate": 2.4500000000000003e-06, | |
| "loss": 0.6707, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.23510971786833856, | |
| "grad_norm": 0.6572731733322144, | |
| "learning_rate": 2.5e-06, | |
| "loss": 0.6812, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.23981191222570533, | |
| "grad_norm": 0.6716486811637878, | |
| "learning_rate": 2.55e-06, | |
| "loss": 0.6803, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.2445141065830721, | |
| "grad_norm": 0.6065218448638916, | |
| "learning_rate": 2.6e-06, | |
| "loss": 0.6353, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.24921630094043887, | |
| "grad_norm": 0.6749820709228516, | |
| "learning_rate": 2.6500000000000005e-06, | |
| "loss": 0.6393, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.25391849529780564, | |
| "grad_norm": 0.6187207102775574, | |
| "learning_rate": 2.7000000000000004e-06, | |
| "loss": 0.6318, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.25862068965517243, | |
| "grad_norm": 0.6700596809387207, | |
| "learning_rate": 2.7500000000000004e-06, | |
| "loss": 0.6762, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.26332288401253917, | |
| "grad_norm": 0.6460264921188354, | |
| "learning_rate": 2.8000000000000003e-06, | |
| "loss": 0.6388, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.26802507836990597, | |
| "grad_norm": 0.7263085842132568, | |
| "learning_rate": 2.85e-06, | |
| "loss": 0.6413, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.2727272727272727, | |
| "grad_norm": 0.674784779548645, | |
| "learning_rate": 2.9e-06, | |
| "loss": 0.6134, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.2774294670846395, | |
| "grad_norm": 0.6552851796150208, | |
| "learning_rate": 2.95e-06, | |
| "loss": 0.6508, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.28213166144200624, | |
| "grad_norm": 0.7160854339599609, | |
| "learning_rate": 3e-06, | |
| "loss": 0.677, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.28683385579937304, | |
| "grad_norm": 0.682802677154541, | |
| "learning_rate": 3.05e-06, | |
| "loss": 0.5983, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.29153605015673983, | |
| "grad_norm": 0.5360945463180542, | |
| "learning_rate": 3.1000000000000004e-06, | |
| "loss": 0.5928, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.2962382445141066, | |
| "grad_norm": 0.6875145435333252, | |
| "learning_rate": 3.1500000000000003e-06, | |
| "loss": 0.6366, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.30094043887147337, | |
| "grad_norm": 0.5564191937446594, | |
| "learning_rate": 3.2000000000000003e-06, | |
| "loss": 0.6466, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.3056426332288401, | |
| "grad_norm": 0.8959600925445557, | |
| "learning_rate": 3.2500000000000002e-06, | |
| "loss": 0.5875, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.3103448275862069, | |
| "grad_norm": 0.5513110160827637, | |
| "learning_rate": 3.3000000000000006e-06, | |
| "loss": 0.6042, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.31504702194357365, | |
| "grad_norm": 0.5638250112533569, | |
| "learning_rate": 3.3500000000000005e-06, | |
| "loss": 0.6343, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.31974921630094044, | |
| "grad_norm": 0.5723473429679871, | |
| "learning_rate": 3.4000000000000005e-06, | |
| "loss": 0.6007, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.32445141065830724, | |
| "grad_norm": 0.5193734765052795, | |
| "learning_rate": 3.45e-06, | |
| "loss": 0.5877, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.329153605015674, | |
| "grad_norm": 0.7437443733215332, | |
| "learning_rate": 3.5e-06, | |
| "loss": 0.6579, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.3338557993730408, | |
| "grad_norm": 0.5810403823852539, | |
| "learning_rate": 3.5500000000000003e-06, | |
| "loss": 0.625, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.3385579937304075, | |
| "grad_norm": 0.5594515204429626, | |
| "learning_rate": 3.6000000000000003e-06, | |
| "loss": 0.6039, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.3432601880877743, | |
| "grad_norm": 0.5781659483909607, | |
| "learning_rate": 3.65e-06, | |
| "loss": 0.5936, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.34796238244514105, | |
| "grad_norm": 0.5736168622970581, | |
| "learning_rate": 3.7e-06, | |
| "loss": 0.61, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.35266457680250785, | |
| "grad_norm": 0.5082181096076965, | |
| "learning_rate": 3.7500000000000005e-06, | |
| "loss": 0.6031, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.3573667711598746, | |
| "grad_norm": 0.5188243985176086, | |
| "learning_rate": 3.8000000000000005e-06, | |
| "loss": 0.6048, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.3620689655172414, | |
| "grad_norm": 0.678931713104248, | |
| "learning_rate": 3.85e-06, | |
| "loss": 0.615, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.3667711598746082, | |
| "grad_norm": 0.5580578446388245, | |
| "learning_rate": 3.900000000000001e-06, | |
| "loss": 0.5622, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.3714733542319749, | |
| "grad_norm": 0.5222985744476318, | |
| "learning_rate": 3.95e-06, | |
| "loss": 0.6233, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.3761755485893417, | |
| "grad_norm": 0.6105766892433167, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 0.5448, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.38087774294670845, | |
| "grad_norm": 0.4837906062602997, | |
| "learning_rate": 4.05e-06, | |
| "loss": 0.6058, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.38557993730407525, | |
| "grad_norm": 0.5260081887245178, | |
| "learning_rate": 4.1e-06, | |
| "loss": 0.5753, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.390282131661442, | |
| "grad_norm": 0.5424107909202576, | |
| "learning_rate": 4.15e-06, | |
| "loss": 0.5836, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.3949843260188088, | |
| "grad_norm": 0.5307353734970093, | |
| "learning_rate": 4.2000000000000004e-06, | |
| "loss": 0.5891, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.3996865203761755, | |
| "grad_norm": 0.5606550574302673, | |
| "learning_rate": 4.25e-06, | |
| "loss": 0.6097, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.4043887147335423, | |
| "grad_norm": 0.5380442142486572, | |
| "learning_rate": 4.3e-06, | |
| "loss": 0.6161, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.4090909090909091, | |
| "grad_norm": 0.7110352516174316, | |
| "learning_rate": 4.350000000000001e-06, | |
| "loss": 0.5681, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.41379310344827586, | |
| "grad_norm": 0.525680422782898, | |
| "learning_rate": 4.4e-06, | |
| "loss": 0.5942, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.41849529780564265, | |
| "grad_norm": 0.5567077398300171, | |
| "learning_rate": 4.450000000000001e-06, | |
| "loss": 0.5929, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.4231974921630094, | |
| "grad_norm": 0.5358735918998718, | |
| "learning_rate": 4.5e-06, | |
| "loss": 0.5717, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.4278996865203762, | |
| "grad_norm": 0.48081454634666443, | |
| "learning_rate": 4.5500000000000005e-06, | |
| "loss": 0.5984, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.43260188087774293, | |
| "grad_norm": 0.4900401830673218, | |
| "learning_rate": 4.600000000000001e-06, | |
| "loss": 0.6017, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.4373040752351097, | |
| "grad_norm": 0.48369699716567993, | |
| "learning_rate": 4.65e-06, | |
| "loss": 0.5295, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.44200626959247646, | |
| "grad_norm": 0.4992128312587738, | |
| "learning_rate": 4.7e-06, | |
| "loss": 0.5495, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.44670846394984326, | |
| "grad_norm": 0.5139058828353882, | |
| "learning_rate": 4.75e-06, | |
| "loss": 0.5528, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.45141065830721006, | |
| "grad_norm": 0.485097199678421, | |
| "learning_rate": 4.800000000000001e-06, | |
| "loss": 0.5528, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.4561128526645768, | |
| "grad_norm": 0.5944838523864746, | |
| "learning_rate": 4.85e-06, | |
| "loss": 0.5577, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.4608150470219436, | |
| "grad_norm": 0.5313893556594849, | |
| "learning_rate": 4.9000000000000005e-06, | |
| "loss": 0.5744, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.46551724137931033, | |
| "grad_norm": 0.49062076210975647, | |
| "learning_rate": 4.95e-06, | |
| "loss": 0.5563, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.4702194357366771, | |
| "grad_norm": 0.5196151733398438, | |
| "learning_rate": 5e-06, | |
| "loss": 0.5817, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.47492163009404387, | |
| "grad_norm": 0.507022500038147, | |
| "learning_rate": 4.9999910183883085e-06, | |
| "loss": 0.5608, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.47962382445141066, | |
| "grad_norm": 0.494018018245697, | |
| "learning_rate": 4.999964073617768e-06, | |
| "loss": 0.57, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.4843260188087774, | |
| "grad_norm": 0.48901236057281494, | |
| "learning_rate": 4.999919165881985e-06, | |
| "loss": 0.5568, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.4890282131661442, | |
| "grad_norm": 0.5166047811508179, | |
| "learning_rate": 4.999856295503635e-06, | |
| "loss": 0.5676, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.493730407523511, | |
| "grad_norm": 0.4952854812145233, | |
| "learning_rate": 4.9997754629344596e-06, | |
| "loss": 0.5454, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.49843260188087773, | |
| "grad_norm": 0.48603224754333496, | |
| "learning_rate": 4.999676668755263e-06, | |
| "loss": 0.5351, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.5031347962382445, | |
| "grad_norm": 0.4922390878200531, | |
| "learning_rate": 4.999559913675912e-06, | |
| "loss": 0.5672, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.5078369905956113, | |
| "grad_norm": 0.4846023619174957, | |
| "learning_rate": 4.999425198535325e-06, | |
| "loss": 0.5472, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.512539184952978, | |
| "grad_norm": 0.4974515736103058, | |
| "learning_rate": 4.999272524301469e-06, | |
| "loss": 0.5507, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.5172413793103449, | |
| "grad_norm": 0.5394945740699768, | |
| "learning_rate": 4.9991018920713505e-06, | |
| "loss": 0.5667, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.5219435736677116, | |
| "grad_norm": 0.5393733978271484, | |
| "learning_rate": 4.9989133030710154e-06, | |
| "loss": 0.5505, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.5266457680250783, | |
| "grad_norm": 0.7595535516738892, | |
| "learning_rate": 4.9987067586555275e-06, | |
| "loss": 0.5484, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.5313479623824452, | |
| "grad_norm": 0.5122578144073486, | |
| "learning_rate": 4.998482260308969e-06, | |
| "loss": 0.5669, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.5360501567398119, | |
| "grad_norm": 0.5233834981918335, | |
| "learning_rate": 4.998239809644427e-06, | |
| "loss": 0.5613, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.5407523510971787, | |
| "grad_norm": 3.08868408203125, | |
| "learning_rate": 4.9979794084039755e-06, | |
| "loss": 0.5746, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.5454545454545454, | |
| "grad_norm": 0.49716249108314514, | |
| "learning_rate": 4.997701058458677e-06, | |
| "loss": 0.5349, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.5501567398119123, | |
| "grad_norm": 0.5552968978881836, | |
| "learning_rate": 4.997404761808554e-06, | |
| "loss": 0.569, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.554858934169279, | |
| "grad_norm": 0.5029860734939575, | |
| "learning_rate": 4.9970905205825845e-06, | |
| "loss": 0.5755, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.5595611285266457, | |
| "grad_norm": 0.5006077885627747, | |
| "learning_rate": 4.996758337038683e-06, | |
| "loss": 0.5654, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.5642633228840125, | |
| "grad_norm": 0.4900127053260803, | |
| "learning_rate": 4.996408213563684e-06, | |
| "loss": 0.5575, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.5689655172413793, | |
| "grad_norm": 0.5142834186553955, | |
| "learning_rate": 4.996040152673326e-06, | |
| "loss": 0.5247, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.5736677115987461, | |
| "grad_norm": 0.5097452998161316, | |
| "learning_rate": 4.995654157012233e-06, | |
| "loss": 0.538, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.5783699059561128, | |
| "grad_norm": 0.5581697225570679, | |
| "learning_rate": 4.995250229353895e-06, | |
| "loss": 0.5324, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.5830721003134797, | |
| "grad_norm": 0.5943841934204102, | |
| "learning_rate": 4.99482837260065e-06, | |
| "loss": 0.547, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.5877742946708464, | |
| "grad_norm": 0.541631281375885, | |
| "learning_rate": 4.99438858978366e-06, | |
| "loss": 0.577, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.5924764890282131, | |
| "grad_norm": 0.6008974313735962, | |
| "learning_rate": 4.993930884062892e-06, | |
| "loss": 0.5579, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.5971786833855799, | |
| "grad_norm": 0.7009158730506897, | |
| "learning_rate": 4.993455258727094e-06, | |
| "loss": 0.5579, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.6018808777429467, | |
| "grad_norm": 0.5942498445510864, | |
| "learning_rate": 4.992961717193773e-06, | |
| "loss": 0.5599, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.6065830721003135, | |
| "grad_norm": 0.4936768710613251, | |
| "learning_rate": 4.9924502630091655e-06, | |
| "loss": 0.5676, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.6112852664576802, | |
| "grad_norm": 0.5035172700881958, | |
| "learning_rate": 4.99192089984822e-06, | |
| "loss": 0.5519, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.6159874608150471, | |
| "grad_norm": 0.8178919553756714, | |
| "learning_rate": 4.9913736315145614e-06, | |
| "loss": 0.5594, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.6206896551724138, | |
| "grad_norm": 0.4977147877216339, | |
| "learning_rate": 4.990808461940474e-06, | |
| "loss": 0.54, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.6253918495297806, | |
| "grad_norm": 0.7395517826080322, | |
| "learning_rate": 4.990225395186862e-06, | |
| "loss": 0.5467, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.6300940438871473, | |
| "grad_norm": 1.1424250602722168, | |
| "learning_rate": 4.9896244354432314e-06, | |
| "loss": 0.5432, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.6347962382445141, | |
| "grad_norm": 0.5002970695495605, | |
| "learning_rate": 4.98900558702765e-06, | |
| "loss": 0.5526, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.6394984326018809, | |
| "grad_norm": 0.49464455246925354, | |
| "learning_rate": 4.9883688543867225e-06, | |
| "loss": 0.563, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.6442006269592476, | |
| "grad_norm": 0.48660922050476074, | |
| "learning_rate": 4.987714242095558e-06, | |
| "loss": 0.5328, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.6489028213166145, | |
| "grad_norm": 0.5086668133735657, | |
| "learning_rate": 4.9870417548577355e-06, | |
| "loss": 0.5373, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.6536050156739812, | |
| "grad_norm": 0.4638510048389435, | |
| "learning_rate": 4.9863513975052696e-06, | |
| "loss": 0.5459, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.658307210031348, | |
| "grad_norm": 0.5742124915122986, | |
| "learning_rate": 4.985643174998578e-06, | |
| "loss": 0.5681, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.6630094043887147, | |
| "grad_norm": 0.5262806415557861, | |
| "learning_rate": 4.984917092426445e-06, | |
| "loss": 0.5549, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.6677115987460815, | |
| "grad_norm": 0.4765531122684479, | |
| "learning_rate": 4.984173155005982e-06, | |
| "loss": 0.5435, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.6724137931034483, | |
| "grad_norm": 0.4731754660606384, | |
| "learning_rate": 4.983411368082597e-06, | |
| "loss": 0.5484, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.677115987460815, | |
| "grad_norm": 0.5015071630477905, | |
| "learning_rate": 4.982631737129948e-06, | |
| "loss": 0.5314, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.6818181818181818, | |
| "grad_norm": 0.5013841986656189, | |
| "learning_rate": 4.98183426774991e-06, | |
| "loss": 0.5705, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.6865203761755486, | |
| "grad_norm": 0.5398444533348083, | |
| "learning_rate": 4.981018965672529e-06, | |
| "loss": 0.5713, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.6912225705329154, | |
| "grad_norm": 0.5247324109077454, | |
| "learning_rate": 4.98018583675599e-06, | |
| "loss": 0.5512, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.6959247648902821, | |
| "grad_norm": 0.488935649394989, | |
| "learning_rate": 4.979334886986562e-06, | |
| "loss": 0.5532, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.700626959247649, | |
| "grad_norm": 0.5056779384613037, | |
| "learning_rate": 4.978466122478567e-06, | |
| "loss": 0.5668, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.7053291536050157, | |
| "grad_norm": 0.5102982521057129, | |
| "learning_rate": 4.97757954947433e-06, | |
| "loss": 0.5346, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.7100313479623824, | |
| "grad_norm": 0.4989202320575714, | |
| "learning_rate": 4.976675174344132e-06, | |
| "loss": 0.5488, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.7147335423197492, | |
| "grad_norm": 0.4996865689754486, | |
| "learning_rate": 4.975753003586172e-06, | |
| "loss": 0.528, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.719435736677116, | |
| "grad_norm": 0.5011430978775024, | |
| "learning_rate": 4.974813043826513e-06, | |
| "loss": 0.5221, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.7241379310344828, | |
| "grad_norm": 0.530082643032074, | |
| "learning_rate": 4.973855301819039e-06, | |
| "loss": 0.5398, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.7288401253918495, | |
| "grad_norm": 0.5243843197822571, | |
| "learning_rate": 4.972879784445402e-06, | |
| "loss": 0.5215, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.7335423197492164, | |
| "grad_norm": 0.5321754217147827, | |
| "learning_rate": 4.971886498714978e-06, | |
| "loss": 0.5432, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.7382445141065831, | |
| "grad_norm": 0.5054258704185486, | |
| "learning_rate": 4.97087545176481e-06, | |
| "loss": 0.5197, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.7429467084639498, | |
| "grad_norm": 0.5209529995918274, | |
| "learning_rate": 4.9698466508595655e-06, | |
| "loss": 0.5486, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.7476489028213166, | |
| "grad_norm": 0.47057369351387024, | |
| "learning_rate": 4.9688001033914756e-06, | |
| "loss": 0.5358, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.7523510971786834, | |
| "grad_norm": 0.5286029577255249, | |
| "learning_rate": 4.967735816880286e-06, | |
| "loss": 0.5471, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.7570532915360502, | |
| "grad_norm": 0.7403479218482971, | |
| "learning_rate": 4.966653798973205e-06, | |
| "loss": 0.5264, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.7617554858934169, | |
| "grad_norm": 0.5945819020271301, | |
| "learning_rate": 4.965554057444842e-06, | |
| "loss": 0.5194, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.7664576802507836, | |
| "grad_norm": 0.48147913813591003, | |
| "learning_rate": 4.964436600197161e-06, | |
| "loss": 0.5425, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.7711598746081505, | |
| "grad_norm": 0.5892655849456787, | |
| "learning_rate": 4.963301435259413e-06, | |
| "loss": 0.5134, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.7758620689655172, | |
| "grad_norm": 0.546521782875061, | |
| "learning_rate": 4.962148570788088e-06, | |
| "loss": 0.5334, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.780564263322884, | |
| "grad_norm": 0.5432559251785278, | |
| "learning_rate": 4.96097801506685e-06, | |
| "loss": 0.5214, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.7852664576802508, | |
| "grad_norm": 0.4892060458660126, | |
| "learning_rate": 4.959789776506482e-06, | |
| "loss": 0.5307, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.7899686520376176, | |
| "grad_norm": 0.5296177268028259, | |
| "learning_rate": 4.958583863644821e-06, | |
| "loss": 0.5539, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.7946708463949843, | |
| "grad_norm": 0.4970422685146332, | |
| "learning_rate": 4.9573602851466985e-06, | |
| "loss": 0.5193, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.799373040752351, | |
| "grad_norm": 0.5201449990272522, | |
| "learning_rate": 4.9561190498038815e-06, | |
| "loss": 0.542, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.8040752351097179, | |
| "grad_norm": 0.6067378520965576, | |
| "learning_rate": 4.954860166535005e-06, | |
| "loss": 0.5319, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.8087774294670846, | |
| "grad_norm": 0.4754677712917328, | |
| "learning_rate": 4.95358364438551e-06, | |
| "loss": 0.5366, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.8134796238244514, | |
| "grad_norm": 0.4970039129257202, | |
| "learning_rate": 4.952289492527576e-06, | |
| "loss": 0.5626, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.8181818181818182, | |
| "grad_norm": 0.5602061748504639, | |
| "learning_rate": 4.9509777202600605e-06, | |
| "loss": 0.4951, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.822884012539185, | |
| "grad_norm": 0.5069930553436279, | |
| "learning_rate": 4.949648337008425e-06, | |
| "loss": 0.5419, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.8275862068965517, | |
| "grad_norm": 2.1186537742614746, | |
| "learning_rate": 4.948301352324674e-06, | |
| "loss": 0.5361, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.8322884012539185, | |
| "grad_norm": 0.4984792470932007, | |
| "learning_rate": 4.946936775887281e-06, | |
| "loss": 0.5315, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.8369905956112853, | |
| "grad_norm": 0.49590349197387695, | |
| "learning_rate": 4.945554617501124e-06, | |
| "loss": 0.5258, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.841692789968652, | |
| "grad_norm": 0.48108625411987305, | |
| "learning_rate": 4.944154887097411e-06, | |
| "loss": 0.5566, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.8463949843260188, | |
| "grad_norm": 0.5167516469955444, | |
| "learning_rate": 4.942737594733608e-06, | |
| "loss": 0.5285, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.8510971786833855, | |
| "grad_norm": 0.4649943709373474, | |
| "learning_rate": 4.941302750593373e-06, | |
| "loss": 0.5443, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.8557993730407524, | |
| "grad_norm": 0.5184140205383301, | |
| "learning_rate": 4.939850364986475e-06, | |
| "loss": 0.4859, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.8605015673981191, | |
| "grad_norm": 0.6746440529823303, | |
| "learning_rate": 4.938380448348725e-06, | |
| "loss": 0.4947, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.8652037617554859, | |
| "grad_norm": 0.5150278806686401, | |
| "learning_rate": 4.9368930112419e-06, | |
| "loss": 0.5351, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.8699059561128527, | |
| "grad_norm": 0.47614121437072754, | |
| "learning_rate": 4.935388064353665e-06, | |
| "loss": 0.5362, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.8746081504702194, | |
| "grad_norm": 0.5044179558753967, | |
| "learning_rate": 4.9338656184975e-06, | |
| "loss": 0.532, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.8793103448275862, | |
| "grad_norm": 0.4887774884700775, | |
| "learning_rate": 4.932325684612618e-06, | |
| "loss": 0.5448, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.8840125391849529, | |
| "grad_norm": 0.49574583768844604, | |
| "learning_rate": 4.93076827376389e-06, | |
| "loss": 0.5485, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.8887147335423198, | |
| "grad_norm": 0.5294933915138245, | |
| "learning_rate": 4.9291933971417635e-06, | |
| "loss": 0.5441, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.8934169278996865, | |
| "grad_norm": 0.4594694972038269, | |
| "learning_rate": 4.9276010660621835e-06, | |
| "loss": 0.5322, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.8981191222570533, | |
| "grad_norm": 0.47507232427597046, | |
| "learning_rate": 4.925991291966508e-06, | |
| "loss": 0.5103, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.9028213166144201, | |
| "grad_norm": 0.4808727204799652, | |
| "learning_rate": 4.92436408642143e-06, | |
| "loss": 0.5479, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.9075235109717869, | |
| "grad_norm": 0.481921523809433, | |
| "learning_rate": 4.9227194611188934e-06, | |
| "loss": 0.5242, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.9122257053291536, | |
| "grad_norm": 0.6387396454811096, | |
| "learning_rate": 4.921057427876007e-06, | |
| "loss": 0.4937, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.9169278996865203, | |
| "grad_norm": 0.5185182094573975, | |
| "learning_rate": 4.919377998634959e-06, | |
| "loss": 0.5515, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.9216300940438872, | |
| "grad_norm": 0.5029557943344116, | |
| "learning_rate": 4.917681185462934e-06, | |
| "loss": 0.5371, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.9263322884012539, | |
| "grad_norm": 0.5311322808265686, | |
| "learning_rate": 4.915967000552028e-06, | |
| "loss": 0.5275, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.9310344827586207, | |
| "grad_norm": 0.46653756499290466, | |
| "learning_rate": 4.914235456219154e-06, | |
| "loss": 0.5208, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.9357366771159875, | |
| "grad_norm": 0.4970494508743286, | |
| "learning_rate": 4.912486564905959e-06, | |
| "loss": 0.549, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.9404388714733543, | |
| "grad_norm": 0.5315521359443665, | |
| "learning_rate": 4.910720339178735e-06, | |
| "loss": 0.5314, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.945141065830721, | |
| "grad_norm": 0.5144534707069397, | |
| "learning_rate": 4.908936791728323e-06, | |
| "loss": 0.5372, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.9498432601880877, | |
| "grad_norm": 0.6076200008392334, | |
| "learning_rate": 4.907135935370027e-06, | |
| "loss": 0.5353, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.9545454545454546, | |
| "grad_norm": 0.5259833931922913, | |
| "learning_rate": 4.905317783043523e-06, | |
| "loss": 0.5442, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.9592476489028213, | |
| "grad_norm": 0.4931281507015228, | |
| "learning_rate": 4.9034823478127605e-06, | |
| "loss": 0.5246, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.9639498432601881, | |
| "grad_norm": 0.4909270405769348, | |
| "learning_rate": 4.901629642865872e-06, | |
| "loss": 0.5011, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.9686520376175548, | |
| "grad_norm": 0.581427276134491, | |
| "learning_rate": 4.89975968151508e-06, | |
| "loss": 0.5253, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.9733542319749217, | |
| "grad_norm": 0.5055932998657227, | |
| "learning_rate": 4.8978724771965965e-06, | |
| "loss": 0.5152, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.9780564263322884, | |
| "grad_norm": 0.5337440371513367, | |
| "learning_rate": 4.895968043470532e-06, | |
| "loss": 0.5321, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.9827586206896551, | |
| "grad_norm": 0.5171258449554443, | |
| "learning_rate": 4.894046394020794e-06, | |
| "loss": 0.4973, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.987460815047022, | |
| "grad_norm": 0.4880523085594177, | |
| "learning_rate": 4.892107542654988e-06, | |
| "loss": 0.529, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.9921630094043887, | |
| "grad_norm": 0.4594219923019409, | |
| "learning_rate": 4.890151503304325e-06, | |
| "loss": 0.5502, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.9968652037617555, | |
| "grad_norm": 0.5071013569831848, | |
| "learning_rate": 4.88817829002351e-06, | |
| "loss": 0.5258, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 1.0047021943573669, | |
| "grad_norm": 0.953562319278717, | |
| "learning_rate": 4.886187916990653e-06, | |
| "loss": 1.0585, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 1.0094043887147335, | |
| "grad_norm": 0.46260955929756165, | |
| "learning_rate": 4.884180398507163e-06, | |
| "loss": 0.5036, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 1.0141065830721003, | |
| "grad_norm": 0.46361738443374634, | |
| "learning_rate": 4.882155748997636e-06, | |
| "loss": 0.4975, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 1.0188087774294672, | |
| "grad_norm": 0.47986194491386414, | |
| "learning_rate": 4.8801139830097685e-06, | |
| "loss": 0.5039, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 1.0235109717868338, | |
| "grad_norm": 0.49743711948394775, | |
| "learning_rate": 4.878055115214238e-06, | |
| "loss": 0.5139, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 1.0282131661442007, | |
| "grad_norm": 0.5503370761871338, | |
| "learning_rate": 4.875979160404607e-06, | |
| "loss": 0.5117, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 1.0329153605015673, | |
| "grad_norm": 0.47498467564582825, | |
| "learning_rate": 4.873886133497209e-06, | |
| "loss": 0.5195, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 1.0376175548589341, | |
| "grad_norm": 0.4586140513420105, | |
| "learning_rate": 4.87177604953105e-06, | |
| "loss": 0.5168, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.042319749216301, | |
| "grad_norm": 0.5042305588722229, | |
| "learning_rate": 4.869648923667694e-06, | |
| "loss": 0.4693, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 1.0470219435736676, | |
| "grad_norm": 0.6340722441673279, | |
| "learning_rate": 4.867504771191154e-06, | |
| "loss": 0.4945, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 1.0517241379310345, | |
| "grad_norm": 0.4741908311843872, | |
| "learning_rate": 4.865343607507788e-06, | |
| "loss": 0.5027, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 1.0564263322884013, | |
| "grad_norm": 0.6638283133506775, | |
| "learning_rate": 4.86316544814618e-06, | |
| "loss": 0.5191, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 1.061128526645768, | |
| "grad_norm": 0.7720419764518738, | |
| "learning_rate": 4.860970308757038e-06, | |
| "loss": 0.4634, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.0658307210031348, | |
| "grad_norm": 0.5258729457855225, | |
| "learning_rate": 4.858758205113072e-06, | |
| "loss": 0.4924, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 1.0705329153605017, | |
| "grad_norm": 0.5001716017723083, | |
| "learning_rate": 4.856529153108888e-06, | |
| "loss": 0.5241, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 1.0752351097178683, | |
| "grad_norm": 0.5368038415908813, | |
| "learning_rate": 4.854283168760868e-06, | |
| "loss": 0.5031, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 1.0799373040752351, | |
| "grad_norm": 0.5357080101966858, | |
| "learning_rate": 4.85202026820706e-06, | |
| "loss": 0.5003, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 1.084639498432602, | |
| "grad_norm": 0.5052481293678284, | |
| "learning_rate": 4.84974046770706e-06, | |
| "loss": 0.5375, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.0893416927899686, | |
| "grad_norm": 0.5544849634170532, | |
| "learning_rate": 4.847443783641893e-06, | |
| "loss": 0.4472, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 1.0940438871473355, | |
| "grad_norm": 0.49668335914611816, | |
| "learning_rate": 4.845130232513901e-06, | |
| "loss": 0.4934, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 1.098746081504702, | |
| "grad_norm": 0.5318028330802917, | |
| "learning_rate": 4.842799830946615e-06, | |
| "loss": 0.4903, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 1.103448275862069, | |
| "grad_norm": 0.48978278040885925, | |
| "learning_rate": 4.840452595684646e-06, | |
| "loss": 0.4864, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 1.1081504702194358, | |
| "grad_norm": 0.47105321288108826, | |
| "learning_rate": 4.83808854359356e-06, | |
| "loss": 0.4805, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 1.1128526645768024, | |
| "grad_norm": 0.6040552854537964, | |
| "learning_rate": 4.835707691659753e-06, | |
| "loss": 0.4839, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 1.1175548589341693, | |
| "grad_norm": 0.501312255859375, | |
| "learning_rate": 4.8333100569903365e-06, | |
| "loss": 0.495, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 1.1222570532915361, | |
| "grad_norm": 0.48165130615234375, | |
| "learning_rate": 4.8308956568130094e-06, | |
| "loss": 0.5144, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 1.1269592476489028, | |
| "grad_norm": 0.5097485780715942, | |
| "learning_rate": 4.828464508475934e-06, | |
| "loss": 0.5081, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 1.1316614420062696, | |
| "grad_norm": 0.49503275752067566, | |
| "learning_rate": 4.826016629447616e-06, | |
| "loss": 0.5103, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.1363636363636362, | |
| "grad_norm": 0.5472146272659302, | |
| "learning_rate": 4.823552037316775e-06, | |
| "loss": 0.4857, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 1.141065830721003, | |
| "grad_norm": 0.5358877182006836, | |
| "learning_rate": 4.821070749792218e-06, | |
| "loss": 0.5418, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 1.14576802507837, | |
| "grad_norm": 0.4832223951816559, | |
| "learning_rate": 4.818572784702713e-06, | |
| "loss": 0.5121, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 1.1504702194357366, | |
| "grad_norm": 0.928979218006134, | |
| "learning_rate": 4.816058159996863e-06, | |
| "loss": 0.5218, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 1.1551724137931034, | |
| "grad_norm": 0.46563178300857544, | |
| "learning_rate": 4.813526893742972e-06, | |
| "loss": 0.5045, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 1.1598746081504703, | |
| "grad_norm": 0.5262567400932312, | |
| "learning_rate": 4.810979004128924e-06, | |
| "loss": 0.4984, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 1.164576802507837, | |
| "grad_norm": 0.4928685426712036, | |
| "learning_rate": 4.808414509462042e-06, | |
| "loss": 0.5183, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 1.1692789968652038, | |
| "grad_norm": 0.8799586892127991, | |
| "learning_rate": 4.80583342816896e-06, | |
| "loss": 0.4851, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 1.1739811912225706, | |
| "grad_norm": 0.5468968749046326, | |
| "learning_rate": 4.803235778795496e-06, | |
| "loss": 0.5242, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 1.1786833855799372, | |
| "grad_norm": 0.49233874678611755, | |
| "learning_rate": 4.800621580006511e-06, | |
| "loss": 0.4687, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.183385579937304, | |
| "grad_norm": 0.4705159366130829, | |
| "learning_rate": 4.797990850585782e-06, | |
| "loss": 0.5166, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 1.188087774294671, | |
| "grad_norm": 0.49235984683036804, | |
| "learning_rate": 4.79534360943586e-06, | |
| "loss": 0.4964, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 1.1927899686520376, | |
| "grad_norm": 0.5998479127883911, | |
| "learning_rate": 4.792679875577937e-06, | |
| "loss": 0.4816, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 1.1974921630094044, | |
| "grad_norm": 0.6658573746681213, | |
| "learning_rate": 4.789999668151714e-06, | |
| "loss": 0.5157, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 1.2021943573667713, | |
| "grad_norm": 0.47048208117485046, | |
| "learning_rate": 4.7873030064152545e-06, | |
| "loss": 0.4957, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.206896551724138, | |
| "grad_norm": 0.47910359501838684, | |
| "learning_rate": 4.784589909744856e-06, | |
| "loss": 0.4906, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 1.2115987460815048, | |
| "grad_norm": 0.6204367876052856, | |
| "learning_rate": 4.7818603976349005e-06, | |
| "loss": 0.5032, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 1.2163009404388714, | |
| "grad_norm": 0.48487767577171326, | |
| "learning_rate": 4.779114489697724e-06, | |
| "loss": 0.4986, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 1.2210031347962382, | |
| "grad_norm": 0.5075157284736633, | |
| "learning_rate": 4.776352205663469e-06, | |
| "loss": 0.5021, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 1.225705329153605, | |
| "grad_norm": 0.4977814257144928, | |
| "learning_rate": 4.773573565379947e-06, | |
| "loss": 0.5132, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.2304075235109717, | |
| "grad_norm": 0.515428900718689, | |
| "learning_rate": 4.770778588812489e-06, | |
| "loss": 0.4783, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 1.2351097178683386, | |
| "grad_norm": 0.5740492939949036, | |
| "learning_rate": 4.7679672960438135e-06, | |
| "loss": 0.505, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 1.2398119122257054, | |
| "grad_norm": 0.46476778388023376, | |
| "learning_rate": 4.765139707273872e-06, | |
| "loss": 0.4925, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 1.244514106583072, | |
| "grad_norm": 0.4873107373714447, | |
| "learning_rate": 4.762295842819707e-06, | |
| "loss": 0.5057, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 1.249216300940439, | |
| "grad_norm": 0.5337681174278259, | |
| "learning_rate": 4.759435723115308e-06, | |
| "loss": 0.4709, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 1.2539184952978055, | |
| "grad_norm": 0.5014109015464783, | |
| "learning_rate": 4.756559368711463e-06, | |
| "loss": 0.5074, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 1.2586206896551724, | |
| "grad_norm": 0.4641067385673523, | |
| "learning_rate": 4.75366680027561e-06, | |
| "loss": 0.4882, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 1.2633228840125392, | |
| "grad_norm": 0.5014533996582031, | |
| "learning_rate": 4.7507580385916906e-06, | |
| "loss": 0.4911, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 1.2680250783699059, | |
| "grad_norm": 0.7282389402389526, | |
| "learning_rate": 4.747833104559999e-06, | |
| "loss": 0.4674, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 1.2727272727272727, | |
| "grad_norm": 0.44998112320899963, | |
| "learning_rate": 4.744892019197033e-06, | |
| "loss": 0.4818, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.2774294670846396, | |
| "grad_norm": 0.48868605494499207, | |
| "learning_rate": 4.74193480363534e-06, | |
| "loss": 0.4891, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 1.2821316614420062, | |
| "grad_norm": 0.49108532071113586, | |
| "learning_rate": 4.738961479123373e-06, | |
| "loss": 0.497, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 1.286833855799373, | |
| "grad_norm": 0.49707749485969543, | |
| "learning_rate": 4.735972067025326e-06, | |
| "loss": 0.5044, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 1.29153605015674, | |
| "grad_norm": 0.5466123223304749, | |
| "learning_rate": 4.732966588820991e-06, | |
| "loss": 0.4966, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 1.2962382445141065, | |
| "grad_norm": 0.4684564769268036, | |
| "learning_rate": 4.729945066105599e-06, | |
| "loss": 0.475, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.3009404388714734, | |
| "grad_norm": 0.5143007636070251, | |
| "learning_rate": 4.726907520589664e-06, | |
| "loss": 0.4669, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 1.3056426332288402, | |
| "grad_norm": 0.5112866163253784, | |
| "learning_rate": 4.72385397409883e-06, | |
| "loss": 0.5085, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 1.3103448275862069, | |
| "grad_norm": 0.5432227849960327, | |
| "learning_rate": 4.720784448573712e-06, | |
| "loss": 0.5003, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 1.3150470219435737, | |
| "grad_norm": 0.5111671686172485, | |
| "learning_rate": 4.717698966069739e-06, | |
| "loss": 0.5305, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 1.3197492163009406, | |
| "grad_norm": 0.5037922859191895, | |
| "learning_rate": 4.7145975487569965e-06, | |
| "loss": 0.5081, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.3244514106583072, | |
| "grad_norm": 0.7806075215339661, | |
| "learning_rate": 4.711480218920064e-06, | |
| "loss": 0.4698, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 1.329153605015674, | |
| "grad_norm": 0.5198766589164734, | |
| "learning_rate": 4.708346998957859e-06, | |
| "loss": 0.5188, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 1.3338557993730409, | |
| "grad_norm": 0.7277726531028748, | |
| "learning_rate": 4.705197911383473e-06, | |
| "loss": 0.4893, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 1.3385579937304075, | |
| "grad_norm": 0.4980437457561493, | |
| "learning_rate": 4.7020329788240115e-06, | |
| "loss": 0.476, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 1.3432601880877744, | |
| "grad_norm": 0.5318494439125061, | |
| "learning_rate": 4.6988522240204325e-06, | |
| "loss": 0.4853, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 1.347962382445141, | |
| "grad_norm": 0.5113736391067505, | |
| "learning_rate": 4.695655669827377e-06, | |
| "loss": 0.5005, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 1.3526645768025078, | |
| "grad_norm": 0.7522029876708984, | |
| "learning_rate": 4.6924433392130135e-06, | |
| "loss": 0.4924, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 1.3573667711598745, | |
| "grad_norm": 0.4861883521080017, | |
| "learning_rate": 4.689215255258866e-06, | |
| "loss": 0.5099, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 1.3620689655172413, | |
| "grad_norm": 0.46011000871658325, | |
| "learning_rate": 4.685971441159653e-06, | |
| "loss": 0.4792, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 1.3667711598746082, | |
| "grad_norm": 0.5154278874397278, | |
| "learning_rate": 4.682711920223115e-06, | |
| "loss": 0.4781, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.3714733542319748, | |
| "grad_norm": 0.45991218090057373, | |
| "learning_rate": 4.679436715869856e-06, | |
| "loss": 0.4989, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 1.3761755485893417, | |
| "grad_norm": 0.49241772294044495, | |
| "learning_rate": 4.676145851633166e-06, | |
| "loss": 0.5159, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 1.3808777429467085, | |
| "grad_norm": 0.523045003414154, | |
| "learning_rate": 4.672839351158856e-06, | |
| "loss": 0.5012, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 1.3855799373040751, | |
| "grad_norm": 0.5598923563957214, | |
| "learning_rate": 4.669517238205089e-06, | |
| "loss": 0.4855, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 1.390282131661442, | |
| "grad_norm": 0.507128894329071, | |
| "learning_rate": 4.666179536642208e-06, | |
| "loss": 0.4845, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 1.3949843260188088, | |
| "grad_norm": 0.721315860748291, | |
| "learning_rate": 4.662826270452565e-06, | |
| "loss": 0.4817, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 1.3996865203761755, | |
| "grad_norm": 0.560897171497345, | |
| "learning_rate": 4.659457463730347e-06, | |
| "loss": 0.4912, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 1.4043887147335423, | |
| "grad_norm": 0.5121816396713257, | |
| "learning_rate": 4.6560731406814056e-06, | |
| "loss": 0.5061, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 1.4090909090909092, | |
| "grad_norm": 0.4652915596961975, | |
| "learning_rate": 4.65267332562308e-06, | |
| "loss": 0.5088, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 1.4137931034482758, | |
| "grad_norm": 0.5149025321006775, | |
| "learning_rate": 4.649258042984026e-06, | |
| "loss": 0.5071, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.4184952978056427, | |
| "grad_norm": 0.4650140106678009, | |
| "learning_rate": 4.6458273173040395e-06, | |
| "loss": 0.4631, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 1.4231974921630095, | |
| "grad_norm": 0.9061777591705322, | |
| "learning_rate": 4.642381173233874e-06, | |
| "loss": 0.5008, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 1.4278996865203761, | |
| "grad_norm": 0.484729140996933, | |
| "learning_rate": 4.638919635535073e-06, | |
| "loss": 0.4564, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 1.432601880877743, | |
| "grad_norm": 1.1061334609985352, | |
| "learning_rate": 4.635442729079788e-06, | |
| "loss": 0.4843, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 1.4373040752351098, | |
| "grad_norm": 0.5028228163719177, | |
| "learning_rate": 4.6319504788505956e-06, | |
| "loss": 0.4794, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 1.4420062695924765, | |
| "grad_norm": 0.49323561787605286, | |
| "learning_rate": 4.628442909940325e-06, | |
| "loss": 0.4901, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 1.4467084639498433, | |
| "grad_norm": 0.5237039923667908, | |
| "learning_rate": 4.624920047551874e-06, | |
| "loss": 0.5079, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 1.4514106583072102, | |
| "grad_norm": 0.5407230854034424, | |
| "learning_rate": 4.621381916998029e-06, | |
| "loss": 0.4742, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 1.4561128526645768, | |
| "grad_norm": 0.4688400328159332, | |
| "learning_rate": 4.6178285437012806e-06, | |
| "loss": 0.5096, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 1.4608150470219436, | |
| "grad_norm": 0.6071098446846008, | |
| "learning_rate": 4.6142599531936435e-06, | |
| "loss": 0.4721, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.4655172413793103, | |
| "grad_norm": 0.47543954849243164, | |
| "learning_rate": 4.610676171116475e-06, | |
| "loss": 0.4924, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 1.4702194357366771, | |
| "grad_norm": 0.47207143902778625, | |
| "learning_rate": 4.607077223220286e-06, | |
| "loss": 0.4965, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 1.4749216300940438, | |
| "grad_norm": 0.5051419734954834, | |
| "learning_rate": 4.603463135364556e-06, | |
| "loss": 0.4671, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 1.4796238244514106, | |
| "grad_norm": 0.5427581667900085, | |
| "learning_rate": 4.5998339335175555e-06, | |
| "loss": 0.4896, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 1.4843260188087775, | |
| "grad_norm": 0.8648138642311096, | |
| "learning_rate": 4.596189643756147e-06, | |
| "loss": 0.4666, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 1.489028213166144, | |
| "grad_norm": 0.5034400224685669, | |
| "learning_rate": 4.592530292265609e-06, | |
| "loss": 0.4849, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 1.493730407523511, | |
| "grad_norm": 0.5227445960044861, | |
| "learning_rate": 4.58885590533944e-06, | |
| "loss": 0.4942, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 1.4984326018808778, | |
| "grad_norm": 0.4640982151031494, | |
| "learning_rate": 4.585166509379173e-06, | |
| "loss": 0.5165, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 1.5031347962382444, | |
| "grad_norm": 0.5094525814056396, | |
| "learning_rate": 4.581462130894186e-06, | |
| "loss": 0.4934, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 1.5078369905956113, | |
| "grad_norm": 0.5064478516578674, | |
| "learning_rate": 4.57774279650151e-06, | |
| "loss": 0.4847, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.5125391849529781, | |
| "grad_norm": 0.5131570100784302, | |
| "learning_rate": 4.574008532925638e-06, | |
| "loss": 0.5111, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 1.5172413793103448, | |
| "grad_norm": 0.5142849087715149, | |
| "learning_rate": 4.570259366998336e-06, | |
| "loss": 0.4953, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 1.5219435736677116, | |
| "grad_norm": 0.4943976402282715, | |
| "learning_rate": 4.566495325658445e-06, | |
| "loss": 0.5235, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 1.5266457680250785, | |
| "grad_norm": 0.46294504404067993, | |
| "learning_rate": 4.5627164359516915e-06, | |
| "loss": 0.505, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 1.531347962382445, | |
| "grad_norm": 0.6234234571456909, | |
| "learning_rate": 4.558922725030491e-06, | |
| "loss": 0.4776, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 1.536050156739812, | |
| "grad_norm": 0.5395891666412354, | |
| "learning_rate": 4.555114220153755e-06, | |
| "loss": 0.4313, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 1.5407523510971788, | |
| "grad_norm": 0.8332454562187195, | |
| "learning_rate": 4.551290948686693e-06, | |
| "loss": 0.5141, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 1.5454545454545454, | |
| "grad_norm": 0.4640675485134125, | |
| "learning_rate": 4.547452938100615e-06, | |
| "loss": 0.5178, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 1.5501567398119123, | |
| "grad_norm": 0.48269912600517273, | |
| "learning_rate": 4.54360021597274e-06, | |
| "loss": 0.4955, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 1.5548589341692791, | |
| "grad_norm": 0.5193467140197754, | |
| "learning_rate": 4.539732809985989e-06, | |
| "loss": 0.4864, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.5595611285266457, | |
| "grad_norm": 0.4845351576805115, | |
| "learning_rate": 4.535850747928796e-06, | |
| "loss": 0.5003, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 1.5642633228840124, | |
| "grad_norm": 0.5264540314674377, | |
| "learning_rate": 4.531954057694897e-06, | |
| "loss": 0.4828, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 1.5689655172413794, | |
| "grad_norm": 0.4795820415019989, | |
| "learning_rate": 4.5280427672831414e-06, | |
| "loss": 0.4904, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 1.573667711598746, | |
| "grad_norm": 0.5169357061386108, | |
| "learning_rate": 4.524116904797281e-06, | |
| "loss": 0.4661, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 1.5783699059561127, | |
| "grad_norm": 0.4765758514404297, | |
| "learning_rate": 4.520176498445774e-06, | |
| "loss": 0.4793, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 1.5830721003134798, | |
| "grad_norm": 0.5529272556304932, | |
| "learning_rate": 4.516221576541581e-06, | |
| "loss": 0.4793, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 1.5877742946708464, | |
| "grad_norm": 0.594944179058075, | |
| "learning_rate": 4.512252167501959e-06, | |
| "loss": 0.4786, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 1.592476489028213, | |
| "grad_norm": 0.4921863079071045, | |
| "learning_rate": 4.508268299848262e-06, | |
| "loss": 0.4854, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 1.59717868338558, | |
| "grad_norm": 0.4983494281768799, | |
| "learning_rate": 4.50427000220573e-06, | |
| "loss": 0.4999, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 1.6018808777429467, | |
| "grad_norm": 0.48530513048171997, | |
| "learning_rate": 4.50025730330329e-06, | |
| "loss": 0.477, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.6065830721003134, | |
| "grad_norm": 0.47819897532463074, | |
| "learning_rate": 4.4962302319733445e-06, | |
| "loss": 0.4933, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 1.6112852664576802, | |
| "grad_norm": 0.608711838722229, | |
| "learning_rate": 4.492188817151565e-06, | |
| "loss": 0.5285, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 1.615987460815047, | |
| "grad_norm": 0.5040555000305176, | |
| "learning_rate": 4.488133087876688e-06, | |
| "loss": 0.467, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 1.6206896551724137, | |
| "grad_norm": 0.4925503432750702, | |
| "learning_rate": 4.484063073290301e-06, | |
| "loss": 0.4651, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 1.6253918495297806, | |
| "grad_norm": 0.5364562273025513, | |
| "learning_rate": 4.479978802636637e-06, | |
| "loss": 0.5002, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 1.6300940438871474, | |
| "grad_norm": 0.5685595870018005, | |
| "learning_rate": 4.475880305262362e-06, | |
| "loss": 0.504, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 1.634796238244514, | |
| "grad_norm": 0.4894132912158966, | |
| "learning_rate": 4.471767610616366e-06, | |
| "loss": 0.4961, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 1.6394984326018809, | |
| "grad_norm": 0.5791006088256836, | |
| "learning_rate": 4.467640748249549e-06, | |
| "loss": 0.4706, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 1.6442006269592477, | |
| "grad_norm": 0.4814227521419525, | |
| "learning_rate": 4.4634997478146125e-06, | |
| "loss": 0.4895, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 1.6489028213166144, | |
| "grad_norm": 0.4842629134654999, | |
| "learning_rate": 4.459344639065842e-06, | |
| "loss": 0.482, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.6536050156739812, | |
| "grad_norm": 0.44065847992897034, | |
| "learning_rate": 4.455175451858897e-06, | |
| "loss": 0.4912, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 1.658307210031348, | |
| "grad_norm": 0.5212785601615906, | |
| "learning_rate": 4.450992216150592e-06, | |
| "loss": 0.4995, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 1.6630094043887147, | |
| "grad_norm": 0.6393409967422485, | |
| "learning_rate": 4.446794961998689e-06, | |
| "loss": 0.4675, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 1.6677115987460815, | |
| "grad_norm": 0.4786463975906372, | |
| "learning_rate": 4.442583719561671e-06, | |
| "loss": 0.4938, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 1.6724137931034484, | |
| "grad_norm": 0.5183210968971252, | |
| "learning_rate": 4.438358519098536e-06, | |
| "loss": 0.5032, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 1.677115987460815, | |
| "grad_norm": 0.7055342197418213, | |
| "learning_rate": 4.4341193909685685e-06, | |
| "loss": 0.4861, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 1.6818181818181817, | |
| "grad_norm": 0.4798656105995178, | |
| "learning_rate": 4.429866365631134e-06, | |
| "loss": 0.4917, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 1.6865203761755487, | |
| "grad_norm": 0.5035173296928406, | |
| "learning_rate": 4.425599473645447e-06, | |
| "loss": 0.4834, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 1.6912225705329154, | |
| "grad_norm": 0.5660416483879089, | |
| "learning_rate": 4.421318745670364e-06, | |
| "loss": 0.4829, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 1.695924764890282, | |
| "grad_norm": 0.7084828019142151, | |
| "learning_rate": 4.4170242124641524e-06, | |
| "loss": 0.4603, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.700626959247649, | |
| "grad_norm": 0.4513323903083801, | |
| "learning_rate": 4.412715904884277e-06, | |
| "loss": 0.4896, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 1.7053291536050157, | |
| "grad_norm": 0.48268529772758484, | |
| "learning_rate": 4.4083938538871735e-06, | |
| "loss": 0.4684, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 1.7100313479623823, | |
| "grad_norm": 0.4861423671245575, | |
| "learning_rate": 4.4040580905280295e-06, | |
| "loss": 0.4878, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 1.7147335423197492, | |
| "grad_norm": 0.48606055974960327, | |
| "learning_rate": 4.3997086459605586e-06, | |
| "loss": 0.4849, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 1.719435736677116, | |
| "grad_norm": 0.4912388026714325, | |
| "learning_rate": 4.395345551436779e-06, | |
| "loss": 0.509, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 1.7241379310344827, | |
| "grad_norm": 0.47986406087875366, | |
| "learning_rate": 4.390968838306788e-06, | |
| "loss": 0.4635, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 1.7288401253918495, | |
| "grad_norm": 0.5145572423934937, | |
| "learning_rate": 4.386578538018535e-06, | |
| "loss": 0.4631, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 1.7335423197492164, | |
| "grad_norm": 0.5538941621780396, | |
| "learning_rate": 4.382174682117598e-06, | |
| "loss": 0.5091, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 1.738244514106583, | |
| "grad_norm": 0.49918535351753235, | |
| "learning_rate": 4.377757302246956e-06, | |
| "loss": 0.4454, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 1.7429467084639498, | |
| "grad_norm": 0.48130425810813904, | |
| "learning_rate": 4.373326430146762e-06, | |
| "loss": 0.5011, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.7476489028213167, | |
| "grad_norm": 0.8833332657814026, | |
| "learning_rate": 4.368882097654113e-06, | |
| "loss": 0.4961, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 1.7523510971786833, | |
| "grad_norm": 0.45491841435432434, | |
| "learning_rate": 4.364424336702825e-06, | |
| "loss": 0.4708, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 1.7570532915360502, | |
| "grad_norm": 0.6433991193771362, | |
| "learning_rate": 4.3599531793232e-06, | |
| "loss": 0.488, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 1.761755485893417, | |
| "grad_norm": 0.49767547845840454, | |
| "learning_rate": 4.355468657641797e-06, | |
| "loss": 0.4836, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 1.7664576802507836, | |
| "grad_norm": 0.5577245950698853, | |
| "learning_rate": 4.3509708038812035e-06, | |
| "loss": 0.4879, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 1.7711598746081505, | |
| "grad_norm": 0.4875679314136505, | |
| "learning_rate": 4.346459650359798e-06, | |
| "loss": 0.4858, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 1.7758620689655173, | |
| "grad_norm": 0.47319531440734863, | |
| "learning_rate": 4.341935229491525e-06, | |
| "loss": 0.4554, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 1.780564263322884, | |
| "grad_norm": 0.7505528330802917, | |
| "learning_rate": 4.337397573785659e-06, | |
| "loss": 0.5043, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 1.7852664576802508, | |
| "grad_norm": 0.49199047684669495, | |
| "learning_rate": 4.332846715846566e-06, | |
| "loss": 0.4712, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 1.7899686520376177, | |
| "grad_norm": 0.5161461234092712, | |
| "learning_rate": 4.328282688373479e-06, | |
| "loss": 0.4927, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.7946708463949843, | |
| "grad_norm": 0.5262524485588074, | |
| "learning_rate": 4.323705524160258e-06, | |
| "loss": 0.4904, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 1.799373040752351, | |
| "grad_norm": 0.5441488027572632, | |
| "learning_rate": 4.319115256095149e-06, | |
| "loss": 0.4662, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 1.804075235109718, | |
| "grad_norm": 0.6795123815536499, | |
| "learning_rate": 4.314511917160557e-06, | |
| "loss": 0.4681, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 1.8087774294670846, | |
| "grad_norm": 0.47761398553848267, | |
| "learning_rate": 4.3098955404328045e-06, | |
| "loss": 0.4611, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 1.8134796238244513, | |
| "grad_norm": 0.7907284498214722, | |
| "learning_rate": 4.305266159081895e-06, | |
| "loss": 0.4826, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 1.8181818181818183, | |
| "grad_norm": 0.52130126953125, | |
| "learning_rate": 4.3006238063712725e-06, | |
| "loss": 0.4671, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 1.822884012539185, | |
| "grad_norm": 0.8447866439819336, | |
| "learning_rate": 4.295968515657583e-06, | |
| "loss": 0.5014, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 1.8275862068965516, | |
| "grad_norm": 0.4711958169937134, | |
| "learning_rate": 4.29130032039044e-06, | |
| "loss": 0.4844, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 1.8322884012539185, | |
| "grad_norm": 0.5012089610099792, | |
| "learning_rate": 4.2866192541121755e-06, | |
| "loss": 0.4747, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 1.8369905956112853, | |
| "grad_norm": 0.5260939598083496, | |
| "learning_rate": 4.281925350457606e-06, | |
| "loss": 0.4778, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.841692789968652, | |
| "grad_norm": 0.4744386672973633, | |
| "learning_rate": 4.277218643153787e-06, | |
| "loss": 0.4795, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 1.8463949843260188, | |
| "grad_norm": 0.4848107695579529, | |
| "learning_rate": 4.272499166019771e-06, | |
| "loss": 0.4783, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 1.8510971786833856, | |
| "grad_norm": 0.48980602622032166, | |
| "learning_rate": 4.267766952966369e-06, | |
| "loss": 0.4665, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 1.8557993730407523, | |
| "grad_norm": 0.49879398941993713, | |
| "learning_rate": 4.2630220379959006e-06, | |
| "loss": 0.444, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 1.8605015673981191, | |
| "grad_norm": 0.5382682681083679, | |
| "learning_rate": 4.258264455201953e-06, | |
| "loss": 0.466, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 1.865203761755486, | |
| "grad_norm": 0.47293514013290405, | |
| "learning_rate": 4.2534942387691335e-06, | |
| "loss": 0.4915, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 1.8699059561128526, | |
| "grad_norm": 0.4993828237056732, | |
| "learning_rate": 4.248711422972829e-06, | |
| "loss": 0.4771, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 1.8746081504702194, | |
| "grad_norm": 0.4950089156627655, | |
| "learning_rate": 4.243916042178954e-06, | |
| "loss": 0.4614, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 1.8793103448275863, | |
| "grad_norm": 0.48008930683135986, | |
| "learning_rate": 4.239108130843709e-06, | |
| "loss": 0.4684, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 1.884012539184953, | |
| "grad_norm": 0.5422408580780029, | |
| "learning_rate": 4.234287723513326e-06, | |
| "loss": 0.4933, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.8887147335423198, | |
| "grad_norm": 0.5405403971672058, | |
| "learning_rate": 4.229454854823827e-06, | |
| "loss": 0.4912, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 1.8934169278996866, | |
| "grad_norm": 0.6024321913719177, | |
| "learning_rate": 4.224609559500772e-06, | |
| "loss": 0.504, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 1.8981191222570533, | |
| "grad_norm": 0.4791870415210724, | |
| "learning_rate": 4.21975187235901e-06, | |
| "loss": 0.4547, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 1.90282131661442, | |
| "grad_norm": 0.9783840775489807, | |
| "learning_rate": 4.21488182830243e-06, | |
| "loss": 0.483, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 1.907523510971787, | |
| "grad_norm": 1.0899996757507324, | |
| "learning_rate": 4.209999462323706e-06, | |
| "loss": 0.4603, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 1.9122257053291536, | |
| "grad_norm": 0.6221780180931091, | |
| "learning_rate": 4.20510480950405e-06, | |
| "loss": 0.4926, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 1.9169278996865202, | |
| "grad_norm": 0.671617329120636, | |
| "learning_rate": 4.200197905012961e-06, | |
| "loss": 0.4536, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 1.9216300940438873, | |
| "grad_norm": 0.48824113607406616, | |
| "learning_rate": 4.195278784107965e-06, | |
| "loss": 0.4709, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 1.926332288401254, | |
| "grad_norm": 0.4700049161911011, | |
| "learning_rate": 4.19034748213437e-06, | |
| "loss": 0.4721, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 1.9310344827586206, | |
| "grad_norm": 0.5228457450866699, | |
| "learning_rate": 4.185404034525008e-06, | |
| "loss": 0.4644, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.9357366771159876, | |
| "grad_norm": 0.5192087888717651, | |
| "learning_rate": 4.180448476799981e-06, | |
| "loss": 0.5018, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 1.9404388714733543, | |
| "grad_norm": 0.49069511890411377, | |
| "learning_rate": 4.175480844566404e-06, | |
| "loss": 0.4747, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 1.9451410658307209, | |
| "grad_norm": 0.45442187786102295, | |
| "learning_rate": 4.170501173518152e-06, | |
| "loss": 0.4683, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 1.9498432601880877, | |
| "grad_norm": 0.5145336985588074, | |
| "learning_rate": 4.165509499435604e-06, | |
| "loss": 0.4677, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 1.9545454545454546, | |
| "grad_norm": 0.514231264591217, | |
| "learning_rate": 4.16050585818538e-06, | |
| "loss": 0.4812, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 1.9592476489028212, | |
| "grad_norm": 0.5052164793014526, | |
| "learning_rate": 4.155490285720092e-06, | |
| "loss": 0.5056, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 1.963949843260188, | |
| "grad_norm": 0.5336954593658447, | |
| "learning_rate": 4.150462818078079e-06, | |
| "loss": 0.4916, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 1.968652037617555, | |
| "grad_norm": 0.5100321769714355, | |
| "learning_rate": 4.145423491383153e-06, | |
| "loss": 0.4869, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 1.9733542319749215, | |
| "grad_norm": 0.5358601212501526, | |
| "learning_rate": 4.14037234184433e-06, | |
| "loss": 0.5041, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 1.9780564263322884, | |
| "grad_norm": 0.4950590133666992, | |
| "learning_rate": 4.135309405755583e-06, | |
| "loss": 0.4889, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.9827586206896552, | |
| "grad_norm": 0.5121294856071472, | |
| "learning_rate": 4.130234719495574e-06, | |
| "loss": 0.4736, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 1.9874608150470219, | |
| "grad_norm": 0.5014020800590515, | |
| "learning_rate": 4.125148319527391e-06, | |
| "loss": 0.4635, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 1.9921630094043887, | |
| "grad_norm": 0.5898067355155945, | |
| "learning_rate": 4.1200502423982904e-06, | |
| "loss": 0.4842, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 1.9968652037617556, | |
| "grad_norm": 0.535094141960144, | |
| "learning_rate": 4.1149405247394295e-06, | |
| "loss": 0.4885, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 2.0047021943573666, | |
| "grad_norm": 1.0523930788040161, | |
| "learning_rate": 4.10981920326561e-06, | |
| "loss": 0.9153, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 2.0094043887147337, | |
| "grad_norm": 0.46556714177131653, | |
| "learning_rate": 4.104686314775009e-06, | |
| "loss": 0.4418, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 2.0141065830721003, | |
| "grad_norm": 0.6543925404548645, | |
| "learning_rate": 4.099541896148914e-06, | |
| "loss": 0.4504, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 2.018808777429467, | |
| "grad_norm": 0.5143531560897827, | |
| "learning_rate": 4.094385984351462e-06, | |
| "loss": 0.4452, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 2.023510971786834, | |
| "grad_norm": 0.5021682977676392, | |
| "learning_rate": 4.0892186164293715e-06, | |
| "loss": 0.4655, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 2.0282131661442007, | |
| "grad_norm": 0.5309224128723145, | |
| "learning_rate": 4.0840398295116745e-06, | |
| "loss": 0.4531, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 2.0329153605015673, | |
| "grad_norm": 0.48278284072875977, | |
| "learning_rate": 4.078849660809456e-06, | |
| "loss": 0.4487, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 2.0376175548589344, | |
| "grad_norm": 0.6067396998405457, | |
| "learning_rate": 4.073648147615579e-06, | |
| "loss": 0.4315, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 2.042319749216301, | |
| "grad_norm": 0.48894888162612915, | |
| "learning_rate": 4.068435327304421e-06, | |
| "loss": 0.4573, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 2.0470219435736676, | |
| "grad_norm": 0.5110018253326416, | |
| "learning_rate": 4.063211237331603e-06, | |
| "loss": 0.4555, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 2.0517241379310347, | |
| "grad_norm": 0.6234620809555054, | |
| "learning_rate": 4.057975915233725e-06, | |
| "loss": 0.4391, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 2.0564263322884013, | |
| "grad_norm": 0.4931652843952179, | |
| "learning_rate": 4.052729398628089e-06, | |
| "loss": 0.4654, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 2.061128526645768, | |
| "grad_norm": 0.4912821650505066, | |
| "learning_rate": 4.047471725212437e-06, | |
| "loss": 0.4622, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 2.0658307210031346, | |
| "grad_norm": 0.575827956199646, | |
| "learning_rate": 4.042202932764673e-06, | |
| "loss": 0.4466, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 2.0705329153605017, | |
| "grad_norm": 0.4869770109653473, | |
| "learning_rate": 4.036923059142595e-06, | |
| "loss": 0.4487, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 2.0752351097178683, | |
| "grad_norm": 0.4875548779964447, | |
| "learning_rate": 4.031632142283623e-06, | |
| "loss": 0.4431, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 2.079937304075235, | |
| "grad_norm": 0.5250763893127441, | |
| "learning_rate": 4.026330220204524e-06, | |
| "loss": 0.4508, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 2.084639498432602, | |
| "grad_norm": 0.49760839343070984, | |
| "learning_rate": 4.021017331001146e-06, | |
| "loss": 0.4418, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 2.0893416927899686, | |
| "grad_norm": 0.5596722364425659, | |
| "learning_rate": 4.015693512848131e-06, | |
| "loss": 0.4491, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 2.0940438871473352, | |
| "grad_norm": 0.5050379037857056, | |
| "learning_rate": 4.0103588039986556e-06, | |
| "loss": 0.452, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 2.0987460815047023, | |
| "grad_norm": 0.5045352578163147, | |
| "learning_rate": 4.005013242784146e-06, | |
| "loss": 0.4606, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 2.103448275862069, | |
| "grad_norm": 0.568103015422821, | |
| "learning_rate": 3.999656867614006e-06, | |
| "loss": 0.4492, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 2.1081504702194356, | |
| "grad_norm": 0.48106372356414795, | |
| "learning_rate": 3.994289716975341e-06, | |
| "loss": 0.4634, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 2.1128526645768027, | |
| "grad_norm": 0.6244045495986938, | |
| "learning_rate": 3.988911829432682e-06, | |
| "loss": 0.4492, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 2.1175548589341693, | |
| "grad_norm": 0.514492392539978, | |
| "learning_rate": 3.983523243627706e-06, | |
| "loss": 0.4464, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 2.122257053291536, | |
| "grad_norm": 0.53345787525177, | |
| "learning_rate": 3.978123998278962e-06, | |
| "loss": 0.4363, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 2.126959247648903, | |
| "grad_norm": 0.675532877445221, | |
| "learning_rate": 3.97271413218159e-06, | |
| "loss": 0.4651, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 2.1316614420062696, | |
| "grad_norm": 0.473951518535614, | |
| "learning_rate": 3.9672936842070425e-06, | |
| "loss": 0.4627, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 2.1363636363636362, | |
| "grad_norm": 0.4946623742580414, | |
| "learning_rate": 3.9618626933028086e-06, | |
| "loss": 0.4664, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 2.1410658307210033, | |
| "grad_norm": 0.574018120765686, | |
| "learning_rate": 3.956421198492128e-06, | |
| "loss": 0.4482, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 2.14576802507837, | |
| "grad_norm": 0.5015276074409485, | |
| "learning_rate": 3.950969238873714e-06, | |
| "loss": 0.4473, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 2.1504702194357366, | |
| "grad_norm": 0.4798969030380249, | |
| "learning_rate": 3.9455068536214765e-06, | |
| "loss": 0.4763, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 2.1551724137931036, | |
| "grad_norm": 0.5117236375808716, | |
| "learning_rate": 3.9400340819842335e-06, | |
| "loss": 0.4562, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 2.1598746081504703, | |
| "grad_norm": 0.6655581593513489, | |
| "learning_rate": 3.934550963285432e-06, | |
| "loss": 0.4569, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 2.164576802507837, | |
| "grad_norm": 0.5741562843322754, | |
| "learning_rate": 3.9290575369228664e-06, | |
| "loss": 0.4292, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 2.169278996865204, | |
| "grad_norm": 0.5728350281715393, | |
| "learning_rate": 3.923553842368396e-06, | |
| "loss": 0.4686, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 2.1739811912225706, | |
| "grad_norm": 0.47696346044540405, | |
| "learning_rate": 3.918039919167658e-06, | |
| "loss": 0.4344, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 2.1786833855799372, | |
| "grad_norm": 0.5124625563621521, | |
| "learning_rate": 3.912515806939786e-06, | |
| "loss": 0.4483, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 2.183385579937304, | |
| "grad_norm": 1.2648731470108032, | |
| "learning_rate": 3.906981545377124e-06, | |
| "loss": 0.4613, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 2.188087774294671, | |
| "grad_norm": 0.5044319033622742, | |
| "learning_rate": 3.901437174244943e-06, | |
| "loss": 0.4298, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 2.1927899686520376, | |
| "grad_norm": 0.6312819123268127, | |
| "learning_rate": 3.895882733381154e-06, | |
| "loss": 0.4471, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 2.197492163009404, | |
| "grad_norm": 0.5056606531143188, | |
| "learning_rate": 3.890318262696023e-06, | |
| "loss": 0.4663, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 2.2021943573667713, | |
| "grad_norm": 0.4930380582809448, | |
| "learning_rate": 3.8847438021718805e-06, | |
| "loss": 0.4419, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 2.206896551724138, | |
| "grad_norm": 0.5264694094657898, | |
| "learning_rate": 3.879159391862839e-06, | |
| "loss": 0.4636, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 2.2115987460815045, | |
| "grad_norm": 0.517235279083252, | |
| "learning_rate": 3.873565071894503e-06, | |
| "loss": 0.4354, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 2.2163009404388716, | |
| "grad_norm": 0.5416566729545593, | |
| "learning_rate": 3.86796088246368e-06, | |
| "loss": 0.4317, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 2.2210031347962382, | |
| "grad_norm": 0.5619626045227051, | |
| "learning_rate": 3.8623468638380905e-06, | |
| "loss": 0.4155, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 2.225705329153605, | |
| "grad_norm": 0.5955201983451843, | |
| "learning_rate": 3.856723056356085e-06, | |
| "loss": 0.4586, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 2.230407523510972, | |
| "grad_norm": 0.6263668537139893, | |
| "learning_rate": 3.851089500426346e-06, | |
| "loss": 0.4459, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 2.2351097178683386, | |
| "grad_norm": 0.525097131729126, | |
| "learning_rate": 3.845446236527605e-06, | |
| "loss": 0.4451, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 2.239811912225705, | |
| "grad_norm": 0.48124319314956665, | |
| "learning_rate": 3.8397933052083445e-06, | |
| "loss": 0.4626, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 2.2445141065830723, | |
| "grad_norm": 0.5204625129699707, | |
| "learning_rate": 3.834130747086512e-06, | |
| "loss": 0.4481, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 2.249216300940439, | |
| "grad_norm": 0.5445339679718018, | |
| "learning_rate": 3.828458602849226e-06, | |
| "loss": 0.4484, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 2.2539184952978055, | |
| "grad_norm": 0.550906240940094, | |
| "learning_rate": 3.822776913252485e-06, | |
| "loss": 0.4505, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 2.2586206896551726, | |
| "grad_norm": 2.449854850769043, | |
| "learning_rate": 3.817085719120872e-06, | |
| "loss": 0.402, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 2.2633228840125392, | |
| "grad_norm": 0.5691458582878113, | |
| "learning_rate": 3.811385061347263e-06, | |
| "loss": 0.4457, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 2.268025078369906, | |
| "grad_norm": 0.6042170524597168, | |
| "learning_rate": 3.805674980892535e-06, | |
| "loss": 0.4703, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 2.2727272727272725, | |
| "grad_norm": 0.577621579170227, | |
| "learning_rate": 3.7999555187852667e-06, | |
| "loss": 0.458, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 2.2774294670846396, | |
| "grad_norm": 0.4807332754135132, | |
| "learning_rate": 3.7942267161214497e-06, | |
| "loss": 0.4454, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 2.282131661442006, | |
| "grad_norm": 0.5056536793708801, | |
| "learning_rate": 3.7884886140641884e-06, | |
| "loss": 0.4684, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 2.2868338557993733, | |
| "grad_norm": 0.9068124294281006, | |
| "learning_rate": 3.7827412538434062e-06, | |
| "loss": 0.4322, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 2.29153605015674, | |
| "grad_norm": 0.49843674898147583, | |
| "learning_rate": 3.7769846767555495e-06, | |
| "loss": 0.4594, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 2.2962382445141065, | |
| "grad_norm": 0.5128474235534668, | |
| "learning_rate": 3.7712189241632898e-06, | |
| "loss": 0.4549, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 2.300940438871473, | |
| "grad_norm": 0.5473759770393372, | |
| "learning_rate": 3.7654440374952288e-06, | |
| "loss": 0.4421, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 2.30564263322884, | |
| "grad_norm": 0.5089896321296692, | |
| "learning_rate": 3.7596600582455976e-06, | |
| "loss": 0.4191, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 2.310344827586207, | |
| "grad_norm": 0.5044212937355042, | |
| "learning_rate": 3.75386702797396e-06, | |
| "loss": 0.4206, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 2.3150470219435735, | |
| "grad_norm": 0.46690526604652405, | |
| "learning_rate": 3.7480649883049164e-06, | |
| "loss": 0.4257, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 2.3197492163009406, | |
| "grad_norm": 0.514756441116333, | |
| "learning_rate": 3.7422539809277993e-06, | |
| "loss": 0.4363, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 2.324451410658307, | |
| "grad_norm": 0.5000742077827454, | |
| "learning_rate": 3.736434047596379e-06, | |
| "loss": 0.4438, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 2.329153605015674, | |
| "grad_norm": 0.5082759857177734, | |
| "learning_rate": 3.73060523012856e-06, | |
| "loss": 0.4504, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 2.333855799373041, | |
| "grad_norm": 0.538390576839447, | |
| "learning_rate": 3.724767570406082e-06, | |
| "loss": 0.4685, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 2.3385579937304075, | |
| "grad_norm": 0.4855618476867676, | |
| "learning_rate": 3.7189211103742206e-06, | |
| "loss": 0.4257, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 2.343260188087774, | |
| "grad_norm": 0.5083416104316711, | |
| "learning_rate": 3.7130658920414818e-06, | |
| "loss": 0.4608, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 2.347962382445141, | |
| "grad_norm": 0.47180086374282837, | |
| "learning_rate": 3.7072019574793034e-06, | |
| "loss": 0.4726, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 2.352664576802508, | |
| "grad_norm": 0.5352308750152588, | |
| "learning_rate": 3.701329348821752e-06, | |
| "loss": 0.4525, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 2.3573667711598745, | |
| "grad_norm": 0.49689581990242004, | |
| "learning_rate": 3.695448108265221e-06, | |
| "loss": 0.4378, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 2.3620689655172415, | |
| "grad_norm": 0.4953297972679138, | |
| "learning_rate": 3.6895582780681254e-06, | |
| "loss": 0.4344, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 2.366771159874608, | |
| "grad_norm": 0.5224218368530273, | |
| "learning_rate": 3.683659900550598e-06, | |
| "loss": 0.4632, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 2.371473354231975, | |
| "grad_norm": 0.5154619812965393, | |
| "learning_rate": 3.6777530180941894e-06, | |
| "loss": 0.4449, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 2.376175548589342, | |
| "grad_norm": 0.48938247561454773, | |
| "learning_rate": 3.671837673141559e-06, | |
| "loss": 0.4314, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 2.3808777429467085, | |
| "grad_norm": 0.5059546828269958, | |
| "learning_rate": 3.6659139081961707e-06, | |
| "loss": 0.4475, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 2.385579937304075, | |
| "grad_norm": 0.6195068359375, | |
| "learning_rate": 3.6599817658219916e-06, | |
| "loss": 0.4492, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 2.3902821316614418, | |
| "grad_norm": 0.5169727206230164, | |
| "learning_rate": 3.6540412886431796e-06, | |
| "loss": 0.4612, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 2.394984326018809, | |
| "grad_norm": 1.0481178760528564, | |
| "learning_rate": 3.648092519343783e-06, | |
| "loss": 0.4446, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 2.3996865203761755, | |
| "grad_norm": 0.49299949407577515, | |
| "learning_rate": 3.642135500667431e-06, | |
| "loss": 0.4318, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 2.4043887147335425, | |
| "grad_norm": 0.508884072303772, | |
| "learning_rate": 3.6361702754170247e-06, | |
| "loss": 0.4476, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 2.409090909090909, | |
| "grad_norm": 0.4841139018535614, | |
| "learning_rate": 3.630196886454435e-06, | |
| "loss": 0.4294, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 2.413793103448276, | |
| "grad_norm": 0.49162107706069946, | |
| "learning_rate": 3.62421537670019e-06, | |
| "loss": 0.4449, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 2.4184952978056424, | |
| "grad_norm": 0.5381420254707336, | |
| "learning_rate": 3.618225789133167e-06, | |
| "loss": 0.4472, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 2.4231974921630095, | |
| "grad_norm": 0.4767305552959442, | |
| "learning_rate": 3.612228166790287e-06, | |
| "loss": 0.4464, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 2.427899686520376, | |
| "grad_norm": 0.5070378184318542, | |
| "learning_rate": 3.606222552766201e-06, | |
| "loss": 0.4555, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 2.4326018808777428, | |
| "grad_norm": 0.503158688545227, | |
| "learning_rate": 3.6002089902129844e-06, | |
| "loss": 0.4484, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 2.43730407523511, | |
| "grad_norm": 0.7062323093414307, | |
| "learning_rate": 3.5941875223398225e-06, | |
| "loss": 0.4394, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 2.4420062695924765, | |
| "grad_norm": 0.48649832606315613, | |
| "learning_rate": 3.588158192412707e-06, | |
| "loss": 0.4354, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 2.446708463949843, | |
| "grad_norm": 0.49017950892448425, | |
| "learning_rate": 3.582121043754116e-06, | |
| "loss": 0.4387, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 2.45141065830721, | |
| "grad_norm": 0.4829757809638977, | |
| "learning_rate": 3.5760761197427097e-06, | |
| "loss": 0.4377, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 2.456112852664577, | |
| "grad_norm": 0.482098251581192, | |
| "learning_rate": 3.570023463813017e-06, | |
| "loss": 0.4297, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 2.4608150470219434, | |
| "grad_norm": 0.678365170955658, | |
| "learning_rate": 3.5639631194551216e-06, | |
| "loss": 0.4537, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 2.4655172413793105, | |
| "grad_norm": 0.5783978700637817, | |
| "learning_rate": 3.557895130214352e-06, | |
| "loss": 0.4443, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 2.470219435736677, | |
| "grad_norm": 0.47321340441703796, | |
| "learning_rate": 3.5518195396909653e-06, | |
| "loss": 0.4633, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 2.4749216300940438, | |
| "grad_norm": 0.5123101472854614, | |
| "learning_rate": 3.5457363915398384e-06, | |
| "loss": 0.4529, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 2.479623824451411, | |
| "grad_norm": 0.4978135824203491, | |
| "learning_rate": 3.539645729470151e-06, | |
| "loss": 0.4389, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 2.4843260188087775, | |
| "grad_norm": 0.5567356944084167, | |
| "learning_rate": 3.5335475972450715e-06, | |
| "loss": 0.4381, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 2.489028213166144, | |
| "grad_norm": 1.5688557624816895, | |
| "learning_rate": 3.5274420386814458e-06, | |
| "loss": 0.4652, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 2.493730407523511, | |
| "grad_norm": 0.6167457103729248, | |
| "learning_rate": 3.521329097649478e-06, | |
| "loss": 0.446, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 2.498432601880878, | |
| "grad_norm": 0.5396067500114441, | |
| "learning_rate": 3.515208818072418e-06, | |
| "loss": 0.443, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 2.5031347962382444, | |
| "grad_norm": 0.5053077936172485, | |
| "learning_rate": 3.509081243926247e-06, | |
| "loss": 0.4313, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 2.507836990595611, | |
| "grad_norm": 0.655543863773346, | |
| "learning_rate": 3.5029464192393557e-06, | |
| "loss": 0.4622, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 2.512539184952978, | |
| "grad_norm": 0.6995306611061096, | |
| "learning_rate": 3.4968043880922363e-06, | |
| "loss": 0.4152, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 2.5172413793103448, | |
| "grad_norm": 0.5307276248931885, | |
| "learning_rate": 3.4906551946171603e-06, | |
| "loss": 0.424, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 2.521943573667712, | |
| "grad_norm": 0.5830240249633789, | |
| "learning_rate": 3.484498882997861e-06, | |
| "loss": 0.4226, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 2.5266457680250785, | |
| "grad_norm": 0.7151938080787659, | |
| "learning_rate": 3.478335497469219e-06, | |
| "loss": 0.4514, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 2.531347962382445, | |
| "grad_norm": 0.5270131230354309, | |
| "learning_rate": 3.472165082316943e-06, | |
| "loss": 0.4493, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 2.5360501567398117, | |
| "grad_norm": 0.48484402894973755, | |
| "learning_rate": 3.465987681877251e-06, | |
| "loss": 0.4431, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 2.540752351097179, | |
| "grad_norm": 0.5078386664390564, | |
| "learning_rate": 3.4598033405365527e-06, | |
| "loss": 0.4559, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 2.5454545454545454, | |
| "grad_norm": 0.5258911848068237, | |
| "learning_rate": 3.45361210273113e-06, | |
| "loss": 0.448, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 2.5501567398119125, | |
| "grad_norm": 0.5508100986480713, | |
| "learning_rate": 3.447414012946818e-06, | |
| "loss": 0.4357, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 2.554858934169279, | |
| "grad_norm": 0.5883537530899048, | |
| "learning_rate": 3.4412091157186853e-06, | |
| "loss": 0.4504, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 2.5595611285266457, | |
| "grad_norm": 0.5134791731834412, | |
| "learning_rate": 3.4349974556307146e-06, | |
| "loss": 0.4409, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 2.5642633228840124, | |
| "grad_norm": 0.6710099577903748, | |
| "learning_rate": 3.4287790773154807e-06, | |
| "loss": 0.4655, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 2.5689655172413794, | |
| "grad_norm": 0.5207563638687134, | |
| "learning_rate": 3.4225540254538297e-06, | |
| "loss": 0.4626, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 2.573667711598746, | |
| "grad_norm": 0.5239883065223694, | |
| "learning_rate": 3.416322344774562e-06, | |
| "loss": 0.4413, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 2.5783699059561127, | |
| "grad_norm": 0.5160831809043884, | |
| "learning_rate": 3.4100840800541055e-06, | |
| "loss": 0.4632, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 2.58307210031348, | |
| "grad_norm": 0.4692513346672058, | |
| "learning_rate": 3.4038392761161986e-06, | |
| "loss": 0.4535, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 2.5877742946708464, | |
| "grad_norm": 0.5935313701629639, | |
| "learning_rate": 3.3975879778315634e-06, | |
| "loss": 0.423, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 2.592476489028213, | |
| "grad_norm": 0.868563711643219, | |
| "learning_rate": 3.391330230117587e-06, | |
| "loss": 0.3958, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 2.5971786833855797, | |
| "grad_norm": 0.49354490637779236, | |
| "learning_rate": 3.385066077937997e-06, | |
| "loss": 0.4658, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 2.6018808777429467, | |
| "grad_norm": 0.5423957109451294, | |
| "learning_rate": 3.378795566302541e-06, | |
| "loss": 0.425, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 2.6065830721003134, | |
| "grad_norm": 0.5455307364463806, | |
| "learning_rate": 3.372518740266658e-06, | |
| "loss": 0.4448, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 2.6112852664576804, | |
| "grad_norm": 0.5334805846214294, | |
| "learning_rate": 3.36623564493116e-06, | |
| "loss": 0.4563, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 2.615987460815047, | |
| "grad_norm": 0.49819517135620117, | |
| "learning_rate": 3.3599463254419047e-06, | |
| "loss": 0.4602, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 2.6206896551724137, | |
| "grad_norm": 0.5382740497589111, | |
| "learning_rate": 3.3536508269894724e-06, | |
| "loss": 0.4681, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 2.6253918495297803, | |
| "grad_norm": 0.4976508319377899, | |
| "learning_rate": 3.347349194808842e-06, | |
| "loss": 0.4549, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 2.6300940438871474, | |
| "grad_norm": 0.505546510219574, | |
| "learning_rate": 3.3410414741790625e-06, | |
| "loss": 0.4288, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 2.634796238244514, | |
| "grad_norm": 0.50031578540802, | |
| "learning_rate": 3.3347277104229332e-06, | |
| "loss": 0.4439, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 2.639498432601881, | |
| "grad_norm": 0.535987377166748, | |
| "learning_rate": 3.3284079489066728e-06, | |
| "loss": 0.4375, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 2.6442006269592477, | |
| "grad_norm": 0.5403039455413818, | |
| "learning_rate": 3.3220822350395966e-06, | |
| "loss": 0.431, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 2.6489028213166144, | |
| "grad_norm": 0.49778786301612854, | |
| "learning_rate": 3.31575061427379e-06, | |
| "loss": 0.4312, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 2.653605015673981, | |
| "grad_norm": 0.5499002933502197, | |
| "learning_rate": 3.3094131321037783e-06, | |
| "loss": 0.4388, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 2.658307210031348, | |
| "grad_norm": 0.4913281202316284, | |
| "learning_rate": 3.303069834066206e-06, | |
| "loss": 0.4334, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 2.6630094043887147, | |
| "grad_norm": 0.513525664806366, | |
| "learning_rate": 3.2967207657395055e-06, | |
| "loss": 0.475, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 2.6677115987460818, | |
| "grad_norm": 0.5432775020599365, | |
| "learning_rate": 3.2903659727435692e-06, | |
| "loss": 0.4629, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 2.6724137931034484, | |
| "grad_norm": 0.47809165716171265, | |
| "learning_rate": 3.284005500739423e-06, | |
| "loss": 0.4577, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 2.677115987460815, | |
| "grad_norm": 0.5284624695777893, | |
| "learning_rate": 3.2776393954289e-06, | |
| "loss": 0.4294, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 2.6818181818181817, | |
| "grad_norm": 0.49356934428215027, | |
| "learning_rate": 3.271267702554307e-06, | |
| "loss": 0.4208, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 2.6865203761755487, | |
| "grad_norm": 0.5608382821083069, | |
| "learning_rate": 3.2648904678981032e-06, | |
| "loss": 0.4646, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 2.6912225705329154, | |
| "grad_norm": 0.5191068649291992, | |
| "learning_rate": 3.2585077372825636e-06, | |
| "loss": 0.4132, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 2.695924764890282, | |
| "grad_norm": 0.5745784640312195, | |
| "learning_rate": 3.2521195565694543e-06, | |
| "loss": 0.4447, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 2.700626959247649, | |
| "grad_norm": 0.5302786827087402, | |
| "learning_rate": 3.2457259716597023e-06, | |
| "loss": 0.4459, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 2.7053291536050157, | |
| "grad_norm": 0.47739923000335693, | |
| "learning_rate": 3.2393270284930658e-06, | |
| "loss": 0.4542, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 2.7100313479623823, | |
| "grad_norm": 0.5023962259292603, | |
| "learning_rate": 3.2329227730478026e-06, | |
| "loss": 0.4305, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 2.714733542319749, | |
| "grad_norm": 0.48917847871780396, | |
| "learning_rate": 3.2265132513403415e-06, | |
| "loss": 0.4189, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 2.719435736677116, | |
| "grad_norm": 0.4881870448589325, | |
| "learning_rate": 3.22009850942495e-06, | |
| "loss": 0.4451, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 2.7241379310344827, | |
| "grad_norm": 0.550719141960144, | |
| "learning_rate": 3.213678593393405e-06, | |
| "loss": 0.4606, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 2.7288401253918497, | |
| "grad_norm": 0.5335834622383118, | |
| "learning_rate": 3.207253549374662e-06, | |
| "loss": 0.4567, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 2.7335423197492164, | |
| "grad_norm": 0.5054845809936523, | |
| "learning_rate": 3.200823423534519e-06, | |
| "loss": 0.4433, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 2.738244514106583, | |
| "grad_norm": 0.52922523021698, | |
| "learning_rate": 3.194388262075293e-06, | |
| "loss": 0.4485, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 2.7429467084639496, | |
| "grad_norm": 0.7209565043449402, | |
| "learning_rate": 3.1879481112354804e-06, | |
| "loss": 0.45, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 2.7476489028213167, | |
| "grad_norm": 0.5428304076194763, | |
| "learning_rate": 3.181503017289428e-06, | |
| "loss": 0.4109, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 2.7523510971786833, | |
| "grad_norm": 0.4802536964416504, | |
| "learning_rate": 3.175053026547002e-06, | |
| "loss": 0.4153, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 2.7570532915360504, | |
| "grad_norm": 0.4878472685813904, | |
| "learning_rate": 3.16859818535325e-06, | |
| "loss": 0.4581, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 2.761755485893417, | |
| "grad_norm": 0.48334580659866333, | |
| "learning_rate": 3.1621385400880756e-06, | |
| "loss": 0.4521, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 2.7664576802507836, | |
| "grad_norm": 0.5261298418045044, | |
| "learning_rate": 3.1556741371658984e-06, | |
| "loss": 0.4554, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 2.7711598746081503, | |
| "grad_norm": 0.5182640552520752, | |
| "learning_rate": 3.1492050230353238e-06, | |
| "loss": 0.447, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 2.7758620689655173, | |
| "grad_norm": 0.545038640499115, | |
| "learning_rate": 3.142731244178809e-06, | |
| "loss": 0.4206, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 2.780564263322884, | |
| "grad_norm": 0.5139180421829224, | |
| "learning_rate": 3.1362528471123277e-06, | |
| "loss": 0.4037, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 2.785266457680251, | |
| "grad_norm": 0.54038006067276, | |
| "learning_rate": 3.129769878385039e-06, | |
| "loss": 0.41, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 2.7899686520376177, | |
| "grad_norm": 0.4779907166957855, | |
| "learning_rate": 3.1232823845789473e-06, | |
| "loss": 0.451, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 2.7946708463949843, | |
| "grad_norm": 0.8652167320251465, | |
| "learning_rate": 3.1167904123085736e-06, | |
| "loss": 0.4557, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 2.799373040752351, | |
| "grad_norm": 0.5036542415618896, | |
| "learning_rate": 3.110294008220617e-06, | |
| "loss": 0.4442, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 2.804075235109718, | |
| "grad_norm": 0.5064799785614014, | |
| "learning_rate": 3.1037932189936205e-06, | |
| "loss": 0.4408, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 2.8087774294670846, | |
| "grad_norm": 0.5548039674758911, | |
| "learning_rate": 3.097288091337635e-06, | |
| "loss": 0.4359, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 2.8134796238244513, | |
| "grad_norm": 0.5237991809844971, | |
| "learning_rate": 3.0907786719938876e-06, | |
| "loss": 0.4258, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 2.8181818181818183, | |
| "grad_norm": 0.47527915239334106, | |
| "learning_rate": 3.084265007734436e-06, | |
| "loss": 0.4358, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 2.822884012539185, | |
| "grad_norm": 3.827120304107666, | |
| "learning_rate": 3.0777471453618457e-06, | |
| "loss": 0.4607, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 2.8275862068965516, | |
| "grad_norm": 0.44980505108833313, | |
| "learning_rate": 3.0712251317088426e-06, | |
| "loss": 0.4333, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 2.8322884012539182, | |
| "grad_norm": 0.5296145081520081, | |
| "learning_rate": 3.064699013637983e-06, | |
| "loss": 0.4517, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 2.8369905956112853, | |
| "grad_norm": 0.5016323924064636, | |
| "learning_rate": 3.0581688380413115e-06, | |
| "loss": 0.4365, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 2.841692789968652, | |
| "grad_norm": 0.5290690064430237, | |
| "learning_rate": 3.0516346518400315e-06, | |
| "loss": 0.4521, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 2.846394984326019, | |
| "grad_norm": 0.5085683465003967, | |
| "learning_rate": 3.0450965019841593e-06, | |
| "loss": 0.4518, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 2.8510971786833856, | |
| "grad_norm": 0.5380619764328003, | |
| "learning_rate": 3.0385544354521957e-06, | |
| "loss": 0.416, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 2.8557993730407523, | |
| "grad_norm": 0.48775514960289, | |
| "learning_rate": 3.0320084992507814e-06, | |
| "loss": 0.4421, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 2.860501567398119, | |
| "grad_norm": 0.5242875814437866, | |
| "learning_rate": 3.0254587404143604e-06, | |
| "loss": 0.479, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 2.865203761755486, | |
| "grad_norm": 0.4760294556617737, | |
| "learning_rate": 3.0189052060048464e-06, | |
| "loss": 0.4411, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 2.8699059561128526, | |
| "grad_norm": 0.5514405965805054, | |
| "learning_rate": 3.01234794311128e-06, | |
| "loss": 0.4384, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 2.8746081504702197, | |
| "grad_norm": 0.5242321491241455, | |
| "learning_rate": 3.0057869988494925e-06, | |
| "loss": 0.4629, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 2.8793103448275863, | |
| "grad_norm": 0.5526494383811951, | |
| "learning_rate": 2.999222420361767e-06, | |
| "loss": 0.4525, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 2.884012539184953, | |
| "grad_norm": 0.4866560101509094, | |
| "learning_rate": 2.9926542548165e-06, | |
| "loss": 0.4652, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 2.8887147335423196, | |
| "grad_norm": 0.5438888669013977, | |
| "learning_rate": 2.9860825494078605e-06, | |
| "loss": 0.4369, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 2.8934169278996866, | |
| "grad_norm": 0.5022105574607849, | |
| "learning_rate": 2.979507351355454e-06, | |
| "loss": 0.4529, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 2.8981191222570533, | |
| "grad_norm": 0.5202184915542603, | |
| "learning_rate": 2.972928707903981e-06, | |
| "loss": 0.4416, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 2.9028213166144203, | |
| "grad_norm": 0.5267678499221802, | |
| "learning_rate": 2.966346666322898e-06, | |
| "loss": 0.4392, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 2.907523510971787, | |
| "grad_norm": 0.6735110282897949, | |
| "learning_rate": 2.9597612739060775e-06, | |
| "loss": 0.4184, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 2.9122257053291536, | |
| "grad_norm": 1.0363622903823853, | |
| "learning_rate": 2.9531725779714713e-06, | |
| "loss": 0.4506, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 2.91692789968652, | |
| "grad_norm": 0.5508633852005005, | |
| "learning_rate": 2.9465806258607653e-06, | |
| "loss": 0.4562, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 2.9216300940438873, | |
| "grad_norm": 0.5185503363609314, | |
| "learning_rate": 2.939985464939043e-06, | |
| "loss": 0.459, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 2.926332288401254, | |
| "grad_norm": 0.557746410369873, | |
| "learning_rate": 2.9333871425944434e-06, | |
| "loss": 0.405, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 2.9310344827586206, | |
| "grad_norm": 0.7767590284347534, | |
| "learning_rate": 2.926785706237822e-06, | |
| "loss": 0.4344, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 2.9357366771159876, | |
| "grad_norm": 0.48968929052352905, | |
| "learning_rate": 2.920181203302409e-06, | |
| "loss": 0.4261, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 2.9404388714733543, | |
| "grad_norm": 0.521247148513794, | |
| "learning_rate": 2.91357368124347e-06, | |
| "loss": 0.4236, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 2.945141065830721, | |
| "grad_norm": 1.0396796464920044, | |
| "learning_rate": 2.906963187537962e-06, | |
| "loss": 0.4344, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 2.9498432601880875, | |
| "grad_norm": 0.6208511590957642, | |
| "learning_rate": 2.9003497696841955e-06, | |
| "loss": 0.4145, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 2.9545454545454546, | |
| "grad_norm": 0.5897473096847534, | |
| "learning_rate": 2.8937334752014913e-06, | |
| "loss": 0.466, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 2.959247648902821, | |
| "grad_norm": 0.5206231474876404, | |
| "learning_rate": 2.887114351629839e-06, | |
| "loss": 0.4431, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 2.9639498432601883, | |
| "grad_norm": 0.7496956586837769, | |
| "learning_rate": 2.8804924465295575e-06, | |
| "loss": 0.4658, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 2.968652037617555, | |
| "grad_norm": 0.5203924179077148, | |
| "learning_rate": 2.873867807480951e-06, | |
| "loss": 0.4734, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 2.9733542319749215, | |
| "grad_norm": 0.5175911784172058, | |
| "learning_rate": 2.8672404820839676e-06, | |
| "loss": 0.44, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 2.978056426332288, | |
| "grad_norm": 0.5651830434799194, | |
| "learning_rate": 2.8606105179578584e-06, | |
| "loss": 0.4466, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 2.9827586206896552, | |
| "grad_norm": 0.5229518413543701, | |
| "learning_rate": 2.8539779627408332e-06, | |
| "loss": 0.4259, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 2.987460815047022, | |
| "grad_norm": 0.5007745027542114, | |
| "learning_rate": 2.847342864089721e-06, | |
| "loss": 0.4436, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 2.992163009404389, | |
| "grad_norm": 0.5250042080879211, | |
| "learning_rate": 2.8407052696796255e-06, | |
| "loss": 0.4308, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 2.9968652037617556, | |
| "grad_norm": 0.5170105695724487, | |
| "learning_rate": 2.834065227203584e-06, | |
| "loss": 0.449, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 3.0047021943573666, | |
| "grad_norm": 0.49444133043289185, | |
| "learning_rate": 2.8274227843722213e-06, | |
| "loss": 0.8674, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 3.0094043887147337, | |
| "grad_norm": 0.5178474187850952, | |
| "learning_rate": 2.820777988913412e-06, | |
| "loss": 0.4192, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 3.0141065830721003, | |
| "grad_norm": 0.5056724548339844, | |
| "learning_rate": 2.8141308885719337e-06, | |
| "loss": 0.4166, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 3.018808777429467, | |
| "grad_norm": 0.48542484641075134, | |
| "learning_rate": 2.8074815311091265e-06, | |
| "loss": 0.3908, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 3.023510971786834, | |
| "grad_norm": 0.5061275362968445, | |
| "learning_rate": 2.8008299643025477e-06, | |
| "loss": 0.4326, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 3.0282131661442007, | |
| "grad_norm": 0.49305713176727295, | |
| "learning_rate": 2.7941762359456294e-06, | |
| "loss": 0.4259, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 3.0329153605015673, | |
| "grad_norm": 0.5171103477478027, | |
| "learning_rate": 2.787520393847334e-06, | |
| "loss": 0.4167, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 3.0376175548589344, | |
| "grad_norm": 0.5052148103713989, | |
| "learning_rate": 2.780862485831814e-06, | |
| "loss": 0.4282, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 3.042319749216301, | |
| "grad_norm": 0.5092865824699402, | |
| "learning_rate": 2.7742025597380644e-06, | |
| "loss": 0.4306, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 3.0470219435736676, | |
| "grad_norm": 0.5179376602172852, | |
| "learning_rate": 2.7675406634195824e-06, | |
| "loss": 0.423, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 3.0517241379310347, | |
| "grad_norm": 0.6232285499572754, | |
| "learning_rate": 2.7608768447440193e-06, | |
| "loss": 0.408, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 3.0564263322884013, | |
| "grad_norm": 0.5209566950798035, | |
| "learning_rate": 2.754211151592841e-06, | |
| "loss": 0.4124, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 3.061128526645768, | |
| "grad_norm": 0.5083103179931641, | |
| "learning_rate": 2.7475436318609827e-06, | |
| "loss": 0.4326, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 3.0658307210031346, | |
| "grad_norm": 0.5016148090362549, | |
| "learning_rate": 2.7408743334565006e-06, | |
| "loss": 0.4077, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 3.0705329153605017, | |
| "grad_norm": 0.5021759271621704, | |
| "learning_rate": 2.734203304300235e-06, | |
| "loss": 0.4003, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 3.0752351097178683, | |
| "grad_norm": 0.5263209939002991, | |
| "learning_rate": 2.7275305923254607e-06, | |
| "loss": 0.4127, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 3.079937304075235, | |
| "grad_norm": 0.5125823020935059, | |
| "learning_rate": 2.720856245477544e-06, | |
| "loss": 0.4028, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 3.084639498432602, | |
| "grad_norm": 0.5266130566596985, | |
| "learning_rate": 2.7141803117135978e-06, | |
| "loss": 0.397, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 3.0893416927899686, | |
| "grad_norm": 0.49692511558532715, | |
| "learning_rate": 2.7075028390021385e-06, | |
| "loss": 0.4, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 3.0940438871473352, | |
| "grad_norm": 0.5223091840744019, | |
| "learning_rate": 2.7008238753227385e-06, | |
| "loss": 0.4067, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 3.0987460815047023, | |
| "grad_norm": 0.4923226237297058, | |
| "learning_rate": 2.694143468665685e-06, | |
| "loss": 0.4271, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 3.103448275862069, | |
| "grad_norm": 0.5270125269889832, | |
| "learning_rate": 2.6874616670316338e-06, | |
| "loss": 0.3946, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 3.1081504702194356, | |
| "grad_norm": 0.5231687426567078, | |
| "learning_rate": 2.6807785184312618e-06, | |
| "loss": 0.414, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 3.1128526645768027, | |
| "grad_norm": 0.506938099861145, | |
| "learning_rate": 2.674094070884926e-06, | |
| "loss": 0.4138, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 3.1175548589341693, | |
| "grad_norm": 0.5096138119697571, | |
| "learning_rate": 2.6674083724223166e-06, | |
| "loss": 0.433, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 3.122257053291536, | |
| "grad_norm": 0.5991004705429077, | |
| "learning_rate": 2.6607214710821112e-06, | |
| "loss": 0.4049, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 3.126959247648903, | |
| "grad_norm": 0.5364269018173218, | |
| "learning_rate": 2.6540334149116304e-06, | |
| "loss": 0.4182, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 3.1316614420062696, | |
| "grad_norm": 0.5317479968070984, | |
| "learning_rate": 2.647344251966493e-06, | |
| "loss": 0.415, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 3.1363636363636362, | |
| "grad_norm": 0.5510737895965576, | |
| "learning_rate": 2.6406540303102714e-06, | |
| "loss": 0.415, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 3.1410658307210033, | |
| "grad_norm": 0.5536981225013733, | |
| "learning_rate": 2.6339627980141425e-06, | |
| "loss": 0.4169, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 3.14576802507837, | |
| "grad_norm": 0.5343831777572632, | |
| "learning_rate": 2.6272706031565482e-06, | |
| "loss": 0.4018, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 3.1504702194357366, | |
| "grad_norm": 0.500717282295227, | |
| "learning_rate": 2.6205774938228433e-06, | |
| "loss": 0.3978, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 3.1551724137931036, | |
| "grad_norm": 0.6430208683013916, | |
| "learning_rate": 2.6138835181049556e-06, | |
| "loss": 0.4234, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 3.1598746081504703, | |
| "grad_norm": 0.5403954982757568, | |
| "learning_rate": 2.6071887241010374e-06, | |
| "loss": 0.4051, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 3.164576802507837, | |
| "grad_norm": 0.6121819615364075, | |
| "learning_rate": 2.6004931599151223e-06, | |
| "loss": 0.3929, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 3.169278996865204, | |
| "grad_norm": 0.519755482673645, | |
| "learning_rate": 2.593796873656775e-06, | |
| "loss": 0.4353, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 3.1739811912225706, | |
| "grad_norm": 0.4970746636390686, | |
| "learning_rate": 2.587099913440749e-06, | |
| "loss": 0.4155, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 3.1786833855799372, | |
| "grad_norm": 0.512956440448761, | |
| "learning_rate": 2.580402327386643e-06, | |
| "loss": 0.403, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 3.183385579937304, | |
| "grad_norm": 0.5539934635162354, | |
| "learning_rate": 2.5737041636185496e-06, | |
| "loss": 0.4095, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 3.188087774294671, | |
| "grad_norm": 0.7322019338607788, | |
| "learning_rate": 2.5670054702647146e-06, | |
| "loss": 0.3941, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 3.1927899686520376, | |
| "grad_norm": 1.063719391822815, | |
| "learning_rate": 2.5603062954571872e-06, | |
| "loss": 0.4184, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 3.197492163009404, | |
| "grad_norm": 0.5929884910583496, | |
| "learning_rate": 2.553606687331477e-06, | |
| "loss": 0.4378, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 3.2021943573667713, | |
| "grad_norm": 0.6147572994232178, | |
| "learning_rate": 2.5469066940262073e-06, | |
| "loss": 0.4078, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 3.206896551724138, | |
| "grad_norm": 0.5559990406036377, | |
| "learning_rate": 2.540206363682768e-06, | |
| "loss": 0.4004, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 3.2115987460815045, | |
| "grad_norm": 0.49420028924942017, | |
| "learning_rate": 2.533505744444972e-06, | |
| "loss": 0.4182, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 3.2163009404388716, | |
| "grad_norm": 0.4829191565513611, | |
| "learning_rate": 2.526804884458707e-06, | |
| "loss": 0.4093, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 3.2210031347962382, | |
| "grad_norm": 0.5211793780326843, | |
| "learning_rate": 2.520103831871591e-06, | |
| "loss": 0.4157, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 3.225705329153605, | |
| "grad_norm": 0.4876849055290222, | |
| "learning_rate": 2.513402634832627e-06, | |
| "loss": 0.3961, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 3.230407523510972, | |
| "grad_norm": 0.5313226580619812, | |
| "learning_rate": 2.5067013414918523e-06, | |
| "loss": 0.4005, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 3.2351097178683386, | |
| "grad_norm": 0.5361777544021606, | |
| "learning_rate": 2.5e-06, | |
| "loss": 0.414, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 3.239811912225705, | |
| "grad_norm": 0.5172957181930542, | |
| "learning_rate": 2.493298658508149e-06, | |
| "loss": 0.3953, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 3.2445141065830723, | |
| "grad_norm": 0.5602390170097351, | |
| "learning_rate": 2.4865973651673743e-06, | |
| "loss": 0.4137, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 3.249216300940439, | |
| "grad_norm": 0.5494715571403503, | |
| "learning_rate": 2.4798961681284096e-06, | |
| "loss": 0.4385, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 3.2539184952978055, | |
| "grad_norm": 0.521594762802124, | |
| "learning_rate": 2.473195115541293e-06, | |
| "loss": 0.3943, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 3.2586206896551726, | |
| "grad_norm": 0.5154503583908081, | |
| "learning_rate": 2.466494255555029e-06, | |
| "loss": 0.441, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 3.2633228840125392, | |
| "grad_norm": 0.6013544797897339, | |
| "learning_rate": 2.459793636317233e-06, | |
| "loss": 0.405, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 3.268025078369906, | |
| "grad_norm": 0.531454861164093, | |
| "learning_rate": 2.4530933059737936e-06, | |
| "loss": 0.4425, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 3.2727272727272725, | |
| "grad_norm": 0.5347344875335693, | |
| "learning_rate": 2.4463933126685236e-06, | |
| "loss": 0.4051, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 3.2774294670846396, | |
| "grad_norm": 0.5035227537155151, | |
| "learning_rate": 2.439693704542814e-06, | |
| "loss": 0.4021, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 3.282131661442006, | |
| "grad_norm": 0.4999369978904724, | |
| "learning_rate": 2.432994529735286e-06, | |
| "loss": 0.4097, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 3.2868338557993733, | |
| "grad_norm": 0.5022649765014648, | |
| "learning_rate": 2.4262958363814512e-06, | |
| "loss": 0.4189, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 3.29153605015674, | |
| "grad_norm": 0.5714133381843567, | |
| "learning_rate": 2.4195976726133574e-06, | |
| "loss": 0.4067, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 3.2962382445141065, | |
| "grad_norm": 0.5297165513038635, | |
| "learning_rate": 2.4129000865592517e-06, | |
| "loss": 0.4053, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 3.300940438871473, | |
| "grad_norm": 0.5833218693733215, | |
| "learning_rate": 2.4062031263432267e-06, | |
| "loss": 0.4269, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 3.30564263322884, | |
| "grad_norm": 0.48326241970062256, | |
| "learning_rate": 2.3995068400848785e-06, | |
| "loss": 0.404, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 3.310344827586207, | |
| "grad_norm": 0.5570845603942871, | |
| "learning_rate": 2.392811275898963e-06, | |
| "loss": 0.419, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 3.3150470219435735, | |
| "grad_norm": 0.5318753123283386, | |
| "learning_rate": 2.3861164818950448e-06, | |
| "loss": 0.387, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 3.3197492163009406, | |
| "grad_norm": 0.5461038947105408, | |
| "learning_rate": 2.379422506177157e-06, | |
| "loss": 0.4121, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 3.324451410658307, | |
| "grad_norm": 0.5123147368431091, | |
| "learning_rate": 2.372729396843453e-06, | |
| "loss": 0.4138, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 3.329153605015674, | |
| "grad_norm": 0.5471891760826111, | |
| "learning_rate": 2.366037201985858e-06, | |
| "loss": 0.4185, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 3.333855799373041, | |
| "grad_norm": 0.550929605960846, | |
| "learning_rate": 2.3593459696897294e-06, | |
| "loss": 0.3945, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 3.3385579937304075, | |
| "grad_norm": 0.5357216000556946, | |
| "learning_rate": 2.352655748033508e-06, | |
| "loss": 0.4136, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 3.343260188087774, | |
| "grad_norm": 0.5054169297218323, | |
| "learning_rate": 2.3459665850883704e-06, | |
| "loss": 0.4346, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 3.347962382445141, | |
| "grad_norm": 0.5135513544082642, | |
| "learning_rate": 2.33927852891789e-06, | |
| "loss": 0.4184, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 3.352664576802508, | |
| "grad_norm": 0.5367952585220337, | |
| "learning_rate": 2.3325916275776834e-06, | |
| "loss": 0.4398, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 3.3573667711598745, | |
| "grad_norm": 0.5645503997802734, | |
| "learning_rate": 2.3259059291150744e-06, | |
| "loss": 0.401, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 3.3620689655172415, | |
| "grad_norm": 0.5017855763435364, | |
| "learning_rate": 2.319221481568739e-06, | |
| "loss": 0.4206, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 3.366771159874608, | |
| "grad_norm": 0.5428129434585571, | |
| "learning_rate": 2.3125383329683666e-06, | |
| "loss": 0.4223, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 3.371473354231975, | |
| "grad_norm": 0.5728990435600281, | |
| "learning_rate": 2.3058565313343152e-06, | |
| "loss": 0.3896, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 3.376175548589342, | |
| "grad_norm": 0.5580390095710754, | |
| "learning_rate": 2.2991761246772623e-06, | |
| "loss": 0.405, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 3.3808777429467085, | |
| "grad_norm": 0.5162783861160278, | |
| "learning_rate": 2.2924971609978623e-06, | |
| "loss": 0.3961, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 3.385579937304075, | |
| "grad_norm": 0.5139627456665039, | |
| "learning_rate": 2.285819688286403e-06, | |
| "loss": 0.3877, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 3.3902821316614418, | |
| "grad_norm": 0.5245716571807861, | |
| "learning_rate": 2.2791437545224563e-06, | |
| "loss": 0.4341, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 3.394984326018809, | |
| "grad_norm": 0.5056993961334229, | |
| "learning_rate": 2.2724694076745397e-06, | |
| "loss": 0.4116, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 3.3996865203761755, | |
| "grad_norm": 0.5380951762199402, | |
| "learning_rate": 2.265796695699766e-06, | |
| "loss": 0.4154, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 3.4043887147335425, | |
| "grad_norm": 0.5752882361412048, | |
| "learning_rate": 2.2591256665434998e-06, | |
| "loss": 0.4177, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 3.409090909090909, | |
| "grad_norm": 0.5096545219421387, | |
| "learning_rate": 2.252456368139019e-06, | |
| "loss": 0.4139, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 3.413793103448276, | |
| "grad_norm": 0.5271265506744385, | |
| "learning_rate": 2.245788848407159e-06, | |
| "loss": 0.4223, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 3.4184952978056424, | |
| "grad_norm": 0.5337948203086853, | |
| "learning_rate": 2.2391231552559815e-06, | |
| "loss": 0.4208, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 3.4231974921630095, | |
| "grad_norm": 0.5831982493400574, | |
| "learning_rate": 2.2324593365804184e-06, | |
| "loss": 0.3903, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 3.427899686520376, | |
| "grad_norm": 0.5560181140899658, | |
| "learning_rate": 2.225797440261936e-06, | |
| "loss": 0.4321, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 3.4326018808777428, | |
| "grad_norm": 0.5280754566192627, | |
| "learning_rate": 2.219137514168187e-06, | |
| "loss": 0.3948, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 3.43730407523511, | |
| "grad_norm": 0.5112728476524353, | |
| "learning_rate": 2.212479606152667e-06, | |
| "loss": 0.426, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 3.4420062695924765, | |
| "grad_norm": 0.5265609622001648, | |
| "learning_rate": 2.205823764054372e-06, | |
| "loss": 0.4094, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 3.446708463949843, | |
| "grad_norm": 0.48872876167297363, | |
| "learning_rate": 2.199170035697453e-06, | |
| "loss": 0.4073, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 3.45141065830721, | |
| "grad_norm": 0.521526038646698, | |
| "learning_rate": 2.1925184688908735e-06, | |
| "loss": 0.4135, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 3.456112852664577, | |
| "grad_norm": 0.5472409725189209, | |
| "learning_rate": 2.185869111428067e-06, | |
| "loss": 0.4369, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 3.4608150470219434, | |
| "grad_norm": 0.5710989832878113, | |
| "learning_rate": 2.1792220110865885e-06, | |
| "loss": 0.4224, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 3.4655172413793105, | |
| "grad_norm": 0.6842237710952759, | |
| "learning_rate": 2.1725772156277795e-06, | |
| "loss": 0.4032, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 3.470219435736677, | |
| "grad_norm": 0.5267806053161621, | |
| "learning_rate": 2.165934772796417e-06, | |
| "loss": 0.412, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 3.4749216300940438, | |
| "grad_norm": 0.6110681295394897, | |
| "learning_rate": 2.159294730320374e-06, | |
| "loss": 0.4264, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 3.479623824451411, | |
| "grad_norm": 0.567146897315979, | |
| "learning_rate": 2.15265713591028e-06, | |
| "loss": 0.4145, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 3.4843260188087775, | |
| "grad_norm": 0.5455949306488037, | |
| "learning_rate": 2.1460220372591676e-06, | |
| "loss": 0.4338, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 3.489028213166144, | |
| "grad_norm": 0.5989809036254883, | |
| "learning_rate": 2.139389482042142e-06, | |
| "loss": 0.3985, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 3.493730407523511, | |
| "grad_norm": 0.5719276666641235, | |
| "learning_rate": 2.1327595179160332e-06, | |
| "loss": 0.4157, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 3.498432601880878, | |
| "grad_norm": 0.5467635989189148, | |
| "learning_rate": 2.1261321925190492e-06, | |
| "loss": 0.4055, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 3.5031347962382444, | |
| "grad_norm": 0.5934366583824158, | |
| "learning_rate": 2.1195075534704433e-06, | |
| "loss": 0.3826, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 3.507836990595611, | |
| "grad_norm": 0.5331693291664124, | |
| "learning_rate": 2.1128856483701625e-06, | |
| "loss": 0.4114, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 3.512539184952978, | |
| "grad_norm": 0.5111117959022522, | |
| "learning_rate": 2.10626652479851e-06, | |
| "loss": 0.3927, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 3.5172413793103448, | |
| "grad_norm": 0.5242960453033447, | |
| "learning_rate": 2.0996502303158057e-06, | |
| "loss": 0.4079, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 3.521943573667712, | |
| "grad_norm": 1.4825061559677124, | |
| "learning_rate": 2.0930368124620385e-06, | |
| "loss": 0.4089, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 3.5266457680250785, | |
| "grad_norm": 0.5873427391052246, | |
| "learning_rate": 2.086426318756531e-06, | |
| "loss": 0.4258, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 3.531347962382445, | |
| "grad_norm": 0.5490135550498962, | |
| "learning_rate": 2.0798187966975917e-06, | |
| "loss": 0.4313, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 3.5360501567398117, | |
| "grad_norm": 0.5256242752075195, | |
| "learning_rate": 2.073214293762179e-06, | |
| "loss": 0.4205, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 3.540752351097179, | |
| "grad_norm": 0.5382546782493591, | |
| "learning_rate": 2.0666128574055575e-06, | |
| "loss": 0.4182, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 3.5454545454545454, | |
| "grad_norm": 1.0182400941848755, | |
| "learning_rate": 2.0600145350609585e-06, | |
| "loss": 0.4176, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 3.5501567398119125, | |
| "grad_norm": 0.5244695544242859, | |
| "learning_rate": 2.053419374139235e-06, | |
| "loss": 0.4162, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 3.554858934169279, | |
| "grad_norm": 0.509304940700531, | |
| "learning_rate": 2.0468274220285295e-06, | |
| "loss": 0.4149, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 3.5595611285266457, | |
| "grad_norm": 0.5326141119003296, | |
| "learning_rate": 2.0402387260939224e-06, | |
| "loss": 0.4105, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 3.5642633228840124, | |
| "grad_norm": 0.5248592495918274, | |
| "learning_rate": 2.033653333677103e-06, | |
| "loss": 0.4214, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 3.5689655172413794, | |
| "grad_norm": 0.5611410737037659, | |
| "learning_rate": 2.02707129209602e-06, | |
| "loss": 0.4034, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 3.573667711598746, | |
| "grad_norm": 0.49410030245780945, | |
| "learning_rate": 2.0204926486445463e-06, | |
| "loss": 0.4229, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 3.5783699059561127, | |
| "grad_norm": 0.49964067339897156, | |
| "learning_rate": 2.0139174505921403e-06, | |
| "loss": 0.4433, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 3.58307210031348, | |
| "grad_norm": 0.5891857147216797, | |
| "learning_rate": 2.0073457451835e-06, | |
| "loss": 0.3995, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 3.5877742946708464, | |
| "grad_norm": 0.5028961896896362, | |
| "learning_rate": 2.0007775796382335e-06, | |
| "loss": 0.4242, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 3.592476489028213, | |
| "grad_norm": 0.5589643716812134, | |
| "learning_rate": 1.994213001150508e-06, | |
| "loss": 0.3929, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 3.5971786833855797, | |
| "grad_norm": 0.6315639615058899, | |
| "learning_rate": 1.9876520568887207e-06, | |
| "loss": 0.4196, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 3.6018808777429467, | |
| "grad_norm": 0.538480818271637, | |
| "learning_rate": 1.981094793995155e-06, | |
| "loss": 0.403, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 3.6065830721003134, | |
| "grad_norm": 0.5254942774772644, | |
| "learning_rate": 1.974541259585641e-06, | |
| "loss": 0.4313, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 3.6112852664576804, | |
| "grad_norm": 16.726457595825195, | |
| "learning_rate": 1.9679915007492194e-06, | |
| "loss": 0.4131, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 3.615987460815047, | |
| "grad_norm": 0.572813093662262, | |
| "learning_rate": 1.9614455645478047e-06, | |
| "loss": 0.4022, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 3.6206896551724137, | |
| "grad_norm": 0.5614109039306641, | |
| "learning_rate": 1.9549034980158403e-06, | |
| "loss": 0.4149, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 3.6253918495297803, | |
| "grad_norm": 0.5074586272239685, | |
| "learning_rate": 1.9483653481599697e-06, | |
| "loss": 0.42, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 3.6300940438871474, | |
| "grad_norm": 0.5631129145622253, | |
| "learning_rate": 1.9418311619586897e-06, | |
| "loss": 0.4257, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 3.634796238244514, | |
| "grad_norm": 0.5475037097930908, | |
| "learning_rate": 1.935300986362018e-06, | |
| "loss": 0.3986, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 3.639498432601881, | |
| "grad_norm": 0.5358164310455322, | |
| "learning_rate": 1.9287748682911582e-06, | |
| "loss": 0.4308, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 3.6442006269592477, | |
| "grad_norm": 0.612288773059845, | |
| "learning_rate": 1.9222528546381543e-06, | |
| "loss": 0.4216, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 3.6489028213166144, | |
| "grad_norm": 0.569261908531189, | |
| "learning_rate": 1.9157349922655648e-06, | |
| "loss": 0.3987, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 3.653605015673981, | |
| "grad_norm": 0.5050970911979675, | |
| "learning_rate": 1.909221328006114e-06, | |
| "loss": 0.4084, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 3.658307210031348, | |
| "grad_norm": 0.5210846066474915, | |
| "learning_rate": 1.9027119086623647e-06, | |
| "loss": 0.4222, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 3.6630094043887147, | |
| "grad_norm": 0.5151328444480896, | |
| "learning_rate": 1.8962067810063806e-06, | |
| "loss": 0.4373, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 3.6677115987460818, | |
| "grad_norm": 0.5783085823059082, | |
| "learning_rate": 1.8897059917793844e-06, | |
| "loss": 0.3932, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 3.6724137931034484, | |
| "grad_norm": 0.5122448205947876, | |
| "learning_rate": 1.8832095876914268e-06, | |
| "loss": 0.4111, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 3.677115987460815, | |
| "grad_norm": 0.5330267548561096, | |
| "learning_rate": 1.8767176154210537e-06, | |
| "loss": 0.4178, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 3.6818181818181817, | |
| "grad_norm": 0.543250560760498, | |
| "learning_rate": 1.8702301216149616e-06, | |
| "loss": 0.4066, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 3.6865203761755487, | |
| "grad_norm": 0.5525115728378296, | |
| "learning_rate": 1.8637471528876727e-06, | |
| "loss": 0.4079, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 3.6912225705329154, | |
| "grad_norm": 0.5233202576637268, | |
| "learning_rate": 1.8572687558211923e-06, | |
| "loss": 0.4194, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 3.695924764890282, | |
| "grad_norm": 0.6125171780586243, | |
| "learning_rate": 1.850794976964677e-06, | |
| "loss": 0.3814, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 3.700626959247649, | |
| "grad_norm": 0.5407599210739136, | |
| "learning_rate": 1.8443258628341026e-06, | |
| "loss": 0.4144, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 3.7053291536050157, | |
| "grad_norm": 0.5853657126426697, | |
| "learning_rate": 1.837861459911925e-06, | |
| "loss": 0.4249, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 3.7100313479623823, | |
| "grad_norm": 0.7031234502792358, | |
| "learning_rate": 1.8314018146467505e-06, | |
| "loss": 0.4052, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 3.714733542319749, | |
| "grad_norm": 1.9762593507766724, | |
| "learning_rate": 1.8249469734529995e-06, | |
| "loss": 0.3881, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 3.719435736677116, | |
| "grad_norm": 0.5328689217567444, | |
| "learning_rate": 1.818496982710572e-06, | |
| "loss": 0.4246, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 3.7241379310344827, | |
| "grad_norm": 0.5613765120506287, | |
| "learning_rate": 1.81205188876452e-06, | |
| "loss": 0.424, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 3.7288401253918497, | |
| "grad_norm": 0.6554239392280579, | |
| "learning_rate": 1.8056117379247078e-06, | |
| "loss": 0.4273, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 3.7335423197492164, | |
| "grad_norm": 1.2649489641189575, | |
| "learning_rate": 1.7991765764654813e-06, | |
| "loss": 0.4147, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 3.738244514106583, | |
| "grad_norm": 0.5478984713554382, | |
| "learning_rate": 1.7927464506253394e-06, | |
| "loss": 0.4252, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 3.7429467084639496, | |
| "grad_norm": 0.6170595288276672, | |
| "learning_rate": 1.7863214066065951e-06, | |
| "loss": 0.3919, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 3.7476489028213167, | |
| "grad_norm": 0.5435987710952759, | |
| "learning_rate": 1.779901490575051e-06, | |
| "loss": 0.4191, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 3.7523510971786833, | |
| "grad_norm": 0.49888044595718384, | |
| "learning_rate": 1.7734867486596596e-06, | |
| "loss": 0.4214, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 3.7570532915360504, | |
| "grad_norm": 0.6968030333518982, | |
| "learning_rate": 1.767077226952198e-06, | |
| "loss": 0.4273, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 3.761755485893417, | |
| "grad_norm": 0.5169183611869812, | |
| "learning_rate": 1.7606729715069349e-06, | |
| "loss": 0.4148, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 3.7664576802507836, | |
| "grad_norm": 0.5539206862449646, | |
| "learning_rate": 1.7542740283402981e-06, | |
| "loss": 0.4184, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 3.7711598746081503, | |
| "grad_norm": 0.6863934397697449, | |
| "learning_rate": 1.7478804434305466e-06, | |
| "loss": 0.4005, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 3.7758620689655173, | |
| "grad_norm": 0.6211932897567749, | |
| "learning_rate": 1.741492262717438e-06, | |
| "loss": 0.4177, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 3.780564263322884, | |
| "grad_norm": 0.6639383435249329, | |
| "learning_rate": 1.7351095321018974e-06, | |
| "loss": 0.3871, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 3.785266457680251, | |
| "grad_norm": 0.5569302439689636, | |
| "learning_rate": 1.7287322974456933e-06, | |
| "loss": 0.3931, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 3.7899686520376177, | |
| "grad_norm": 0.5468054413795471, | |
| "learning_rate": 1.7223606045711006e-06, | |
| "loss": 0.4165, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 3.7946708463949843, | |
| "grad_norm": 0.5721184015274048, | |
| "learning_rate": 1.7159944992605774e-06, | |
| "loss": 0.4209, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 3.799373040752351, | |
| "grad_norm": 0.5125740170478821, | |
| "learning_rate": 1.7096340272564318e-06, | |
| "loss": 0.4092, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 3.804075235109718, | |
| "grad_norm": 0.5384911298751831, | |
| "learning_rate": 1.7032792342604947e-06, | |
| "loss": 0.4004, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 3.8087774294670846, | |
| "grad_norm": 0.5560373067855835, | |
| "learning_rate": 1.6969301659337944e-06, | |
| "loss": 0.4157, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 3.8134796238244513, | |
| "grad_norm": 0.5534592866897583, | |
| "learning_rate": 1.6905868678962225e-06, | |
| "loss": 0.4208, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 3.8181818181818183, | |
| "grad_norm": 0.5902019739151001, | |
| "learning_rate": 1.684249385726211e-06, | |
| "loss": 0.4125, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 3.822884012539185, | |
| "grad_norm": 0.549802839756012, | |
| "learning_rate": 1.677917764960404e-06, | |
| "loss": 0.4017, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 3.8275862068965516, | |
| "grad_norm": 0.5346619486808777, | |
| "learning_rate": 1.6715920510933277e-06, | |
| "loss": 0.4317, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 3.8322884012539182, | |
| "grad_norm": 0.5298053622245789, | |
| "learning_rate": 1.6652722895770676e-06, | |
| "loss": 0.4281, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 3.8369905956112853, | |
| "grad_norm": 0.5283401608467102, | |
| "learning_rate": 1.6589585258209383e-06, | |
| "loss": 0.3778, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 3.841692789968652, | |
| "grad_norm": 0.5301955342292786, | |
| "learning_rate": 1.6526508051911588e-06, | |
| "loss": 0.4182, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 3.846394984326019, | |
| "grad_norm": 0.527755856513977, | |
| "learning_rate": 1.6463491730105282e-06, | |
| "loss": 0.4071, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 3.8510971786833856, | |
| "grad_norm": 0.5193698406219482, | |
| "learning_rate": 1.6400536745580955e-06, | |
| "loss": 0.3893, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 3.8557993730407523, | |
| "grad_norm": 0.5560773611068726, | |
| "learning_rate": 1.6337643550688408e-06, | |
| "loss": 0.4165, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 3.860501567398119, | |
| "grad_norm": 0.5100756883621216, | |
| "learning_rate": 1.627481259733343e-06, | |
| "loss": 0.3977, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 3.865203761755486, | |
| "grad_norm": 0.6014723181724548, | |
| "learning_rate": 1.6212044336974598e-06, | |
| "loss": 0.3951, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 3.8699059561128526, | |
| "grad_norm": 0.5284562706947327, | |
| "learning_rate": 1.614933922062003e-06, | |
| "loss": 0.4074, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 3.8746081504702197, | |
| "grad_norm": 0.563565731048584, | |
| "learning_rate": 1.6086697698824144e-06, | |
| "loss": 0.4023, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 3.8793103448275863, | |
| "grad_norm": 0.5110756158828735, | |
| "learning_rate": 1.6024120221684373e-06, | |
| "loss": 0.4003, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 3.884012539184953, | |
| "grad_norm": 0.5191101431846619, | |
| "learning_rate": 1.5961607238838022e-06, | |
| "loss": 0.4081, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 3.8887147335423196, | |
| "grad_norm": 0.5377500653266907, | |
| "learning_rate": 1.589915919945894e-06, | |
| "loss": 0.4184, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 3.8934169278996866, | |
| "grad_norm": 0.5440651774406433, | |
| "learning_rate": 1.5836776552254386e-06, | |
| "loss": 0.4355, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 3.8981191222570533, | |
| "grad_norm": 0.603829026222229, | |
| "learning_rate": 1.5774459745461711e-06, | |
| "loss": 0.4056, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 3.9028213166144203, | |
| "grad_norm": 0.5026826858520508, | |
| "learning_rate": 1.5712209226845201e-06, | |
| "loss": 0.3824, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 3.907523510971787, | |
| "grad_norm": 0.7123918533325195, | |
| "learning_rate": 1.565002544369286e-06, | |
| "loss": 0.4157, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 3.9122257053291536, | |
| "grad_norm": 0.541552722454071, | |
| "learning_rate": 1.5587908842813142e-06, | |
| "loss": 0.4057, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 3.91692789968652, | |
| "grad_norm": 0.5404677391052246, | |
| "learning_rate": 1.5525859870531823e-06, | |
| "loss": 0.4211, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 3.9216300940438873, | |
| "grad_norm": 0.6348632574081421, | |
| "learning_rate": 1.5463878972688707e-06, | |
| "loss": 0.4092, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 3.926332288401254, | |
| "grad_norm": 0.5390607714653015, | |
| "learning_rate": 1.5401966594634483e-06, | |
| "loss": 0.435, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 3.9310344827586206, | |
| "grad_norm": 0.5333203077316284, | |
| "learning_rate": 1.5340123181227495e-06, | |
| "loss": 0.4245, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 3.9357366771159876, | |
| "grad_norm": 0.5764657855033875, | |
| "learning_rate": 1.527834917683058e-06, | |
| "loss": 0.3909, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 3.9404388714733543, | |
| "grad_norm": 0.6087055802345276, | |
| "learning_rate": 1.5216645025307813e-06, | |
| "loss": 0.4059, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 3.945141065830721, | |
| "grad_norm": 0.558167576789856, | |
| "learning_rate": 1.5155011170021399e-06, | |
| "loss": 0.4142, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 3.9498432601880875, | |
| "grad_norm": 0.5065332055091858, | |
| "learning_rate": 1.5093448053828402e-06, | |
| "loss": 0.4237, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 3.9545454545454546, | |
| "grad_norm": 0.5446473360061646, | |
| "learning_rate": 1.503195611907764e-06, | |
| "loss": 0.4235, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 3.959247648902821, | |
| "grad_norm": 0.6033996939659119, | |
| "learning_rate": 1.4970535807606453e-06, | |
| "loss": 0.384, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 3.9639498432601883, | |
| "grad_norm": 0.5554048418998718, | |
| "learning_rate": 1.4909187560737542e-06, | |
| "loss": 0.3953, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 3.968652037617555, | |
| "grad_norm": 0.533640444278717, | |
| "learning_rate": 1.4847911819275829e-06, | |
| "loss": 0.4072, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 3.9733542319749215, | |
| "grad_norm": 0.5807821750640869, | |
| "learning_rate": 1.4786709023505224e-06, | |
| "loss": 0.3975, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 3.978056426332288, | |
| "grad_norm": 0.6907732486724854, | |
| "learning_rate": 1.4725579613185549e-06, | |
| "loss": 0.4232, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 3.9827586206896552, | |
| "grad_norm": 0.6130005717277527, | |
| "learning_rate": 1.4664524027549291e-06, | |
| "loss": 0.4094, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 3.987460815047022, | |
| "grad_norm": 0.6130813360214233, | |
| "learning_rate": 1.4603542705298493e-06, | |
| "loss": 0.3954, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 3.992163009404389, | |
| "grad_norm": 0.5968356132507324, | |
| "learning_rate": 1.4542636084601624e-06, | |
| "loss": 0.3675, | |
| "step": 847 | |
| }, | |
| { | |
| "epoch": 3.9968652037617556, | |
| "grad_norm": 0.6450978517532349, | |
| "learning_rate": 1.4481804603090358e-06, | |
| "loss": 0.4113, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 4.004702194357367, | |
| "grad_norm": 1.4493160247802734, | |
| "learning_rate": 1.4421048697856494e-06, | |
| "loss": 0.7551, | |
| "step": 849 | |
| }, | |
| { | |
| "epoch": 4.009404388714733, | |
| "grad_norm": 0.5255843997001648, | |
| "learning_rate": 1.4360368805448788e-06, | |
| "loss": 0.3801, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 4.0141065830721, | |
| "grad_norm": 0.5161401629447937, | |
| "learning_rate": 1.4299765361869837e-06, | |
| "loss": 0.3959, | |
| "step": 851 | |
| }, | |
| { | |
| "epoch": 4.018808777429467, | |
| "grad_norm": 0.5007433295249939, | |
| "learning_rate": 1.4239238802572908e-06, | |
| "loss": 0.3553, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 4.023510971786834, | |
| "grad_norm": 0.5718136429786682, | |
| "learning_rate": 1.4178789562458847e-06, | |
| "loss": 0.3937, | |
| "step": 853 | |
| }, | |
| { | |
| "epoch": 4.028213166144201, | |
| "grad_norm": 0.5353648066520691, | |
| "learning_rate": 1.4118418075872936e-06, | |
| "loss": 0.3816, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 4.032915360501567, | |
| "grad_norm": 0.5066031813621521, | |
| "learning_rate": 1.405812477660178e-06, | |
| "loss": 0.3774, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 4.037617554858934, | |
| "grad_norm": 0.5610586404800415, | |
| "learning_rate": 1.3997910097870165e-06, | |
| "loss": 0.4023, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 4.0423197492163006, | |
| "grad_norm": 0.5356720089912415, | |
| "learning_rate": 1.3937774472337994e-06, | |
| "loss": 0.4041, | |
| "step": 857 | |
| }, | |
| { | |
| "epoch": 4.047021943573668, | |
| "grad_norm": 0.5321263670921326, | |
| "learning_rate": 1.3877718332097146e-06, | |
| "loss": 0.3905, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 4.051724137931035, | |
| "grad_norm": 0.5932567119598389, | |
| "learning_rate": 1.3817742108668333e-06, | |
| "loss": 0.3989, | |
| "step": 859 | |
| }, | |
| { | |
| "epoch": 4.056426332288401, | |
| "grad_norm": 0.5155854225158691, | |
| "learning_rate": 1.3757846232998118e-06, | |
| "loss": 0.3369, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 4.061128526645768, | |
| "grad_norm": 0.9726412892341614, | |
| "learning_rate": 1.369803113545566e-06, | |
| "loss": 0.4116, | |
| "step": 861 | |
| }, | |
| { | |
| "epoch": 4.065830721003135, | |
| "grad_norm": 0.5043352842330933, | |
| "learning_rate": 1.3638297245829762e-06, | |
| "loss": 0.4054, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 4.070532915360501, | |
| "grad_norm": 0.531753659248352, | |
| "learning_rate": 1.3578644993325701e-06, | |
| "loss": 0.4036, | |
| "step": 863 | |
| }, | |
| { | |
| "epoch": 4.075235109717869, | |
| "grad_norm": 0.5960082411766052, | |
| "learning_rate": 1.3519074806562165e-06, | |
| "loss": 0.3946, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 4.079937304075235, | |
| "grad_norm": 0.5632782578468323, | |
| "learning_rate": 1.3459587113568208e-06, | |
| "loss": 0.3951, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 4.084639498432602, | |
| "grad_norm": 0.5298274755477905, | |
| "learning_rate": 1.340018234178009e-06, | |
| "loss": 0.3962, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 4.089341692789969, | |
| "grad_norm": 0.5057002902030945, | |
| "learning_rate": 1.3340860918038295e-06, | |
| "loss": 0.3635, | |
| "step": 867 | |
| }, | |
| { | |
| "epoch": 4.094043887147335, | |
| "grad_norm": 0.5192775130271912, | |
| "learning_rate": 1.328162326858442e-06, | |
| "loss": 0.3871, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 4.098746081504702, | |
| "grad_norm": 0.5322587490081787, | |
| "learning_rate": 1.3222469819058112e-06, | |
| "loss": 0.3987, | |
| "step": 869 | |
| }, | |
| { | |
| "epoch": 4.103448275862069, | |
| "grad_norm": 0.5546263456344604, | |
| "learning_rate": 1.3163400994494025e-06, | |
| "loss": 0.3965, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 4.108150470219436, | |
| "grad_norm": 0.5492004156112671, | |
| "learning_rate": 1.3104417219318762e-06, | |
| "loss": 0.3837, | |
| "step": 871 | |
| }, | |
| { | |
| "epoch": 4.112852664576803, | |
| "grad_norm": 0.5725740790367126, | |
| "learning_rate": 1.3045518917347791e-06, | |
| "loss": 0.3935, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 4.117554858934169, | |
| "grad_norm": 0.6411507725715637, | |
| "learning_rate": 1.2986706511782476e-06, | |
| "loss": 0.3881, | |
| "step": 873 | |
| }, | |
| { | |
| "epoch": 4.122257053291536, | |
| "grad_norm": 0.5744498372077942, | |
| "learning_rate": 1.2927980425206968e-06, | |
| "loss": 0.3913, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 4.1269592476489025, | |
| "grad_norm": 0.5495030283927917, | |
| "learning_rate": 1.2869341079585184e-06, | |
| "loss": 0.3919, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 4.131661442006269, | |
| "grad_norm": 0.5531148910522461, | |
| "learning_rate": 1.2810788896257804e-06, | |
| "loss": 0.3603, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 4.136363636363637, | |
| "grad_norm": 0.5476847290992737, | |
| "learning_rate": 1.2752324295939178e-06, | |
| "loss": 0.3956, | |
| "step": 877 | |
| }, | |
| { | |
| "epoch": 4.141065830721003, | |
| "grad_norm": 0.5382871627807617, | |
| "learning_rate": 1.2693947698714409e-06, | |
| "loss": 0.4183, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 4.14576802507837, | |
| "grad_norm": 0.6912056803703308, | |
| "learning_rate": 1.263565952403622e-06, | |
| "loss": 0.383, | |
| "step": 879 | |
| }, | |
| { | |
| "epoch": 4.150470219435737, | |
| "grad_norm": 0.5323772430419922, | |
| "learning_rate": 1.2577460190722013e-06, | |
| "loss": 0.3964, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 4.155172413793103, | |
| "grad_norm": 0.5875644683837891, | |
| "learning_rate": 1.2519350116950842e-06, | |
| "loss": 0.4001, | |
| "step": 881 | |
| }, | |
| { | |
| "epoch": 4.15987460815047, | |
| "grad_norm": 0.5375142097473145, | |
| "learning_rate": 1.2461329720260403e-06, | |
| "loss": 0.3871, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 4.164576802507837, | |
| "grad_norm": 2.5649774074554443, | |
| "learning_rate": 1.2403399417544033e-06, | |
| "loss": 0.3983, | |
| "step": 883 | |
| }, | |
| { | |
| "epoch": 4.169278996865204, | |
| "grad_norm": 0.5615472793579102, | |
| "learning_rate": 1.2345559625047718e-06, | |
| "loss": 0.404, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 4.173981191222571, | |
| "grad_norm": 0.567313015460968, | |
| "learning_rate": 1.2287810758367104e-06, | |
| "loss": 0.4089, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 4.178683385579937, | |
| "grad_norm": 0.718047559261322, | |
| "learning_rate": 1.2230153232444511e-06, | |
| "loss": 0.3899, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 4.183385579937304, | |
| "grad_norm": 0.5390134453773499, | |
| "learning_rate": 1.217258746156594e-06, | |
| "loss": 0.3802, | |
| "step": 887 | |
| }, | |
| { | |
| "epoch": 4.1880877742946705, | |
| "grad_norm": 0.502026379108429, | |
| "learning_rate": 1.2115113859358118e-06, | |
| "loss": 0.3855, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 4.192789968652038, | |
| "grad_norm": 0.5020443797111511, | |
| "learning_rate": 1.2057732838785514e-06, | |
| "loss": 0.368, | |
| "step": 889 | |
| }, | |
| { | |
| "epoch": 4.197492163009405, | |
| "grad_norm": 0.5389068126678467, | |
| "learning_rate": 1.2000444812147333e-06, | |
| "loss": 0.3808, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 4.202194357366771, | |
| "grad_norm": 0.5966452360153198, | |
| "learning_rate": 1.1943250191074664e-06, | |
| "loss": 0.3995, | |
| "step": 891 | |
| }, | |
| { | |
| "epoch": 4.206896551724138, | |
| "grad_norm": 0.5827345848083496, | |
| "learning_rate": 1.188614938652738e-06, | |
| "loss": 0.4069, | |
| "step": 892 | |
| }, | |
| { | |
| "epoch": 4.2115987460815045, | |
| "grad_norm": 0.6623309254646301, | |
| "learning_rate": 1.1829142808791294e-06, | |
| "loss": 0.3683, | |
| "step": 893 | |
| }, | |
| { | |
| "epoch": 4.216300940438871, | |
| "grad_norm": 0.523257315158844, | |
| "learning_rate": 1.177223086747516e-06, | |
| "loss": 0.3805, | |
| "step": 894 | |
| }, | |
| { | |
| "epoch": 4.221003134796238, | |
| "grad_norm": 0.5709425806999207, | |
| "learning_rate": 1.1715413971507747e-06, | |
| "loss": 0.3942, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 4.225705329153605, | |
| "grad_norm": 1.240659475326538, | |
| "learning_rate": 1.1658692529134888e-06, | |
| "loss": 0.3796, | |
| "step": 896 | |
| }, | |
| { | |
| "epoch": 4.230407523510972, | |
| "grad_norm": 0.565609335899353, | |
| "learning_rate": 1.1602066947916565e-06, | |
| "loss": 0.4021, | |
| "step": 897 | |
| }, | |
| { | |
| "epoch": 4.235109717868339, | |
| "grad_norm": 0.6735373139381409, | |
| "learning_rate": 1.154553763472396e-06, | |
| "loss": 0.3477, | |
| "step": 898 | |
| }, | |
| { | |
| "epoch": 4.239811912225705, | |
| "grad_norm": 0.8588801026344299, | |
| "learning_rate": 1.1489104995736543e-06, | |
| "loss": 0.3809, | |
| "step": 899 | |
| }, | |
| { | |
| "epoch": 4.244514106583072, | |
| "grad_norm": 0.5612583756446838, | |
| "learning_rate": 1.1432769436439162e-06, | |
| "loss": 0.3935, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 4.2492163009404385, | |
| "grad_norm": 0.49488264322280884, | |
| "learning_rate": 1.1376531361619105e-06, | |
| "loss": 0.4034, | |
| "step": 901 | |
| }, | |
| { | |
| "epoch": 4.253918495297806, | |
| "grad_norm": 0.5561050176620483, | |
| "learning_rate": 1.1320391175363225e-06, | |
| "loss": 0.3779, | |
| "step": 902 | |
| }, | |
| { | |
| "epoch": 4.258620689655173, | |
| "grad_norm": 0.5452584624290466, | |
| "learning_rate": 1.126434928105497e-06, | |
| "loss": 0.3842, | |
| "step": 903 | |
| }, | |
| { | |
| "epoch": 4.263322884012539, | |
| "grad_norm": 0.6411417126655579, | |
| "learning_rate": 1.1208406081371612e-06, | |
| "loss": 0.3875, | |
| "step": 904 | |
| }, | |
| { | |
| "epoch": 4.268025078369906, | |
| "grad_norm": 0.5172016024589539, | |
| "learning_rate": 1.11525619782812e-06, | |
| "loss": 0.3999, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 4.2727272727272725, | |
| "grad_norm": 0.5747371912002563, | |
| "learning_rate": 1.1096817373039773e-06, | |
| "loss": 0.4031, | |
| "step": 906 | |
| }, | |
| { | |
| "epoch": 4.277429467084639, | |
| "grad_norm": 0.5466158390045166, | |
| "learning_rate": 1.104117266618846e-06, | |
| "loss": 0.3962, | |
| "step": 907 | |
| }, | |
| { | |
| "epoch": 4.282131661442007, | |
| "grad_norm": 0.6000977158546448, | |
| "learning_rate": 1.0985628257550575e-06, | |
| "loss": 0.384, | |
| "step": 908 | |
| }, | |
| { | |
| "epoch": 4.286833855799373, | |
| "grad_norm": 0.543651819229126, | |
| "learning_rate": 1.0930184546228769e-06, | |
| "loss": 0.3908, | |
| "step": 909 | |
| }, | |
| { | |
| "epoch": 4.29153605015674, | |
| "grad_norm": 0.5661547183990479, | |
| "learning_rate": 1.087484193060215e-06, | |
| "loss": 0.3624, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 4.2962382445141065, | |
| "grad_norm": 0.5777263045310974, | |
| "learning_rate": 1.0819600808323424e-06, | |
| "loss": 0.3981, | |
| "step": 911 | |
| }, | |
| { | |
| "epoch": 4.300940438871473, | |
| "grad_norm": 0.6490275859832764, | |
| "learning_rate": 1.0764461576316041e-06, | |
| "loss": 0.3801, | |
| "step": 912 | |
| }, | |
| { | |
| "epoch": 4.30564263322884, | |
| "grad_norm": 0.8035992980003357, | |
| "learning_rate": 1.0709424630771333e-06, | |
| "loss": 0.3973, | |
| "step": 913 | |
| }, | |
| { | |
| "epoch": 4.310344827586207, | |
| "grad_norm": 0.603229820728302, | |
| "learning_rate": 1.0654490367145684e-06, | |
| "loss": 0.3878, | |
| "step": 914 | |
| }, | |
| { | |
| "epoch": 4.315047021943574, | |
| "grad_norm": 0.5470160841941833, | |
| "learning_rate": 1.0599659180157678e-06, | |
| "loss": 0.4047, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 4.3197492163009406, | |
| "grad_norm": 5.193161487579346, | |
| "learning_rate": 1.0544931463785237e-06, | |
| "loss": 0.4272, | |
| "step": 916 | |
| }, | |
| { | |
| "epoch": 4.324451410658307, | |
| "grad_norm": 0.5719714760780334, | |
| "learning_rate": 1.049030761126287e-06, | |
| "loss": 0.4009, | |
| "step": 917 | |
| }, | |
| { | |
| "epoch": 4.329153605015674, | |
| "grad_norm": 0.577610969543457, | |
| "learning_rate": 1.043578801507874e-06, | |
| "loss": 0.3806, | |
| "step": 918 | |
| }, | |
| { | |
| "epoch": 4.33385579937304, | |
| "grad_norm": 0.5377278923988342, | |
| "learning_rate": 1.038137306697193e-06, | |
| "loss": 0.402, | |
| "step": 919 | |
| }, | |
| { | |
| "epoch": 4.338557993730408, | |
| "grad_norm": 0.6189285516738892, | |
| "learning_rate": 1.0327063157929582e-06, | |
| "loss": 0.3915, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 4.343260188087775, | |
| "grad_norm": 0.5795995593070984, | |
| "learning_rate": 1.027285867818411e-06, | |
| "loss": 0.3957, | |
| "step": 921 | |
| }, | |
| { | |
| "epoch": 4.347962382445141, | |
| "grad_norm": 0.49627596139907837, | |
| "learning_rate": 1.021876001721039e-06, | |
| "loss": 0.3395, | |
| "step": 922 | |
| }, | |
| { | |
| "epoch": 4.352664576802508, | |
| "grad_norm": 0.6332801580429077, | |
| "learning_rate": 1.016476756372295e-06, | |
| "loss": 0.3891, | |
| "step": 923 | |
| }, | |
| { | |
| "epoch": 4.3573667711598745, | |
| "grad_norm": 0.5847935676574707, | |
| "learning_rate": 1.011088170567319e-06, | |
| "loss": 0.3783, | |
| "step": 924 | |
| }, | |
| { | |
| "epoch": 4.362068965517241, | |
| "grad_norm": 0.6362956166267395, | |
| "learning_rate": 1.0057102830246596e-06, | |
| "loss": 0.372, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 4.366771159874608, | |
| "grad_norm": 0.549068033695221, | |
| "learning_rate": 1.0003431323859943e-06, | |
| "loss": 0.4023, | |
| "step": 926 | |
| }, | |
| { | |
| "epoch": 4.371473354231975, | |
| "grad_norm": 0.5768754482269287, | |
| "learning_rate": 9.949867572158544e-07, | |
| "loss": 0.3876, | |
| "step": 927 | |
| }, | |
| { | |
| "epoch": 4.376175548589342, | |
| "grad_norm": 0.7501256465911865, | |
| "learning_rate": 9.896411960013455e-07, | |
| "loss": 0.4011, | |
| "step": 928 | |
| }, | |
| { | |
| "epoch": 4.3808777429467085, | |
| "grad_norm": 0.5689511895179749, | |
| "learning_rate": 9.843064871518694e-07, | |
| "loss": 0.3816, | |
| "step": 929 | |
| }, | |
| { | |
| "epoch": 4.385579937304075, | |
| "grad_norm": 0.6056525111198425, | |
| "learning_rate": 9.78982668998856e-07, | |
| "loss": 0.3764, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 4.390282131661442, | |
| "grad_norm": 0.5028645396232605, | |
| "learning_rate": 9.736697797954766e-07, | |
| "loss": 0.3996, | |
| "step": 931 | |
| }, | |
| { | |
| "epoch": 4.394984326018808, | |
| "grad_norm": 0.5384755730628967, | |
| "learning_rate": 9.683678577163788e-07, | |
| "loss": 0.3924, | |
| "step": 932 | |
| }, | |
| { | |
| "epoch": 4.399686520376176, | |
| "grad_norm": 0.5420899391174316, | |
| "learning_rate": 9.630769408574065e-07, | |
| "loss": 0.3664, | |
| "step": 933 | |
| }, | |
| { | |
| "epoch": 4.4043887147335425, | |
| "grad_norm": 1.1258292198181152, | |
| "learning_rate": 9.577970672353274e-07, | |
| "loss": 0.372, | |
| "step": 934 | |
| }, | |
| { | |
| "epoch": 4.409090909090909, | |
| "grad_norm": 0.5689091682434082, | |
| "learning_rate": 9.525282747875636e-07, | |
| "loss": 0.3684, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 4.413793103448276, | |
| "grad_norm": 0.5591897964477539, | |
| "learning_rate": 9.472706013719113e-07, | |
| "loss": 0.3958, | |
| "step": 936 | |
| }, | |
| { | |
| "epoch": 4.418495297805642, | |
| "grad_norm": 0.5757090449333191, | |
| "learning_rate": 9.420240847662759e-07, | |
| "loss": 0.3796, | |
| "step": 937 | |
| }, | |
| { | |
| "epoch": 4.423197492163009, | |
| "grad_norm": 0.5598184466362, | |
| "learning_rate": 9.367887626683975e-07, | |
| "loss": 0.4052, | |
| "step": 938 | |
| }, | |
| { | |
| "epoch": 4.427899686520377, | |
| "grad_norm": 0.573148787021637, | |
| "learning_rate": 9.315646726955798e-07, | |
| "loss": 0.3831, | |
| "step": 939 | |
| }, | |
| { | |
| "epoch": 4.432601880877743, | |
| "grad_norm": 0.5196001529693604, | |
| "learning_rate": 9.263518523844211e-07, | |
| "loss": 0.382, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 4.43730407523511, | |
| "grad_norm": 0.5644182562828064, | |
| "learning_rate": 9.211503391905446e-07, | |
| "loss": 0.3858, | |
| "step": 941 | |
| }, | |
| { | |
| "epoch": 4.4420062695924765, | |
| "grad_norm": 0.5429992079734802, | |
| "learning_rate": 9.159601704883253e-07, | |
| "loss": 0.3919, | |
| "step": 942 | |
| }, | |
| { | |
| "epoch": 4.446708463949843, | |
| "grad_norm": 0.5335330367088318, | |
| "learning_rate": 9.107813835706303e-07, | |
| "loss": 0.3594, | |
| "step": 943 | |
| }, | |
| { | |
| "epoch": 4.45141065830721, | |
| "grad_norm": 0.5579376816749573, | |
| "learning_rate": 9.056140156485385e-07, | |
| "loss": 0.3768, | |
| "step": 944 | |
| }, | |
| { | |
| "epoch": 4.456112852664576, | |
| "grad_norm": 1.2909058332443237, | |
| "learning_rate": 9.004581038510865e-07, | |
| "loss": 0.3877, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 4.460815047021944, | |
| "grad_norm": 0.5664709806442261, | |
| "learning_rate": 8.953136852249922e-07, | |
| "loss": 0.4057, | |
| "step": 946 | |
| }, | |
| { | |
| "epoch": 4.4655172413793105, | |
| "grad_norm": 0.6413213610649109, | |
| "learning_rate": 8.901807967343898e-07, | |
| "loss": 0.4006, | |
| "step": 947 | |
| }, | |
| { | |
| "epoch": 4.470219435736677, | |
| "grad_norm": 0.5393944978713989, | |
| "learning_rate": 8.850594752605712e-07, | |
| "loss": 0.3957, | |
| "step": 948 | |
| }, | |
| { | |
| "epoch": 4.474921630094044, | |
| "grad_norm": 0.6381353139877319, | |
| "learning_rate": 8.79949757601711e-07, | |
| "loss": 0.3861, | |
| "step": 949 | |
| }, | |
| { | |
| "epoch": 4.47962382445141, | |
| "grad_norm": 0.5642807483673096, | |
| "learning_rate": 8.748516804726096e-07, | |
| "loss": 0.3867, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 4.484326018808777, | |
| "grad_norm": 0.5574389696121216, | |
| "learning_rate": 8.697652805044265e-07, | |
| "loss": 0.3644, | |
| "step": 951 | |
| }, | |
| { | |
| "epoch": 4.4890282131661445, | |
| "grad_norm": 0.5668476223945618, | |
| "learning_rate": 8.646905942444172e-07, | |
| "loss": 0.3735, | |
| "step": 952 | |
| }, | |
| { | |
| "epoch": 4.493730407523511, | |
| "grad_norm": 0.5889971256256104, | |
| "learning_rate": 8.59627658155671e-07, | |
| "loss": 0.392, | |
| "step": 953 | |
| }, | |
| { | |
| "epoch": 4.498432601880878, | |
| "grad_norm": 0.5788328647613525, | |
| "learning_rate": 8.545765086168484e-07, | |
| "loss": 0.3845, | |
| "step": 954 | |
| }, | |
| { | |
| "epoch": 4.503134796238244, | |
| "grad_norm": 0.9370825886726379, | |
| "learning_rate": 8.495371819219206e-07, | |
| "loss": 0.3985, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 4.507836990595611, | |
| "grad_norm": 0.5701330900192261, | |
| "learning_rate": 8.44509714279908e-07, | |
| "loss": 0.4099, | |
| "step": 956 | |
| }, | |
| { | |
| "epoch": 4.512539184952978, | |
| "grad_norm": 0.5952461361885071, | |
| "learning_rate": 8.394941418146202e-07, | |
| "loss": 0.4003, | |
| "step": 957 | |
| }, | |
| { | |
| "epoch": 4.517241379310345, | |
| "grad_norm": 0.5201880931854248, | |
| "learning_rate": 8.344905005643967e-07, | |
| "loss": 0.4023, | |
| "step": 958 | |
| }, | |
| { | |
| "epoch": 4.521943573667712, | |
| "grad_norm": 0.5678507089614868, | |
| "learning_rate": 8.294988264818488e-07, | |
| "loss": 0.3913, | |
| "step": 959 | |
| }, | |
| { | |
| "epoch": 4.5266457680250785, | |
| "grad_norm": 0.5855143070220947, | |
| "learning_rate": 8.245191554335963e-07, | |
| "loss": 0.3852, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 4.531347962382445, | |
| "grad_norm": 0.5465763807296753, | |
| "learning_rate": 8.1955152320002e-07, | |
| "loss": 0.387, | |
| "step": 961 | |
| }, | |
| { | |
| "epoch": 4.536050156739812, | |
| "grad_norm": 0.5709990859031677, | |
| "learning_rate": 8.145959654749924e-07, | |
| "loss": 0.3997, | |
| "step": 962 | |
| }, | |
| { | |
| "epoch": 4.540752351097178, | |
| "grad_norm": 0.8095040321350098, | |
| "learning_rate": 8.096525178656306e-07, | |
| "loss": 0.3686, | |
| "step": 963 | |
| }, | |
| { | |
| "epoch": 4.545454545454545, | |
| "grad_norm": 0.5552055239677429, | |
| "learning_rate": 8.047212158920362e-07, | |
| "loss": 0.3958, | |
| "step": 964 | |
| }, | |
| { | |
| "epoch": 4.5501567398119125, | |
| "grad_norm": 0.5294213891029358, | |
| "learning_rate": 7.998020949870402e-07, | |
| "loss": 0.4134, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 4.554858934169279, | |
| "grad_norm": 0.5582411289215088, | |
| "learning_rate": 7.948951904959504e-07, | |
| "loss": 0.3799, | |
| "step": 966 | |
| }, | |
| { | |
| "epoch": 4.559561128526646, | |
| "grad_norm": 0.5506302714347839, | |
| "learning_rate": 7.900005376762948e-07, | |
| "loss": 0.3899, | |
| "step": 967 | |
| }, | |
| { | |
| "epoch": 4.564263322884012, | |
| "grad_norm": 0.5499927997589111, | |
| "learning_rate": 7.851181716975703e-07, | |
| "loss": 0.398, | |
| "step": 968 | |
| }, | |
| { | |
| "epoch": 4.568965517241379, | |
| "grad_norm": 0.530657172203064, | |
| "learning_rate": 7.802481276409896e-07, | |
| "loss": 0.3622, | |
| "step": 969 | |
| }, | |
| { | |
| "epoch": 4.5736677115987465, | |
| "grad_norm": 0.9928011894226074, | |
| "learning_rate": 7.75390440499228e-07, | |
| "loss": 0.3735, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 4.578369905956113, | |
| "grad_norm": 0.6327497363090515, | |
| "learning_rate": 7.705451451761734e-07, | |
| "loss": 0.3686, | |
| "step": 971 | |
| }, | |
| { | |
| "epoch": 4.58307210031348, | |
| "grad_norm": 1.5083693265914917, | |
| "learning_rate": 7.657122764866754e-07, | |
| "loss": 0.3706, | |
| "step": 972 | |
| }, | |
| { | |
| "epoch": 4.587774294670846, | |
| "grad_norm": 0.5787728428840637, | |
| "learning_rate": 7.608918691562914e-07, | |
| "loss": 0.4063, | |
| "step": 973 | |
| }, | |
| { | |
| "epoch": 4.592476489028213, | |
| "grad_norm": 0.580129861831665, | |
| "learning_rate": 7.560839578210466e-07, | |
| "loss": 0.3693, | |
| "step": 974 | |
| }, | |
| { | |
| "epoch": 4.59717868338558, | |
| "grad_norm": 0.5664162635803223, | |
| "learning_rate": 7.512885770271722e-07, | |
| "loss": 0.393, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 4.601880877742946, | |
| "grad_norm": 0.6353042125701904, | |
| "learning_rate": 7.465057612308676e-07, | |
| "loss": 0.3902, | |
| "step": 976 | |
| }, | |
| { | |
| "epoch": 4.606583072100314, | |
| "grad_norm": 0.5410131812095642, | |
| "learning_rate": 7.417355447980484e-07, | |
| "loss": 0.3948, | |
| "step": 977 | |
| }, | |
| { | |
| "epoch": 4.61128526645768, | |
| "grad_norm": 0.5826568603515625, | |
| "learning_rate": 7.369779620041001e-07, | |
| "loss": 0.3828, | |
| "step": 978 | |
| }, | |
| { | |
| "epoch": 4.615987460815047, | |
| "grad_norm": 0.5563565492630005, | |
| "learning_rate": 7.322330470336314e-07, | |
| "loss": 0.4095, | |
| "step": 979 | |
| }, | |
| { | |
| "epoch": 4.620689655172414, | |
| "grad_norm": 0.5592769980430603, | |
| "learning_rate": 7.275008339802295e-07, | |
| "loss": 0.3924, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 4.62539184952978, | |
| "grad_norm": 0.744817316532135, | |
| "learning_rate": 7.227813568462141e-07, | |
| "loss": 0.3737, | |
| "step": 981 | |
| }, | |
| { | |
| "epoch": 4.630094043887147, | |
| "grad_norm": 0.5768243074417114, | |
| "learning_rate": 7.180746495423946e-07, | |
| "loss": 0.3906, | |
| "step": 982 | |
| }, | |
| { | |
| "epoch": 4.6347962382445145, | |
| "grad_norm": 0.5390825867652893, | |
| "learning_rate": 7.133807458878247e-07, | |
| "loss": 0.3872, | |
| "step": 983 | |
| }, | |
| { | |
| "epoch": 4.639498432601881, | |
| "grad_norm": 0.5862735509872437, | |
| "learning_rate": 7.086996796095599e-07, | |
| "loss": 0.3839, | |
| "step": 984 | |
| }, | |
| { | |
| "epoch": 4.644200626959248, | |
| "grad_norm": 0.5880054235458374, | |
| "learning_rate": 7.040314843424173e-07, | |
| "loss": 0.3936, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 4.648902821316614, | |
| "grad_norm": 0.522564172744751, | |
| "learning_rate": 6.99376193628728e-07, | |
| "loss": 0.3827, | |
| "step": 986 | |
| }, | |
| { | |
| "epoch": 4.653605015673981, | |
| "grad_norm": 0.5806529521942139, | |
| "learning_rate": 6.947338409181056e-07, | |
| "loss": 0.387, | |
| "step": 987 | |
| }, | |
| { | |
| "epoch": 4.658307210031348, | |
| "grad_norm": 0.5591483116149902, | |
| "learning_rate": 6.90104459567196e-07, | |
| "loss": 0.3896, | |
| "step": 988 | |
| }, | |
| { | |
| "epoch": 4.663009404388715, | |
| "grad_norm": 0.8104233145713806, | |
| "learning_rate": 6.854880828394442e-07, | |
| "loss": 0.3795, | |
| "step": 989 | |
| }, | |
| { | |
| "epoch": 4.667711598746082, | |
| "grad_norm": 0.5946338772773743, | |
| "learning_rate": 6.808847439048524e-07, | |
| "loss": 0.4052, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 4.672413793103448, | |
| "grad_norm": 0.6517843008041382, | |
| "learning_rate": 6.762944758397432e-07, | |
| "loss": 0.3922, | |
| "step": 991 | |
| }, | |
| { | |
| "epoch": 4.677115987460815, | |
| "grad_norm": 0.526491641998291, | |
| "learning_rate": 6.717173116265208e-07, | |
| "loss": 0.3853, | |
| "step": 992 | |
| }, | |
| { | |
| "epoch": 4.681818181818182, | |
| "grad_norm": 0.5598382353782654, | |
| "learning_rate": 6.671532841534345e-07, | |
| "loss": 0.3931, | |
| "step": 993 | |
| }, | |
| { | |
| "epoch": 4.686520376175548, | |
| "grad_norm": 0.6231300830841064, | |
| "learning_rate": 6.626024262143421e-07, | |
| "loss": 0.401, | |
| "step": 994 | |
| }, | |
| { | |
| "epoch": 4.691222570532915, | |
| "grad_norm": 0.5638094544410706, | |
| "learning_rate": 6.58064770508475e-07, | |
| "loss": 0.3838, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 4.695924764890282, | |
| "grad_norm": 0.5840059518814087, | |
| "learning_rate": 6.535403496402023e-07, | |
| "loss": 0.3726, | |
| "step": 996 | |
| }, | |
| { | |
| "epoch": 4.700626959247649, | |
| "grad_norm": 0.5452519655227661, | |
| "learning_rate": 6.490291961187975e-07, | |
| "loss": 0.3735, | |
| "step": 997 | |
| }, | |
| { | |
| "epoch": 4.705329153605016, | |
| "grad_norm": 0.6175218224525452, | |
| "learning_rate": 6.445313423582039e-07, | |
| "loss": 0.4116, | |
| "step": 998 | |
| }, | |
| { | |
| "epoch": 4.710031347962382, | |
| "grad_norm": 0.5606091022491455, | |
| "learning_rate": 6.400468206768004e-07, | |
| "loss": 0.3907, | |
| "step": 999 | |
| }, | |
| { | |
| "epoch": 4.714733542319749, | |
| "grad_norm": 0.5126330256462097, | |
| "learning_rate": 6.35575663297176e-07, | |
| "loss": 0.3978, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 4.7194357366771165, | |
| "grad_norm": 0.542105495929718, | |
| "learning_rate": 6.31117902345888e-07, | |
| "loss": 0.3408, | |
| "step": 1001 | |
| }, | |
| { | |
| "epoch": 4.724137931034483, | |
| "grad_norm": 0.7690749168395996, | |
| "learning_rate": 6.266735698532392e-07, | |
| "loss": 0.3865, | |
| "step": 1002 | |
| }, | |
| { | |
| "epoch": 4.72884012539185, | |
| "grad_norm": 0.5448989868164062, | |
| "learning_rate": 6.222426977530449e-07, | |
| "loss": 0.4078, | |
| "step": 1003 | |
| }, | |
| { | |
| "epoch": 4.733542319749216, | |
| "grad_norm": 0.5502550601959229, | |
| "learning_rate": 6.178253178824029e-07, | |
| "loss": 0.3987, | |
| "step": 1004 | |
| }, | |
| { | |
| "epoch": 4.738244514106583, | |
| "grad_norm": 0.5256698131561279, | |
| "learning_rate": 6.134214619814657e-07, | |
| "loss": 0.3797, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 4.74294670846395, | |
| "grad_norm": 0.5733087062835693, | |
| "learning_rate": 6.090311616932127e-07, | |
| "loss": 0.3729, | |
| "step": 1006 | |
| }, | |
| { | |
| "epoch": 4.747648902821316, | |
| "grad_norm": 0.5204405188560486, | |
| "learning_rate": 6.04654448563221e-07, | |
| "loss": 0.3798, | |
| "step": 1007 | |
| }, | |
| { | |
| "epoch": 4.752351097178684, | |
| "grad_norm": 0.5861319899559021, | |
| "learning_rate": 6.002913540394417e-07, | |
| "loss": 0.3605, | |
| "step": 1008 | |
| }, | |
| { | |
| "epoch": 4.75705329153605, | |
| "grad_norm": 0.5983589887619019, | |
| "learning_rate": 5.959419094719713e-07, | |
| "loss": 0.406, | |
| "step": 1009 | |
| }, | |
| { | |
| "epoch": 4.761755485893417, | |
| "grad_norm": 0.5816555023193359, | |
| "learning_rate": 5.916061461128269e-07, | |
| "loss": 0.3813, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 4.766457680250784, | |
| "grad_norm": 0.6101461052894592, | |
| "learning_rate": 5.872840951157241e-07, | |
| "loss": 0.3656, | |
| "step": 1011 | |
| }, | |
| { | |
| "epoch": 4.77115987460815, | |
| "grad_norm": 0.5770082473754883, | |
| "learning_rate": 5.829757875358477e-07, | |
| "loss": 0.3824, | |
| "step": 1012 | |
| }, | |
| { | |
| "epoch": 4.775862068965517, | |
| "grad_norm": 0.5549518465995789, | |
| "learning_rate": 5.786812543296372e-07, | |
| "loss": 0.3919, | |
| "step": 1013 | |
| }, | |
| { | |
| "epoch": 4.7805642633228835, | |
| "grad_norm": 0.5969197750091553, | |
| "learning_rate": 5.744005263545538e-07, | |
| "loss": 0.4094, | |
| "step": 1014 | |
| }, | |
| { | |
| "epoch": 4.785266457680251, | |
| "grad_norm": 0.6006177663803101, | |
| "learning_rate": 5.701336343688671e-07, | |
| "loss": 0.4076, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 4.789968652037618, | |
| "grad_norm": 0.5562496185302734, | |
| "learning_rate": 5.658806090314322e-07, | |
| "loss": 0.3756, | |
| "step": 1016 | |
| }, | |
| { | |
| "epoch": 4.794670846394984, | |
| "grad_norm": 0.619269609451294, | |
| "learning_rate": 5.616414809014647e-07, | |
| "loss": 0.3803, | |
| "step": 1017 | |
| }, | |
| { | |
| "epoch": 4.799373040752351, | |
| "grad_norm": 0.5273744463920593, | |
| "learning_rate": 5.574162804383293e-07, | |
| "loss": 0.3903, | |
| "step": 1018 | |
| }, | |
| { | |
| "epoch": 4.804075235109718, | |
| "grad_norm": 0.5756296515464783, | |
| "learning_rate": 5.532050380013115e-07, | |
| "loss": 0.3834, | |
| "step": 1019 | |
| }, | |
| { | |
| "epoch": 4.808777429467085, | |
| "grad_norm": 0.6292474269866943, | |
| "learning_rate": 5.490077838494079e-07, | |
| "loss": 0.4114, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 4.813479623824452, | |
| "grad_norm": 0.5505243539810181, | |
| "learning_rate": 5.448245481411041e-07, | |
| "loss": 0.3919, | |
| "step": 1021 | |
| }, | |
| { | |
| "epoch": 4.818181818181818, | |
| "grad_norm": 0.5497763156890869, | |
| "learning_rate": 5.406553609341586e-07, | |
| "loss": 0.3913, | |
| "step": 1022 | |
| }, | |
| { | |
| "epoch": 4.822884012539185, | |
| "grad_norm": 0.5497800707817078, | |
| "learning_rate": 5.365002521853882e-07, | |
| "loss": 0.3753, | |
| "step": 1023 | |
| }, | |
| { | |
| "epoch": 4.827586206896552, | |
| "grad_norm": 0.5281540155410767, | |
| "learning_rate": 5.32359251750452e-07, | |
| "loss": 0.3828, | |
| "step": 1024 | |
| }, | |
| { | |
| "epoch": 4.832288401253918, | |
| "grad_norm": 0.5442911982536316, | |
| "learning_rate": 5.282323893836347e-07, | |
| "loss": 0.4074, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 4.836990595611285, | |
| "grad_norm": 0.5267000794410706, | |
| "learning_rate": 5.241196947376382e-07, | |
| "loss": 0.3844, | |
| "step": 1026 | |
| }, | |
| { | |
| "epoch": 4.841692789968652, | |
| "grad_norm": 0.5562646985054016, | |
| "learning_rate": 5.200211973633632e-07, | |
| "loss": 0.4098, | |
| "step": 1027 | |
| }, | |
| { | |
| "epoch": 4.846394984326019, | |
| "grad_norm": 0.53923499584198, | |
| "learning_rate": 5.15936926709699e-07, | |
| "loss": 0.3987, | |
| "step": 1028 | |
| }, | |
| { | |
| "epoch": 4.851097178683386, | |
| "grad_norm": 0.5662857890129089, | |
| "learning_rate": 5.118669121233127e-07, | |
| "loss": 0.3928, | |
| "step": 1029 | |
| }, | |
| { | |
| "epoch": 4.855799373040752, | |
| "grad_norm": 0.5717837810516357, | |
| "learning_rate": 5.078111828484347e-07, | |
| "loss": 0.3914, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 4.860501567398119, | |
| "grad_norm": 0.6961056590080261, | |
| "learning_rate": 5.037697680266565e-07, | |
| "loss": 0.394, | |
| "step": 1031 | |
| }, | |
| { | |
| "epoch": 4.8652037617554855, | |
| "grad_norm": 0.6069244742393494, | |
| "learning_rate": 4.997426966967106e-07, | |
| "loss": 0.3949, | |
| "step": 1032 | |
| }, | |
| { | |
| "epoch": 4.869905956112853, | |
| "grad_norm": 0.51577228307724, | |
| "learning_rate": 4.957299977942704e-07, | |
| "loss": 0.3792, | |
| "step": 1033 | |
| }, | |
| { | |
| "epoch": 4.87460815047022, | |
| "grad_norm": 0.5260924696922302, | |
| "learning_rate": 4.917317001517389e-07, | |
| "loss": 0.3851, | |
| "step": 1034 | |
| }, | |
| { | |
| "epoch": 4.879310344827586, | |
| "grad_norm": 0.5808457732200623, | |
| "learning_rate": 4.877478324980412e-07, | |
| "loss": 0.4056, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 4.884012539184953, | |
| "grad_norm": 0.5486719012260437, | |
| "learning_rate": 4.837784234584194e-07, | |
| "loss": 0.3748, | |
| "step": 1036 | |
| }, | |
| { | |
| "epoch": 4.88871473354232, | |
| "grad_norm": 0.5316773653030396, | |
| "learning_rate": 4.79823501554226e-07, | |
| "loss": 0.3973, | |
| "step": 1037 | |
| }, | |
| { | |
| "epoch": 4.893416927899686, | |
| "grad_norm": 0.5671694874763489, | |
| "learning_rate": 4.7588309520271934e-07, | |
| "loss": 0.3817, | |
| "step": 1038 | |
| }, | |
| { | |
| "epoch": 4.898119122257054, | |
| "grad_norm": 0.5756440758705139, | |
| "learning_rate": 4.7195723271685893e-07, | |
| "loss": 0.4129, | |
| "step": 1039 | |
| }, | |
| { | |
| "epoch": 4.90282131661442, | |
| "grad_norm": 0.5772919058799744, | |
| "learning_rate": 4.6804594230510286e-07, | |
| "loss": 0.4097, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 4.907523510971787, | |
| "grad_norm": 0.5606109499931335, | |
| "learning_rate": 4.641492520712043e-07, | |
| "loss": 0.3859, | |
| "step": 1041 | |
| }, | |
| { | |
| "epoch": 4.912225705329154, | |
| "grad_norm": 0.6109121441841125, | |
| "learning_rate": 4.60267190014011e-07, | |
| "loss": 0.3974, | |
| "step": 1042 | |
| }, | |
| { | |
| "epoch": 4.91692789968652, | |
| "grad_norm": 0.5530210137367249, | |
| "learning_rate": 4.563997840272602e-07, | |
| "loss": 0.3813, | |
| "step": 1043 | |
| }, | |
| { | |
| "epoch": 4.921630094043887, | |
| "grad_norm": 0.527423083782196, | |
| "learning_rate": 4.5254706189938545e-07, | |
| "loss": 0.3682, | |
| "step": 1044 | |
| }, | |
| { | |
| "epoch": 4.9263322884012535, | |
| "grad_norm": 0.8316900134086609, | |
| "learning_rate": 4.4870905131330827e-07, | |
| "loss": 0.4091, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 4.931034482758621, | |
| "grad_norm": 0.5423528552055359, | |
| "learning_rate": 4.448857798462455e-07, | |
| "loss": 0.4069, | |
| "step": 1046 | |
| }, | |
| { | |
| "epoch": 4.935736677115988, | |
| "grad_norm": 0.590660572052002, | |
| "learning_rate": 4.4107727496950913e-07, | |
| "loss": 0.3789, | |
| "step": 1047 | |
| }, | |
| { | |
| "epoch": 4.940438871473354, | |
| "grad_norm": 0.6766872406005859, | |
| "learning_rate": 4.372835640483089e-07, | |
| "loss": 0.4012, | |
| "step": 1048 | |
| }, | |
| { | |
| "epoch": 4.945141065830721, | |
| "grad_norm": 0.6196874380111694, | |
| "learning_rate": 4.3350467434155526e-07, | |
| "loss": 0.3943, | |
| "step": 1049 | |
| }, | |
| { | |
| "epoch": 4.9498432601880875, | |
| "grad_norm": 0.6170504689216614, | |
| "learning_rate": 4.297406330016643e-07, | |
| "loss": 0.3849, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 4.954545454545455, | |
| "grad_norm": 0.6729781627655029, | |
| "learning_rate": 4.25991467074362e-07, | |
| "loss": 0.3746, | |
| "step": 1051 | |
| }, | |
| { | |
| "epoch": 4.959247648902822, | |
| "grad_norm": 0.7265579700469971, | |
| "learning_rate": 4.2225720349849063e-07, | |
| "loss": 0.3881, | |
| "step": 1052 | |
| }, | |
| { | |
| "epoch": 4.963949843260188, | |
| "grad_norm": 0.5406692624092102, | |
| "learning_rate": 4.185378691058145e-07, | |
| "loss": 0.3832, | |
| "step": 1053 | |
| }, | |
| { | |
| "epoch": 4.968652037617555, | |
| "grad_norm": 0.6976690292358398, | |
| "learning_rate": 4.148334906208273e-07, | |
| "loss": 0.4138, | |
| "step": 1054 | |
| }, | |
| { | |
| "epoch": 4.9733542319749215, | |
| "grad_norm": 0.5301910042762756, | |
| "learning_rate": 4.1114409466056107e-07, | |
| "loss": 0.3883, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 4.978056426332288, | |
| "grad_norm": 0.5670992136001587, | |
| "learning_rate": 4.0746970773439115e-07, | |
| "loss": 0.4175, | |
| "step": 1056 | |
| }, | |
| { | |
| "epoch": 4.982758620689655, | |
| "grad_norm": 0.5474900007247925, | |
| "learning_rate": 4.0381035624385336e-07, | |
| "loss": 0.4017, | |
| "step": 1057 | |
| }, | |
| { | |
| "epoch": 4.987460815047022, | |
| "grad_norm": 0.5067923069000244, | |
| "learning_rate": 4.0016606648244555e-07, | |
| "loss": 0.3591, | |
| "step": 1058 | |
| }, | |
| { | |
| "epoch": 4.992163009404389, | |
| "grad_norm": 0.6014583706855774, | |
| "learning_rate": 3.9653686463544447e-07, | |
| "loss": 0.4094, | |
| "step": 1059 | |
| }, | |
| { | |
| "epoch": 4.996865203761756, | |
| "grad_norm": 0.549430251121521, | |
| "learning_rate": 3.929227767797153e-07, | |
| "loss": 0.4017, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 5.004702194357367, | |
| "grad_norm": 1.0579490661621094, | |
| "learning_rate": 3.8932382888352547e-07, | |
| "loss": 0.7698, | |
| "step": 1061 | |
| }, | |
| { | |
| "epoch": 5.009404388714733, | |
| "grad_norm": 0.5798449516296387, | |
| "learning_rate": 3.8574004680635686e-07, | |
| "loss": 0.368, | |
| "step": 1062 | |
| }, | |
| { | |
| "epoch": 5.0141065830721, | |
| "grad_norm": 0.588994026184082, | |
| "learning_rate": 3.8217145629872054e-07, | |
| "loss": 0.3894, | |
| "step": 1063 | |
| }, | |
| { | |
| "epoch": 5.018808777429467, | |
| "grad_norm": 0.5300963521003723, | |
| "learning_rate": 3.786180830019717e-07, | |
| "loss": 0.4152, | |
| "step": 1064 | |
| }, | |
| { | |
| "epoch": 5.023510971786834, | |
| "grad_norm": 0.5091548562049866, | |
| "learning_rate": 3.7507995244812636e-07, | |
| "loss": 0.3316, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 5.028213166144201, | |
| "grad_norm": 0.5705922842025757, | |
| "learning_rate": 3.7155709005967544e-07, | |
| "loss": 0.3764, | |
| "step": 1066 | |
| }, | |
| { | |
| "epoch": 5.032915360501567, | |
| "grad_norm": 0.6238323450088501, | |
| "learning_rate": 3.6804952114940504e-07, | |
| "loss": 0.3683, | |
| "step": 1067 | |
| }, | |
| { | |
| "epoch": 5.037617554858934, | |
| "grad_norm": 0.6332188248634338, | |
| "learning_rate": 3.645572709202136e-07, | |
| "loss": 0.3607, | |
| "step": 1068 | |
| }, | |
| { | |
| "epoch": 5.0423197492163006, | |
| "grad_norm": 0.5465915203094482, | |
| "learning_rate": 3.610803644649269e-07, | |
| "loss": 0.3374, | |
| "step": 1069 | |
| }, | |
| { | |
| "epoch": 5.047021943573668, | |
| "grad_norm": 0.5820398330688477, | |
| "learning_rate": 3.576188267661271e-07, | |
| "loss": 0.3743, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 5.051724137931035, | |
| "grad_norm": 0.5405742526054382, | |
| "learning_rate": 3.5417268269596186e-07, | |
| "loss": 0.3891, | |
| "step": 1071 | |
| }, | |
| { | |
| "epoch": 5.056426332288401, | |
| "grad_norm": 0.5952848792076111, | |
| "learning_rate": 3.5074195701597423e-07, | |
| "loss": 0.376, | |
| "step": 1072 | |
| }, | |
| { | |
| "epoch": 5.061128526645768, | |
| "grad_norm": 0.5295292735099792, | |
| "learning_rate": 3.4732667437692075e-07, | |
| "loss": 0.3871, | |
| "step": 1073 | |
| }, | |
| { | |
| "epoch": 5.065830721003135, | |
| "grad_norm": 0.5709607005119324, | |
| "learning_rate": 3.439268593185957e-07, | |
| "loss": 0.3772, | |
| "step": 1074 | |
| }, | |
| { | |
| "epoch": 5.070532915360501, | |
| "grad_norm": 0.5744038224220276, | |
| "learning_rate": 3.4054253626965404e-07, | |
| "loss": 0.3533, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 5.075235109717869, | |
| "grad_norm": 0.5315141081809998, | |
| "learning_rate": 3.371737295474359e-07, | |
| "loss": 0.3876, | |
| "step": 1076 | |
| }, | |
| { | |
| "epoch": 5.079937304075235, | |
| "grad_norm": 0.6047268509864807, | |
| "learning_rate": 3.338204633577924e-07, | |
| "loss": 0.362, | |
| "step": 1077 | |
| }, | |
| { | |
| "epoch": 5.084639498432602, | |
| "grad_norm": 0.5665130019187927, | |
| "learning_rate": 3.3048276179491135e-07, | |
| "loss": 0.4093, | |
| "step": 1078 | |
| }, | |
| { | |
| "epoch": 5.089341692789969, | |
| "grad_norm": 0.5911765694618225, | |
| "learning_rate": 3.271606488411447e-07, | |
| "loss": 0.3732, | |
| "step": 1079 | |
| }, | |
| { | |
| "epoch": 5.094043887147335, | |
| "grad_norm": 0.5678281784057617, | |
| "learning_rate": 3.238541483668345e-07, | |
| "loss": 0.3423, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 5.098746081504702, | |
| "grad_norm": 0.5383968353271484, | |
| "learning_rate": 3.2056328413014456e-07, | |
| "loss": 0.3985, | |
| "step": 1081 | |
| }, | |
| { | |
| "epoch": 5.103448275862069, | |
| "grad_norm": 0.6322943568229675, | |
| "learning_rate": 3.172880797768849e-07, | |
| "loss": 0.3656, | |
| "step": 1082 | |
| }, | |
| { | |
| "epoch": 5.108150470219436, | |
| "grad_norm": 0.5436883568763733, | |
| "learning_rate": 3.1402855884034856e-07, | |
| "loss": 0.3585, | |
| "step": 1083 | |
| }, | |
| { | |
| "epoch": 5.112852664576803, | |
| "grad_norm": 0.5579142570495605, | |
| "learning_rate": 3.1078474474113497e-07, | |
| "loss": 0.3675, | |
| "step": 1084 | |
| }, | |
| { | |
| "epoch": 5.117554858934169, | |
| "grad_norm": 0.5703343152999878, | |
| "learning_rate": 3.075566607869876e-07, | |
| "loss": 0.3896, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 5.122257053291536, | |
| "grad_norm": 0.5619482398033142, | |
| "learning_rate": 3.04344330172624e-07, | |
| "loss": 0.3736, | |
| "step": 1086 | |
| }, | |
| { | |
| "epoch": 5.1269592476489025, | |
| "grad_norm": 0.5970786809921265, | |
| "learning_rate": 3.0114777597956835e-07, | |
| "loss": 0.352, | |
| "step": 1087 | |
| }, | |
| { | |
| "epoch": 5.131661442006269, | |
| "grad_norm": 0.620596706867218, | |
| "learning_rate": 2.9796702117598884e-07, | |
| "loss": 0.3814, | |
| "step": 1088 | |
| }, | |
| { | |
| "epoch": 5.136363636363637, | |
| "grad_norm": 0.546761155128479, | |
| "learning_rate": 2.948020886165279e-07, | |
| "loss": 0.3747, | |
| "step": 1089 | |
| }, | |
| { | |
| "epoch": 5.141065830721003, | |
| "grad_norm": 0.6689497232437134, | |
| "learning_rate": 2.91653001042142e-07, | |
| "loss": 0.3887, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 5.14576802507837, | |
| "grad_norm": 0.6103398203849792, | |
| "learning_rate": 2.885197810799367e-07, | |
| "loss": 0.3457, | |
| "step": 1091 | |
| }, | |
| { | |
| "epoch": 5.150470219435737, | |
| "grad_norm": 0.6375635862350464, | |
| "learning_rate": 2.854024512430043e-07, | |
| "loss": 0.3776, | |
| "step": 1092 | |
| }, | |
| { | |
| "epoch": 5.155172413793103, | |
| "grad_norm": 0.537567138671875, | |
| "learning_rate": 2.8230103393026094e-07, | |
| "loss": 0.365, | |
| "step": 1093 | |
| }, | |
| { | |
| "epoch": 5.15987460815047, | |
| "grad_norm": 0.6254602074623108, | |
| "learning_rate": 2.792155514262887e-07, | |
| "loss": 0.3839, | |
| "step": 1094 | |
| }, | |
| { | |
| "epoch": 5.164576802507837, | |
| "grad_norm": 0.4944222867488861, | |
| "learning_rate": 2.761460259011703e-07, | |
| "loss": 0.352, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 5.169278996865204, | |
| "grad_norm": 0.5793887972831726, | |
| "learning_rate": 2.7309247941033623e-07, | |
| "loss": 0.3857, | |
| "step": 1096 | |
| }, | |
| { | |
| "epoch": 5.173981191222571, | |
| "grad_norm": 0.9634398818016052, | |
| "learning_rate": 2.700549338944014e-07, | |
| "loss": 0.3639, | |
| "step": 1097 | |
| }, | |
| { | |
| "epoch": 5.178683385579937, | |
| "grad_norm": 0.6770529747009277, | |
| "learning_rate": 2.6703341117900905e-07, | |
| "loss": 0.3818, | |
| "step": 1098 | |
| }, | |
| { | |
| "epoch": 5.183385579937304, | |
| "grad_norm": 0.5654211640357971, | |
| "learning_rate": 2.6402793297467476e-07, | |
| "loss": 0.3899, | |
| "step": 1099 | |
| }, | |
| { | |
| "epoch": 5.1880877742946705, | |
| "grad_norm": 0.5664350390434265, | |
| "learning_rate": 2.6103852087662753e-07, | |
| "loss": 0.3627, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 5.192789968652038, | |
| "grad_norm": 0.5438733100891113, | |
| "learning_rate": 2.580651963646602e-07, | |
| "loss": 0.379, | |
| "step": 1101 | |
| }, | |
| { | |
| "epoch": 5.197492163009405, | |
| "grad_norm": 1.721574306488037, | |
| "learning_rate": 2.5510798080296827e-07, | |
| "loss": 0.3759, | |
| "step": 1102 | |
| }, | |
| { | |
| "epoch": 5.202194357366771, | |
| "grad_norm": 0.704701840877533, | |
| "learning_rate": 2.5216689544000193e-07, | |
| "loss": 0.3846, | |
| "step": 1103 | |
| }, | |
| { | |
| "epoch": 5.206896551724138, | |
| "grad_norm": 0.5502007603645325, | |
| "learning_rate": 2.4924196140831027e-07, | |
| "loss": 0.4047, | |
| "step": 1104 | |
| }, | |
| { | |
| "epoch": 5.2115987460815045, | |
| "grad_norm": 0.5678089261054993, | |
| "learning_rate": 2.4633319972439064e-07, | |
| "loss": 0.3535, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 5.216300940438871, | |
| "grad_norm": 0.5587364435195923, | |
| "learning_rate": 2.434406312885376e-07, | |
| "loss": 0.3565, | |
| "step": 1106 | |
| }, | |
| { | |
| "epoch": 5.221003134796238, | |
| "grad_norm": 0.5112608671188354, | |
| "learning_rate": 2.405642768846925e-07, | |
| "loss": 0.3894, | |
| "step": 1107 | |
| }, | |
| { | |
| "epoch": 5.225705329153605, | |
| "grad_norm": 0.6091886162757874, | |
| "learning_rate": 2.3770415718029349e-07, | |
| "loss": 0.3616, | |
| "step": 1108 | |
| }, | |
| { | |
| "epoch": 5.230407523510972, | |
| "grad_norm": 0.6871199607849121, | |
| "learning_rate": 2.3486029272612842e-07, | |
| "loss": 0.3906, | |
| "step": 1109 | |
| }, | |
| { | |
| "epoch": 5.235109717868339, | |
| "grad_norm": 1.1539103984832764, | |
| "learning_rate": 2.320327039561865e-07, | |
| "loss": 0.3703, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 5.239811912225705, | |
| "grad_norm": 0.6069610714912415, | |
| "learning_rate": 2.29221411187511e-07, | |
| "loss": 0.399, | |
| "step": 1111 | |
| }, | |
| { | |
| "epoch": 5.244514106583072, | |
| "grad_norm": 0.5559518337249756, | |
| "learning_rate": 2.2642643462005454e-07, | |
| "loss": 0.4039, | |
| "step": 1112 | |
| }, | |
| { | |
| "epoch": 5.2492163009404385, | |
| "grad_norm": 0.5976861715316772, | |
| "learning_rate": 2.236477943365309e-07, | |
| "loss": 0.376, | |
| "step": 1113 | |
| }, | |
| { | |
| "epoch": 5.253918495297806, | |
| "grad_norm": 0.730370283126831, | |
| "learning_rate": 2.2088551030227668e-07, | |
| "loss": 0.3866, | |
| "step": 1114 | |
| }, | |
| { | |
| "epoch": 5.258620689655173, | |
| "grad_norm": 0.5441141128540039, | |
| "learning_rate": 2.181396023651003e-07, | |
| "loss": 0.3926, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 5.263322884012539, | |
| "grad_norm": 0.5770934224128723, | |
| "learning_rate": 2.1541009025514536e-07, | |
| "loss": 0.3639, | |
| "step": 1116 | |
| }, | |
| { | |
| "epoch": 5.268025078369906, | |
| "grad_norm": 0.5627783536911011, | |
| "learning_rate": 2.1269699358474617e-07, | |
| "loss": 0.3792, | |
| "step": 1117 | |
| }, | |
| { | |
| "epoch": 5.2727272727272725, | |
| "grad_norm": 0.8002972602844238, | |
| "learning_rate": 2.100003318482871e-07, | |
| "loss": 0.3875, | |
| "step": 1118 | |
| }, | |
| { | |
| "epoch": 5.277429467084639, | |
| "grad_norm": 0.5331479907035828, | |
| "learning_rate": 2.073201244220635e-07, | |
| "loss": 0.3925, | |
| "step": 1119 | |
| }, | |
| { | |
| "epoch": 5.282131661442007, | |
| "grad_norm": 0.589150607585907, | |
| "learning_rate": 2.0465639056414106e-07, | |
| "loss": 0.367, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 5.286833855799373, | |
| "grad_norm": 0.5835588574409485, | |
| "learning_rate": 2.0200914941421817e-07, | |
| "loss": 0.3584, | |
| "step": 1121 | |
| }, | |
| { | |
| "epoch": 5.29153605015674, | |
| "grad_norm": 0.5520695447921753, | |
| "learning_rate": 1.9937841999348866e-07, | |
| "loss": 0.3851, | |
| "step": 1122 | |
| }, | |
| { | |
| "epoch": 5.2962382445141065, | |
| "grad_norm": 0.5931273698806763, | |
| "learning_rate": 1.9676422120450455e-07, | |
| "loss": 0.3719, | |
| "step": 1123 | |
| }, | |
| { | |
| "epoch": 5.300940438871473, | |
| "grad_norm": 0.7620764374732971, | |
| "learning_rate": 1.9416657183104038e-07, | |
| "loss": 0.3873, | |
| "step": 1124 | |
| }, | |
| { | |
| "epoch": 5.30564263322884, | |
| "grad_norm": 0.8696703314781189, | |
| "learning_rate": 1.915854905379594e-07, | |
| "loss": 0.4264, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 5.310344827586207, | |
| "grad_norm": 0.5184534192085266, | |
| "learning_rate": 1.8902099587107592e-07, | |
| "loss": 0.3946, | |
| "step": 1126 | |
| }, | |
| { | |
| "epoch": 5.315047021943574, | |
| "grad_norm": 0.6142277717590332, | |
| "learning_rate": 1.8647310625702796e-07, | |
| "loss": 0.3804, | |
| "step": 1127 | |
| }, | |
| { | |
| "epoch": 5.3197492163009406, | |
| "grad_norm": 0.5770272016525269, | |
| "learning_rate": 1.8394184000313815e-07, | |
| "loss": 0.3836, | |
| "step": 1128 | |
| }, | |
| { | |
| "epoch": 5.324451410658307, | |
| "grad_norm": 0.562314510345459, | |
| "learning_rate": 1.814272152972879e-07, | |
| "loss": 0.379, | |
| "step": 1129 | |
| }, | |
| { | |
| "epoch": 5.329153605015674, | |
| "grad_norm": 0.5886692404747009, | |
| "learning_rate": 1.78929250207783e-07, | |
| "loss": 0.353, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 5.33385579937304, | |
| "grad_norm": 0.6114895343780518, | |
| "learning_rate": 1.7644796268322523e-07, | |
| "loss": 0.4035, | |
| "step": 1131 | |
| }, | |
| { | |
| "epoch": 5.338557993730408, | |
| "grad_norm": 0.5147929787635803, | |
| "learning_rate": 1.7398337055238385e-07, | |
| "loss": 0.3775, | |
| "step": 1132 | |
| }, | |
| { | |
| "epoch": 5.343260188087775, | |
| "grad_norm": 0.5390143990516663, | |
| "learning_rate": 1.7153549152406608e-07, | |
| "loss": 0.3616, | |
| "step": 1133 | |
| }, | |
| { | |
| "epoch": 5.347962382445141, | |
| "grad_norm": 0.5406209826469421, | |
| "learning_rate": 1.6910434318699153e-07, | |
| "loss": 0.3723, | |
| "step": 1134 | |
| }, | |
| { | |
| "epoch": 5.352664576802508, | |
| "grad_norm": 0.5647180676460266, | |
| "learning_rate": 1.6668994300966385e-07, | |
| "loss": 0.3894, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 5.3573667711598745, | |
| "grad_norm": 0.5575543642044067, | |
| "learning_rate": 1.642923083402473e-07, | |
| "loss": 0.3793, | |
| "step": 1136 | |
| }, | |
| { | |
| "epoch": 5.362068965517241, | |
| "grad_norm": 0.5549313426017761, | |
| "learning_rate": 1.6191145640644057e-07, | |
| "loss": 0.3451, | |
| "step": 1137 | |
| }, | |
| { | |
| "epoch": 5.366771159874608, | |
| "grad_norm": 0.5195758938789368, | |
| "learning_rate": 1.5954740431535442e-07, | |
| "loss": 0.3707, | |
| "step": 1138 | |
| }, | |
| { | |
| "epoch": 5.371473354231975, | |
| "grad_norm": 0.615749716758728, | |
| "learning_rate": 1.5720016905338558e-07, | |
| "loss": 0.3943, | |
| "step": 1139 | |
| }, | |
| { | |
| "epoch": 5.376175548589342, | |
| "grad_norm": 0.5402339100837708, | |
| "learning_rate": 1.548697674861005e-07, | |
| "loss": 0.3718, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 5.3808777429467085, | |
| "grad_norm": 0.5729634165763855, | |
| "learning_rate": 1.5255621635810737e-07, | |
| "loss": 0.398, | |
| "step": 1141 | |
| }, | |
| { | |
| "epoch": 5.385579937304075, | |
| "grad_norm": 0.6112332940101624, | |
| "learning_rate": 1.5025953229294094e-07, | |
| "loss": 0.365, | |
| "step": 1142 | |
| }, | |
| { | |
| "epoch": 5.390282131661442, | |
| "grad_norm": 0.5643470883369446, | |
| "learning_rate": 1.4797973179294072e-07, | |
| "loss": 0.3614, | |
| "step": 1143 | |
| }, | |
| { | |
| "epoch": 5.394984326018808, | |
| "grad_norm": 0.5690374970436096, | |
| "learning_rate": 1.45716831239133e-07, | |
| "loss": 0.3897, | |
| "step": 1144 | |
| }, | |
| { | |
| "epoch": 5.399686520376176, | |
| "grad_norm": 0.5822629928588867, | |
| "learning_rate": 1.4347084689111307e-07, | |
| "loss": 0.3822, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 5.4043887147335425, | |
| "grad_norm": 0.5914065837860107, | |
| "learning_rate": 1.4124179488692823e-07, | |
| "loss": 0.3689, | |
| "step": 1146 | |
| }, | |
| { | |
| "epoch": 5.409090909090909, | |
| "grad_norm": 0.5669475793838501, | |
| "learning_rate": 1.3902969124296228e-07, | |
| "loss": 0.3866, | |
| "step": 1147 | |
| }, | |
| { | |
| "epoch": 5.413793103448276, | |
| "grad_norm": 0.5467401742935181, | |
| "learning_rate": 1.3683455185382e-07, | |
| "loss": 0.3809, | |
| "step": 1148 | |
| }, | |
| { | |
| "epoch": 5.418495297805642, | |
| "grad_norm": 0.5471454858779907, | |
| "learning_rate": 1.3465639249221313e-07, | |
| "loss": 0.4075, | |
| "step": 1149 | |
| }, | |
| { | |
| "epoch": 5.423197492163009, | |
| "grad_norm": 0.5841071009635925, | |
| "learning_rate": 1.324952288088466e-07, | |
| "loss": 0.369, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 5.427899686520377, | |
| "grad_norm": 0.6020098924636841, | |
| "learning_rate": 1.3035107633230737e-07, | |
| "loss": 0.3529, | |
| "step": 1151 | |
| }, | |
| { | |
| "epoch": 5.432601880877743, | |
| "grad_norm": 0.584348201751709, | |
| "learning_rate": 1.2822395046895032e-07, | |
| "loss": 0.3594, | |
| "step": 1152 | |
| }, | |
| { | |
| "epoch": 5.43730407523511, | |
| "grad_norm": 0.6113020181655884, | |
| "learning_rate": 1.2611386650279167e-07, | |
| "loss": 0.3668, | |
| "step": 1153 | |
| }, | |
| { | |
| "epoch": 5.4420062695924765, | |
| "grad_norm": 0.8762810826301575, | |
| "learning_rate": 1.240208395953943e-07, | |
| "loss": 0.3678, | |
| "step": 1154 | |
| }, | |
| { | |
| "epoch": 5.446708463949843, | |
| "grad_norm": 0.5709835290908813, | |
| "learning_rate": 1.2194488478576266e-07, | |
| "loss": 0.3955, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 5.45141065830721, | |
| "grad_norm": 0.5076249241828918, | |
| "learning_rate": 1.1988601699023244e-07, | |
| "loss": 0.3826, | |
| "step": 1156 | |
| }, | |
| { | |
| "epoch": 5.456112852664576, | |
| "grad_norm": 0.6045148372650146, | |
| "learning_rate": 1.1784425100236419e-07, | |
| "loss": 0.3839, | |
| "step": 1157 | |
| }, | |
| { | |
| "epoch": 5.460815047021944, | |
| "grad_norm": 0.5593690872192383, | |
| "learning_rate": 1.1581960149283839e-07, | |
| "loss": 0.3642, | |
| "step": 1158 | |
| }, | |
| { | |
| "epoch": 5.4655172413793105, | |
| "grad_norm": 0.6259530782699585, | |
| "learning_rate": 1.138120830093467e-07, | |
| "loss": 0.3913, | |
| "step": 1159 | |
| }, | |
| { | |
| "epoch": 5.470219435736677, | |
| "grad_norm": 0.5679107308387756, | |
| "learning_rate": 1.1182170997649067e-07, | |
| "loss": 0.3739, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 5.474921630094044, | |
| "grad_norm": 0.5973315238952637, | |
| "learning_rate": 1.0984849669567616e-07, | |
| "loss": 0.407, | |
| "step": 1161 | |
| }, | |
| { | |
| "epoch": 5.47962382445141, | |
| "grad_norm": 0.5076819658279419, | |
| "learning_rate": 1.0789245734501186e-07, | |
| "loss": 0.3971, | |
| "step": 1162 | |
| }, | |
| { | |
| "epoch": 5.484326018808777, | |
| "grad_norm": 0.5315993428230286, | |
| "learning_rate": 1.0595360597920629e-07, | |
| "loss": 0.3712, | |
| "step": 1163 | |
| }, | |
| { | |
| "epoch": 5.4890282131661445, | |
| "grad_norm": 0.5396918058395386, | |
| "learning_rate": 1.0403195652946784e-07, | |
| "loss": 0.374, | |
| "step": 1164 | |
| }, | |
| { | |
| "epoch": 5.493730407523511, | |
| "grad_norm": 0.591569185256958, | |
| "learning_rate": 1.0212752280340327e-07, | |
| "loss": 0.3774, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 5.498432601880878, | |
| "grad_norm": 0.5628752112388611, | |
| "learning_rate": 1.0024031848492044e-07, | |
| "loss": 0.3855, | |
| "step": 1166 | |
| }, | |
| { | |
| "epoch": 5.503134796238244, | |
| "grad_norm": 0.55401611328125, | |
| "learning_rate": 9.837035713412823e-08, | |
| "loss": 0.3865, | |
| "step": 1167 | |
| }, | |
| { | |
| "epoch": 5.507836990595611, | |
| "grad_norm": 0.535001277923584, | |
| "learning_rate": 9.651765218724018e-08, | |
| "loss": 0.3903, | |
| "step": 1168 | |
| }, | |
| { | |
| "epoch": 5.512539184952978, | |
| "grad_norm": 0.5949495434761047, | |
| "learning_rate": 9.468221695647789e-08, | |
| "loss": 0.3658, | |
| "step": 1169 | |
| }, | |
| { | |
| "epoch": 5.517241379310345, | |
| "grad_norm": 0.5502374768257141, | |
| "learning_rate": 9.286406462997305e-08, | |
| "loss": 0.392, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 5.521943573667712, | |
| "grad_norm": 0.5518045425415039, | |
| "learning_rate": 9.106320827167809e-08, | |
| "loss": 0.3807, | |
| "step": 1171 | |
| }, | |
| { | |
| "epoch": 5.5266457680250785, | |
| "grad_norm": 0.5890054106712341, | |
| "learning_rate": 8.927966082126566e-08, | |
| "loss": 0.3893, | |
| "step": 1172 | |
| }, | |
| { | |
| "epoch": 5.531347962382445, | |
| "grad_norm": 0.5351389050483704, | |
| "learning_rate": 8.75134350940407e-08, | |
| "loss": 0.3645, | |
| "step": 1173 | |
| }, | |
| { | |
| "epoch": 5.536050156739812, | |
| "grad_norm": 0.5266129374504089, | |
| "learning_rate": 8.57645437808463e-08, | |
| "loss": 0.3871, | |
| "step": 1174 | |
| }, | |
| { | |
| "epoch": 5.540752351097178, | |
| "grad_norm": 0.5650208592414856, | |
| "learning_rate": 8.403299944797244e-08, | |
| "loss": 0.3679, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 5.545454545454545, | |
| "grad_norm": 0.591468870639801, | |
| "learning_rate": 8.231881453706625e-08, | |
| "loss": 0.3666, | |
| "step": 1176 | |
| }, | |
| { | |
| "epoch": 5.5501567398119125, | |
| "grad_norm": 0.5969362854957581, | |
| "learning_rate": 8.062200136504217e-08, | |
| "loss": 0.3774, | |
| "step": 1177 | |
| }, | |
| { | |
| "epoch": 5.554858934169279, | |
| "grad_norm": 0.5466634035110474, | |
| "learning_rate": 7.894257212399393e-08, | |
| "loss": 0.3797, | |
| "step": 1178 | |
| }, | |
| { | |
| "epoch": 5.559561128526646, | |
| "grad_norm": 0.5653045177459717, | |
| "learning_rate": 7.728053888110681e-08, | |
| "loss": 0.3842, | |
| "step": 1179 | |
| }, | |
| { | |
| "epoch": 5.564263322884012, | |
| "grad_norm": 0.523288905620575, | |
| "learning_rate": 7.563591357857003e-08, | |
| "loss": 0.3733, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 5.568965517241379, | |
| "grad_norm": 0.5463464260101318, | |
| "learning_rate": 7.40087080334928e-08, | |
| "loss": 0.3617, | |
| "step": 1181 | |
| }, | |
| { | |
| "epoch": 5.5736677115987465, | |
| "grad_norm": 0.534662663936615, | |
| "learning_rate": 7.239893393781783e-08, | |
| "loss": 0.3976, | |
| "step": 1182 | |
| }, | |
| { | |
| "epoch": 5.578369905956113, | |
| "grad_norm": 0.5874859094619751, | |
| "learning_rate": 7.080660285823687e-08, | |
| "loss": 0.3792, | |
| "step": 1183 | |
| }, | |
| { | |
| "epoch": 5.58307210031348, | |
| "grad_norm": 0.5846700072288513, | |
| "learning_rate": 6.923172623611057e-08, | |
| "loss": 0.3679, | |
| "step": 1184 | |
| }, | |
| { | |
| "epoch": 5.587774294670846, | |
| "grad_norm": 0.5732503533363342, | |
| "learning_rate": 6.767431538738268e-08, | |
| "loss": 0.3827, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 5.592476489028213, | |
| "grad_norm": 0.8341772556304932, | |
| "learning_rate": 6.613438150250062e-08, | |
| "loss": 0.3566, | |
| "step": 1186 | |
| }, | |
| { | |
| "epoch": 5.59717868338558, | |
| "grad_norm": 0.5577627420425415, | |
| "learning_rate": 6.461193564633538e-08, | |
| "loss": 0.3832, | |
| "step": 1187 | |
| }, | |
| { | |
| "epoch": 5.601880877742946, | |
| "grad_norm": 0.6295877695083618, | |
| "learning_rate": 6.310698875810068e-08, | |
| "loss": 0.3651, | |
| "step": 1188 | |
| }, | |
| { | |
| "epoch": 5.606583072100314, | |
| "grad_norm": 0.5273407697677612, | |
| "learning_rate": 6.16195516512752e-08, | |
| "loss": 0.3939, | |
| "step": 1189 | |
| }, | |
| { | |
| "epoch": 5.61128526645768, | |
| "grad_norm": 0.925127387046814, | |
| "learning_rate": 6.014963501352556e-08, | |
| "loss": 0.382, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 5.615987460815047, | |
| "grad_norm": 0.5775822401046753, | |
| "learning_rate": 5.8697249406627354e-08, | |
| "loss": 0.3628, | |
| "step": 1191 | |
| }, | |
| { | |
| "epoch": 5.620689655172414, | |
| "grad_norm": 0.5419414043426514, | |
| "learning_rate": 5.726240526639199e-08, | |
| "loss": 0.3853, | |
| "step": 1192 | |
| }, | |
| { | |
| "epoch": 5.62539184952978, | |
| "grad_norm": 0.5734292268753052, | |
| "learning_rate": 5.5845112902589703e-08, | |
| "loss": 0.3817, | |
| "step": 1193 | |
| }, | |
| { | |
| "epoch": 5.630094043887147, | |
| "grad_norm": 0.569153904914856, | |
| "learning_rate": 5.44453824988761e-08, | |
| "loss": 0.3593, | |
| "step": 1194 | |
| }, | |
| { | |
| "epoch": 5.6347962382445145, | |
| "grad_norm": 0.49767062067985535, | |
| "learning_rate": 5.3063224112719355e-08, | |
| "loss": 0.3858, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 5.639498432601881, | |
| "grad_norm": 0.5975450873374939, | |
| "learning_rate": 5.169864767532673e-08, | |
| "loss": 0.3584, | |
| "step": 1196 | |
| }, | |
| { | |
| "epoch": 5.644200626959248, | |
| "grad_norm": 0.5690646171569824, | |
| "learning_rate": 5.0351662991575677e-08, | |
| "loss": 0.3968, | |
| "step": 1197 | |
| }, | |
| { | |
| "epoch": 5.648902821316614, | |
| "grad_norm": 0.5771529674530029, | |
| "learning_rate": 4.9022279739940335e-08, | |
| "loss": 0.3823, | |
| "step": 1198 | |
| }, | |
| { | |
| "epoch": 5.653605015673981, | |
| "grad_norm": 0.5986964106559753, | |
| "learning_rate": 4.7710507472424336e-08, | |
| "loss": 0.3653, | |
| "step": 1199 | |
| }, | |
| { | |
| "epoch": 5.658307210031348, | |
| "grad_norm": 1.1150685548782349, | |
| "learning_rate": 4.641635561449087e-08, | |
| "loss": 0.3695, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 5.663009404388715, | |
| "grad_norm": 0.5487309694290161, | |
| "learning_rate": 4.513983346499523e-08, | |
| "loss": 0.3859, | |
| "step": 1201 | |
| }, | |
| { | |
| "epoch": 5.667711598746082, | |
| "grad_norm": 0.551460862159729, | |
| "learning_rate": 4.3880950196118764e-08, | |
| "loss": 0.3438, | |
| "step": 1202 | |
| }, | |
| { | |
| "epoch": 5.672413793103448, | |
| "grad_norm": 0.5822682976722717, | |
| "learning_rate": 4.263971485330198e-08, | |
| "loss": 0.3767, | |
| "step": 1203 | |
| }, | |
| { | |
| "epoch": 5.677115987460815, | |
| "grad_norm": 0.5359184741973877, | |
| "learning_rate": 4.141613635517988e-08, | |
| "loss": 0.3743, | |
| "step": 1204 | |
| }, | |
| { | |
| "epoch": 5.681818181818182, | |
| "grad_norm": 0.6006045937538147, | |
| "learning_rate": 4.021022349351838e-08, | |
| "loss": 0.3743, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 5.686520376175548, | |
| "grad_norm": 0.5705166459083557, | |
| "learning_rate": 3.902198493314968e-08, | |
| "loss": 0.3691, | |
| "step": 1206 | |
| }, | |
| { | |
| "epoch": 5.691222570532915, | |
| "grad_norm": 0.5757796764373779, | |
| "learning_rate": 3.785142921191198e-08, | |
| "loss": 0.4, | |
| "step": 1207 | |
| }, | |
| { | |
| "epoch": 5.695924764890282, | |
| "grad_norm": 0.525772750377655, | |
| "learning_rate": 3.669856474058708e-08, | |
| "loss": 0.3527, | |
| "step": 1208 | |
| }, | |
| { | |
| "epoch": 5.700626959247649, | |
| "grad_norm": 0.583878755569458, | |
| "learning_rate": 3.556339980283929e-08, | |
| "loss": 0.3818, | |
| "step": 1209 | |
| }, | |
| { | |
| "epoch": 5.705329153605016, | |
| "grad_norm": 0.49472200870513916, | |
| "learning_rate": 3.4445942555157706e-08, | |
| "loss": 0.3816, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 5.710031347962382, | |
| "grad_norm": 0.5557237863540649, | |
| "learning_rate": 3.3346201026795696e-08, | |
| "loss": 0.4004, | |
| "step": 1211 | |
| }, | |
| { | |
| "epoch": 5.714733542319749, | |
| "grad_norm": 0.5274887084960938, | |
| "learning_rate": 3.2264183119714296e-08, | |
| "loss": 0.3711, | |
| "step": 1212 | |
| }, | |
| { | |
| "epoch": 5.7194357366771165, | |
| "grad_norm": 0.5763286352157593, | |
| "learning_rate": 3.1199896608525014e-08, | |
| "loss": 0.3871, | |
| "step": 1213 | |
| }, | |
| { | |
| "epoch": 5.724137931034483, | |
| "grad_norm": 0.5784947276115417, | |
| "learning_rate": 3.0153349140435165e-08, | |
| "loss": 0.3585, | |
| "step": 1214 | |
| }, | |
| { | |
| "epoch": 5.72884012539185, | |
| "grad_norm": 0.607018232345581, | |
| "learning_rate": 2.9124548235190397e-08, | |
| "loss": 0.3495, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 5.733542319749216, | |
| "grad_norm": 0.6274241209030151, | |
| "learning_rate": 2.811350128502338e-08, | |
| "loss": 0.3479, | |
| "step": 1216 | |
| }, | |
| { | |
| "epoch": 5.738244514106583, | |
| "grad_norm": 0.6364694833755493, | |
| "learning_rate": 2.7120215554598538e-08, | |
| "loss": 0.3785, | |
| "step": 1217 | |
| }, | |
| { | |
| "epoch": 5.74294670846395, | |
| "grad_norm": 0.518100380897522, | |
| "learning_rate": 2.6144698180961548e-08, | |
| "loss": 0.3768, | |
| "step": 1218 | |
| }, | |
| { | |
| "epoch": 5.747648902821316, | |
| "grad_norm": 0.5653648972511292, | |
| "learning_rate": 2.5186956173487152e-08, | |
| "loss": 0.3718, | |
| "step": 1219 | |
| }, | |
| { | |
| "epoch": 5.752351097178684, | |
| "grad_norm": 0.5562875270843506, | |
| "learning_rate": 2.424699641382866e-08, | |
| "loss": 0.3673, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 5.75705329153605, | |
| "grad_norm": 0.6033557057380676, | |
| "learning_rate": 2.33248256558688e-08, | |
| "loss": 0.3907, | |
| "step": 1221 | |
| }, | |
| { | |
| "epoch": 5.761755485893417, | |
| "grad_norm": 0.7889437675476074, | |
| "learning_rate": 2.2420450525671155e-08, | |
| "loss": 0.3678, | |
| "step": 1222 | |
| }, | |
| { | |
| "epoch": 5.766457680250784, | |
| "grad_norm": 0.6031072735786438, | |
| "learning_rate": 2.1533877521433267e-08, | |
| "loss": 0.3983, | |
| "step": 1223 | |
| }, | |
| { | |
| "epoch": 5.77115987460815, | |
| "grad_norm": 0.6203309297561646, | |
| "learning_rate": 2.066511301343832e-08, | |
| "loss": 0.3593, | |
| "step": 1224 | |
| }, | |
| { | |
| "epoch": 5.775862068965517, | |
| "grad_norm": 0.5420303344726562, | |
| "learning_rate": 1.9814163244010754e-08, | |
| "loss": 0.3767, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 5.7805642633228835, | |
| "grad_norm": 0.9023717045783997, | |
| "learning_rate": 1.8981034327470727e-08, | |
| "loss": 0.3856, | |
| "step": 1226 | |
| }, | |
| { | |
| "epoch": 5.785266457680251, | |
| "grad_norm": 0.5445950627326965, | |
| "learning_rate": 1.8165732250090828e-08, | |
| "loss": 0.3909, | |
| "step": 1227 | |
| }, | |
| { | |
| "epoch": 5.789968652037618, | |
| "grad_norm": 0.5818548798561096, | |
| "learning_rate": 1.736826287005222e-08, | |
| "loss": 0.3875, | |
| "step": 1228 | |
| }, | |
| { | |
| "epoch": 5.794670846394984, | |
| "grad_norm": 0.5768299698829651, | |
| "learning_rate": 1.6588631917403285e-08, | |
| "loss": 0.4014, | |
| "step": 1229 | |
| }, | |
| { | |
| "epoch": 5.799373040752351, | |
| "grad_norm": 0.7270597219467163, | |
| "learning_rate": 1.5826844994017986e-08, | |
| "loss": 0.3782, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 5.804075235109718, | |
| "grad_norm": 0.5477209687232971, | |
| "learning_rate": 1.5082907573555906e-08, | |
| "loss": 0.3848, | |
| "step": 1231 | |
| }, | |
| { | |
| "epoch": 5.808777429467085, | |
| "grad_norm": 0.5744673013687134, | |
| "learning_rate": 1.435682500142227e-08, | |
| "loss": 0.3689, | |
| "step": 1232 | |
| }, | |
| { | |
| "epoch": 5.813479623824452, | |
| "grad_norm": 0.527204692363739, | |
| "learning_rate": 1.3648602494730768e-08, | |
| "loss": 0.3674, | |
| "step": 1233 | |
| }, | |
| { | |
| "epoch": 5.818181818181818, | |
| "grad_norm": 1.9482752084732056, | |
| "learning_rate": 1.2958245142265235e-08, | |
| "loss": 0.4015, | |
| "step": 1234 | |
| }, | |
| { | |
| "epoch": 5.822884012539185, | |
| "grad_norm": 0.6324442028999329, | |
| "learning_rate": 1.2285757904442475e-08, | |
| "loss": 0.3813, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 5.827586206896552, | |
| "grad_norm": 0.541420042514801, | |
| "learning_rate": 1.1631145613278105e-08, | |
| "loss": 0.3788, | |
| "step": 1236 | |
| }, | |
| { | |
| "epoch": 5.832288401253918, | |
| "grad_norm": 0.609536349773407, | |
| "learning_rate": 1.0994412972351043e-08, | |
| "loss": 0.3898, | |
| "step": 1237 | |
| }, | |
| { | |
| "epoch": 5.836990595611285, | |
| "grad_norm": 0.5510598421096802, | |
| "learning_rate": 1.0375564556769357e-08, | |
| "loss": 0.3969, | |
| "step": 1238 | |
| }, | |
| { | |
| "epoch": 5.841692789968652, | |
| "grad_norm": 0.5160980224609375, | |
| "learning_rate": 9.774604813138078e-09, | |
| "loss": 0.3743, | |
| "step": 1239 | |
| }, | |
| { | |
| "epoch": 5.846394984326019, | |
| "grad_norm": 0.5274291038513184, | |
| "learning_rate": 9.191538059526717e-09, | |
| "loss": 0.3726, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 5.851097178683386, | |
| "grad_norm": 0.5729535222053528, | |
| "learning_rate": 8.626368485438742e-09, | |
| "loss": 0.4018, | |
| "step": 1241 | |
| }, | |
| { | |
| "epoch": 5.855799373040752, | |
| "grad_norm": 0.5532820820808411, | |
| "learning_rate": 8.07910015178104e-09, | |
| "loss": 0.3816, | |
| "step": 1242 | |
| }, | |
| { | |
| "epoch": 5.860501567398119, | |
| "grad_norm": 0.7647237181663513, | |
| "learning_rate": 7.549736990835054e-09, | |
| "loss": 0.38, | |
| "step": 1243 | |
| }, | |
| { | |
| "epoch": 5.8652037617554855, | |
| "grad_norm": 0.5721030235290527, | |
| "learning_rate": 7.0382828062279254e-09, | |
| "loss": 0.3848, | |
| "step": 1244 | |
| }, | |
| { | |
| "epoch": 5.869905956112853, | |
| "grad_norm": 0.5395824909210205, | |
| "learning_rate": 6.544741272906385e-09, | |
| "loss": 0.3568, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 5.87460815047022, | |
| "grad_norm": 0.5219895243644714, | |
| "learning_rate": 6.0691159371087386e-09, | |
| "loss": 0.3623, | |
| "step": 1246 | |
| }, | |
| { | |
| "epoch": 5.879310344827586, | |
| "grad_norm": 0.5917700529098511, | |
| "learning_rate": 5.611410216340984e-09, | |
| "loss": 0.4068, | |
| "step": 1247 | |
| }, | |
| { | |
| "epoch": 5.884012539184953, | |
| "grad_norm": 0.5229591131210327, | |
| "learning_rate": 5.171627399351009e-09, | |
| "loss": 0.3817, | |
| "step": 1248 | |
| }, | |
| { | |
| "epoch": 5.88871473354232, | |
| "grad_norm": 0.5710911750793457, | |
| "learning_rate": 4.749770646105822e-09, | |
| "loss": 0.3973, | |
| "step": 1249 | |
| }, | |
| { | |
| "epoch": 5.893416927899686, | |
| "grad_norm": 0.5184536576271057, | |
| "learning_rate": 4.3458429877679675e-09, | |
| "loss": 0.3815, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 5.898119122257054, | |
| "grad_norm": 0.5801165699958801, | |
| "learning_rate": 3.959847326674704e-09, | |
| "loss": 0.405, | |
| "step": 1251 | |
| }, | |
| { | |
| "epoch": 5.90282131661442, | |
| "grad_norm": 0.5677738189697266, | |
| "learning_rate": 3.591786436316358e-09, | |
| "loss": 0.3534, | |
| "step": 1252 | |
| }, | |
| { | |
| "epoch": 5.907523510971787, | |
| "grad_norm": 0.7012061476707458, | |
| "learning_rate": 3.241662961317171e-09, | |
| "loss": 0.3861, | |
| "step": 1253 | |
| }, | |
| { | |
| "epoch": 5.912225705329154, | |
| "grad_norm": 0.5930037498474121, | |
| "learning_rate": 2.909479417415595e-09, | |
| "loss": 0.3845, | |
| "step": 1254 | |
| }, | |
| { | |
| "epoch": 5.91692789968652, | |
| "grad_norm": 0.5155079960823059, | |
| "learning_rate": 2.5952381914465253e-09, | |
| "loss": 0.3804, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 5.921630094043887, | |
| "grad_norm": 0.6083419322967529, | |
| "learning_rate": 2.298941541323818e-09, | |
| "loss": 0.3804, | |
| "step": 1256 | |
| }, | |
| { | |
| "epoch": 5.9263322884012535, | |
| "grad_norm": 0.5572425723075867, | |
| "learning_rate": 2.020591596024746e-09, | |
| "loss": 0.3652, | |
| "step": 1257 | |
| }, | |
| { | |
| "epoch": 5.931034482758621, | |
| "grad_norm": 0.8666858673095703, | |
| "learning_rate": 1.7601903555744537e-09, | |
| "loss": 0.3693, | |
| "step": 1258 | |
| }, | |
| { | |
| "epoch": 5.935736677115988, | |
| "grad_norm": 0.6273589730262756, | |
| "learning_rate": 1.5177396910312502e-09, | |
| "loss": 0.373, | |
| "step": 1259 | |
| }, | |
| { | |
| "epoch": 5.940438871473354, | |
| "grad_norm": 0.5664013028144836, | |
| "learning_rate": 1.2932413444727287e-09, | |
| "loss": 0.3753, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 5.945141065830721, | |
| "grad_norm": 0.5878673195838928, | |
| "learning_rate": 1.0866969289849426e-09, | |
| "loss": 0.3906, | |
| "step": 1261 | |
| }, | |
| { | |
| "epoch": 5.9498432601880875, | |
| "grad_norm": 0.5575219392776489, | |
| "learning_rate": 8.98107928649361e-10, | |
| "loss": 0.3708, | |
| "step": 1262 | |
| }, | |
| { | |
| "epoch": 5.954545454545455, | |
| "grad_norm": 0.5593152046203613, | |
| "learning_rate": 7.274756985323205e-10, | |
| "loss": 0.4109, | |
| "step": 1263 | |
| }, | |
| { | |
| "epoch": 5.959247648902822, | |
| "grad_norm": 0.5827597379684448, | |
| "learning_rate": 5.748014646755895e-10, | |
| "loss": 0.3926, | |
| "step": 1264 | |
| }, | |
| { | |
| "epoch": 5.963949843260188, | |
| "grad_norm": 0.5260237455368042, | |
| "learning_rate": 4.4008632408831797e-10, | |
| "loss": 0.3963, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 5.968652037617555, | |
| "grad_norm": 0.5742549300193787, | |
| "learning_rate": 3.2333124473704623e-10, | |
| "loss": 0.3704, | |
| "step": 1266 | |
| }, | |
| { | |
| "epoch": 5.9733542319749215, | |
| "grad_norm": 0.5746361017227173, | |
| "learning_rate": 2.245370655409862e-10, | |
| "loss": 0.3807, | |
| "step": 1267 | |
| }, | |
| { | |
| "epoch": 5.978056426332288, | |
| "grad_norm": 0.5230088829994202, | |
| "learning_rate": 1.4370449636535998e-10, | |
| "loss": 0.3794, | |
| "step": 1268 | |
| }, | |
| { | |
| "epoch": 5.982758620689655, | |
| "grad_norm": 0.605073094367981, | |
| "learning_rate": 8.083411801529384e-11, | |
| "loss": 0.3865, | |
| "step": 1269 | |
| }, | |
| { | |
| "epoch": 5.987460815047022, | |
| "grad_norm": 0.5702431797981262, | |
| "learning_rate": 3.592638223220979e-11, | |
| "loss": 0.3883, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 5.992163009404389, | |
| "grad_norm": 0.5366142988204956, | |
| "learning_rate": 8.98161169188283e-12, | |
| "loss": 0.3731, | |
| "step": 1271 | |
| }, | |
| { | |
| "epoch": 5.996865203761756, | |
| "grad_norm": 0.5781266093254089, | |
| "learning_rate": 0.0, | |
| "loss": 0.3776, | |
| "step": 1272 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 1272, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 6, | |
| "save_steps": 212, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 8.193552390105648e+19, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |