| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "global_step": 1014, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 6.451612903225806e-06, | |
| "loss": 0.1129, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 1.2903225806451613e-05, | |
| "loss": 0.2075, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 1.935483870967742e-05, | |
| "loss": 0.2, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 2.5806451612903226e-05, | |
| "loss": 0.1129, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 3.2258064516129034e-05, | |
| "loss": 0.1462, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 3.870967741935484e-05, | |
| "loss": 0.1246, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.516129032258064e-05, | |
| "loss": 0.0955, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 5.161290322580645e-05, | |
| "loss": 0.0836, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 5.8064516129032266e-05, | |
| "loss": 0.0858, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 6.451612903225807e-05, | |
| "loss": 0.0999, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 7.096774193548388e-05, | |
| "loss": 0.061, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 7.741935483870968e-05, | |
| "loss": 0.063, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 8.387096774193549e-05, | |
| "loss": 0.0535, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 9.032258064516129e-05, | |
| "loss": 0.0784, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 9.677419354838711e-05, | |
| "loss": 0.1076, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.0001032258064516129, | |
| "loss": 0.0906, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.00010967741935483871, | |
| "loss": 0.0865, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.00011612903225806453, | |
| "loss": 0.0596, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.00012258064516129034, | |
| "loss": 0.0933, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.00012903225806451613, | |
| "loss": 0.0638, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.00013548387096774193, | |
| "loss": 0.1095, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.00014193548387096775, | |
| "loss": 0.0548, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.00014838709677419355, | |
| "loss": 0.0709, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.00015483870967741937, | |
| "loss": 0.0691, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.00016129032258064516, | |
| "loss": 0.0586, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.00016774193548387098, | |
| "loss": 0.0631, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.00017419354838709678, | |
| "loss": 0.0606, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.00018064516129032257, | |
| "loss": 0.0825, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.0001870967741935484, | |
| "loss": 0.0313, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.00019354838709677422, | |
| "loss": 0.0438, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.0002, | |
| "loss": 0.0662, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.00019999948930413158, | |
| "loss": 0.1139, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.00019999795722174243, | |
| "loss": 0.0644, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.00019999540376848117, | |
| "loss": 0.0836, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.00019999182897042857, | |
| "loss": 0.0507, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.0001999872328640973, | |
| "loss": 0.0576, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.0001999816154964316, | |
| "loss": 0.0597, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.0001999749769248068, | |
| "loss": 0.1041, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.00019996731721702875, | |
| "loss": 0.0398, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.00019995863645133307, | |
| "loss": 0.0721, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.00019994893471638432, | |
| "loss": 0.0407, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.0001999382121112753, | |
| "loss": 0.1591, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.00019992646874552578, | |
| "loss": 0.0404, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.0001999137047390815, | |
| "loss": 0.0728, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.00019989992022231303, | |
| "loss": 0.025, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.00019988511533601425, | |
| "loss": 0.0897, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.000199869290231401, | |
| "loss": 0.0528, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.00019985244507010968, | |
| "loss": 0.0384, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.00019983458002419534, | |
| "loss": 0.1167, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.00019981569527613004, | |
| "loss": 0.1163, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.0001997957910188011, | |
| "loss": 0.1524, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.0001997748674555089, | |
| "loss": 0.0568, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.00019975292479996502, | |
| "loss": 0.0964, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.00019972996327628995, | |
| "loss": 0.0757, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.00019970598311901076, | |
| "loss": 0.0522, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.0001996809845730588, | |
| "loss": 0.0124, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.00019965496789376718, | |
| "loss": 0.0804, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.0001996279333468681, | |
| "loss": 0.0659, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.00019959988120849014, | |
| "loss": 0.067, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.0001995708117651556, | |
| "loss": 0.1222, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.00019954072531377733, | |
| "loss": 0.0618, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.00019950962216165586, | |
| "loss": 0.0652, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.0001994775026264762, | |
| "loss": 0.1025, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.00019944436703630468, | |
| "loss": 0.0764, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.00019941021572958544, | |
| "loss": 0.0553, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.0001993750490551371, | |
| "loss": 0.069, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.0001993388673721492, | |
| "loss": 0.0841, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.00019930167105017843, | |
| "loss": 0.0244, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.00019926346046914497, | |
| "loss": 0.0499, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.00019922423601932853, | |
| "loss": 0.0339, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.00019918399810136438, | |
| "loss": 0.0607, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.0001991427471262393, | |
| "loss": 0.046, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.0001991004835152874, | |
| "loss": 0.1192, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.0001990572077001856, | |
| "loss": 0.0888, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.00019901292012294955, | |
| "loss": 0.0402, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.00019896762123592895, | |
| "loss": 0.0915, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.00019892131150180283, | |
| "loss": 0.0385, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.000198873991393575, | |
| "loss": 0.0663, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.00019882566139456915, | |
| "loss": 0.0389, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.00019877632199842392, | |
| "loss": 0.0519, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.00019872597370908777, | |
| "loss": 0.0515, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.00019867461704081398, | |
| "loss": 0.0713, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.00019862225251815534, | |
| "loss": 0.0375, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.00019856888067595874, | |
| "loss": 0.0639, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.00019851450205935977, | |
| "loss": 0.0597, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.00019845911722377713, | |
| "loss": 0.0269, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.00019840272673490693, | |
| "loss": 0.0421, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.00019834533116871704, | |
| "loss": 0.0799, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.00019828693111144095, | |
| "loss": 0.0519, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.00019822752715957203, | |
| "loss": 0.1014, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.00019816711991985737, | |
| "loss": 0.0451, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.00019810571000929147, | |
| "loss": 0.1064, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.00019804329805511018, | |
| "loss": 0.0545, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.0001979798846947839, | |
| "loss": 0.077, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.00019791547057601162, | |
| "loss": 0.0317, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.0001978500563567137, | |
| "loss": 0.0389, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00019778364270502558, | |
| "loss": 0.0355, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00019771623029929088, | |
| "loss": 0.0738, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00019764781982805428, | |
| "loss": 0.0411, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00019757841199005474, | |
| "loss": 0.1486, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00019750800749421813, | |
| "loss": 0.0476, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00019743660705965016, | |
| "loss": 0.0452, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00019736421141562896, | |
| "loss": 0.0658, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00019729082130159767, | |
| "loss": 0.0979, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00019721643746715688, | |
| "loss": 0.0517, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.0001971410606720569, | |
| "loss": 0.0838, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.0001970646916861901, | |
| "loss": 0.0204, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.00019698733128958295, | |
| "loss": 0.0198, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.0001969089802723882, | |
| "loss": 0.0208, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.0001968296394348766, | |
| "loss": 0.0265, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.000196749309587429, | |
| "loss": 0.13, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.00019666799155052778, | |
| "loss": 0.0808, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.00019658568615474862, | |
| "loss": 0.0651, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.00019650239424075206, | |
| "loss": 0.1267, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.00019641811665927484, | |
| "loss": 0.0508, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.00019633285427112117, | |
| "loss": 0.047, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.00019624660794715412, | |
| "loss": 0.1234, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.0001961593785682864, | |
| "loss": 0.0354, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.00019607116702547174, | |
| "loss": 0.0449, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.0001959819742196956, | |
| "loss": 0.0578, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.00019589180106196582, | |
| "loss": 0.031, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.0001958006484733037, | |
| "loss": 0.0794, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.00019570851738473413, | |
| "loss": 0.0372, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.00019561540873727653, | |
| "loss": 0.0164, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.00019552132348193486, | |
| "loss": 0.0165, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.0001954262625796882, | |
| "loss": 0.0886, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.00019533022700148072, | |
| "loss": 0.4116, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.0001952332177282119, | |
| "loss": 0.0364, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.00019513523575072643, | |
| "loss": 0.1257, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.00019503628206980414, | |
| "loss": 0.0788, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.0001949363576961497, | |
| "loss": 0.0638, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.00019483546365038247, | |
| "loss": 0.0503, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.0001947336009630259, | |
| "loss": 0.0863, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.000194630770674497, | |
| "loss": 0.0494, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.00019452697383509588, | |
| "loss": 0.0339, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.00019442221150499492, | |
| "loss": 0.0474, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.00019431648475422782, | |
| "loss": 0.0188, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.0001942097946626789, | |
| "loss": 0.0364, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.00019410214232007197, | |
| "loss": 0.0616, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.00019399352882595915, | |
| "loss": 0.0224, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.0001938839552897097, | |
| "loss": 0.0276, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.00019377342283049864, | |
| "loss": 0.0419, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.0001936619325772954, | |
| "loss": 0.0672, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.0001935494856688522, | |
| "loss": 0.3356, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.00019343608325369247, | |
| "loss": 0.6535, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.00019332172649009913, | |
| "loss": 0.2866, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.00019320641654610267, | |
| "loss": 0.2056, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.00019309015459946939, | |
| "loss": 0.2119, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.00019297294183768916, | |
| "loss": 0.1324, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.00019285477945796346, | |
| "loss": 0.056, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.00019273566866719307, | |
| "loss": 0.1161, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.00019261561068196573, | |
| "loss": 0.1027, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.00019249460672854387, | |
| "loss": 0.0439, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.00019237265804285177, | |
| "loss": 0.0981, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.00019224976587046332, | |
| "loss": 0.1057, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.0001921259314665889, | |
| "loss": 0.0461, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.00019200115609606302, | |
| "loss": 0.0778, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.0001918754410333309, | |
| "loss": 0.096, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.00019174878756243583, | |
| "loss": 0.1929, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.00019162119697700591, | |
| "loss": 0.0839, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.00019149267058024083, | |
| "loss": 0.0642, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.00019136320968489857, | |
| "loss": 0.0662, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.00019123281561328205, | |
| "loss": 0.0737, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.0001911014896972255, | |
| "loss": 0.0406, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.00019096923327808102, | |
| "loss": 0.0555, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.00019083604770670472, | |
| "loss": 0.0594, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.00019070193434344302, | |
| "loss": 0.0646, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.00019056689455811875, | |
| "loss": 0.0762, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.00019043092973001707, | |
| "loss": 0.0365, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.00019029404124787158, | |
| "loss": 0.0312, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.00019015623050984984, | |
| "loss": 0.029, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.00019001749892353943, | |
| "loss": 0.0742, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.00018987784790593324, | |
| "loss": 0.0951, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.0001897372788834152, | |
| "loss": 0.0388, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.0001895957932917458, | |
| "loss": 0.0117, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.00018945339257604707, | |
| "loss": 0.039, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.00018931007819078825, | |
| "loss": 0.0446, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.00018916585159977053, | |
| "loss": 0.0597, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.00018902071427611246, | |
| "loss": 0.0613, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.00018887466770223466, | |
| "loss": 0.0382, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.00018872771336984477, | |
| "loss": 0.0617, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.00018857985277992217, | |
| "loss": 0.0397, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.00018843108744270274, | |
| "loss": 0.0425, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.00018828141887766332, | |
| "loss": 0.0646, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.00018813084861350626, | |
| "loss": 0.0491, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.00018797937818814384, | |
| "loss": 0.0357, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.00018782700914868244, | |
| "loss": 0.031, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.00018767374305140683, | |
| "loss": 0.0133, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.00018751958146176424, | |
| "loss": 0.0269, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.0001873645259543485, | |
| "loss": 0.0465, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.00018720857811288362, | |
| "loss": 0.3442, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.0001870517395302081, | |
| "loss": 0.0119, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.00018689401180825815, | |
| "loss": 0.0392, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.00018673539655805174, | |
| "loss": 0.0133, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.00018657589539967196, | |
| "loss": 0.0619, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.0001864155099622504, | |
| "loss": 0.0453, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.0001862542418839507, | |
| "loss": 0.0628, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.0001860920928119517, | |
| "loss": 0.0285, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.00018592906440243061, | |
| "loss": 0.0371, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.0001857651583205461, | |
| "loss": 0.0465, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.0001856003762404214, | |
| "loss": 0.0442, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.000185434719845127, | |
| "loss": 0.0546, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.00018526819082666368, | |
| "loss": 0.0667, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.00018510079088594509, | |
| "loss": 0.0277, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.00018493252173278033, | |
| "loss": 0.026, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.00018476338508585665, | |
| "loss": 0.045, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.00018459338267272184, | |
| "loss": 0.0179, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.0001844225162297664, | |
| "loss": 0.0399, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.00018425078750220614, | |
| "loss": 0.0647, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.00018407819824406408, | |
| "loss": 0.0235, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.0001839047502181526, | |
| "loss": 0.0365, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.0001837304451960556, | |
| "loss": 0.0586, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.00018355528495811005, | |
| "loss": 0.0216, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.0001833792712933882, | |
| "loss": 0.0477, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.0001832024059996791, | |
| "loss": 0.4065, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.0001830246908834702, | |
| "loss": 0.0568, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.00018284612775992906, | |
| "loss": 0.0383, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.00018266671845288464, | |
| "loss": 0.0467, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.0001824864647948088, | |
| "loss": 0.0256, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.00018230536862679747, | |
| "loss": 0.0341, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.000182123431798552, | |
| "loss": 0.042, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.00018194065616836008, | |
| "loss": 0.0376, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.00018175704360307688, | |
| "loss": 0.0331, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.00018157259597810602, | |
| "loss": 0.0188, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.00018138731517738026, | |
| "loss": 0.0229, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.00018120120309334238, | |
| "loss": 0.0726, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.00018101426162692587, | |
| "loss": 0.0887, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.00018082649268753542, | |
| "loss": 0.0735, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.0001806378981930274, | |
| "loss": 0.03, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.00018044848006969048, | |
| "loss": 0.0451, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.00018025824025222565, | |
| "loss": 0.0394, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.00018006718068372672, | |
| "loss": 0.0399, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.0001798753033156603, | |
| "loss": 0.0089, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.00017968261010784602, | |
| "loss": 0.016, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.00017948910302843638, | |
| "loss": 0.2229, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.00017929478405389663, | |
| "loss": 0.5365, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.0001790996551689848, | |
| "loss": 1.8367, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.00017890371836673118, | |
| "loss": 0.2386, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.00017870697564841803, | |
| "loss": 0.1373, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.00017850942902355928, | |
| "loss": 0.2789, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.00017831108050987978, | |
| "loss": 0.0838, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.0001781119321332949, | |
| "loss": 0.047, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.00017791198592788967, | |
| "loss": 0.0346, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.00017771124393589814, | |
| "loss": 0.0278, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.0001775097082076824, | |
| "loss": 0.0377, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.00017730738080171175, | |
| "loss": 0.0399, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.0001771042637845416, | |
| "loss": 0.0213, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.00017690035923079235, | |
| "loss": 0.0332, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.00017669566922312826, | |
| "loss": 0.0266, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.00017649019585223622, | |
| "loss": 0.0156, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.00017628394121680415, | |
| "loss": 0.0262, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.00017607690742349995, | |
| "loss": 0.1003, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.00017586909658694967, | |
| "loss": 0.0629, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.00017566051082971597, | |
| "loss": 0.0634, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.00017545115228227655, | |
| "loss": 0.0339, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.00017524102308300236, | |
| "loss": 0.0232, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.00017503012537813564, | |
| "loss": 0.0732, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.00017481846132176815, | |
| "loss": 0.0676, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.00017460603307581902, | |
| "loss": 0.0404, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.00017439284281001285, | |
| "loss": 0.0611, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.00017417889270185735, | |
| "loss": 0.0554, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.00017396418493662133, | |
| "loss": 0.0591, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.00017374872170731207, | |
| "loss": 0.0524, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.00017353250521465324, | |
| "loss": 0.0682, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.00017331553766706224, | |
| "loss": 1.7005, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.00017309782128062764, | |
| "loss": 0.0843, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.00017287935827908662, | |
| "loss": 1.3616, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.00017266015089380224, | |
| "loss": 0.0807, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.00017244020136374065, | |
| "loss": 0.4651, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.0001722195119354481, | |
| "loss": 0.1874, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.00017199808486302823, | |
| "loss": 0.2203, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.0001717759224081189, | |
| "loss": 0.148, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.00017155302683986894, | |
| "loss": 0.1682, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.00017132940043491538, | |
| "loss": 0.0387, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.00017110504547735982, | |
| "loss": 0.0393, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.00017087996425874523, | |
| "loss": 0.0585, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.00017065415907803258, | |
| "loss": 0.1035, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.00017042763224157733, | |
| "loss": 0.0974, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.0001702003860631059, | |
| "loss": 0.072, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.00016997242286369196, | |
| "loss": 0.0506, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.00016974374497173277, | |
| "loss": 0.1191, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.00016951435472292543, | |
| "loss": 0.1262, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.00016928425446024304, | |
| "loss": 0.0686, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.0001690534465339106, | |
| "loss": 0.0656, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.00016882193330138124, | |
| "loss": 0.0502, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.00016858971712731196, | |
| "loss": 0.0332, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.00016835680038353957, | |
| "loss": 0.0553, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.0001681231854490565, | |
| "loss": 0.2367, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.00016788887470998632, | |
| "loss": 0.0867, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.00016765387055955958, | |
| "loss": 0.0284, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.00016741817539808927, | |
| "loss": 0.0891, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.00016718179163294629, | |
| "loss": 0.1499, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.00016694472167853486, | |
| "loss": 0.1557, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.00016670696795626793, | |
| "loss": 5.755, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.00016646853289454237, | |
| "loss": 2.6485, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.00016622941892871417, | |
| "loss": 1.1198, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.00016598962850107368, | |
| "loss": 0.2973, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.00016574916406082043, | |
| "loss": 0.3, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.00016550802806403843, | |
| "loss": 0.0799, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.00016526622297367076, | |
| "loss": 0.0629, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.00016502375125949467, | |
| "loss": 0.0906, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.0001647806153980962, | |
| "loss": 0.0538, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.00016453681787284493, | |
| "loss": 0.0623, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.00016429236117386866, | |
| "loss": 0.0249, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.0001640472477980279, | |
| "loss": 0.104, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.00016380148024889047, | |
| "loss": 0.0917, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.00016355506103670573, | |
| "loss": 0.0315, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.0001633079926783792, | |
| "loss": 0.05, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.00016306027769744666, | |
| "loss": 0.0118, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.00016281191862404842, | |
| "loss": 0.0436, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.0001625629179949036, | |
| "loss": 0.0405, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.00016231327835328406, | |
| "loss": 0.0233, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.0001620630022489884, | |
| "loss": 0.0998, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.00016181209223831614, | |
| "loss": 0.036, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.00016156055088404128, | |
| "loss": 0.0296, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.00016130838075538654, | |
| "loss": 0.1588, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.00016105558442799676, | |
| "loss": 0.0197, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.00016080216448391275, | |
| "loss": 0.064, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.0001605481235115448, | |
| "loss": 0.0335, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.00016029346410564648, | |
| "loss": 0.0359, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.0001600381888672879, | |
| "loss": 0.0599, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.00015978230040382925, | |
| "loss": 0.0445, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.00015952580132889415, | |
| "loss": 0.0663, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.00015926869426234296, | |
| "loss": 0.1002, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.00015901098183024602, | |
| "loss": 0.0853, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.0001587526666648568, | |
| "loss": 0.057, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.00015849375140458504, | |
| "loss": 0.0328, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.00015823423869396988, | |
| "loss": 0.1314, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.00015797413118365266, | |
| "loss": 0.0366, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.00015771343153034997, | |
| "loss": 0.0403, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.00015745214239682657, | |
| "loss": 0.0395, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.00015719026645186806, | |
| "loss": 0.0415, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.00015692780637025367, | |
| "loss": 0.0373, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.00015666476483272907, | |
| "loss": 0.0555, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.00015640114452597872, | |
| "loss": 0.0173, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.00015613694814259866, | |
| "loss": 0.0586, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.00015587217838106896, | |
| "loss": 0.0505, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.00015560683794572602, | |
| "loss": 0.146, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.00015534092954673517, | |
| "loss": 0.0842, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.00015507445590006285, | |
| "loss": 0.0109, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.00015480741972744876, | |
| "loss": 0.0508, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.00015453982375637844, | |
| "loss": 0.0414, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.0001542716707200549, | |
| "loss": 0.0807, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.00015400296335737118, | |
| "loss": 0.0537, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.00015373370441288206, | |
| "loss": 0.073, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.00015346389663677614, | |
| "loss": 0.0284, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.00015319354278484773, | |
| "loss": 0.0164, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.00015292264561846879, | |
| "loss": 0.0179, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.00015265120790456052, | |
| "loss": 0.0321, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.00015237923241556538, | |
| "loss": 0.0404, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.00015210672192941845, | |
| "loss": 0.0637, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.0001518336792295194, | |
| "loss": 0.0503, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.00015156010710470381, | |
| "loss": 0.04, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.00015128600834921467, | |
| "loss": 0.0435, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.00015101138576267408, | |
| "loss": 0.0186, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.00015073624215005446, | |
| "loss": 0.0284, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.00015046058032164989, | |
| "loss": 0.0122, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.00015018440309304753, | |
| "loss": 0.043, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.00014990771328509878, | |
| "loss": 0.011, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.00014963051372389046, | |
| "loss": 0.0251, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.00014935280724071598, | |
| "loss": 0.04, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.00014907459667204642, | |
| "loss": 0.0805, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.00014879588485950155, | |
| "loss": 0.0073, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.00014851667464982078, | |
| "loss": 0.0333, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.00014823696889483415, | |
| "loss": 0.0079, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.00014795677045143304, | |
| "loss": 0.0082, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.00014767608218154132, | |
| "loss": 0.0216, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.0001473949069520857, | |
| "loss": 0.0594, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.00014711324763496674, | |
| "loss": 0.0152, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.0001468311071070295, | |
| "loss": 0.0468, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.00014654848825003394, | |
| "loss": 0.0255, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.00014626539395062578, | |
| "loss": 0.0289, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.00014598182710030673, | |
| "loss": 0.0284, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.00014569779059540522, | |
| "loss": 0.0298, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.00014541328733704665, | |
| "loss": 0.0458, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.00014512832023112375, | |
| "loss": 0.0404, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.000144842892188267, | |
| "loss": 0.0334, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.00014455700612381488, | |
| "loss": 0.0437, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.000144270664957784, | |
| "loss": 0.0314, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.00014398387161483937, | |
| "loss": 0.0659, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.00014369662902426452, | |
| "loss": 0.0109, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.0001434089401199315, | |
| "loss": 0.0085, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.00014312080784027102, | |
| "loss": 0.0415, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.0001428322351282424, | |
| "loss": 0.038, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.00014254322493130342, | |
| "loss": 0.0402, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.00014225378020138042, | |
| "loss": 0.0153, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.00014196390389483788, | |
| "loss": 0.0215, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.00014167359897244853, | |
| "loss": 0.0343, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.0001413828683993628, | |
| "loss": 0.0149, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.00014109171514507872, | |
| "loss": 0.0431, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.00014080014218341162, | |
| "loss": 0.0138, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.00014050815249246363, | |
| "loss": 0.0514, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.00014021574905459332, | |
| "loss": 0.0206, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.0001399229348563852, | |
| "loss": 0.0283, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.00013962971288861937, | |
| "loss": 0.031, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.00013933608614624072, | |
| "loss": 0.0139, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.00013904205762832857, | |
| "loss": 0.0078, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.0001387476303380659, | |
| "loss": 0.0153, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.00013845280728270865, | |
| "loss": 0.0255, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.00013815759147355533, | |
| "loss": 0.0398, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.00013786198592591562, | |
| "loss": 0.0552, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.00013756599365908026, | |
| "loss": 0.0043, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.0001372696176962898, | |
| "loss": 0.0185, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.00013697286106470377, | |
| "loss": 0.0378, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.00013667572679536998, | |
| "loss": 0.0256, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.00013637821792319326, | |
| "loss": 0.0253, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.00013608033748690462, | |
| "loss": 0.0164, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.00013578208852903025, | |
| "loss": 0.0104, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.00013548347409586038, | |
| "loss": 0.0223, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.0001351844972374181, | |
| "loss": 0.0188, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.0001348851610074284, | |
| "loss": 0.0164, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.00013458546846328674, | |
| "loss": 0.0278, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.0001342854226660281, | |
| "loss": 0.0499, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.0001339850266802953, | |
| "loss": 0.0306, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.00013368428357430826, | |
| "loss": 0.0262, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.00013338319641983216, | |
| "loss": 0.0204, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.0001330817682921463, | |
| "loss": 0.0077, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.0001327800022700127, | |
| "loss": 0.0423, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.00013247790143564457, | |
| "loss": 0.0641, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.00013217546887467482, | |
| "loss": 0.0142, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.0001318727076761247, | |
| "loss": 0.0455, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.0001315696209323721, | |
| "loss": 0.0189, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.00013126621173911986, | |
| "loss": 0.0619, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.00013096248319536454, | |
| "loss": 0.0341, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.00013065843840336428, | |
| "loss": 0.0296, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.0001303540804686075, | |
| "loss": 0.0196, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.00013004941249978105, | |
| "loss": 0.0345, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.00012974443760873833, | |
| "loss": 0.0204, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.00012943915891046768, | |
| "loss": 0.0169, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.0001291335795230605, | |
| "loss": 0.0121, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.0001288277025676794, | |
| "loss": 0.0483, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.00012852153116852638, | |
| "loss": 0.0652, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.00012821506845281077, | |
| "loss": 0.0405, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.0001279083175507174, | |
| "loss": 0.0348, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.00012760128159537468, | |
| "loss": 0.0241, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.00012729396372282244, | |
| "loss": 0.0087, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.0001269863670719801, | |
| "loss": 0.0073, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.00012667849478461437, | |
| "loss": 0.0501, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.0001263703500053074, | |
| "loss": 0.0156, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.00012606193588142448, | |
| "loss": 0.0218, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.000125753255563082, | |
| "loss": 0.0655, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.0001254443122031152, | |
| "loss": 0.0264, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.00012513510895704606, | |
| "loss": 0.0102, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.00012482564898305099, | |
| "loss": 0.0478, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.00012451593544192857, | |
| "loss": 0.0186, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.00012420597149706732, | |
| "loss": 0.0329, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.00012389576031441338, | |
| "loss": 0.0247, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.0001235853050624381, | |
| "loss": 0.0321, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.0001232746089121058, | |
| "loss": 0.0431, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.00012296367503684128, | |
| "loss": 0.0227, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.00012265250661249743, | |
| "loss": 0.0644, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.00012234110681732288, | |
| "loss": 0.0258, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.00012202947883192933, | |
| "loss": 0.0395, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.00012171762583925932, | |
| "loss": 0.0232, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.00012140555102455353, | |
| "loss": 0.0505, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.00012109325757531835, | |
| "loss": 0.034, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.00012078074868129328, | |
| "loss": 0.0358, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.00012046802753441828, | |
| "loss": 0.0248, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.00012015509732880137, | |
| "loss": 0.0155, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.00011984196126068576, | |
| "loss": 0.0193, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.00011952862252841742, | |
| "loss": 0.0323, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.00011921508433241222, | |
| "loss": 0.0264, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.00011890134987512343, | |
| "loss": 0.0199, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.00011858742236100883, | |
| "loss": 0.0376, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.00011827330499649816, | |
| "loss": 0.0814, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.00011795900098996018, | |
| "loss": 0.0201, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.00011764451355167005, | |
| "loss": 0.0177, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.00011732984589377653, | |
| "loss": 0.0348, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.00011701500123026901, | |
| "loss": 0.0516, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.0001166999827769449, | |
| "loss": 0.031, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.00011638479375137663, | |
| "loss": 0.0268, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.0001160694373728789, | |
| "loss": 0.0255, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.00011575391686247566, | |
| "loss": 0.0185, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.00011543823544286737, | |
| "loss": 0.0331, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.00011512239633839792, | |
| "loss": 0.0226, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.00011480640277502186, | |
| "loss": 0.0352, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.00011449025798027131, | |
| "loss": 0.0391, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.00011417396518322312, | |
| "loss": 0.0134, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.00011385752761446575, | |
| "loss": 0.021, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.00011354094850606637, | |
| "loss": 0.0422, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.00011322423109153789, | |
| "loss": 0.0293, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.00011290737860580576, | |
| "loss": 0.0099, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.00011259039428517511, | |
| "loss": 0.0183, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.00011227328136729756, | |
| "loss": 0.0039, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.00011195604309113829, | |
| "loss": 0.0201, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.00011163868269694285, | |
| "loss": 0.0208, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.00011132120342620402, | |
| "loss": 0.0478, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.0001110036085216289, | |
| "loss": 0.0364, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.00011068590122710559, | |
| "loss": 0.0159, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.00011036808478767013, | |
| "loss": 0.0268, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.00011005016244947337, | |
| "loss": 0.031, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.0001097321374597478, | |
| "loss": 0.0158, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.00010941401306677441, | |
| "loss": 0.0431, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.00010909579251984945, | |
| "loss": 0.0104, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.00010877747906925126, | |
| "loss": 0.0139, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.00010845907596620719, | |
| "loss": 0.0351, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.00010814058646286013, | |
| "loss": 0.0259, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.00010782201381223564, | |
| "loss": 0.035, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.00010750336126820844, | |
| "loss": 0.0284, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.00010718463208546918, | |
| "loss": 0.0826, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.00010686582951949154, | |
| "loss": 0.0266, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.00010654695682649849, | |
| "loss": 0.017, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.00010622801726342939, | |
| "loss": 0.0489, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.00010590901408790663, | |
| "loss": 0.0081, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.0001055899505582022, | |
| "loss": 0.0518, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.0001052708299332047, | |
| "loss": 0.0289, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.00010495165547238579, | |
| "loss": 0.0471, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.00010463243043576702, | |
| "loss": 0.014, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.0001043131580838866, | |
| "loss": 0.0257, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.00010399384167776591, | |
| "loss": 0.0287, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.00010367448447887628, | |
| "loss": 0.0279, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.00010335508974910588, | |
| "loss": 0.0276, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.00010303566075072599, | |
| "loss": 0.0429, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.00010271620074635801, | |
| "loss": 0.0048, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.0001023967129989401, | |
| "loss": 0.0131, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.00010207720077169363, | |
| "loss": 0.0216, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.00010175766732809011, | |
| "loss": 0.0172, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.00010143811593181778, | |
| "loss": 0.0248, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.0001011185498467481, | |
| "loss": 0.0454, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.00010079897233690276, | |
| "loss": 0.0277, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.00010047938666641998, | |
| "loss": 0.0239, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.0001001597960995214, | |
| "loss": 0.045, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 9.984020390047866e-05, | |
| "loss": 0.0124, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 9.952061333358003e-05, | |
| "loss": 0.0268, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 9.920102766309726e-05, | |
| "loss": 0.0185, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 9.88814501532519e-05, | |
| "loss": 0.0458, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 9.856188406818226e-05, | |
| "loss": 0.034, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 9.82423326719099e-05, | |
| "loss": 0.01, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 9.792279922830637e-05, | |
| "loss": 0.0604, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 9.760328700105991e-05, | |
| "loss": 0.0344, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 9.728379925364199e-05, | |
| "loss": 0.01, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 9.696433924927403e-05, | |
| "loss": 0.0141, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 9.664491025089411e-05, | |
| "loss": 0.0078, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 9.632551552112374e-05, | |
| "loss": 0.0274, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 9.600615832223412e-05, | |
| "loss": 0.0093, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 9.568684191611344e-05, | |
| "loss": 0.0437, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 9.536756956423299e-05, | |
| "loss": 0.0295, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 9.504834452761426e-05, | |
| "loss": 0.0609, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 9.472917006679533e-05, | |
| "loss": 0.0247, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 9.44100494417978e-05, | |
| "loss": 0.0186, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 9.40909859120934e-05, | |
| "loss": 0.0437, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 9.377198273657061e-05, | |
| "loss": 0.0487, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 9.345304317350153e-05, | |
| "loss": 0.0179, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 9.313417048050846e-05, | |
| "loss": 0.0122, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 9.281536791453084e-05, | |
| "loss": 0.0119, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 9.24966387317916e-05, | |
| "loss": 0.0129, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 9.217798618776438e-05, | |
| "loss": 0.0245, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 9.185941353713988e-05, | |
| "loss": 0.0244, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 9.154092403379286e-05, | |
| "loss": 0.0237, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 9.122252093074876e-05, | |
| "loss": 0.0372, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 9.09042074801506e-05, | |
| "loss": 0.0291, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 9.05859869332256e-05, | |
| "loss": 0.0111, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 9.026786254025219e-05, | |
| "loss": 0.0156, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 8.994983755052664e-05, | |
| "loss": 0.024, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 8.963191521232988e-05, | |
| "loss": 0.0206, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 8.931409877289444e-05, | |
| "loss": 0.0147, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 8.899639147837108e-05, | |
| "loss": 0.023, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 8.867879657379599e-05, | |
| "loss": 0.0323, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 8.836131730305718e-05, | |
| "loss": 0.0094, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 8.804395690886173e-05, | |
| "loss": 0.0279, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 8.772671863270247e-05, | |
| "loss": 0.0357, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 8.740960571482495e-05, | |
| "loss": 0.0418, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 8.709262139419425e-05, | |
| "loss": 0.0131, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 8.677576890846212e-05, | |
| "loss": 0.013, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 8.645905149393364e-05, | |
| "loss": 0.0106, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 8.614247238553427e-05, | |
| "loss": 0.0133, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 8.582603481677692e-05, | |
| "loss": 0.0486, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 8.550974201972869e-05, | |
| "loss": 0.0154, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 8.519359722497817e-05, | |
| "loss": 0.0118, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 8.48776036616021e-05, | |
| "loss": 0.0112, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 8.456176455713268e-05, | |
| "loss": 0.0043, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 8.424608313752436e-05, | |
| "loss": 0.0357, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 8.393056262712115e-05, | |
| "loss": 0.0189, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 8.361520624862338e-05, | |
| "loss": 0.0276, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 8.330001722305511e-05, | |
| "loss": 0.0067, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 8.298499876973101e-05, | |
| "loss": 0.0235, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 8.26701541062235e-05, | |
| "loss": 0.0619, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 8.235548644832996e-05, | |
| "loss": 0.0081, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 8.204099901003983e-05, | |
| "loss": 0.0023, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 8.172669500350188e-05, | |
| "loss": 0.0408, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 8.141257763899118e-05, | |
| "loss": 0.021, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 8.109865012487662e-05, | |
| "loss": 0.0293, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 8.078491566758782e-05, | |
| "loss": 0.0467, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 8.047137747158263e-05, | |
| "loss": 0.0175, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 8.015803873931425e-05, | |
| "loss": 0.0271, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 7.984490267119867e-05, | |
| "loss": 0.0222, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 7.953197246558173e-05, | |
| "loss": 0.0149, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 7.921925131870673e-05, | |
| "loss": 0.0121, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 7.890674242468166e-05, | |
| "loss": 0.0379, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 7.859444897544646e-05, | |
| "loss": 0.0121, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 7.82823741607407e-05, | |
| "loss": 0.0087, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 7.797052116807068e-05, | |
| "loss": 0.0289, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 7.765889318267717e-05, | |
| "loss": 0.0309, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 7.73474933875026e-05, | |
| "loss": 0.0268, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 7.703632496315878e-05, | |
| "loss": 0.0101, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 7.672539108789423e-05, | |
| "loss": 0.0077, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 7.641469493756195e-05, | |
| "loss": 0.0066, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 7.610423968558666e-05, | |
| "loss": 0.0315, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 7.579402850293271e-05, | |
| "loss": 0.0136, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 7.548406455807146e-05, | |
| "loss": 0.0316, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 7.517435101694902e-05, | |
| "loss": 0.0247, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 7.486489104295396e-05, | |
| "loss": 0.0153, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 7.455568779688482e-05, | |
| "loss": 0.0242, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 7.424674443691804e-05, | |
| "loss": 0.0384, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 7.393806411857554e-05, | |
| "loss": 0.0082, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 7.362964999469264e-05, | |
| "loss": 0.0143, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 7.332150521538564e-05, | |
| "loss": 0.0078, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 7.301363292801997e-05, | |
| "loss": 0.0141, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 7.270603627717758e-05, | |
| "loss": 0.0272, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 7.239871840462536e-05, | |
| "loss": 0.0111, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 7.209168244928264e-05, | |
| "loss": 0.0282, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 7.178493154718924e-05, | |
| "loss": 0.0166, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 7.147846883147363e-05, | |
| "loss": 0.0169, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 7.117229743232058e-05, | |
| "loss": 0.0188, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 7.086642047693952e-05, | |
| "loss": 0.0136, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 7.056084108953233e-05, | |
| "loss": 0.0425, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 7.02555623912617e-05, | |
| "loss": 0.0517, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 6.995058750021896e-05, | |
| "loss": 0.0124, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 6.964591953139252e-05, | |
| "loss": 0.0101, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 6.934156159663574e-05, | |
| "loss": 0.0473, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 6.903751680463549e-05, | |
| "loss": 0.0291, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 6.873378826088015e-05, | |
| "loss": 0.019, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 6.843037906762792e-05, | |
| "loss": 0.0197, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 6.812729232387532e-05, | |
| "loss": 0.0193, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 6.782453112532516e-05, | |
| "loss": 0.0102, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 6.752209856435546e-05, | |
| "loss": 0.0168, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 6.721999772998731e-05, | |
| "loss": 0.028, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 6.691823170785374e-05, | |
| "loss": 0.0273, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 6.661680358016785e-05, | |
| "loss": 0.0023, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 6.631571642569179e-05, | |
| "loss": 0.017, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 6.60149733197047e-05, | |
| "loss": 0.0125, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 6.571457733397196e-05, | |
| "loss": 0.0188, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 6.541453153671327e-05, | |
| "loss": 0.0136, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 6.51148389925716e-05, | |
| "loss": 0.0251, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 6.481550276258193e-05, | |
| "loss": 0.0159, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 6.451652590413963e-05, | |
| "loss": 0.009, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 6.421791147096976e-05, | |
| "loss": 0.041, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 6.39196625130954e-05, | |
| "loss": 0.0058, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 6.362178207680678e-05, | |
| "loss": 0.0086, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 6.332427320463003e-05, | |
| "loss": 0.0465, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 6.302713893529626e-05, | |
| "loss": 0.0184, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 6.273038230371023e-05, | |
| "loss": 0.018, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 6.243400634091978e-05, | |
| "loss": 0.0164, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 6.213801407408442e-05, | |
| "loss": 0.0046, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 6.18424085264447e-05, | |
| "loss": 0.0431, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 6.154719271729134e-05, | |
| "loss": 0.0119, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 6.125236966193412e-05, | |
| "loss": 0.037, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 6.095794237167145e-05, | |
| "loss": 0.0224, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 6.066391385375929e-05, | |
| "loss": 0.0208, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 6.037028711138066e-05, | |
| "loss": 0.0066, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 6.007706514361482e-05, | |
| "loss": 0.0254, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 5.9784250945406737e-05, | |
| "loss": 0.0326, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 5.949184750753638e-05, | |
| "loss": 0.0098, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 5.91998578165884e-05, | |
| "loss": 0.0166, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 5.890828485492129e-05, | |
| "loss": 0.0316, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 5.861713160063721e-05, | |
| "loss": 0.0422, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 5.832640102755148e-05, | |
| "loss": 0.004, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 5.8036096105162095e-05, | |
| "loss": 0.0094, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 5.7746219798619625e-05, | |
| "loss": 0.0436, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 5.7456775068696554e-05, | |
| "loss": 0.0243, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 5.7167764871757636e-05, | |
| "loss": 0.008, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 5.687919215972899e-05, | |
| "loss": 0.0098, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 5.659105988006852e-05, | |
| "loss": 0.0158, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 5.6303370975735495e-05, | |
| "loss": 0.0705, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 5.601612838516066e-05, | |
| "loss": 0.0221, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 5.572933504221599e-05, | |
| "loss": 0.0326, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 5.544299387618514e-05, | |
| "loss": 0.0217, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 5.515710781173301e-05, | |
| "loss": 0.0207, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 5.487167976887627e-05, | |
| "loss": 0.0251, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 5.4586712662953365e-05, | |
| "loss": 0.0379, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 5.430220940459478e-05, | |
| "loss": 0.0259, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 5.401817289969331e-05, | |
| "loss": 0.039, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 5.373460604937424e-05, | |
| "loss": 0.0163, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 5.34515117499661e-05, | |
| "loss": 0.0182, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 5.3168892892970536e-05, | |
| "loss": 0.0156, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 5.288675236503328e-05, | |
| "loss": 0.0498, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 5.260509304791433e-05, | |
| "loss": 0.0253, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 5.232391781845875e-05, | |
| "loss": 0.0207, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 5.204322954856695e-05, | |
| "loss": 0.0397, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 5.176303110516587e-05, | |
| "loss": 0.0339, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 5.1483325350179254e-05, | |
| "loss": 0.0265, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 5.120411514049843e-05, | |
| "loss": 0.0323, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 5.092540332795359e-05, | |
| "loss": 0.03, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 5.0647192759284044e-05, | |
| "loss": 0.0195, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 5.0369486276109554e-05, | |
| "loss": 0.0271, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 5.0092286714901225e-05, | |
| "loss": 0.0147, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 4.981559690695251e-05, | |
| "loss": 0.0168, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 4.9539419678350104e-05, | |
| "loss": 0.0277, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 4.926375784994558e-05, | |
| "loss": 0.0092, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 4.898861423732594e-05, | |
| "loss": 0.0093, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 4.871399165078534e-05, | |
| "loss": 0.0215, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 4.8439892895296215e-05, | |
| "loss": 0.0278, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 4.816632077048059e-05, | |
| "loss": 0.0058, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 4.789327807058157e-05, | |
| "loss": 0.0296, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 4.7620767584434634e-05, | |
| "loss": 0.0323, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 4.73487920954395e-05, | |
| "loss": 0.0127, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 4.7077354381531244e-05, | |
| "loss": 0.0122, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 4.6806457215152287e-05, | |
| "loss": 0.0231, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 4.653610336322388e-05, | |
| "loss": 0.0439, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 4.6266295587117995e-05, | |
| "loss": 0.0402, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 4.599703664262882e-05, | |
| "loss": 0.018, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 4.5728329279945094e-05, | |
| "loss": 0.0261, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 4.5460176243621614e-05, | |
| "loss": 0.0226, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 4.519258027255121e-05, | |
| "loss": 0.0288, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 4.492554409993719e-05, | |
| "loss": 0.0215, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 4.465907045326483e-05, | |
| "loss": 0.0141, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 4.4393162054273985e-05, | |
| "loss": 0.0324, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 4.412782161893105e-05, | |
| "loss": 0.0056, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 4.386305185740136e-05, | |
| "loss": 0.0045, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 4.359885547402132e-05, | |
| "loss": 0.0581, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 4.333523516727096e-05, | |
| "loss": 0.008, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 4.3072193629746335e-05, | |
| "loss": 0.0091, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 4.280973354813197e-05, | |
| "loss": 0.0333, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 4.2547857603173445e-05, | |
| "loss": 0.0203, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 4.228656846965004e-05, | |
| "loss": 0.0124, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 4.202586881634739e-05, | |
| "loss": 0.0192, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 4.17657613060301e-05, | |
| "loss": 0.0082, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 4.150624859541496e-05, | |
| "loss": 0.026, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 4.1247333335143227e-05, | |
| "loss": 0.007, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 4.0989018169754e-05, | |
| "loss": 0.0198, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 4.0731305737657054e-05, | |
| "loss": 0.0128, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 4.04741986711059e-05, | |
| "loss": 0.0046, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 4.0217699596170754e-05, | |
| "loss": 0.0309, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 3.996181113271214e-05, | |
| "loss": 0.0352, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 3.970653589435356e-05, | |
| "loss": 0.0225, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 3.945187648845523e-05, | |
| "loss": 0.0239, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 3.919783551608729e-05, | |
| "loss": 0.0168, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 3.894441557200325e-05, | |
| "loss": 0.0344, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 3.869161924461345e-05, | |
| "loss": 0.0206, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 3.8439449115958726e-05, | |
| "loss": 0.0225, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 3.8187907761683935e-05, | |
| "loss": 0.0117, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 3.793699775101164e-05, | |
| "loss": 0.0151, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 3.7686721646715974e-05, | |
| "loss": 0.0145, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 3.743708200509639e-05, | |
| "loss": 0.0288, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 3.718808137595158e-05, | |
| "loss": 0.0412, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 3.693972230255336e-05, | |
| "loss": 0.0193, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 3.6692007321620815e-05, | |
| "loss": 0.0169, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 3.644493896329431e-05, | |
| "loss": 0.0315, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 3.619851975110953e-05, | |
| "loss": 0.0126, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 3.5952752201972094e-05, | |
| "loss": 0.0114, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 3.570763882613135e-05, | |
| "loss": 0.0139, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 3.546318212715508e-05, | |
| "loss": 0.0207, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 3.5219384601903814e-05, | |
| "loss": 0.0089, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 3.497624874050537e-05, | |
| "loss": 0.0016, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 3.473377702632924e-05, | |
| "loss": 0.0192, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 3.449197193596161e-05, | |
| "loss": 0.0169, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 3.4250835939179585e-05, | |
| "loss": 0.0064, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 3.401037149892635e-05, | |
| "loss": 0.0057, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 3.377058107128583e-05, | |
| "loss": 0.0175, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 3.353146710545765e-05, | |
| "loss": 0.0243, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 3.3293032043732076e-05, | |
| "loss": 0.0526, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 3.3055278321465146e-05, | |
| "loss": 0.0138, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 3.281820836705375e-05, | |
| "loss": 0.0202, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 3.2581824601910746e-05, | |
| "loss": 0.0096, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 3.234612944044044e-05, | |
| "loss": 0.0396, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 3.21111252900137e-05, | |
| "loss": 0.0085, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 3.1876814550943525e-05, | |
| "loss": 0.0288, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 3.164319961646043e-05, | |
| "loss": 0.0208, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 3.141028287268808e-05, | |
| "loss": 0.0241, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 3.11780666986188e-05, | |
| "loss": 0.0147, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 3.094655346608939e-05, | |
| "loss": 0.024, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 3.071574553975698e-05, | |
| "loss": 0.014, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 3.0485645277074572e-05, | |
| "loss": 0.0076, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 3.0256255028267245e-05, | |
| "loss": 0.0262, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 3.002757713630806e-05, | |
| "loss": 0.0172, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 2.9799613936894135e-05, | |
| "loss": 0.0534, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 2.957236775842266e-05, | |
| "loss": 0.0041, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 2.934584092196745e-05, | |
| "loss": 0.047, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 2.9120035741254803e-05, | |
| "loss": 0.0117, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 2.8894954522640204e-05, | |
| "loss": 0.011, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 2.8670599565084634e-05, | |
| "loss": 0.0204, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 2.8446973160131064e-05, | |
| "loss": 0.0276, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 2.8224077591881138e-05, | |
| "loss": 0.0126, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 2.8001915136971758e-05, | |
| "loss": 0.0239, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 2.7780488064551923e-05, | |
| "loss": 0.0152, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 2.7559798636259394e-05, | |
| "loss": 0.0103, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 2.7339849106197768e-05, | |
| "loss": 0.0138, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 2.71206417209134e-05, | |
| "loss": 0.0406, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 2.6902178719372405e-05, | |
| "loss": 0.0093, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 2.6684462332937766e-05, | |
| "loss": 0.0288, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 2.646749478534678e-05, | |
| "loss": 0.0075, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 2.6251278292687943e-05, | |
| "loss": 0.0086, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 2.6035815063378664e-05, | |
| "loss": 0.0154, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 2.5821107298142643e-05, | |
| "loss": 0.0046, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 2.5607157189987163e-05, | |
| "loss": 0.0186, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 2.539396692418099e-05, | |
| "loss": 0.0171, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 2.5181538678231864e-05, | |
| "loss": 0.017, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 2.4969874621864375e-05, | |
| "loss": 0.0195, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 2.4758976916997624e-05, | |
| "loss": 0.0113, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 2.4548847717723454e-05, | |
| "loss": 0.0112, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 2.4339489170284058e-05, | |
| "loss": 0.0084, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 2.4130903413050353e-05, | |
| "loss": 0.0034, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 2.392309257650005e-05, | |
| "loss": 0.0118, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 2.3716058783195872e-05, | |
| "loss": 0.0219, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 2.3509804147763838e-05, | |
| "loss": 0.0667, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 2.330433077687173e-05, | |
| "loss": 0.0342, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 2.3099640769207686e-05, | |
| "loss": 0.0069, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 2.289573621545843e-05, | |
| "loss": 0.0115, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 2.2692619198288257e-05, | |
| "loss": 0.0064, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 2.249029179231761e-05, | |
| "loss": 0.0279, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 2.22887560641019e-05, | |
| "loss": 0.0035, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 2.208801407211033e-05, | |
| "loss": 0.0185, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 2.188806786670513e-05, | |
| "loss": 0.0187, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 2.168891949012024e-05, | |
| "loss": 0.0264, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 2.149057097644075e-05, | |
| "loss": 0.012, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 2.129302435158198e-05, | |
| "loss": 0.0038, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 2.1096281633268843e-05, | |
| "loss": 0.045, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 2.0900344831015206e-05, | |
| "loss": 0.0109, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 2.070521594610336e-05, | |
| "loss": 0.0376, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 2.0510896971563663e-05, | |
| "loss": 0.0081, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 2.0317389892153972e-05, | |
| "loss": 0.0033, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 2.0124696684339717e-05, | |
| "loss": 0.0037, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 1.993281931627331e-05, | |
| "loss": 0.0241, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 1.974175974777437e-05, | |
| "loss": 0.0195, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 1.9551519930309536e-05, | |
| "loss": 0.008, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 1.936210180697262e-05, | |
| "loss": 0.0544, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.917350731246462e-05, | |
| "loss": 0.0097, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.8985738373074124e-05, | |
| "loss": 0.0513, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.8798796906657635e-05, | |
| "loss": 0.0087, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.8612684822619776e-05, | |
| "loss": 0.0157, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.8427404021894013e-05, | |
| "loss": 0.0055, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.8242956396923126e-05, | |
| "loss": 0.0101, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.8059343831639963e-05, | |
| "loss": 0.012, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.7876568201448006e-05, | |
| "loss": 0.0193, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.769463137320254e-05, | |
| "loss": 0.0115, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.7513535205191224e-05, | |
| "loss": 0.0141, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.7333281547115366e-05, | |
| "loss": 0.0134, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.7153872240070945e-05, | |
| "loss": 0.0089, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.6975309116529803e-05, | |
| "loss": 0.0202, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.6797594000320914e-05, | |
| "loss": 0.0067, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.662072870661181e-05, | |
| "loss": 0.0069, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.644471504188999e-05, | |
| "loss": 0.0034, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.6269554803944444e-05, | |
| "loss": 0.0204, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.6095249781847398e-05, | |
| "loss": 0.0403, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.592180175593593e-05, | |
| "loss": 0.0138, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.5749212497793865e-05, | |
| "loss": 0.022, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.5577483770233603e-05, | |
| "loss": 0.019, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.5406617327278205e-05, | |
| "loss": 0.0145, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.5236614914143365e-05, | |
| "loss": 0.0166, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.5067478267219704e-05, | |
| "loss": 0.0239, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.4899209114054946e-05, | |
| "loss": 0.0155, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.473180917333633e-05, | |
| "loss": 0.0419, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.4565280154873006e-05, | |
| "loss": 0.0072, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.4399623759578618e-05, | |
| "loss": 0.0176, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.4234841679453925e-05, | |
| "loss": 0.0216, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.4070935597569401e-05, | |
| "loss": 0.0075, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.3907907188048308e-05, | |
| "loss": 0.0353, | |
| "step": 847 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.37457581160493e-05, | |
| "loss": 0.015, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.3584490037749608e-05, | |
| "loss": 0.0159, | |
| "step": 849 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.3424104600328057e-05, | |
| "loss": 0.0151, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.3264603441948276e-05, | |
| "loss": 0.0142, | |
| "step": 851 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.3105988191741857e-05, | |
| "loss": 0.0171, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.2948260469791928e-05, | |
| "loss": 0.0115, | |
| "step": 853 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.2791421887116395e-05, | |
| "loss": 0.0059, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.2635474045651552e-05, | |
| "loss": 0.0136, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.2480418538235773e-05, | |
| "loss": 0.0073, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.2326256948593206e-05, | |
| "loss": 0.0114, | |
| "step": 857 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.2172990851317578e-05, | |
| "loss": 0.011, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.202062181185617e-05, | |
| "loss": 0.0299, | |
| "step": 859 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.1869151386493748e-05, | |
| "loss": 0.0175, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.1718581122336702e-05, | |
| "loss": 0.018, | |
| "step": 861 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.1568912557297284e-05, | |
| "loss": 0.0304, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.1420147220077848e-05, | |
| "loss": 0.0181, | |
| "step": 863 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.1272286630155249e-05, | |
| "loss": 0.0164, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.1125332297765345e-05, | |
| "loss": 0.0273, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.0979285723887534e-05, | |
| "loss": 0.0129, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.0834148400229483e-05, | |
| "loss": 0.0291, | |
| "step": 867 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.0689921809211767e-05, | |
| "loss": 0.0057, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.0546607423952937e-05, | |
| "loss": 0.0075, | |
| "step": 869 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.0404206708254216e-05, | |
| "loss": 0.0413, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.0262721116584795e-05, | |
| "loss": 0.0088, | |
| "step": 871 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.0122152094066783e-05, | |
| "loss": 0.0226, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 9.982501076460604e-06, | |
| "loss": 0.075, | |
| "step": 873 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 9.843769490150146e-06, | |
| "loss": 0.0301, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 9.705958752128442e-06, | |
| "loss": 0.0312, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 9.569070269982939e-06, | |
| "loss": 0.0224, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 9.433105441881274e-06, | |
| "loss": 0.0182, | |
| "step": 877 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 9.298065656556999e-06, | |
| "loss": 0.0221, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 9.163952293295296e-06, | |
| "loss": 0.1938, | |
| "step": 879 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 9.030766721918993e-06, | |
| "loss": 0.0283, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 8.898510302774499e-06, | |
| "loss": 0.0062, | |
| "step": 881 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 8.767184386717985e-06, | |
| "loss": 0.0116, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 8.636790315101451e-06, | |
| "loss": 0.0227, | |
| "step": 883 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 8.507329419759191e-06, | |
| "loss": 0.0092, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 8.378803022994109e-06, | |
| "loss": 0.0065, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 8.251212437564193e-06, | |
| "loss": 0.0257, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 8.124558966669116e-06, | |
| "loss": 0.0102, | |
| "step": 887 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 7.998843903936993e-06, | |
| "loss": 0.0122, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 7.874068533411095e-06, | |
| "loss": 0.024, | |
| "step": 889 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 7.750234129536693e-06, | |
| "loss": 0.0225, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 7.627341957148238e-06, | |
| "loss": 0.0193, | |
| "step": 891 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 7.505393271456152e-06, | |
| "loss": 0.0164, | |
| "step": 892 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 7.384389318034257e-06, | |
| "loss": 0.0078, | |
| "step": 893 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 7.26433133280695e-06, | |
| "loss": 0.0018, | |
| "step": 894 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 7.145220542036568e-06, | |
| "loss": 0.0166, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 7.0270581623108404e-06, | |
| "loss": 0.0077, | |
| "step": 896 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 6.909845400530624e-06, | |
| "loss": 0.011, | |
| "step": 897 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 6.7935834538973235e-06, | |
| "loss": 0.0258, | |
| "step": 898 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 6.678273509900878e-06, | |
| "loss": 0.0074, | |
| "step": 899 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 6.56391674630753e-06, | |
| "loss": 0.0383, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 6.4505143311478055e-06, | |
| "loss": 0.0244, | |
| "step": 901 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 6.338067422704619e-06, | |
| "loss": 0.0199, | |
| "step": 902 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 6.2265771695013574e-06, | |
| "loss": 0.0094, | |
| "step": 903 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 6.116044710290325e-06, | |
| "loss": 0.0139, | |
| "step": 904 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 6.0064711740408616e-06, | |
| "loss": 0.0312, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 5.8978576799280425e-06, | |
| "loss": 0.0205, | |
| "step": 906 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 5.790205337321109e-06, | |
| "loss": 0.0185, | |
| "step": 907 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 5.683515245772231e-06, | |
| "loss": 0.0084, | |
| "step": 908 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 5.57778849500511e-06, | |
| "loss": 0.0188, | |
| "step": 909 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 5.473026164904116e-06, | |
| "loss": 0.0352, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 5.369229325503022e-06, | |
| "loss": 0.0265, | |
| "step": 911 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 5.266399036974112e-06, | |
| "loss": 0.0117, | |
| "step": 912 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 5.164536349617533e-06, | |
| "loss": 0.0057, | |
| "step": 913 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 5.063642303850302e-06, | |
| "loss": 0.0071, | |
| "step": 914 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 4.963717930195888e-06, | |
| "loss": 0.0131, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 4.864764249273579e-06, | |
| "loss": 0.005, | |
| "step": 916 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 4.766782271788117e-06, | |
| "loss": 0.0206, | |
| "step": 917 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 4.669772998519295e-06, | |
| "loss": 0.0046, | |
| "step": 918 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 4.573737420311808e-06, | |
| "loss": 0.0043, | |
| "step": 919 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 4.47867651806515e-06, | |
| "loss": 0.0234, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 4.384591262723492e-06, | |
| "loss": 0.0038, | |
| "step": 921 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 4.291482615265874e-06, | |
| "loss": 0.0028, | |
| "step": 922 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 4.199351526696327e-06, | |
| "loss": 0.0099, | |
| "step": 923 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 4.10819893803418e-06, | |
| "loss": 0.0202, | |
| "step": 924 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 4.018025780304413e-06, | |
| "loss": 0.0159, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 3.928832974528251e-06, | |
| "loss": 0.007, | |
| "step": 926 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 3.84062143171362e-06, | |
| "loss": 0.0128, | |
| "step": 927 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 3.7533920528459165e-06, | |
| "loss": 0.0077, | |
| "step": 928 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 3.667145728878829e-06, | |
| "loss": 0.0072, | |
| "step": 929 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 3.5818833407251895e-06, | |
| "loss": 0.0192, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 3.497605759247946e-06, | |
| "loss": 0.0074, | |
| "step": 931 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 3.4143138452513932e-06, | |
| "loss": 0.0138, | |
| "step": 932 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 3.332008449472246e-06, | |
| "loss": 0.0154, | |
| "step": 933 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 3.250690412571e-06, | |
| "loss": 0.0213, | |
| "step": 934 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 3.170360565123387e-06, | |
| "loss": 0.0215, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 3.0910197276118214e-06, | |
| "loss": 0.0099, | |
| "step": 936 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 3.012668710417066e-06, | |
| "loss": 0.0113, | |
| "step": 937 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 2.9353083138099258e-06, | |
| "loss": 0.0158, | |
| "step": 938 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 2.858939327943122e-06, | |
| "loss": 0.0182, | |
| "step": 939 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 2.7835625328431315e-06, | |
| "loss": 0.0181, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 2.709178698402337e-06, | |
| "loss": 0.0096, | |
| "step": 941 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 2.635788584371057e-06, | |
| "loss": 0.0108, | |
| "step": 942 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 2.5633929403498734e-06, | |
| "loss": 0.0042, | |
| "step": 943 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 2.491992505781904e-06, | |
| "loss": 0.0166, | |
| "step": 944 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 2.421588009945286e-06, | |
| "loss": 0.0063, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 2.3521801719457282e-06, | |
| "loss": 0.0296, | |
| "step": 946 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 2.2837697007091265e-06, | |
| "loss": 0.0165, | |
| "step": 947 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 2.216357294974425e-06, | |
| "loss": 0.0098, | |
| "step": 948 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 2.149943643286334e-06, | |
| "loss": 0.0105, | |
| "step": 949 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 2.0845294239884127e-06, | |
| "loss": 0.0194, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 2.020115305216086e-06, | |
| "loss": 0.0092, | |
| "step": 951 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.9567019448898603e-06, | |
| "loss": 0.0195, | |
| "step": 952 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.894289990708531e-06, | |
| "loss": 0.0149, | |
| "step": 953 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.8328800801426626e-06, | |
| "loss": 0.006, | |
| "step": 954 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.7724728404279855e-06, | |
| "loss": 0.0073, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.7130688885590773e-06, | |
| "loss": 0.0217, | |
| "step": 956 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.6546688312829794e-06, | |
| "loss": 0.0071, | |
| "step": 957 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.5972732650930578e-06, | |
| "loss": 0.0348, | |
| "step": 958 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.540882776222874e-06, | |
| "loss": 0.0146, | |
| "step": 959 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.4854979406402347e-06, | |
| "loss": 0.0108, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.4311193240412746e-06, | |
| "loss": 0.032, | |
| "step": 961 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.3777474818446824e-06, | |
| "loss": 0.008, | |
| "step": 962 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.3253829591860389e-06, | |
| "loss": 0.0062, | |
| "step": 963 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.2740262909122558e-06, | |
| "loss": 0.0145, | |
| "step": 964 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.2236780015761007e-06, | |
| "loss": 0.0179, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.174338605430847e-06, | |
| "loss": 0.0173, | |
| "step": 966 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.1260086064250108e-06, | |
| "loss": 0.007, | |
| "step": 967 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.078688498197189e-06, | |
| "loss": 0.0011, | |
| "step": 968 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.0323787640710626e-06, | |
| "loss": 0.0227, | |
| "step": 969 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 9.870798770504563e-07, | |
| "loss": 0.014, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 9.427922998144323e-07, | |
| "loss": 0.0042, | |
| "step": 971 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 8.99516484712648e-07, | |
| "loss": 0.0122, | |
| "step": 972 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 8.572528737607055e-07, | |
| "loss": 0.0124, | |
| "step": 973 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 8.160018986356433e-07, | |
| "loss": 0.0329, | |
| "step": 974 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 7.757639806714956e-07, | |
| "loss": 0.0171, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 7.365395308550405e-07, | |
| "loss": 0.0182, | |
| "step": 976 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 6.983289498215806e-07, | |
| "loss": 0.0296, | |
| "step": 977 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 6.611326278508135e-07, | |
| "loss": 0.0394, | |
| "step": 978 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 6.249509448629009e-07, | |
| "loss": 0.0228, | |
| "step": 979 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 5.897842704145728e-07, | |
| "loss": 0.0162, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 5.556329636953295e-07, | |
| "loss": 0.0162, | |
| "step": 981 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 5.224973735238004e-07, | |
| "loss": 0.0089, | |
| "step": 982 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 4.903778383441581e-07, | |
| "loss": 0.0135, | |
| "step": 983 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 4.592746862226882e-07, | |
| "loss": 0.0093, | |
| "step": 984 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 4.291882348444132e-07, | |
| "loss": 0.0047, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 4.0011879150986256e-07, | |
| "loss": 0.011, | |
| "step": 986 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 3.7206665313191946e-07, | |
| "loss": 0.0318, | |
| "step": 987 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 3.4503210623282323e-07, | |
| "loss": 0.0067, | |
| "step": 988 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 3.1901542694119377e-07, | |
| "loss": 0.0038, | |
| "step": 989 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 2.9401688098924517e-07, | |
| "loss": 0.012, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 2.700367237100543e-07, | |
| "loss": 0.0427, | |
| "step": 991 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 2.4707520003497406e-07, | |
| "loss": 0.0197, | |
| "step": 992 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 2.2513254449110234e-07, | |
| "loss": 0.0101, | |
| "step": 993 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 2.0420898119891675e-07, | |
| "loss": 0.0124, | |
| "step": 994 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.843047238699658e-07, | |
| "loss": 0.0234, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.6541997580468148e-07, | |
| "loss": 0.0118, | |
| "step": 996 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.4755492989031449e-07, | |
| "loss": 0.0264, | |
| "step": 997 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.307097685989911e-07, | |
| "loss": 0.0163, | |
| "step": 998 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.1488466398577036e-07, | |
| "loss": 0.0303, | |
| "step": 999 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.0007977768697885e-07, | |
| "loss": 0.0177, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 8.629526091848972e-08, | |
| "loss": 0.0134, | |
| "step": 1001 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 7.353125447423504e-08, | |
| "loss": 0.0136, | |
| "step": 1002 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 6.178788872470698e-08, | |
| "loss": 0.0286, | |
| "step": 1003 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 5.106528361568108e-08, | |
| "loss": 0.0097, | |
| "step": 1004 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 4.1363548666961684e-08, | |
| "loss": 0.0107, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 3.26827829712606e-08, | |
| "loss": 0.0164, | |
| "step": 1006 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 2.5023075193209012e-08, | |
| "loss": 0.0271, | |
| "step": 1007 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.8384503568413813e-08, | |
| "loss": 0.0082, | |
| "step": 1008 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 1.2767135902713723e-08, | |
| "loss": 0.0124, | |
| "step": 1009 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 8.171029571435452e-09, | |
| "loss": 0.0298, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.596231518827487e-09, | |
| "loss": 0.0151, | |
| "step": 1011 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 2.0427782575715983e-09, | |
| "loss": 0.0041, | |
| "step": 1012 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 5.10695868449762e-10, | |
| "loss": 0.026, | |
| "step": 1013 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 0.0, | |
| "loss": 0.0089, | |
| "step": 1014 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 1014, | |
| "total_flos": 55825951334400.0, | |
| "train_loss": 0.054662292419071265, | |
| "train_runtime": 7573.6964, | |
| "train_samples_per_second": 2.141, | |
| "train_steps_per_second": 0.134 | |
| } | |
| ], | |
| "max_steps": 1014, | |
| "num_train_epochs": 1, | |
| "total_flos": 55825951334400.0, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |