| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 1040, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 6.25e-06, | |
| "loss": 1.354, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 1.25e-05, | |
| "loss": 1.4196, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 1.8750000000000002e-05, | |
| "loss": 1.4008, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 2.5e-05, | |
| "loss": 1.3531, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 3.125e-05, | |
| "loss": 1.2091, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 3.7500000000000003e-05, | |
| "loss": 1.2213, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.375e-05, | |
| "loss": 1.18, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 5e-05, | |
| "loss": 1.0744, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 5.6250000000000005e-05, | |
| "loss": 1.2032, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 6.25e-05, | |
| "loss": 1.0925, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 6.875e-05, | |
| "loss": 1.1905, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 7.500000000000001e-05, | |
| "loss": 1.1024, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 8.125000000000001e-05, | |
| "loss": 1.066, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 8.75e-05, | |
| "loss": 0.9857, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 9.375e-05, | |
| "loss": 1.0583, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.0001, | |
| "loss": 1.1359, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.00010625000000000001, | |
| "loss": 0.3346, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.00011250000000000001, | |
| "loss": 0.9871, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.00011875, | |
| "loss": 1.0403, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.000125, | |
| "loss": 1.0968, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.00013125000000000002, | |
| "loss": 1.0546, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.0001375, | |
| "loss": 1.0389, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.00014375, | |
| "loss": 1.028, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.00015000000000000001, | |
| "loss": 1.0471, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.00015625, | |
| "loss": 0.9374, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.00016250000000000002, | |
| "loss": 1.0271, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.00016875, | |
| "loss": 0.3417, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.000175, | |
| "loss": 0.9645, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.00018125000000000001, | |
| "loss": 1.0597, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.0001875, | |
| "loss": 0.9678, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.00019375000000000002, | |
| "loss": 0.9314, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.0002, | |
| "loss": 1.04, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.00019999951432210904, | |
| "loss": 1.0053, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.00019999805729315381, | |
| "loss": 0.9822, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.0001999956289272873, | |
| "loss": 0.9289, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.0001999922292480975, | |
| "loss": 0.9671, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.00019998785828860742, | |
| "loss": 0.9869, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.00019998251609127464, | |
| "loss": 0.9501, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.00019997620270799092, | |
| "loss": 0.8762, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.00019996891820008164, | |
| "loss": 0.9656, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.00019996066263830531, | |
| "loss": 0.9619, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.00019995143610285277, | |
| "loss": 0.9327, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.00019994123868334655, | |
| "loss": 0.9848, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.00019993007047883988, | |
| "loss": 1.0161, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.00019991793159781569, | |
| "loss": 0.9219, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.0001999048221581858, | |
| "loss": 0.953, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.0001998907422872894, | |
| "loss": 0.9215, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.00019987569212189224, | |
| "loss": 0.996, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.0001998596718081849, | |
| "loss": 0.9969, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.00019984268150178167, | |
| "loss": 0.9982, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.00019982472136771882, | |
| "loss": 0.3213, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.0001998057915804532, | |
| "loss": 1.0021, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.00019978589232386035, | |
| "loss": 0.9302, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.0001997650237912329, | |
| "loss": 0.9078, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.00019974318618527849, | |
| "loss": 0.9411, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.00019972037971811802, | |
| "loss": 0.9986, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.00019969660461128342, | |
| "loss": 0.9609, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.00019967186109571552, | |
| "loss": 0.935, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.00019964614941176195, | |
| "loss": 0.9725, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.00019961946980917456, | |
| "loss": 0.9642, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.0001995918225471073, | |
| "loss": 0.9792, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.00019956320789411337, | |
| "loss": 0.9111, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.00019953362612814296, | |
| "loss": 0.9991, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.00019950307753654017, | |
| "loss": 0.9489, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.0001994715624160405, | |
| "loss": 0.892, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.00019943908107276798, | |
| "loss": 0.9708, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.00019940563382223197, | |
| "loss": 0.2971, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.00019937122098932428, | |
| "loss": 0.9065, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.0001993358429083159, | |
| "loss": 0.9531, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.00019929949992285396, | |
| "loss": 0.9471, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.0001992621923859581, | |
| "loss": 0.9695, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.00019922392066001722, | |
| "loss": 0.9679, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.00019918468511678596, | |
| "loss": 0.9238, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.00019914448613738106, | |
| "loss": 0.9605, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.00019910332411227758, | |
| "loss": 0.9854, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.0001990611994413053, | |
| "loss": 0.9069, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.00019901811253364456, | |
| "loss": 0.8975, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.00019897406380782261, | |
| "loss": 0.8872, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.00019892905369170926, | |
| "loss": 0.9219, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.00019888308262251285, | |
| "loss": 0.9985, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.00019883615104677608, | |
| "loss": 0.9769, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.00019878825942037148, | |
| "loss": 0.9303, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.00019873940820849714, | |
| "loss": 0.9734, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.00019868959788567212, | |
| "loss": 0.9821, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.00019863882893573188, | |
| "loss": 0.9149, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.0001985871018518236, | |
| "loss": 0.994, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.0001985344171364012, | |
| "loss": 0.9824, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.00019848077530122083, | |
| "loss": 0.938, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.00019842617686733545, | |
| "loss": 0.9199, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.00019837062236509014, | |
| "loss": 0.9361, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.00019831411233411678, | |
| "loss": 0.8936, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.00019825664732332884, | |
| "loss": 0.9604, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.00019819822789091598, | |
| "loss": 0.9126, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.00019813885460433879, | |
| "loss": 0.3002, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.00019807852804032305, | |
| "loss": 0.9704, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.00019801724878485438, | |
| "loss": 0.9617, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.0001979550174331724, | |
| "loss": 0.9704, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.00019789183458976484, | |
| "loss": 0.9209, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.000197827700868362, | |
| "loss": 0.9234, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00019776261689193048, | |
| "loss": 0.9371, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00019769658329266718, | |
| "loss": 1.0014, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00019762960071199333, | |
| "loss": 0.9554, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00019756166980054813, | |
| "loss": 0.9479, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00019749279121818235, | |
| "loss": 0.8958, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00019742296563395216, | |
| "loss": 0.9782, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00019735219372611233, | |
| "loss": 0.9272, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00019728047618210995, | |
| "loss": 0.9196, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00019720781369857746, | |
| "loss": 0.9326, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00019713420698132614, | |
| "loss": 0.8558, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.0001970596567453391, | |
| "loss": 0.9383, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.00019698416371476433, | |
| "loss": 0.9137, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.0001969077286229078, | |
| "loss": 0.3247, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.00019683035221222618, | |
| "loss": 0.9465, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.00019675203523431966, | |
| "loss": 0.8971, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.00019667277844992475, | |
| "loss": 0.8971, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.00019659258262890683, | |
| "loss": 0.8856, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.00019651144855025265, | |
| "loss": 0.3271, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.0001964293770020628, | |
| "loss": 0.9045, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.0001963463687815439, | |
| "loss": 0.9437, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.0001962624246950012, | |
| "loss": 0.9322, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.00019617754555783043, | |
| "loss": 0.9018, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.00019609173219450998, | |
| "loss": 0.9784, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.0001960049854385929, | |
| "loss": 0.9623, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.0001959173061326988, | |
| "loss": 0.3051, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.00019582869512850575, | |
| "loss": 0.9444, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.0001957391532867418, | |
| "loss": 0.9439, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.0001956486814771769, | |
| "loss": 0.931, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.0001955572805786141, | |
| "loss": 0.9075, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.00019546495147888132, | |
| "loss": 0.9113, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.0001953716950748227, | |
| "loss": 0.9328, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.00019527751227228963, | |
| "loss": 0.9031, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.00019518240398613227, | |
| "loss": 0.8926, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.00019508637114019038, | |
| "loss": 0.9489, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.0001949894146672846, | |
| "loss": 0.2985, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.00019489153550920728, | |
| "loss": 0.9203, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.0001947927346167132, | |
| "loss": 0.9452, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.0001946930129495106, | |
| "loss": 0.9127, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.0001945923714762516, | |
| "loss": 0.9909, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.00019449081117452302, | |
| "loss": 0.8861, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.00019438833303083678, | |
| "loss": 0.9719, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.0001942849380406201, | |
| "loss": 0.952, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.00019418062720820637, | |
| "loss": 0.9007, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.00019407540154682472, | |
| "loss": 0.921, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.00019396926207859084, | |
| "loss": 0.9232, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.00019386220983449653, | |
| "loss": 0.9495, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.00019375424585439994, | |
| "loss": 0.3129, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.00019364537118701542, | |
| "loss": 0.9249, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.0001935355868899034, | |
| "loss": 0.9079, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.00019342489402945998, | |
| "loss": 0.9685, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.00019331329368090666, | |
| "loss": 0.9026, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.00019320078692827987, | |
| "loss": 0.9082, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.00019308737486442045, | |
| "loss": 0.9867, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.00019297305859096304, | |
| "loss": 0.9287, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.00019285783921832537, | |
| "loss": 0.92, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.0001927417178656975, | |
| "loss": 0.8397, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.0001926246956610309, | |
| "loss": 0.9437, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.0001925067737410275, | |
| "loss": 0.902, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.0001923879532511287, | |
| "loss": 0.9219, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.00019226823534550418, | |
| "loss": 0.3306, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.00019214762118704076, | |
| "loss": 0.9146, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.00019202611194733108, | |
| "loss": 0.9856, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.00019190370880666207, | |
| "loss": 0.8802, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.00019178041295400382, | |
| "loss": 0.933, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.00019165622558699763, | |
| "loss": 0.8891, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.00019153114791194473, | |
| "loss": 0.9589, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.00019140518114379434, | |
| "loss": 0.3159, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.00019127832650613189, | |
| "loss": 0.9367, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.00019115058523116733, | |
| "loss": 0.8948, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.00019102195855972287, | |
| "loss": 0.9252, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.0001908924477412211, | |
| "loss": 0.917, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.00019076205403367285, | |
| "loss": 0.9382, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.000190630778703665, | |
| "loss": 0.9195, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.000190498623026348, | |
| "loss": 0.8703, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.00019036558828542367, | |
| "loss": 0.8912, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.00019023167577313264, | |
| "loss": 0.8815, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.0001900968867902419, | |
| "loss": 0.9702, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.00018996122264603203, | |
| "loss": 0.8715, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.00018982468465828442, | |
| "loss": 0.9003, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.00018968727415326884, | |
| "loss": 0.8853, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.0001895489924657301, | |
| "loss": 0.9172, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.00018940984093887542, | |
| "loss": 0.9066, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.00018926982092436117, | |
| "loss": 0.8922, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.00018912893378227985, | |
| "loss": 0.9051, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.0001889871808811469, | |
| "loss": 0.8885, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.00018884456359788724, | |
| "loss": 0.9208, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.00018870108331782217, | |
| "loss": 0.3049, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.00018855674143465566, | |
| "loss": 0.9022, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.00018841153935046098, | |
| "loss": 0.9392, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.00018826547847566693, | |
| "loss": 0.9543, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.00018811856022904423, | |
| "loss": 0.9483, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.00018797078603769184, | |
| "loss": 0.8272, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.00018782215733702286, | |
| "loss": 0.9363, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.0001876726755707508, | |
| "loss": 0.8628, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.00018752234219087538, | |
| "loss": 0.9399, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.00018737115865766863, | |
| "loss": 0.9488, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.00018721912643966055, | |
| "loss": 0.9155, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.00018706624701362483, | |
| "loss": 0.9434, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.00018691252186456465, | |
| "loss": 0.9321, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.00018675795248569813, | |
| "loss": 0.9033, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.00018660254037844388, | |
| "loss": 0.8924, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.00018644628705240636, | |
| "loss": 0.8941, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.0001862891940253613, | |
| "loss": 0.9788, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.00018613126282324092, | |
| "loss": 0.9482, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.00018597249498011903, | |
| "loss": 0.9189, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.0001858128920381963, | |
| "loss": 0.971, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.00018565245554778517, | |
| "loss": 0.928, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.00018549118706729468, | |
| "loss": 0.8613, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.00018532908816321558, | |
| "loss": 0.9415, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.00018516616041010494, | |
| "loss": 0.8505, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.0001850024053905709, | |
| "loss": 0.9253, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.00018483782469525738, | |
| "loss": 0.9365, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.00018467241992282843, | |
| "loss": 0.8936, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.00018450619267995283, | |
| "loss": 0.9627, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.0001843391445812886, | |
| "loss": 0.8926, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.00018417127724946702, | |
| "loss": 0.9064, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.00018400259231507717, | |
| "loss": 0.9148, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.0001838330914166499, | |
| "loss": 0.9293, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.00018366277620064197, | |
| "loss": 0.3, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.00018349164832142013, | |
| "loss": 0.9553, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.0001833197094412449, | |
| "loss": 0.865, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.00018314696123025454, | |
| "loss": 0.9238, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.00018297340536644875, | |
| "loss": 0.9083, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.00018279904353567253, | |
| "loss": 0.916, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.0001826238774315995, | |
| "loss": 0.9253, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.0001824479087557158, | |
| "loss": 0.9343, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.00018227113921730334, | |
| "loss": 0.9062, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.00018209357053342325, | |
| "loss": 0.9876, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.0001819152044288992, | |
| "loss": 0.9446, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.00018173604263630063, | |
| "loss": 0.9048, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.00018155608689592604, | |
| "loss": 0.9808, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.00018137533895578583, | |
| "loss": 0.9091, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.00018119380057158568, | |
| "loss": 0.8511, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.00018101147350670907, | |
| "loss": 0.9621, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.00018082835953220054, | |
| "loss": 0.89, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.00018064446042674828, | |
| "loss": 0.9617, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.00018045977797666684, | |
| "loss": 0.9026, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.00018027431397587992, | |
| "loss": 0.8714, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.00018008807022590283, | |
| "loss": 0.9611, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.00017990104853582493, | |
| "loss": 0.9143, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.00017971325072229226, | |
| "loss": 0.9591, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.00017952467860948973, | |
| "loss": 0.8614, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.00017933533402912354, | |
| "loss": 0.8888, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.0001791452188204031, | |
| "loss": 0.906, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.00017895433483002354, | |
| "loss": 0.9466, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.00017876268391214754, | |
| "loss": 0.9391, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.00017857026792838737, | |
| "loss": 0.9269, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.00017837708874778683, | |
| "loss": 0.8756, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.000178183148246803, | |
| "loss": 0.9507, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.00017798844830928817, | |
| "loss": 0.9492, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.00017779299082647148, | |
| "loss": 0.9581, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.0001775967776969405, | |
| "loss": 0.8927, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.00017739981082662276, | |
| "loss": 0.9197, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.00017720209212876737, | |
| "loss": 0.8919, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.0001770036235239263, | |
| "loss": 0.8873, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.00017680440693993585, | |
| "loss": 0.9195, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.0001766044443118978, | |
| "loss": 0.9198, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.00017640373758216077, | |
| "loss": 0.9114, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.00017620228870030108, | |
| "loss": 0.9289, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.00017600009962310417, | |
| "loss": 0.9017, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.0001757971723145453, | |
| "loss": 0.9194, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.00017559350874577065, | |
| "loss": 0.9143, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.000175389110895078, | |
| "loss": 0.913, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.00017518398074789775, | |
| "loss": 0.8694, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.00017497812029677344, | |
| "loss": 0.8792, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.00017477153154134243, | |
| "loss": 0.9098, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.00017456421648831655, | |
| "loss": 0.9043, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.00017435617715146263, | |
| "loss": 0.919, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.00017414741555158266, | |
| "loss": 0.9182, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.00017393793371649462, | |
| "loss": 0.9128, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.0001737277336810124, | |
| "loss": 0.8936, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.0001735168174869262, | |
| "loss": 0.9161, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.00017330518718298264, | |
| "loss": 0.9185, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.00017309284482486495, | |
| "loss": 0.9087, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.00017287979247517286, | |
| "loss": 0.9441, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.0001726660322034027, | |
| "loss": 0.8839, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.00017245156608592727, | |
| "loss": 0.3174, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.00017223639620597556, | |
| "loss": 0.8911, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.00017202052465361268, | |
| "loss": 0.9058, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.0001718039535257194, | |
| "loss": 0.8988, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.00017158668492597186, | |
| "loss": 0.9677, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.0001713687209648212, | |
| "loss": 0.9123, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.00017115006375947303, | |
| "loss": 0.9187, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.00017093071543386668, | |
| "loss": 0.9103, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.00017071067811865476, | |
| "loss": 0.9507, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.00017048995395118252, | |
| "loss": 0.8716, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.00017026854507546692, | |
| "loss": 0.3181, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.00017004645364217583, | |
| "loss": 0.9239, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.00016982368180860728, | |
| "loss": 0.9222, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.00016960023173866835, | |
| "loss": 0.9329, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.00016937610560285418, | |
| "loss": 0.9381, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.00016915130557822695, | |
| "loss": 0.8579, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.0001689258338483947, | |
| "loss": 0.8982, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.00016869969260349018, | |
| "loss": 0.9199, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.00016847288404014935, | |
| "loss": 0.3165, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.00016824541036149037, | |
| "loss": 0.8695, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.00016801727377709194, | |
| "loss": 0.8743, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.00016778847650297197, | |
| "loss": 0.9202, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.00016755902076156604, | |
| "loss": 0.858, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.0001673289087817057, | |
| "loss": 0.9668, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.00016709814279859702, | |
| "loss": 0.8471, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.0001668667250537987, | |
| "loss": 0.8535, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.0001666346577952004, | |
| "loss": 0.8837, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.00016640194327700086, | |
| "loss": 0.9569, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.00016616858375968595, | |
| "loss": 0.9127, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.00016593458151000688, | |
| "loss": 0.9326, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.00016569993880095806, | |
| "loss": 0.929, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.00016546465791175496, | |
| "loss": 0.9183, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.00016522874112781213, | |
| "loss": 0.9495, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.00016499219074072086, | |
| "loss": 0.8918, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.00016475500904822706, | |
| "loss": 0.9207, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.00016451719835420877, | |
| "loss": 0.9345, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.00016427876096865394, | |
| "loss": 0.9267, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.00016403969920763788, | |
| "loss": 0.3331, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.0001638000153933009, | |
| "loss": 0.9455, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.00016355971185382545, | |
| "loss": 0.8989, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.000163318790923414, | |
| "loss": 0.298, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.00016307725494226584, | |
| "loss": 0.2894, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.00016283510625655472, | |
| "loss": 0.9875, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.00016259234721840591, | |
| "loss": 0.886, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.00016234898018587337, | |
| "loss": 0.2947, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.0001621050075229168, | |
| "loss": 0.8589, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.00016186043159937882, | |
| "loss": 0.8434, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.00016161525479096178, | |
| "loss": 0.8448, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.00016136947947920476, | |
| "loss": 0.9115, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.0001611231080514605, | |
| "loss": 0.8815, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.00016087614290087208, | |
| "loss": 0.9008, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.0001606285864263498, | |
| "loss": 0.9077, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.00016038044103254775, | |
| "loss": 0.8836, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.00016013170912984058, | |
| "loss": 0.8678, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.00015988239313430004, | |
| "loss": 0.8614, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.00015963249546767144, | |
| "loss": 0.8878, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.00015938201855735014, | |
| "loss": 0.8884, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.00015913096483635824, | |
| "loss": 0.9089, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.00015887933674332046, | |
| "loss": 0.884, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.00015862713672244093, | |
| "loss": 0.9471, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.000158374367223479, | |
| "loss": 0.8965, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.0001581210307017259, | |
| "loss": 0.8936, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.0001578671296179806, | |
| "loss": 0.9244, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.00015761266643852587, | |
| "loss": 0.9062, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.0001573576436351046, | |
| "loss": 0.8906, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.00015710206368489552, | |
| "loss": 0.8824, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.00015684592907048926, | |
| "loss": 0.9276, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.00015658924227986414, | |
| "loss": 0.8786, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.0001563320058063622, | |
| "loss": 0.8581, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.0001560742221486648, | |
| "loss": 0.2759, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.0001558158938107684, | |
| "loss": 0.8955, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.00015555702330196023, | |
| "loss": 0.8646, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.00015529761313679393, | |
| "loss": 0.8917, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.00015503766583506524, | |
| "loss": 0.9241, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.00015477718392178716, | |
| "loss": 0.9011, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.0001545161699271659, | |
| "loss": 0.8722, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.00015425462638657595, | |
| "loss": 0.2777, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.00015399255584053567, | |
| "loss": 0.8589, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.0001537299608346824, | |
| "loss": 0.8806, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.00015346684391974794, | |
| "loss": 0.8985, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.00015320320765153367, | |
| "loss": 0.8618, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.00015293905459088568, | |
| "loss": 0.9165, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.0001526743873036701, | |
| "loss": 0.9175, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.00015240920836074776, | |
| "loss": 0.822, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.0001521435203379498, | |
| "loss": 0.8497, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.00015187732581605217, | |
| "loss": 0.9076, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.00015161062738075067, | |
| "loss": 0.8994, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.00015134342762263605, | |
| "loss": 0.878, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.00015107572913716858, | |
| "loss": 0.2954, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.00015080753452465296, | |
| "loss": 0.8821, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.0001505388463902131, | |
| "loss": 0.8977, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.0001502696673437667, | |
| "loss": 0.8907, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.00015000000000000001, | |
| "loss": 0.8698, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.0001497298469783424, | |
| "loss": 0.9107, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.00014945921090294076, | |
| "loss": 0.8447, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.00014918809440263436, | |
| "loss": 0.8651, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.00014891650011092896, | |
| "loss": 0.8568, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.00014864443066597139, | |
| "loss": 0.8937, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.000148371888710524, | |
| "loss": 0.9191, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.00014809887689193877, | |
| "loss": 0.8641, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.00014782539786213183, | |
| "loss": 0.806, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.00014755145427755754, | |
| "loss": 0.916, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.0001472770487991827, | |
| "loss": 0.8781, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.00014700218409246087, | |
| "loss": 0.8897, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.0001467268628273062, | |
| "loss": 0.905, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.00014645108767806777, | |
| "loss": 0.8497, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.00014617486132350343, | |
| "loss": 0.8647, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.00014589818644675378, | |
| "loss": 0.9031, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.0001456210657353163, | |
| "loss": 0.8881, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.00014534350188101904, | |
| "loss": 0.8955, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.00014506549757999454, | |
| "loss": 0.8831, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.00014478705553265362, | |
| "loss": 0.8145, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.00014450817844365921, | |
| "loss": 0.8334, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.00014422886902190014, | |
| "loss": 0.9174, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.0001439491299804645, | |
| "loss": 0.8833, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.0001436689640366137, | |
| "loss": 0.8729, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.00014338837391175582, | |
| "loss": 0.8865, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.00014310736233141925, | |
| "loss": 0.8064, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.00014282593202522627, | |
| "loss": 0.8814, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.00014254408572686642, | |
| "loss": 0.8626, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.00014226182617406996, | |
| "loss": 0.8826, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.00014197915610858144, | |
| "loss": 0.9192, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.00014169607827613283, | |
| "loss": 0.8771, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.00014141259542641704, | |
| "loss": 0.8874, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.00014112871031306119, | |
| "loss": 0.8448, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.00014084442569359964, | |
| "loss": 0.9049, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.00014055974432944753, | |
| "loss": 0.8688, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.00014027466898587374, | |
| "loss": 0.8754, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.00013998920243197407, | |
| "loss": 0.834, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.0001397033474406445, | |
| "loss": 0.8895, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.00013941710678855396, | |
| "loss": 0.88, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.0001391304832561175, | |
| "loss": 0.9417, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.00013884347962746948, | |
| "loss": 0.3594, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.0001385560986904362, | |
| "loss": 0.8719, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.000138268343236509, | |
| "loss": 0.8576, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.00013798021606081714, | |
| "loss": 0.8475, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.00013769171996210052, | |
| "loss": 0.2999, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.00013740285774268283, | |
| "loss": 0.9098, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.00013711363220844379, | |
| "loss": 0.8808, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.00013682404616879247, | |
| "loss": 0.2962, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.00013653410243663952, | |
| "loss": 0.8367, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.00013624380382837016, | |
| "loss": 0.9031, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.00013595315316381677, | |
| "loss": 0.8969, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.0001356621532662313, | |
| "loss": 0.9031, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.00013537080696225814, | |
| "loss": 0.8838, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.00013507911708190645, | |
| "loss": 0.8571, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.00013478708645852272, | |
| "loss": 0.9161, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.00013449471792876334, | |
| "loss": 0.8864, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.00013420201433256689, | |
| "loss": 0.8672, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.00013390897851312667, | |
| "loss": 0.8653, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.0001336156133168631, | |
| "loss": 0.8862, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.00013332192159339594, | |
| "loss": 0.9147, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.00013302790619551674, | |
| "loss": 0.8628, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.00013273356997916104, | |
| "loss": 0.8262, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.00013243891580338072, | |
| "loss": 0.8678, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.00013214394653031616, | |
| "loss": 0.8903, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.00013184866502516845, | |
| "loss": 0.9309, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.00013155307415617154, | |
| "loss": 0.8421, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.00013125717679456447, | |
| "loss": 0.8575, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.0001309609758145633, | |
| "loss": 0.8858, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.00013066447409333345, | |
| "loss": 0.9004, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.00013036767451096148, | |
| "loss": 0.9071, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.00013007057995042732, | |
| "loss": 0.9333, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.00012977319329757615, | |
| "loss": 0.8617, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.00012947551744109043, | |
| "loss": 0.925, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.00012917755527246179, | |
| "loss": 0.9664, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.000128879309685963, | |
| "loss": 0.894, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.0001285807835786198, | |
| "loss": 0.8646, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.00012828197985018276, | |
| "loss": 0.8991, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.00012798290140309923, | |
| "loss": 0.9965, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.00012768355114248494, | |
| "loss": 0.8435, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.00012738393197609602, | |
| "loss": 0.8812, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.00012708404681430053, | |
| "loss": 0.8779, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.00012678389857005034, | |
| "loss": 0.863, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.00012648349015885273, | |
| "loss": 0.9237, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.00012618282449874222, | |
| "loss": 0.9368, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.00012588190451025207, | |
| "loss": 0.9207, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.00012558073311638604, | |
| "loss": 0.8304, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.00012527931324258975, | |
| "loss": 0.8987, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.0001249776478167227, | |
| "loss": 0.9162, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.00012467573976902935, | |
| "loss": 0.8434, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.00012437359203211108, | |
| "loss": 0.8428, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.00012407120754089732, | |
| "loss": 0.8859, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.00012376858923261733, | |
| "loss": 0.9039, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.00012346574004677154, | |
| "loss": 0.8998, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.00012316266292510306, | |
| "loss": 0.9422, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.00012285936081156897, | |
| "loss": 0.329, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.00012255583665231196, | |
| "loss": 0.7916, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.00012225209339563145, | |
| "loss": 0.909, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.00012194813399195518, | |
| "loss": 0.8475, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.00012164396139381029, | |
| "loss": 0.303, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.00012133957855579501, | |
| "loss": 0.874, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.00012103498843454959, | |
| "loss": 0.9269, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.00012073019398872778, | |
| "loss": 0.9084, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.00012042519817896804, | |
| "loss": 0.8731, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.00012012000396786485, | |
| "loss": 0.8178, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.00011981461431993977, | |
| "loss": 0.8434, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.00011950903220161285, | |
| "loss": 0.9207, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.00011920326058117364, | |
| "loss": 0.9476, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.00011889730242875243, | |
| "loss": 0.9027, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.00011859116071629149, | |
| "loss": 0.8772, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.00011828483841751596, | |
| "loss": 0.8397, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.00011797833850790527, | |
| "loss": 0.9034, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.00011767166396466403, | |
| "loss": 0.8309, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.00011736481776669306, | |
| "loss": 0.935, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.00011705780289456068, | |
| "loss": 0.8743, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.00011675062233047364, | |
| "loss": 0.8202, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.00011644327905824808, | |
| "loss": 0.8972, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.00011613577606328068, | |
| "loss": 0.8696, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.0001158281163325195, | |
| "loss": 0.8518, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.00011552030285443515, | |
| "loss": 0.8939, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.00011521233861899167, | |
| "loss": 0.8419, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.00011490422661761744, | |
| "loss": 0.8328, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.00011459596984317622, | |
| "loss": 0.8727, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.00011428757128993802, | |
| "loss": 0.8926, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.00011397903395354996, | |
| "loss": 0.9024, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.00011367036083100735, | |
| "loss": 0.8846, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.00011336155492062439, | |
| "loss": 0.8723, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.00011305261922200519, | |
| "loss": 0.858, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.00011274355673601444, | |
| "loss": 0.896, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.00011243437046474853, | |
| "loss": 0.8354, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.00011212506341150615, | |
| "loss": 0.8706, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.0001118156385807593, | |
| "loss": 0.9189, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.00011150609897812387, | |
| "loss": 0.8471, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.00011119644761033078, | |
| "loss": 0.8705, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.00011088668748519647, | |
| "loss": 0.8107, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.00011057682161159379, | |
| "loss": 0.8993, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.00011026685299942285, | |
| "loss": 0.8812, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.00010995678465958168, | |
| "loss": 0.9024, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.00010964661960393703, | |
| "loss": 0.8895, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.00010933636084529506, | |
| "loss": 0.8727, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.00010902601139737225, | |
| "loss": 0.2772, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.00010871557427476583, | |
| "loss": 0.9014, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.00010840505249292476, | |
| "loss": 0.7737, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.00010809444906812033, | |
| "loss": 0.8699, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.00010778376701741689, | |
| "loss": 0.8054, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.00010747300935864243, | |
| "loss": 0.8488, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.00010716217911035951, | |
| "loss": 0.919, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.00010685127929183567, | |
| "loss": 0.9037, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.00010654031292301432, | |
| "loss": 0.8981, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.00010622928302448523, | |
| "loss": 0.876, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.00010591819261745528, | |
| "loss": 0.8164, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.00010560704472371919, | |
| "loss": 0.868, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.00010529584236562995, | |
| "loss": 0.2778, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.00010498458856606972, | |
| "loss": 0.8673, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.00010467328634842024, | |
| "loss": 0.9099, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.00010436193873653361, | |
| "loss": 0.8871, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.00010405054875470286, | |
| "loss": 0.845, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.0001037391194276326, | |
| "loss": 0.8649, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.00010342765378040953, | |
| "loss": 0.8826, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.00010311615483847332, | |
| "loss": 0.8452, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.0001028046256275869, | |
| "loss": 0.844, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.0001024930691738073, | |
| "loss": 0.8781, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.00010218148850345613, | |
| "loss": 0.8892, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.00010186988664309023, | |
| "loss": 0.9034, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.00010155826661947231, | |
| "loss": 0.8881, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.00010124663145954152, | |
| "loss": 0.8243, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.00010093498419038394, | |
| "loss": 0.9244, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.00010062332783920336, | |
| "loss": 0.8509, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.00010031166543329178, | |
| "loss": 0.9, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 0.0001, | |
| "loss": 0.8533, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 9.968833456670824e-05, | |
| "loss": 0.8462, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 9.937667216079665e-05, | |
| "loss": 0.8059, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 9.90650158096161e-05, | |
| "loss": 0.8353, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 9.875336854045851e-05, | |
| "loss": 0.8665, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 9.84417333805277e-05, | |
| "loss": 0.8581, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 9.813011335690981e-05, | |
| "loss": 0.9199, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 9.78185114965439e-05, | |
| "loss": 0.8579, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 9.750693082619273e-05, | |
| "loss": 0.9114, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 9.719537437241312e-05, | |
| "loss": 0.8554, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 9.688384516152672e-05, | |
| "loss": 0.8714, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 9.657234621959051e-05, | |
| "loss": 0.3176, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 9.626088057236745e-05, | |
| "loss": 0.914, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 9.594945124529719e-05, | |
| "loss": 0.8922, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 9.563806126346642e-05, | |
| "loss": 0.8667, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 9.53267136515798e-05, | |
| "loss": 0.3195, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 9.501541143393028e-05, | |
| "loss": 0.8902, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 9.470415763437004e-05, | |
| "loss": 0.8256, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 9.439295527628081e-05, | |
| "loss": 0.9084, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 9.408180738254471e-05, | |
| "loss": 0.7874, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 9.37707169755148e-05, | |
| "loss": 0.9122, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 9.345968707698569e-05, | |
| "loss": 0.9025, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 9.314872070816434e-05, | |
| "loss": 0.8803, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 9.283782088964049e-05, | |
| "loss": 0.8841, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 9.252699064135758e-05, | |
| "loss": 0.8256, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 9.221623298258315e-05, | |
| "loss": 0.9302, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 9.190555093187967e-05, | |
| "loss": 0.846, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 9.159494750707526e-05, | |
| "loss": 0.9032, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 9.128442572523417e-05, | |
| "loss": 0.8943, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 9.097398860262776e-05, | |
| "loss": 0.8538, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 9.066363915470495e-05, | |
| "loss": 0.819, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 9.0353380396063e-05, | |
| "loss": 0.8628, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 9.004321534041835e-05, | |
| "loss": 0.8669, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 8.973314700057717e-05, | |
| "loss": 0.929, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 8.942317838840623e-05, | |
| "loss": 0.903, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 8.911331251480357e-05, | |
| "loss": 0.9046, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 8.880355238966923e-05, | |
| "loss": 0.8923, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 8.849390102187614e-05, | |
| "loss": 0.8832, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 8.818436141924072e-05, | |
| "loss": 0.8654, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 8.787493658849386e-05, | |
| "loss": 0.7729, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 8.756562953525152e-05, | |
| "loss": 0.8695, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 8.725644326398558e-05, | |
| "loss": 0.858, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 8.694738077799488e-05, | |
| "loss": 0.8157, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 8.663844507937562e-05, | |
| "loss": 0.8872, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 8.632963916899268e-05, | |
| "loss": 0.8417, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 8.602096604645009e-05, | |
| "loss": 0.8257, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 8.571242871006202e-05, | |
| "loss": 0.8416, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 8.540403015682381e-05, | |
| "loss": 0.8232, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 8.509577338238255e-05, | |
| "loss": 0.8865, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 8.478766138100834e-05, | |
| "loss": 0.9215, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 8.447969714556484e-05, | |
| "loss": 0.8856, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 8.417188366748052e-05, | |
| "loss": 0.8861, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 8.386422393671933e-05, | |
| "loss": 0.8931, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 8.35567209417519e-05, | |
| "loss": 0.8967, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 8.324937766952638e-05, | |
| "loss": 0.8643, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 8.294219710543932e-05, | |
| "loss": 0.8765, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 8.263518223330697e-05, | |
| "loss": 0.8973, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 8.2328336035336e-05, | |
| "loss": 0.9187, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 8.202166149209474e-05, | |
| "loss": 0.867, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 8.171516158248406e-05, | |
| "loss": 0.8898, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 8.140883928370855e-05, | |
| "loss": 0.8804, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 8.11026975712476e-05, | |
| "loss": 0.8615, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 8.07967394188264e-05, | |
| "loss": 0.9, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 8.049096779838719e-05, | |
| "loss": 0.8256, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 8.018538568006027e-05, | |
| "loss": 0.8613, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 7.987999603213519e-05, | |
| "loss": 0.8533, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 7.957480182103198e-05, | |
| "loss": 0.8545, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 7.926980601127225e-05, | |
| "loss": 0.9173, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 7.896501156545045e-05, | |
| "loss": 0.8456, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 7.866042144420502e-05, | |
| "loss": 0.7872, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 7.835603860618972e-05, | |
| "loss": 0.8519, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 7.805186600804489e-05, | |
| "loss": 0.8202, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 7.774790660436858e-05, | |
| "loss": 0.9014, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 7.744416334768808e-05, | |
| "loss": 0.8634, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 7.714063918843106e-05, | |
| "loss": 0.8986, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 7.683733707489699e-05, | |
| "loss": 0.8941, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 7.653425995322851e-05, | |
| "loss": 0.8697, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 7.623141076738271e-05, | |
| "loss": 0.2966, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 7.592879245910273e-05, | |
| "loss": 0.8992, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 7.562640796788892e-05, | |
| "loss": 0.8272, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 7.532426023097063e-05, | |
| "loss": 0.8195, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 7.502235218327731e-05, | |
| "loss": 0.8716, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 7.472068675741025e-05, | |
| "loss": 0.8775, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 7.4419266883614e-05, | |
| "loss": 0.8286, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 7.411809548974792e-05, | |
| "loss": 0.8105, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 7.38171755012578e-05, | |
| "loss": 0.8623, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 7.351650984114728e-05, | |
| "loss": 0.8618, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 7.32161014299497e-05, | |
| "loss": 0.8763, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 7.291595318569951e-05, | |
| "loss": 0.8745, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 7.2616068023904e-05, | |
| "loss": 0.868, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 7.231644885751507e-05, | |
| "loss": 0.8591, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 7.20170985969008e-05, | |
| "loss": 0.8007, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 7.171802014981726e-05, | |
| "loss": 0.8453, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 7.141921642138025e-05, | |
| "loss": 0.8404, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 7.112069031403704e-05, | |
| "loss": 0.3112, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 7.082244472753822e-05, | |
| "loss": 0.8688, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 7.052448255890957e-05, | |
| "loss": 0.9115, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 7.022680670242387e-05, | |
| "loss": 0.9139, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 6.992942004957271e-05, | |
| "loss": 0.8984, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 6.963232548903853e-05, | |
| "loss": 0.8946, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 6.933552590666659e-05, | |
| "loss": 0.8437, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 6.903902418543671e-05, | |
| "loss": 0.2748, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 6.874282320543557e-05, | |
| "loss": 0.8656, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 6.844692584382848e-05, | |
| "loss": 0.8439, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 6.815133497483157e-05, | |
| "loss": 0.9071, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 6.785605346968386e-05, | |
| "loss": 0.8897, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 6.756108419661931e-05, | |
| "loss": 0.9197, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 6.7266430020839e-05, | |
| "loss": 0.8341, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 6.697209380448333e-05, | |
| "loss": 0.8032, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 6.66780784066041e-05, | |
| "loss": 0.865, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 6.638438668313695e-05, | |
| "loss": 0.8774, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 6.609102148687333e-05, | |
| "loss": 0.8925, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 6.579798566743314e-05, | |
| "loss": 0.8886, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 6.550528207123668e-05, | |
| "loss": 0.8489, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 6.521291354147727e-05, | |
| "loss": 0.8443, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 6.492088291809354e-05, | |
| "loss": 0.8838, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 6.462919303774186e-05, | |
| "loss": 0.8804, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 6.43378467337687e-05, | |
| "loss": 0.9203, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 6.404684683618325e-05, | |
| "loss": 0.8373, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 6.375619617162985e-05, | |
| "loss": 0.8708, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 6.34658975633605e-05, | |
| "loss": 0.8661, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 6.317595383120756e-05, | |
| "loss": 0.7935, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 6.288636779155621e-05, | |
| "loss": 0.9064, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 6.259714225731718e-05, | |
| "loss": 0.8391, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 6.230828003789949e-05, | |
| "loss": 0.863, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 6.20197839391829e-05, | |
| "loss": 0.8561, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 6.173165676349103e-05, | |
| "loss": 0.8073, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 6.144390130956384e-05, | |
| "loss": 0.8308, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 6.115652037253053e-05, | |
| "loss": 0.8818, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 6.0869516743882516e-05, | |
| "loss": 0.8434, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 6.058289321144608e-05, | |
| "loss": 0.8498, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 6.02966525593555e-05, | |
| "loss": 0.9072, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 6.001079756802592e-05, | |
| "loss": 0.8502, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 5.9725331014126294e-05, | |
| "loss": 0.8717, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 5.944025567055251e-05, | |
| "loss": 0.8952, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 5.9155574306400395e-05, | |
| "loss": 0.8307, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 5.887128968693887e-05, | |
| "loss": 0.3252, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 5.858740457358298e-05, | |
| "loss": 0.7903, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 5.830392172386723e-05, | |
| "loss": 0.8551, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 5.802084389141862e-05, | |
| "loss": 0.8756, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 5.773817382593008e-05, | |
| "loss": 0.8274, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 5.7455914273133646e-05, | |
| "loss": 0.8234, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 5.717406797477372e-05, | |
| "loss": 0.8398, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 5.6892637668580716e-05, | |
| "loss": 0.8431, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 5.6611626088244194e-05, | |
| "loss": 0.8118, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 5.6331035963386315e-05, | |
| "loss": 0.2887, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 5.6050870019535494e-05, | |
| "loss": 0.8779, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 5.577113097809989e-05, | |
| "loss": 0.8255, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 5.549182155634076e-05, | |
| "loss": 0.896, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 5.521294446734637e-05, | |
| "loss": 0.8906, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 5.493450242000546e-05, | |
| "loss": 0.8753, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 5.465649811898098e-05, | |
| "loss": 0.8464, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 5.43789342646837e-05, | |
| "loss": 0.8555, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 5.410181355324622e-05, | |
| "loss": 0.8804, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 5.382513867649663e-05, | |
| "loss": 0.9589, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 5.3548912321932244e-05, | |
| "loss": 0.8507, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 5.32731371726938e-05, | |
| "loss": 0.7999, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 5.2997815907539164e-05, | |
| "loss": 0.8169, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 5.272295120081732e-05, | |
| "loss": 0.8257, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 5.2448545722442486e-05, | |
| "loss": 0.8272, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 5.217460213786821e-05, | |
| "loss": 0.8695, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 5.190112310806126e-05, | |
| "loss": 0.8687, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 5.162811128947602e-05, | |
| "loss": 0.8309, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 5.135556933402862e-05, | |
| "loss": 0.773, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 5.108349988907111e-05, | |
| "loss": 0.7721, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 5.0811905597365684e-05, | |
| "loss": 0.8258, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 5.054078909705926e-05, | |
| "loss": 0.8433, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 5.0270153021657674e-05, | |
| "loss": 0.3298, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 5.000000000000002e-05, | |
| "loss": 0.8541, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 4.973033265623332e-05, | |
| "loss": 0.8208, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 4.946115360978696e-05, | |
| "loss": 0.2857, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 4.919246547534708e-05, | |
| "loss": 0.8367, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 4.892427086283147e-05, | |
| "loss": 0.8102, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 4.8656572377363974e-05, | |
| "loss": 0.8184, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 4.8389372619249326e-05, | |
| "loss": 0.8862, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 4.8122674183947836e-05, | |
| "loss": 0.8097, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 4.78564796620502e-05, | |
| "loss": 0.8373, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 4.759079163925223e-05, | |
| "loss": 0.8101, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 4.732561269632992e-05, | |
| "loss": 0.9126, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 4.706094540911429e-05, | |
| "loss": 0.7949, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 4.6796792348466356e-05, | |
| "loss": 0.7682, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 4.653315608025207e-05, | |
| "loss": 0.8947, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 4.6270039165317605e-05, | |
| "loss": 0.2755, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 4.600744415946438e-05, | |
| "loss": 0.8345, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 4.574537361342407e-05, | |
| "loss": 0.8849, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 4.548383007283412e-05, | |
| "loss": 0.933, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 4.522281607821288e-05, | |
| "loss": 0.8122, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 4.4962334164934806e-05, | |
| "loss": 0.7714, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 4.470238686320606e-05, | |
| "loss": 0.8433, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 4.444297669803981e-05, | |
| "loss": 0.8141, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 4.4184106189231625e-05, | |
| "loss": 0.7967, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 4.3925777851335206e-05, | |
| "loss": 0.8322, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 4.3667994193637796e-05, | |
| "loss": 0.8329, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 4.341075772013589e-05, | |
| "loss": 0.831, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 4.315407092951078e-05, | |
| "loss": 0.8835, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 4.289793631510448e-05, | |
| "loss": 0.8034, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 4.264235636489542e-05, | |
| "loss": 0.8084, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 4.238733356147414e-05, | |
| "loss": 0.8064, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 4.213287038201943e-05, | |
| "loss": 0.8606, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 4.187896929827414e-05, | |
| "loss": 0.8635, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 4.1625632776521037e-05, | |
| "loss": 0.8556, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 4.137286327755913e-05, | |
| "loss": 0.8133, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 4.112066325667954e-05, | |
| "loss": 0.8767, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 4.086903516364179e-05, | |
| "loss": 0.3187, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 4.0617981442649855e-05, | |
| "loss": 0.7819, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 4.03675045323286e-05, | |
| "loss": 0.8333, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 4.0117606865699975e-05, | |
| "loss": 0.8633, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 3.9868290870159405e-05, | |
| "loss": 0.8909, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 3.961955896745224e-05, | |
| "loss": 0.7902, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 3.937141357365023e-05, | |
| "loss": 0.8443, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 3.9123857099127936e-05, | |
| "loss": 0.8641, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 3.887689194853951e-05, | |
| "loss": 0.9134, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 3.863052052079528e-05, | |
| "loss": 0.2913, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 3.838474520903825e-05, | |
| "loss": 0.8777, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 3.813956840062118e-05, | |
| "loss": 0.8308, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 3.7894992477083224e-05, | |
| "loss": 0.8855, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 3.7651019814126654e-05, | |
| "loss": 0.9062, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 3.7407652781594095e-05, | |
| "loss": 0.8469, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 3.716489374344527e-05, | |
| "loss": 0.8527, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 3.692274505773419e-05, | |
| "loss": 0.812, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 3.668120907658603e-05, | |
| "loss": 0.8586, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 3.644028814617454e-05, | |
| "loss": 0.8885, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 3.6199984606699155e-05, | |
| "loss": 0.3097, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 3.596030079236212e-05, | |
| "loss": 0.8789, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 3.5721239031346066e-05, | |
| "loss": 0.7722, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 3.548280164579126e-05, | |
| "loss": 0.8457, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 3.5244990951772974e-05, | |
| "loss": 0.9218, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 3.500780925927915e-05, | |
| "loss": 0.7969, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 3.477125887218792e-05, | |
| "loss": 0.8946, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 3.453534208824507e-05, | |
| "loss": 0.8432, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 3.430006119904197e-05, | |
| "loss": 0.8237, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 3.406541848999312e-05, | |
| "loss": 0.8608, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 3.383141624031408e-05, | |
| "loss": 0.9161, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 3.359805672299918e-05, | |
| "loss": 0.847, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 3.336534220479961e-05, | |
| "loss": 0.8219, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 3.3133274946201334e-05, | |
| "loss": 0.8323, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 3.290185720140301e-05, | |
| "loss": 0.9159, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 3.2671091218294284e-05, | |
| "loss": 0.8473, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 3.244097923843398e-05, | |
| "loss": 0.7981, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 3.2211523497028015e-05, | |
| "loss": 0.8305, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 3.198272622290804e-05, | |
| "loss": 0.8608, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 3.1754589638509644e-05, | |
| "loss": 0.8392, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 3.152711595985065e-05, | |
| "loss": 0.8483, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 3.130030739650983e-05, | |
| "loss": 0.8795, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 3.10741661516053e-05, | |
| "loss": 0.8519, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 3.0848694421773075e-05, | |
| "loss": 0.8352, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 3.062389439714584e-05, | |
| "loss": 0.2794, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 3.0399768261331662e-05, | |
| "loss": 0.827, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 3.0176318191392726e-05, | |
| "loss": 0.8537, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 2.995354635782417e-05, | |
| "loss": 0.8556, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 2.9731454924533086e-05, | |
| "loss": 0.83, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 2.9510046048817497e-05, | |
| "loss": 0.8731, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 2.9289321881345254e-05, | |
| "loss": 0.8531, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 2.906928456613336e-05, | |
| "loss": 0.8603, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 2.8849936240527008e-05, | |
| "loss": 0.8553, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 2.8631279035178793e-05, | |
| "loss": 0.8384, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 2.8413315074028158e-05, | |
| "loss": 0.8604, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 2.8196046474280667e-05, | |
| "loss": 0.8735, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 2.797947534638736e-05, | |
| "loss": 0.8304, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 2.776360379402445e-05, | |
| "loss": 0.8339, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 2.7548433914072734e-05, | |
| "loss": 0.8729, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 2.7333967796597315e-05, | |
| "loss": 0.8982, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 2.7120207524827168e-05, | |
| "loss": 0.8141, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 2.690715517513508e-05, | |
| "loss": 0.8074, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 2.669481281701739e-05, | |
| "loss": 0.8095, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 2.6483182513073835e-05, | |
| "loss": 0.8131, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 2.6272266318987603e-05, | |
| "loss": 0.301, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 2.6062066283505403e-05, | |
| "loss": 0.8453, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 2.5852584448417328e-05, | |
| "loss": 0.8641, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 2.564382284853738e-05, | |
| "loss": 0.859, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 2.5435783511683443e-05, | |
| "loss": 0.8272, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 2.5228468458657584e-05, | |
| "loss": 0.8128, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 2.502187970322657e-05, | |
| "loss": 0.8507, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 2.4816019252102273e-05, | |
| "loss": 0.8377, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 2.461088910492202e-05, | |
| "loss": 0.8904, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 2.440649125422937e-05, | |
| "loss": 0.8466, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 2.420282768545469e-05, | |
| "loss": 0.8064, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 2.3999900376895845e-05, | |
| "loss": 0.8307, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 2.3797711299698923e-05, | |
| "loss": 0.8666, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 2.3596262417839255e-05, | |
| "loss": 0.8841, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 2.339555568810221e-05, | |
| "loss": 0.8866, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 2.319559306006417e-05, | |
| "loss": 0.8206, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 2.2996376476073723e-05, | |
| "loss": 0.8701, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 2.279790787123267e-05, | |
| "loss": 0.2702, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 2.260018917337726e-05, | |
| "loss": 0.7411, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 2.240322230305951e-05, | |
| "loss": 0.9003, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 2.2207009173528527e-05, | |
| "loss": 0.8311, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 2.201155169071184e-05, | |
| "loss": 0.8651, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 2.181685175319702e-05, | |
| "loss": 0.8779, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 2.1622911252213197e-05, | |
| "loss": 0.8515, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 2.142973207161265e-05, | |
| "loss": 0.8093, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 2.1237316087852466e-05, | |
| "loss": 0.837, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 2.1045665169976468e-05, | |
| "loss": 0.3116, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 2.0854781179596938e-05, | |
| "loss": 0.8511, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 2.0664665970876496e-05, | |
| "loss": 0.8117, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 2.047532139051026e-05, | |
| "loss": 0.8685, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 2.0286749277707782e-05, | |
| "loss": 0.8709, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 2.009895146417512e-05, | |
| "loss": 0.8471, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 1.9911929774097215e-05, | |
| "loss": 0.8662, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 1.9725686024120095e-05, | |
| "loss": 0.8608, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 1.9540222023333166e-05, | |
| "loss": 0.853, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 1.9355539573251734e-05, | |
| "loss": 0.8789, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.917164046779948e-05, | |
| "loss": 0.8602, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.898852649329095e-05, | |
| "loss": 0.8645, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.880619942841435e-05, | |
| "loss": 0.8072, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.8624661044214152e-05, | |
| "loss": 0.8481, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.8443913104073983e-05, | |
| "loss": 0.7949, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.826395736369937e-05, | |
| "loss": 0.8679, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.808479557110081e-05, | |
| "loss": 0.8335, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.7906429466576767e-05, | |
| "loss": 0.3107, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.7728860782696664e-05, | |
| "loss": 0.8665, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.7552091244284197e-05, | |
| "loss": 0.2925, | |
| "step": 847 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.7376122568400532e-05, | |
| "loss": 0.8225, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.720095646432751e-05, | |
| "loss": 0.8583, | |
| "step": 849 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.702659463355125e-05, | |
| "loss": 0.8405, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.6853038769745467e-05, | |
| "loss": 0.8011, | |
| "step": 851 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.668029055875512e-05, | |
| "loss": 0.8382, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.6508351678579882e-05, | |
| "loss": 0.8465, | |
| "step": 853 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.6337223799358026e-05, | |
| "loss": 0.7947, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.616690858335014e-05, | |
| "loss": 0.8132, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.5997407684922862e-05, | |
| "loss": 0.8854, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.5828722750533008e-05, | |
| "loss": 0.878, | |
| "step": 857 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.566085541871145e-05, | |
| "loss": 0.7978, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.549380732004718e-05, | |
| "loss": 0.8901, | |
| "step": 859 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.5327580077171587e-05, | |
| "loss": 0.8652, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.5162175304742632e-05, | |
| "loss": 0.8608, | |
| "step": 861 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.4997594609429088e-05, | |
| "loss": 0.7125, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.4833839589895071e-05, | |
| "loss": 0.8811, | |
| "step": 863 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.467091183678444e-05, | |
| "loss": 0.8627, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.4508812932705363e-05, | |
| "loss": 0.8571, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.4347544452214868e-05, | |
| "loss": 0.8631, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.4187107961803703e-05, | |
| "loss": 0.8395, | |
| "step": 867 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.402750501988097e-05, | |
| "loss": 0.3013, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.3868737176759106e-05, | |
| "loss": 0.8407, | |
| "step": 869 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.3710805974638696e-05, | |
| "loss": 0.8315, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.3553712947593656e-05, | |
| "loss": 0.8442, | |
| "step": 871 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.339745962155613e-05, | |
| "loss": 0.8315, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.3242047514301858e-05, | |
| "loss": 0.8043, | |
| "step": 873 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.308747813543536e-05, | |
| "loss": 0.8817, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.293375298637518e-05, | |
| "loss": 0.812, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.2780873560339468e-05, | |
| "loss": 0.832, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.2628841342331388e-05, | |
| "loss": 0.284, | |
| "step": 877 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.2477657809124631e-05, | |
| "loss": 0.8757, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.2327324429249232e-05, | |
| "loss": 0.9018, | |
| "step": 879 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.2177842662977135e-05, | |
| "loss": 0.8504, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.2029213962308172e-05, | |
| "loss": 0.8857, | |
| "step": 881 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.188143977095576e-05, | |
| "loss": 0.8646, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.1734521524333086e-05, | |
| "loss": 0.7393, | |
| "step": 883 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.1588460649539035e-05, | |
| "loss": 0.824, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.1443258565344329e-05, | |
| "loss": 0.8312, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.129891668217783e-05, | |
| "loss": 0.826, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.1155436402112785e-05, | |
| "loss": 0.8526, | |
| "step": 887 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.1012819118853147e-05, | |
| "loss": 0.8498, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.0871066217720172e-05, | |
| "loss": 0.8111, | |
| "step": 889 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.0730179075638868e-05, | |
| "loss": 0.7918, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.0590159061124605e-05, | |
| "loss": 0.8257, | |
| "step": 891 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.0451007534269907e-05, | |
| "loss": 0.8581, | |
| "step": 892 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.0312725846731175e-05, | |
| "loss": 0.8336, | |
| "step": 893 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.0175315341715597e-05, | |
| "loss": 0.8532, | |
| "step": 894 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.003877735396801e-05, | |
| "loss": 0.8009, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 9.903113209758096e-06, | |
| "loss": 0.8773, | |
| "step": 896 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 9.768324226867353e-06, | |
| "loss": 0.8002, | |
| "step": 897 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 9.634411714576353e-06, | |
| "loss": 0.8647, | |
| "step": 898 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 9.501376973651999e-06, | |
| "loss": 0.8663, | |
| "step": 899 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 9.369221296335006e-06, | |
| "loss": 0.868, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 9.237945966327133e-06, | |
| "loss": 0.8456, | |
| "step": 901 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 9.107552258778907e-06, | |
| "loss": 0.885, | |
| "step": 902 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 8.978041440277163e-06, | |
| "loss": 0.8014, | |
| "step": 903 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 8.849414768832687e-06, | |
| "loss": 0.2948, | |
| "step": 904 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 8.72167349386811e-06, | |
| "loss": 0.8277, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 8.5948188562057e-06, | |
| "loss": 0.8294, | |
| "step": 906 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 8.46885208805529e-06, | |
| "loss": 0.9009, | |
| "step": 907 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 8.343774413002381e-06, | |
| "loss": 0.7945, | |
| "step": 908 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 8.219587045996203e-06, | |
| "loss": 0.776, | |
| "step": 909 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 8.096291193337934e-06, | |
| "loss": 0.8616, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 7.973888052668943e-06, | |
| "loss": 0.8768, | |
| "step": 911 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 7.852378812959227e-06, | |
| "loss": 0.9217, | |
| "step": 912 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 7.731764654495832e-06, | |
| "loss": 0.8185, | |
| "step": 913 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 7.612046748871327e-06, | |
| "loss": 0.8605, | |
| "step": 914 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 7.493226258972519e-06, | |
| "loss": 0.7954, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 7.375304338969136e-06, | |
| "loss": 0.8045, | |
| "step": 916 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 7.258282134302519e-06, | |
| "loss": 0.88, | |
| "step": 917 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 7.142160781674645e-06, | |
| "loss": 0.8009, | |
| "step": 918 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 7.026941409036991e-06, | |
| "loss": 0.8587, | |
| "step": 919 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 6.9126251355795864e-06, | |
| "loss": 0.3098, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 6.7992130717201564e-06, | |
| "loss": 0.8006, | |
| "step": 921 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 6.68670631909335e-06, | |
| "loss": 0.8399, | |
| "step": 922 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 6.5751059705400295e-06, | |
| "loss": 0.8155, | |
| "step": 923 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 6.464413110096601e-06, | |
| "loss": 0.7898, | |
| "step": 924 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 6.354628812984576e-06, | |
| "loss": 0.833, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 6.245754145600091e-06, | |
| "loss": 0.8139, | |
| "step": 926 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 6.137790165503499e-06, | |
| "loss": 0.8778, | |
| "step": 927 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 6.030737921409169e-06, | |
| "loss": 0.8698, | |
| "step": 928 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 5.9245984531752784e-06, | |
| "loss": 0.8491, | |
| "step": 929 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 5.8193727917936536e-06, | |
| "loss": 0.8826, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 5.715061959379875e-06, | |
| "loss": 0.8765, | |
| "step": 931 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 5.611666969163243e-06, | |
| "loss": 0.8123, | |
| "step": 932 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 5.509188825476963e-06, | |
| "loss": 0.8342, | |
| "step": 933 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 5.4076285237483984e-06, | |
| "loss": 0.9165, | |
| "step": 934 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 5.306987050489442e-06, | |
| "loss": 0.3141, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 5.20726538328683e-06, | |
| "loss": 0.785, | |
| "step": 936 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 5.1084644907927526e-06, | |
| "loss": 0.8165, | |
| "step": 937 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 5.0105853327154e-06, | |
| "loss": 0.8333, | |
| "step": 938 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 4.913628859809638e-06, | |
| "loss": 0.8787, | |
| "step": 939 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 4.817596013867764e-06, | |
| "loss": 0.8619, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 4.722487727710368e-06, | |
| "loss": 0.8821, | |
| "step": 941 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 4.628304925177318e-06, | |
| "loss": 0.8327, | |
| "step": 942 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 4.5350485211186675e-06, | |
| "loss": 0.308, | |
| "step": 943 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 4.442719421385922e-06, | |
| "loss": 0.8425, | |
| "step": 944 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 4.351318522823133e-06, | |
| "loss": 0.8183, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 4.260846713258193e-06, | |
| "loss": 0.8255, | |
| "step": 946 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 4.171304871494264e-06, | |
| "loss": 0.8506, | |
| "step": 947 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 4.082693867301224e-06, | |
| "loss": 0.8721, | |
| "step": 948 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 3.99501456140714e-06, | |
| "loss": 0.9019, | |
| "step": 949 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 3.908267805490051e-06, | |
| "loss": 0.2917, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 3.822454442169576e-06, | |
| "loss": 0.2966, | |
| "step": 951 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 3.7375753049987973e-06, | |
| "loss": 0.8575, | |
| "step": 952 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 3.6536312184560993e-06, | |
| "loss": 0.8433, | |
| "step": 953 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 3.570622997937234e-06, | |
| "loss": 0.8301, | |
| "step": 954 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 3.488551449747357e-06, | |
| "loss": 0.8446, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 3.40741737109318e-06, | |
| "loss": 0.8303, | |
| "step": 956 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 3.3272215500752658e-06, | |
| "loss": 0.8115, | |
| "step": 957 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 3.247964765680389e-06, | |
| "loss": 0.8307, | |
| "step": 958 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 3.169647787773866e-06, | |
| "loss": 0.8876, | |
| "step": 959 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 3.092271377092215e-06, | |
| "loss": 0.8771, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 3.0158362852356626e-06, | |
| "loss": 0.854, | |
| "step": 961 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 2.9403432546609043e-06, | |
| "loss": 0.8442, | |
| "step": 962 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 2.8657930186738567e-06, | |
| "loss": 0.8397, | |
| "step": 963 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 2.7921863014225503e-06, | |
| "loss": 0.8023, | |
| "step": 964 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 2.7195238178900684e-06, | |
| "loss": 0.7843, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 2.647806273887665e-06, | |
| "loss": 0.7725, | |
| "step": 966 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 2.577034366047848e-06, | |
| "loss": 0.8278, | |
| "step": 967 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 2.5072087818176382e-06, | |
| "loss": 0.8379, | |
| "step": 968 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 2.438330199451877e-06, | |
| "loss": 0.8656, | |
| "step": 969 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 2.3703992880066638e-06, | |
| "loss": 0.2981, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 2.3034167073328284e-06, | |
| "loss": 0.8223, | |
| "step": 971 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 2.237383108069546e-06, | |
| "loss": 0.8424, | |
| "step": 972 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 2.1722991316380003e-06, | |
| "loss": 0.8296, | |
| "step": 973 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 2.1081654102351635e-06, | |
| "loss": 0.8222, | |
| "step": 974 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 2.0449825668276244e-06, | |
| "loss": 0.7831, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.9827512151456173e-06, | |
| "loss": 0.8514, | |
| "step": 976 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.921471959676957e-06, | |
| "loss": 0.816, | |
| "step": 977 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.8611453956612347e-06, | |
| "loss": 0.9289, | |
| "step": 978 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.8017721090840322e-06, | |
| "loss": 0.7767, | |
| "step": 979 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.7433526766711728e-06, | |
| "loss": 0.8143, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.6858876658832233e-06, | |
| "loss": 0.837, | |
| "step": 981 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.6293776349098677e-06, | |
| "loss": 0.3026, | |
| "step": 982 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.5738231326645758e-06, | |
| "loss": 0.8983, | |
| "step": 983 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.5192246987791981e-06, | |
| "loss": 0.8715, | |
| "step": 984 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.465582863598791e-06, | |
| "loss": 0.8796, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.4128981481764115e-06, | |
| "loss": 0.7794, | |
| "step": 986 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.3611710642681076e-06, | |
| "loss": 0.9197, | |
| "step": 987 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.3104021143278911e-06, | |
| "loss": 0.8703, | |
| "step": 988 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.2605917915028742e-06, | |
| "loss": 0.9204, | |
| "step": 989 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.2117405796285286e-06, | |
| "loss": 0.8409, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.163848953223934e-06, | |
| "loss": 0.838, | |
| "step": 991 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.1169173774871478e-06, | |
| "loss": 0.7999, | |
| "step": 992 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.0709463082907545e-06, | |
| "loss": 0.8404, | |
| "step": 993 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.0259361921774013e-06, | |
| "loss": 0.8348, | |
| "step": 994 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 9.818874663554357e-07, | |
| "loss": 0.8589, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 9.388005586947191e-07, | |
| "loss": 0.7929, | |
| "step": 996 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 8.966758877224201e-07, | |
| "loss": 0.8093, | |
| "step": 997 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 8.555138626189618e-07, | |
| "loss": 0.8276, | |
| "step": 998 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 8.153148832140466e-07, | |
| "loss": 0.3251, | |
| "step": 999 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 7.760793399827937e-07, | |
| "loss": 0.8685, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 7.378076140419187e-07, | |
| "loss": 0.8518, | |
| "step": 1001 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 7.00500077146038e-07, | |
| "loss": 0.8802, | |
| "step": 1002 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 6.641570916840922e-07, | |
| "loss": 0.8251, | |
| "step": 1003 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 6.287790106757396e-07, | |
| "loss": 0.8853, | |
| "step": 1004 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 5.943661777680354e-07, | |
| "loss": 0.822, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 5.609189272320237e-07, | |
| "loss": 0.7566, | |
| "step": 1006 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 5.284375839594957e-07, | |
| "loss": 0.8198, | |
| "step": 1007 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 4.969224634598591e-07, | |
| "loss": 0.7951, | |
| "step": 1008 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 4.6637387185706206e-07, | |
| "loss": 0.8869, | |
| "step": 1009 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 4.3679210588661866e-07, | |
| "loss": 0.8629, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 4.081774528927218e-07, | |
| "loss": 0.8464, | |
| "step": 1011 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 3.805301908254455e-07, | |
| "loss": 0.7668, | |
| "step": 1012 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 3.5385058823809156e-07, | |
| "loss": 0.8875, | |
| "step": 1013 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 3.281389042844918e-07, | |
| "loss": 0.8502, | |
| "step": 1014 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 3.0339538871660965e-07, | |
| "loss": 0.8048, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 2.7962028188198706e-07, | |
| "loss": 0.8466, | |
| "step": 1016 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 2.5681381472151267e-07, | |
| "loss": 0.8698, | |
| "step": 1017 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 2.3497620876711257e-07, | |
| "loss": 0.8727, | |
| "step": 1018 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 2.141076761396521e-07, | |
| "loss": 0.9219, | |
| "step": 1019 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.9420841954681525e-07, | |
| "loss": 0.8588, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.752786322811839e-07, | |
| "loss": 0.8077, | |
| "step": 1021 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.5731849821833954e-07, | |
| "loss": 0.8445, | |
| "step": 1022 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.403281918150978e-07, | |
| "loss": 0.2915, | |
| "step": 1023 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.2430787810776555e-07, | |
| "loss": 0.8511, | |
| "step": 1024 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.0925771271058649e-07, | |
| "loss": 0.8297, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 9.517784181422019e-08, | |
| "loss": 0.8965, | |
| "step": 1026 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 8.206840218430989e-08, | |
| "loss": 0.8259, | |
| "step": 1027 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 6.992952116013918e-08, | |
| "loss": 0.3133, | |
| "step": 1028 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 5.8761316653455076e-08, | |
| "loss": 0.8463, | |
| "step": 1029 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 4.856389714723575e-08, | |
| "loss": 0.8733, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 3.933736169471347e-08, | |
| "loss": 0.8935, | |
| "step": 1031 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 3.1081799918375454e-08, | |
| "loss": 0.8309, | |
| "step": 1032 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 2.379729200908676e-08, | |
| "loss": 0.8611, | |
| "step": 1033 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.7483908725357545e-08, | |
| "loss": 0.823, | |
| "step": 1034 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 1.2141711392588129e-08, | |
| "loss": 0.8602, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 7.770751902513862e-09, | |
| "loss": 0.842, | |
| "step": 1036 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.371072712727742e-09, | |
| "loss": 0.8857, | |
| "step": 1037 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 1.9427068461808083e-09, | |
| "loss": 0.8334, | |
| "step": 1038 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.856778909601012e-10, | |
| "loss": 0.3639, | |
| "step": 1039 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 0.0, | |
| "loss": 0.5759, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 1040, | |
| "total_flos": 1494862192181248.0, | |
| "train_loss": 0.8496783473457281, | |
| "train_runtime": 12739.3979, | |
| "train_samples_per_second": 10.445, | |
| "train_steps_per_second": 0.082 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 1040, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 1000, | |
| "total_flos": 1494862192181248.0, | |
| "train_batch_size": 8, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |