| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 1.0, |
| "eval_steps": 100, |
| "global_step": 2000, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0, |
| "learning_rate": 5.000000000000001e-07, |
| "loss": 2.7669, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.0000000000000002e-06, |
| "loss": 2.782, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.5e-06, |
| "loss": 2.7742, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.0000000000000003e-06, |
| "loss": 2.6943, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.5e-06, |
| "loss": 2.5422, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3e-06, |
| "loss": 2.3945, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.5000000000000004e-06, |
| "loss": 2.3055, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.000000000000001e-06, |
| "loss": 2.2746, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.5e-06, |
| "loss": 2.1986, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 5e-06, |
| "loss": 2.1832, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 5.500000000000001e-06, |
| "loss": 2.122, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 6e-06, |
| "loss": 2.0978, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 6.5000000000000004e-06, |
| "loss": 2.1149, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 7.000000000000001e-06, |
| "loss": 2.0188, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 7.5e-06, |
| "loss": 1.9337, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 8.000000000000001e-06, |
| "loss": 1.9943, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 8.500000000000002e-06, |
| "loss": 1.9404, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 9e-06, |
| "loss": 1.9063, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 9.5e-06, |
| "loss": 1.9297, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1e-05, |
| "loss": 1.9235, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.05e-05, |
| "loss": 1.7984, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.1000000000000001e-05, |
| "loss": 1.8552, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.1500000000000002e-05, |
| "loss": 1.8237, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.2e-05, |
| "loss": 1.7755, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.25e-05, |
| "loss": 1.7878, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.3000000000000001e-05, |
| "loss": 1.7853, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.3500000000000001e-05, |
| "loss": 1.7371, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.4000000000000001e-05, |
| "loss": 1.789, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.45e-05, |
| "loss": 1.7502, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.5e-05, |
| "loss": 1.7098, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.55e-05, |
| "loss": 1.7511, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.6000000000000003e-05, |
| "loss": 1.7727, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.65e-05, |
| "loss": 1.7315, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.7000000000000003e-05, |
| "loss": 1.7496, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.75e-05, |
| "loss": 1.6851, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.8e-05, |
| "loss": 1.6738, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.85e-05, |
| "loss": 1.6682, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9e-05, |
| "loss": 1.6803, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9500000000000003e-05, |
| "loss": 1.6743, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2e-05, |
| "loss": 1.6752, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.05e-05, |
| "loss": 1.6464, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.1e-05, |
| "loss": 1.6593, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.15e-05, |
| "loss": 1.6329, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2000000000000003e-05, |
| "loss": 1.6469, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.25e-05, |
| "loss": 1.6659, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3000000000000003e-05, |
| "loss": 1.6389, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.35e-05, |
| "loss": 1.6583, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4e-05, |
| "loss": 1.6079, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.45e-05, |
| "loss": 1.6551, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 2.5e-05, |
| "loss": 1.643, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 2.5500000000000003e-05, |
| "loss": 1.5879, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 2.6000000000000002e-05, |
| "loss": 1.681, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 2.6500000000000004e-05, |
| "loss": 1.5742, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 2.7000000000000002e-05, |
| "loss": 1.6519, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 2.7500000000000004e-05, |
| "loss": 1.5784, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 2.8000000000000003e-05, |
| "loss": 1.5889, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 2.8499999999999998e-05, |
| "loss": 1.5601, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 2.9e-05, |
| "loss": 1.586, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 2.95e-05, |
| "loss": 1.5837, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3e-05, |
| "loss": 1.5894, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.05e-05, |
| "loss": 1.6306, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.1e-05, |
| "loss": 1.5699, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.15e-05, |
| "loss": 1.6232, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.2000000000000005e-05, |
| "loss": 1.6154, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.2500000000000004e-05, |
| "loss": 1.5821, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.3e-05, |
| "loss": 1.6217, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.35e-05, |
| "loss": 1.5406, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.4000000000000007e-05, |
| "loss": 1.5578, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.45e-05, |
| "loss": 1.5548, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.5e-05, |
| "loss": 1.6024, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.55e-05, |
| "loss": 1.5928, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.6e-05, |
| "loss": 1.6047, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.65e-05, |
| "loss": 1.5717, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.7e-05, |
| "loss": 1.5685, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.7500000000000003e-05, |
| "loss": 1.5319, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.8e-05, |
| "loss": 1.545, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.85e-05, |
| "loss": 1.5537, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.9000000000000006e-05, |
| "loss": 1.5679, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.9500000000000005e-05, |
| "loss": 1.5901, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 4e-05, |
| "loss": 1.5615, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 4.05e-05, |
| "loss": 1.5512, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 4.1e-05, |
| "loss": 1.5537, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 4.15e-05, |
| "loss": 1.5052, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 4.2e-05, |
| "loss": 1.5378, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 4.25e-05, |
| "loss": 1.5745, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 4.3e-05, |
| "loss": 1.5877, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 4.35e-05, |
| "loss": 1.5404, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 4.4000000000000006e-05, |
| "loss": 1.5483, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 4.4500000000000004e-05, |
| "loss": 1.5316, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 4.5e-05, |
| "loss": 1.5617, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.55e-05, |
| "loss": 1.5685, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.600000000000001e-05, |
| "loss": 1.5747, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.6500000000000005e-05, |
| "loss": 1.5253, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.7e-05, |
| "loss": 1.5176, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.75e-05, |
| "loss": 1.5684, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.8e-05, |
| "loss": 1.5148, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.85e-05, |
| "loss": 1.5389, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.9e-05, |
| "loss": 1.5272, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.9500000000000004e-05, |
| "loss": 1.5633, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 5e-05, |
| "loss": 1.519, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.05, |
| "eval_acceptance_rate": 0.6372560858726501, |
| "eval_acceptance_rate_1": 0.6372109651565552, |
| "eval_acceptance_rate_2": 0.6359634399414062, |
| "eval_acceptance_rate_3": 0.6371628642082214, |
| "eval_acceptance_rate_4": 0.6373171806335449, |
| "eval_avg_generation_length": 215.4989013671875, |
| "eval_avg_input_length": 365.6648864746094, |
| "eval_expected_speedup": 1.9736412211350056, |
| "eval_expected_tokens": 2.467051526418757, |
| "eval_loss": 1.508664608001709, |
| "eval_max_generation_length": 424.7227783203125, |
| "eval_max_input_length": 630.4989624023438, |
| "eval_min_generation_length": 59.60786437988281, |
| "eval_min_input_length": 160.72378540039062, |
| "eval_pulsar_acceptance_rate": 0.36676288160468923, |
| "eval_runtime": 121.2026, |
| "eval_samples_per_second": 4.084, |
| "eval_steps_per_second": 0.132, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.9999965825477314e-05, |
| "loss": 1.4603, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.999986330200268e-05, |
| "loss": 1.5116, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.999969242985639e-05, |
| "loss": 1.5292, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.999945320950562e-05, |
| "loss": 1.5279, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.999914564160437e-05, |
| "loss": 1.4961, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.999876972699352e-05, |
| "loss": 1.5504, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.999832546670082e-05, |
| "loss": 1.5631, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.999781286194085e-05, |
| "loss": 1.4472, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.9997231914115064e-05, |
| "loss": 1.4779, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 4.9996582624811725e-05, |
| "loss": 1.5227, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 4.999586499580599e-05, |
| "loss": 1.5161, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 4.9995079029059824e-05, |
| "loss": 1.5485, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 4.999422472672202e-05, |
| "loss": 1.5273, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 4.999330209112822e-05, |
| "loss": 1.5809, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 4.9992311124800875e-05, |
| "loss": 1.523, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 4.999125183044924e-05, |
| "loss": 1.4913, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 4.999012421096938e-05, |
| "loss": 1.4649, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 4.998892826944418e-05, |
| "loss": 1.4986, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 4.998766400914329e-05, |
| "loss": 1.5137, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 4.9986331433523156e-05, |
| "loss": 1.4823, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 4.9984930546226975e-05, |
| "loss": 1.4663, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 4.9983461351084735e-05, |
| "loss": 1.4819, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 4.9981923852113145e-05, |
| "loss": 1.5535, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 4.9980318053515686e-05, |
| "loss": 1.4416, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 4.997864395968252e-05, |
| "loss": 1.4651, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 4.997690157519059e-05, |
| "loss": 1.4648, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 4.99750909048035e-05, |
| "loss": 1.5525, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 4.997321195347154e-05, |
| "loss": 1.501, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 4.99712647263317e-05, |
| "loss": 1.4397, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 4.996924922870762e-05, |
| "loss": 1.4644, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 4.99671654661096e-05, |
| "loss": 1.4249, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 4.996501344423456e-05, |
| "loss": 1.4381, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 4.996279316896606e-05, |
| "loss": 1.463, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 4.996050464637423e-05, |
| "loss": 1.4517, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 4.995814788271582e-05, |
| "loss": 1.5317, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 4.9955722884434114e-05, |
| "loss": 1.4306, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 4.995322965815898e-05, |
| "loss": 1.4642, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 4.995066821070679e-05, |
| "loss": 1.5041, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 4.9948038549080456e-05, |
| "loss": 1.4405, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 4.994534068046937e-05, |
| "loss": 1.4534, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 4.9942574612249394e-05, |
| "loss": 1.4645, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 4.9939740351982856e-05, |
| "loss": 1.4777, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 4.993683790741852e-05, |
| "loss": 1.487, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 4.993386728649156e-05, |
| "loss": 1.4415, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 4.9930828497323526e-05, |
| "loss": 1.406, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 4.9927721548222374e-05, |
| "loss": 1.4193, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 4.992454644768236e-05, |
| "loss": 1.4266, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 4.9921303204384104e-05, |
| "loss": 1.4054, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 4.991799182719451e-05, |
| "loss": 1.4332, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 4.991461232516675e-05, |
| "loss": 1.4127, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 4.991116470754025e-05, |
| "loss": 1.4296, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 4.990764898374067e-05, |
| "loss": 1.4264, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 4.990406516337987e-05, |
| "loss": 1.4147, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 4.9900413256255876e-05, |
| "loss": 1.4413, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 4.9896693272352846e-05, |
| "loss": 1.4793, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 4.98929052218411e-05, |
| "loss": 1.4607, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 4.9889049115077005e-05, |
| "loss": 1.4065, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 4.988512496260301e-05, |
| "loss": 1.4479, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 4.988113277514761e-05, |
| "loss": 1.396, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 4.9877072563625285e-05, |
| "loss": 1.4241, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 4.9872944339136503e-05, |
| "loss": 1.3647, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 4.986874811296767e-05, |
| "loss": 1.4595, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 4.9864483896591094e-05, |
| "loss": 1.4043, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 4.9860151701665004e-05, |
| "loss": 1.4215, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 4.985575154003345e-05, |
| "loss": 1.3932, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 4.985128342372629e-05, |
| "loss": 1.4363, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 4.984674736495919e-05, |
| "loss": 1.4503, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 4.984214337613357e-05, |
| "loss": 1.4012, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 4.983747146983656e-05, |
| "loss": 1.3571, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 4.9832731658840956e-05, |
| "loss": 1.3973, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 4.982792395610524e-05, |
| "loss": 1.4165, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 4.982304837477348e-05, |
| "loss": 1.3847, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 4.981810492817532e-05, |
| "loss": 1.426, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 4.981309362982598e-05, |
| "loss": 1.3925, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 4.980801449342613e-05, |
| "loss": 1.4092, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 4.980286753286195e-05, |
| "loss": 1.3513, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 4.9797652762205025e-05, |
| "loss": 1.4103, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 4.979237019571235e-05, |
| "loss": 1.4159, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 4.978701984782625e-05, |
| "loss": 1.4049, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 4.978160173317438e-05, |
| "loss": 1.3426, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 4.9776115866569654e-05, |
| "loss": 1.3792, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 4.9770562263010225e-05, |
| "loss": 1.3671, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 4.976494093767943e-05, |
| "loss": 1.4226, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 4.975925190594575e-05, |
| "loss": 1.3503, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 4.9753495183362796e-05, |
| "loss": 1.3956, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 4.974767078566922e-05, |
| "loss": 1.3911, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 4.9741778728788705e-05, |
| "loss": 1.3835, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 4.97358190288299e-05, |
| "loss": 1.4129, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 4.9729791702086414e-05, |
| "loss": 1.3966, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 4.972369676503672e-05, |
| "loss": 1.3682, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 4.971753423434413e-05, |
| "loss": 1.343, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 4.971130412685679e-05, |
| "loss": 1.3597, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 4.970500645960756e-05, |
| "loss": 1.396, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 4.9698641249814036e-05, |
| "loss": 1.3425, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 4.9692208514878444e-05, |
| "loss": 1.3531, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 4.968570827238764e-05, |
| "loss": 1.3967, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 4.967914054011305e-05, |
| "loss": 1.4169, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 4.967250533601059e-05, |
| "loss": 1.3789, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 4.966580267822065e-05, |
| "loss": 1.368, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 4.965903258506806e-05, |
| "loss": 1.3936, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.1, |
| "eval_acceptance_rate": 0.6973184943199158, |
| "eval_acceptance_rate_1": 0.6964073777198792, |
| "eval_acceptance_rate_2": 0.696760356426239, |
| "eval_acceptance_rate_3": 0.6975700855255127, |
| "eval_acceptance_rate_4": 0.6972681879997253, |
| "eval_avg_generation_length": 215.4989013671875, |
| "eval_avg_input_length": 365.6648864746094, |
| "eval_expected_speedup": 2.2072695235703748, |
| "eval_expected_tokens": 2.759086904462969, |
| "eval_loss": 1.2887985706329346, |
| "eval_max_generation_length": 424.7227783203125, |
| "eval_max_input_length": 630.4989624023438, |
| "eval_min_generation_length": 59.60786437988281, |
| "eval_min_input_length": 160.72378540039062, |
| "eval_pulsar_acceptance_rate": 0.4397717261157423, |
| "eval_runtime": 114.4635, |
| "eval_samples_per_second": 4.325, |
| "eval_steps_per_second": 0.14, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 4.965219507506198e-05, |
| "loss": 1.3912, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 4.9645290166895906e-05, |
| "loss": 1.4015, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 4.9638317879447606e-05, |
| "loss": 1.3633, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 4.963127823177902e-05, |
| "loss": 1.3772, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 4.96241712431363e-05, |
| "loss": 1.357, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 4.9616996932949666e-05, |
| "loss": 1.2806, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 4.960975532083342e-05, |
| "loss": 1.3648, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 4.960244642658585e-05, |
| "loss": 1.3354, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 4.959507027018918e-05, |
| "loss": 1.3347, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 4.958762687180956e-05, |
| "loss": 1.344, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 4.958011625179695e-05, |
| "loss": 1.3561, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 4.9572538430685094e-05, |
| "loss": 1.3511, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 4.956489342919147e-05, |
| "loss": 1.3671, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 4.9557181268217227e-05, |
| "loss": 1.3954, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 4.95494019688471e-05, |
| "loss": 1.3535, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 4.95415555523494e-05, |
| "loss": 1.3675, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 4.953364204017595e-05, |
| "loss": 1.3354, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 4.952566145396197e-05, |
| "loss": 1.3595, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 4.951761381552609e-05, |
| "loss": 1.3392, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 4.9509499146870236e-05, |
| "loss": 1.3648, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 4.9501317470179606e-05, |
| "loss": 1.3735, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 4.94930688078226e-05, |
| "loss": 1.3218, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 4.948475318235073e-05, |
| "loss": 1.3222, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 4.9476370616498617e-05, |
| "loss": 1.3465, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 4.946792113318386e-05, |
| "loss": 1.3207, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 4.945940475550703e-05, |
| "loss": 1.3665, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 4.945082150675159e-05, |
| "loss": 1.3443, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 4.944217141038379e-05, |
| "loss": 1.3433, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 4.9433454490052675e-05, |
| "loss": 1.3194, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 4.9424670769589984e-05, |
| "loss": 1.3737, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 4.941582027301006e-05, |
| "loss": 1.3505, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 4.940690302450982e-05, |
| "loss": 1.2845, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 4.939791904846869e-05, |
| "loss": 1.3478, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 4.938886836944851e-05, |
| "loss": 1.3168, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 4.93797510121935e-05, |
| "loss": 1.2953, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 4.937056700163015e-05, |
| "loss": 1.3489, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 4.9361316362867215e-05, |
| "loss": 1.3853, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 4.935199912119558e-05, |
| "loss": 1.3054, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 4.934261530208823e-05, |
| "loss": 1.3143, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 4.933316493120015e-05, |
| "loss": 1.3086, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 4.9323648034368316e-05, |
| "loss": 1.3011, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 4.931406463761154e-05, |
| "loss": 1.2483, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 4.930441476713049e-05, |
| "loss": 1.302, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 4.929469844930753e-05, |
| "loss": 1.3263, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 4.9284915710706695e-05, |
| "loss": 1.387, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 4.9275066578073626e-05, |
| "loss": 1.3013, |
| "step": 246 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 4.926515107833547e-05, |
| "loss": 1.3018, |
| "step": 247 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 4.925516923860083e-05, |
| "loss": 1.331, |
| "step": 248 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 4.9245121086159674e-05, |
| "loss": 1.3337, |
| "step": 249 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 4.923500664848326e-05, |
| "loss": 1.2895, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 4.9224825953224084e-05, |
| "loss": 1.318, |
| "step": 251 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 4.9214579028215776e-05, |
| "loss": 1.2952, |
| "step": 252 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 4.920426590147304e-05, |
| "loss": 1.3644, |
| "step": 253 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 4.919388660119156e-05, |
| "loss": 1.3444, |
| "step": 254 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 4.918344115574796e-05, |
| "loss": 1.361, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 4.917292959369968e-05, |
| "loss": 1.274, |
| "step": 256 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 4.9162351943784935e-05, |
| "loss": 1.3637, |
| "step": 257 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 4.9151708234922603e-05, |
| "loss": 1.2705, |
| "step": 258 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 4.91409984962122e-05, |
| "loss": 1.3022, |
| "step": 259 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 4.913022275693372e-05, |
| "loss": 1.3317, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 4.9119381046547636e-05, |
| "loss": 1.3392, |
| "step": 261 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 4.910847339469477e-05, |
| "loss": 1.3158, |
| "step": 262 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 4.9097499831196216e-05, |
| "loss": 1.3178, |
| "step": 263 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 4.908646038605329e-05, |
| "loss": 1.2786, |
| "step": 264 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 4.907535508944741e-05, |
| "loss": 1.2851, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 4.906418397174002e-05, |
| "loss": 1.2989, |
| "step": 266 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 4.905294706347255e-05, |
| "loss": 1.263, |
| "step": 267 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 4.904164439536626e-05, |
| "loss": 1.3328, |
| "step": 268 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 4.903027599832223e-05, |
| "loss": 1.2766, |
| "step": 269 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 4.901884190342121e-05, |
| "loss": 1.3073, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 4.900734214192358e-05, |
| "loss": 1.3622, |
| "step": 271 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 4.8995776745269254e-05, |
| "loss": 1.344, |
| "step": 272 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 4.8984145745077584e-05, |
| "loss": 1.3522, |
| "step": 273 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 4.8972449173147276e-05, |
| "loss": 1.2848, |
| "step": 274 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 4.8960687061456324e-05, |
| "loss": 1.3402, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 4.8948859442161874e-05, |
| "loss": 1.3081, |
| "step": 276 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 4.893696634760019e-05, |
| "loss": 1.2651, |
| "step": 277 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 4.8925007810286546e-05, |
| "loss": 1.3102, |
| "step": 278 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 4.891298386291513e-05, |
| "loss": 1.3975, |
| "step": 279 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 4.8900894538358944e-05, |
| "loss": 1.3574, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 4.888873986966974e-05, |
| "loss": 1.2603, |
| "step": 281 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 4.8876519890077926e-05, |
| "loss": 1.3324, |
| "step": 282 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 4.8864234632992457e-05, |
| "loss": 1.3366, |
| "step": 283 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 4.885188413200075e-05, |
| "loss": 1.3091, |
| "step": 284 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 4.8839468420868606e-05, |
| "loss": 1.3026, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 4.8826987533540114e-05, |
| "loss": 1.2572, |
| "step": 286 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 4.8814441504137534e-05, |
| "loss": 1.2946, |
| "step": 287 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 4.880183036696123e-05, |
| "loss": 1.3107, |
| "step": 288 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 4.878915415648957e-05, |
| "loss": 1.28, |
| "step": 289 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 4.877641290737884e-05, |
| "loss": 1.2945, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 4.876360665446312e-05, |
| "loss": 1.2664, |
| "step": 291 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 4.875073543275422e-05, |
| "loss": 1.2705, |
| "step": 292 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 4.8737799277441566e-05, |
| "loss": 1.2677, |
| "step": 293 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 4.872479822389211e-05, |
| "loss": 1.278, |
| "step": 294 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 4.871173230765024e-05, |
| "loss": 1.3051, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 4.8698601564437675e-05, |
| "loss": 1.3449, |
| "step": 296 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 4.868540603015335e-05, |
| "loss": 1.3078, |
| "step": 297 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 4.8672145740873374e-05, |
| "loss": 1.2072, |
| "step": 298 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 4.865882073285086e-05, |
| "loss": 1.3015, |
| "step": 299 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 4.864543104251587e-05, |
| "loss": 1.2994, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.15, |
| "eval_acceptance_rate": 0.7273669242858887, |
| "eval_acceptance_rate_1": 0.7265855073928833, |
| "eval_acceptance_rate_2": 0.7276649475097656, |
| "eval_acceptance_rate_3": 0.7265880703926086, |
| "eval_acceptance_rate_4": 0.7270616292953491, |
| "eval_avg_generation_length": 215.4989013671875, |
| "eval_avg_input_length": 365.6648864746094, |
| "eval_expected_speedup": 2.3369276109043398, |
| "eval_expected_tokens": 2.9211595136304247, |
| "eval_loss": 1.1850454807281494, |
| "eval_max_generation_length": 424.7227783203125, |
| "eval_max_input_length": 630.4989624023438, |
| "eval_min_generation_length": 59.60786437988281, |
| "eval_min_input_length": 160.72378540039062, |
| "eval_pulsar_acceptance_rate": 0.4802898784076063, |
| "eval_runtime": 123.3228, |
| "eval_samples_per_second": 4.014, |
| "eval_steps_per_second": 0.13, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 4.863197670647531e-05, |
| "loss": 1.2556, |
| "step": 301 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 4.861845776151281e-05, |
| "loss": 1.282, |
| "step": 302 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 4.860487424458867e-05, |
| "loss": 1.2981, |
| "step": 303 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 4.8591226192839696e-05, |
| "loss": 1.2678, |
| "step": 304 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 4.857751364357913e-05, |
| "loss": 1.2905, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 4.856373663429657e-05, |
| "loss": 1.3769, |
| "step": 306 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 4.8549895202657844e-05, |
| "loss": 1.3233, |
| "step": 307 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 4.853598938650487e-05, |
| "loss": 1.2517, |
| "step": 308 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 4.852201922385564e-05, |
| "loss": 1.2923, |
| "step": 309 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 4.850798475290403e-05, |
| "loss": 1.3197, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 4.8493886012019765e-05, |
| "loss": 1.3041, |
| "step": 311 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 4.8479723039748246e-05, |
| "loss": 1.3085, |
| "step": 312 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 4.846549587481052e-05, |
| "loss": 1.2549, |
| "step": 313 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 4.845120455610309e-05, |
| "loss": 1.2561, |
| "step": 314 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 4.8436849122697883e-05, |
| "loss": 1.2595, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 4.842242961384211e-05, |
| "loss": 1.1942, |
| "step": 316 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 4.8407946068958165e-05, |
| "loss": 1.271, |
| "step": 317 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 4.83933985276435e-05, |
| "loss": 1.3538, |
| "step": 318 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 4.837878702967052e-05, |
| "loss": 1.2728, |
| "step": 319 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 4.8364111614986527e-05, |
| "loss": 1.2275, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 4.834937232371353e-05, |
| "loss": 1.2514, |
| "step": 321 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 4.83345691961482e-05, |
| "loss": 1.3102, |
| "step": 322 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 4.831970227276171e-05, |
| "loss": 1.2467, |
| "step": 323 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 4.830477159419966e-05, |
| "loss": 1.3083, |
| "step": 324 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 4.8289777201281974e-05, |
| "loss": 1.2467, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 4.827471913500274e-05, |
| "loss": 1.2743, |
| "step": 326 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 4.8259597436530125e-05, |
| "loss": 1.2776, |
| "step": 327 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 4.8244412147206284e-05, |
| "loss": 1.3385, |
| "step": 328 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 4.822916330854722e-05, |
| "loss": 1.2746, |
| "step": 329 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 4.821385096224268e-05, |
| "loss": 1.2242, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 4.819847515015602e-05, |
| "loss": 1.2818, |
| "step": 331 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 4.8183035914324136e-05, |
| "loss": 1.2727, |
| "step": 332 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 4.81675332969573e-05, |
| "loss": 1.2286, |
| "step": 333 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 4.815196734043909e-05, |
| "loss": 1.2432, |
| "step": 334 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 4.8136338087326216e-05, |
| "loss": 1.275, |
| "step": 335 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 4.812064558034847e-05, |
| "loss": 1.2502, |
| "step": 336 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 4.810488986240858e-05, |
| "loss": 1.2694, |
| "step": 337 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 4.8089070976582054e-05, |
| "loss": 1.2416, |
| "step": 338 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 4.8073188966117126e-05, |
| "loss": 1.2345, |
| "step": 339 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 4.805724387443462e-05, |
| "loss": 1.244, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 4.80412357451278e-05, |
| "loss": 1.2536, |
| "step": 341 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 4.8025164621962284e-05, |
| "loss": 1.262, |
| "step": 342 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 4.8009030548875896e-05, |
| "loss": 1.2491, |
| "step": 343 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 4.799283356997859e-05, |
| "loss": 1.3092, |
| "step": 344 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 4.797657372955228e-05, |
| "loss": 1.2588, |
| "step": 345 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 4.796025107205075e-05, |
| "loss": 1.223, |
| "step": 346 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 4.794386564209953e-05, |
| "loss": 1.2916, |
| "step": 347 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 4.792741748449575e-05, |
| "loss": 1.2725, |
| "step": 348 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 4.7910906644208054e-05, |
| "loss": 1.2933, |
| "step": 349 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 4.789433316637644e-05, |
| "loss": 1.2601, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 4.787769709631217e-05, |
| "loss": 1.298, |
| "step": 351 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 4.786099847949761e-05, |
| "loss": 1.2316, |
| "step": 352 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 4.784423736158616e-05, |
| "loss": 1.2318, |
| "step": 353 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 4.7827413788402077e-05, |
| "loss": 1.2691, |
| "step": 354 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 4.781052780594034e-05, |
| "loss": 1.2232, |
| "step": 355 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 4.779357946036661e-05, |
| "loss": 1.2884, |
| "step": 356 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 4.777656879801701e-05, |
| "loss": 1.2336, |
| "step": 357 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 4.775949586539803e-05, |
| "loss": 1.2298, |
| "step": 358 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 4.774236070918643e-05, |
| "loss": 1.2701, |
| "step": 359 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 4.7725163376229064e-05, |
| "loss": 1.2777, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 4.770790391354279e-05, |
| "loss": 1.2515, |
| "step": 361 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 4.7690582368314304e-05, |
| "loss": 1.2428, |
| "step": 362 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 4.7673198787900063e-05, |
| "loss": 1.2477, |
| "step": 363 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 4.7655753219826114e-05, |
| "loss": 1.2741, |
| "step": 364 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 4.763824571178798e-05, |
| "loss": 1.236, |
| "step": 365 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 4.762067631165049e-05, |
| "loss": 1.243, |
| "step": 366 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 4.760304506744774e-05, |
| "loss": 1.2307, |
| "step": 367 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 4.7585352027382877e-05, |
| "loss": 1.2442, |
| "step": 368 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 4.7567597239827974e-05, |
| "loss": 1.221, |
| "step": 369 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 4.754978075332398e-05, |
| "loss": 1.1956, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 4.753190261658045e-05, |
| "loss": 1.1754, |
| "step": 371 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 4.751396287847556e-05, |
| "loss": 1.2031, |
| "step": 372 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 4.7495961588055836e-05, |
| "loss": 1.2026, |
| "step": 373 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 4.747789879453615e-05, |
| "loss": 1.1978, |
| "step": 374 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 4.7459774547299475e-05, |
| "loss": 1.2589, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 4.7441588895896805e-05, |
| "loss": 1.2708, |
| "step": 376 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 4.742334189004704e-05, |
| "loss": 1.2407, |
| "step": 377 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 4.7405033579636756e-05, |
| "loss": 1.2278, |
| "step": 378 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 4.738666401472022e-05, |
| "loss": 1.2109, |
| "step": 379 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 4.736823324551909e-05, |
| "loss": 1.1796, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 4.73497413224224e-05, |
| "loss": 1.241, |
| "step": 381 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 4.733118829598635e-05, |
| "loss": 1.2697, |
| "step": 382 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 4.7312574216934225e-05, |
| "loss": 1.2478, |
| "step": 383 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 4.7293899136156184e-05, |
| "loss": 1.2407, |
| "step": 384 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 4.72751631047092e-05, |
| "loss": 1.1911, |
| "step": 385 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 4.725636617381686e-05, |
| "loss": 1.2688, |
| "step": 386 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 4.723750839486926e-05, |
| "loss": 1.239, |
| "step": 387 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 4.721858981942284e-05, |
| "loss": 1.2337, |
| "step": 388 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 4.719961049920027e-05, |
| "loss": 1.2041, |
| "step": 389 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 4.71805704860903e-05, |
| "loss": 1.2164, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 4.716146983214757e-05, |
| "loss": 1.223, |
| "step": 391 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 4.7142308589592556e-05, |
| "loss": 1.2392, |
| "step": 392 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 4.7123086810811356e-05, |
| "loss": 1.1836, |
| "step": 393 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 4.710380454835559e-05, |
| "loss": 1.2215, |
| "step": 394 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 4.708446185494222e-05, |
| "loss": 1.208, |
| "step": 395 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 4.7065058783453424e-05, |
| "loss": 1.2059, |
| "step": 396 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 4.704559538693647e-05, |
| "loss": 1.2306, |
| "step": 397 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 4.7026071718603536e-05, |
| "loss": 1.2309, |
| "step": 398 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 4.700648783183159e-05, |
| "loss": 1.2496, |
| "step": 399 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 4.698684378016222e-05, |
| "loss": 1.223, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.2, |
| "eval_acceptance_rate": 0.7480568289756775, |
| "eval_acceptance_rate_1": 0.7477021813392639, |
| "eval_acceptance_rate_2": 0.7478978633880615, |
| "eval_acceptance_rate_3": 0.7468696236610413, |
| "eval_acceptance_rate_4": 0.7482213377952576, |
| "eval_avg_generation_length": 215.4989013671875, |
| "eval_avg_input_length": 365.6648864746094, |
| "eval_expected_speedup": 2.431512085060141, |
| "eval_expected_tokens": 3.039390106325176, |
| "eval_loss": 1.1151978969573975, |
| "eval_max_generation_length": 424.7227783203125, |
| "eval_max_input_length": 630.4989624023438, |
| "eval_min_generation_length": 59.60786437988281, |
| "eval_min_input_length": 160.72378540039062, |
| "eval_pulsar_acceptance_rate": 0.509847526581294, |
| "eval_runtime": 117.7232, |
| "eval_samples_per_second": 4.205, |
| "eval_steps_per_second": 0.136, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 4.696713961730154e-05, |
| "loss": 1.2496, |
| "step": 401 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 4.694737539711994e-05, |
| "loss": 1.2241, |
| "step": 402 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 4.6927551173652075e-05, |
| "loss": 1.2261, |
| "step": 403 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 4.690766700109659e-05, |
| "loss": 1.2289, |
| "step": 404 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 4.6887722933816076e-05, |
| "loss": 1.2335, |
| "step": 405 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 4.686771902633684e-05, |
| "loss": 1.2269, |
| "step": 406 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 4.684765533334879e-05, |
| "loss": 1.1942, |
| "step": 407 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 4.682753190970533e-05, |
| "loss": 1.1907, |
| "step": 408 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 4.68073488104231e-05, |
| "loss": 1.233, |
| "step": 409 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 4.678710609068193e-05, |
| "loss": 1.268, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 4.6766803805824655e-05, |
| "loss": 1.1887, |
| "step": 411 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 4.674644201135694e-05, |
| "loss": 1.1867, |
| "step": 412 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 4.672602076294714e-05, |
| "loss": 1.2096, |
| "step": 413 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 4.670554011642619e-05, |
| "loss": 1.1924, |
| "step": 414 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 4.668500012778738e-05, |
| "loss": 1.2003, |
| "step": 415 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 4.666440085318626e-05, |
| "loss": 1.2171, |
| "step": 416 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 4.6643742348940464e-05, |
| "loss": 1.1882, |
| "step": 417 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 4.662302467152955e-05, |
| "loss": 1.2127, |
| "step": 418 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 4.660224787759486e-05, |
| "loss": 1.2136, |
| "step": 419 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 4.6581412023939354e-05, |
| "loss": 1.2099, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 4.656051716752745e-05, |
| "loss": 1.2396, |
| "step": 421 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 4.65395633654849e-05, |
| "loss": 1.1966, |
| "step": 422 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 4.65185506750986e-05, |
| "loss": 1.2578, |
| "step": 423 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 4.649747915381643e-05, |
| "loss": 1.222, |
| "step": 424 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 4.6476348859247134e-05, |
| "loss": 1.1825, |
| "step": 425 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 4.645515984916013e-05, |
| "loss": 1.2415, |
| "step": 426 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 4.643391218148536e-05, |
| "loss": 1.1667, |
| "step": 427 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 4.6412605914313144e-05, |
| "loss": 1.2437, |
| "step": 428 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 4.639124110589399e-05, |
| "loss": 1.1796, |
| "step": 429 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 4.6369817814638475e-05, |
| "loss": 1.2253, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 4.634833609911706e-05, |
| "loss": 1.2117, |
| "step": 431 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 4.6326796018059936e-05, |
| "loss": 1.2087, |
| "step": 432 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 4.630519763035687e-05, |
| "loss": 1.205, |
| "step": 433 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 4.6283540995057004e-05, |
| "loss": 1.2467, |
| "step": 434 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 4.6261826171368774e-05, |
| "loss": 1.1599, |
| "step": 435 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 4.6240053218659674e-05, |
| "loss": 1.1935, |
| "step": 436 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 4.621822219645612e-05, |
| "loss": 1.2211, |
| "step": 437 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 4.6196333164443295e-05, |
| "loss": 1.1631, |
| "step": 438 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 4.617438618246498e-05, |
| "loss": 1.2275, |
| "step": 439 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 4.6152381310523387e-05, |
| "loss": 1.1759, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 4.6130318608778995e-05, |
| "loss": 1.2127, |
| "step": 441 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 4.610819813755038e-05, |
| "loss": 1.1816, |
| "step": 442 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 4.608601995731407e-05, |
| "loss": 1.2486, |
| "step": 443 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 4.6063784128704367e-05, |
| "loss": 1.1841, |
| "step": 444 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 4.604149071251318e-05, |
| "loss": 1.2022, |
| "step": 445 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 4.601913976968985e-05, |
| "loss": 1.2143, |
| "step": 446 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 4.5996731361340994e-05, |
| "loss": 1.1515, |
| "step": 447 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 4.597426554873037e-05, |
| "loss": 1.2121, |
| "step": 448 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 4.595174239327862e-05, |
| "loss": 1.2078, |
| "step": 449 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 4.592916195656322e-05, |
| "loss": 1.1593, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 4.59065243003182e-05, |
| "loss": 1.1824, |
| "step": 451 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 4.588382948643406e-05, |
| "loss": 1.2159, |
| "step": 452 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 4.586107757695755e-05, |
| "loss": 1.1879, |
| "step": 453 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 4.5838268634091524e-05, |
| "loss": 1.1936, |
| "step": 454 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 4.581540272019476e-05, |
| "loss": 1.2864, |
| "step": 455 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 4.579247989778179e-05, |
| "loss": 1.2219, |
| "step": 456 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 4.576950022952274e-05, |
| "loss": 1.2052, |
| "step": 457 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 4.574646377824315e-05, |
| "loss": 1.1897, |
| "step": 458 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 4.572337060692379e-05, |
| "loss": 1.2513, |
| "step": 459 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 4.5700220778700504e-05, |
| "loss": 1.1641, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 4.567701435686404e-05, |
| "loss": 1.2097, |
| "step": 461 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 4.565375140485989e-05, |
| "loss": 1.1747, |
| "step": 462 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 4.563043198628806e-05, |
| "loss": 1.1827, |
| "step": 463 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 4.560705616490294e-05, |
| "loss": 1.2405, |
| "step": 464 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 4.5583624004613145e-05, |
| "loss": 1.1829, |
| "step": 465 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 4.556013556948131e-05, |
| "loss": 1.1286, |
| "step": 466 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 4.5536590923723906e-05, |
| "loss": 1.1309, |
| "step": 467 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 4.551299013171111e-05, |
| "loss": 1.2024, |
| "step": 468 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 4.548933325796658e-05, |
| "loss": 1.1834, |
| "step": 469 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 4.546562036716732e-05, |
| "loss": 1.1864, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 4.544185152414343e-05, |
| "loss": 1.1689, |
| "step": 471 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 4.541802679387806e-05, |
| "loss": 1.1941, |
| "step": 472 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 4.539414624150708e-05, |
| "loss": 1.1407, |
| "step": 473 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 4.537020993231904e-05, |
| "loss": 1.1923, |
| "step": 474 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 4.534621793175487e-05, |
| "loss": 1.236, |
| "step": 475 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 4.532217030540781e-05, |
| "loss": 1.2171, |
| "step": 476 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 4.5298067119023114e-05, |
| "loss": 1.1601, |
| "step": 477 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 4.5273908438498e-05, |
| "loss": 1.1809, |
| "step": 478 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 4.524969432988138e-05, |
| "loss": 1.1645, |
| "step": 479 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 4.522542485937369e-05, |
| "loss": 1.2201, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 4.520110009332674e-05, |
| "loss": 1.1901, |
| "step": 481 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 4.517672009824351e-05, |
| "loss": 1.1686, |
| "step": 482 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 4.515228494077798e-05, |
| "loss": 1.1632, |
| "step": 483 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 4.512779468773494e-05, |
| "loss": 1.1922, |
| "step": 484 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 4.510324940606979e-05, |
| "loss": 1.2166, |
| "step": 485 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 4.5078649162888406e-05, |
| "loss": 1.2058, |
| "step": 486 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 4.505399402544692e-05, |
| "loss": 1.1951, |
| "step": 487 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 4.502928406115152e-05, |
| "loss": 1.1916, |
| "step": 488 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 4.500451933755833e-05, |
| "loss": 1.1497, |
| "step": 489 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 4.497969992237312e-05, |
| "loss": 1.1552, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 4.495482588345126e-05, |
| "loss": 1.1439, |
| "step": 491 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 4.49298972887974e-05, |
| "loss": 1.1831, |
| "step": 492 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 4.490491420656537e-05, |
| "loss": 1.2012, |
| "step": 493 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 4.487987670505798e-05, |
| "loss": 1.1972, |
| "step": 494 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 4.4854784852726776e-05, |
| "loss": 1.1618, |
| "step": 495 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 4.4829638718171954e-05, |
| "loss": 1.2118, |
| "step": 496 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 4.480443837014205e-05, |
| "loss": 1.1706, |
| "step": 497 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 4.477918387753388e-05, |
| "loss": 1.1799, |
| "step": 498 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 4.4753875309392266e-05, |
| "loss": 1.112, |
| "step": 499 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 4.4728512734909844e-05, |
| "loss": 1.154, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.25, |
| "eval_acceptance_rate": 0.7612475752830505, |
| "eval_acceptance_rate_1": 0.7604774236679077, |
| "eval_acceptance_rate_2": 0.7610577940940857, |
| "eval_acceptance_rate_3": 0.7606703042984009, |
| "eval_acceptance_rate_4": 0.7614873051643372, |
| "eval_avg_generation_length": 215.4989013671875, |
| "eval_avg_input_length": 365.6648864746094, |
| "eval_expected_speedup": 2.4941636620720145, |
| "eval_expected_tokens": 3.1177045775900183, |
| "eval_loss": 1.0697674751281738, |
| "eval_max_generation_length": 424.7227783203125, |
| "eval_max_input_length": 630.4989624023438, |
| "eval_min_generation_length": 59.60786437988281, |
| "eval_min_input_length": 160.72378540039062, |
| "eval_pulsar_acceptance_rate": 0.5294261443975046, |
| "eval_runtime": 120.7194, |
| "eval_samples_per_second": 4.1, |
| "eval_steps_per_second": 0.133, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 4.470309622342694e-05, |
| "loss": 1.1853, |
| "step": 501 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 4.467762584443131e-05, |
| "loss": 1.1983, |
| "step": 502 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 4.465210166755803e-05, |
| "loss": 1.2206, |
| "step": 503 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 4.46265237625892e-05, |
| "loss": 1.211, |
| "step": 504 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 4.460089219945382e-05, |
| "loss": 1.2069, |
| "step": 505 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 4.457520704822765e-05, |
| "loss": 1.2131, |
| "step": 506 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 4.454946837913287e-05, |
| "loss": 1.1619, |
| "step": 507 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 4.452367626253805e-05, |
| "loss": 1.1813, |
| "step": 508 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 4.449783076895783e-05, |
| "loss": 1.1799, |
| "step": 509 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 4.4471931969052816e-05, |
| "loss": 1.1601, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 4.4445979933629324e-05, |
| "loss": 1.1948, |
| "step": 511 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 4.4419974733639244e-05, |
| "loss": 1.2295, |
| "step": 512 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 4.4393916440179786e-05, |
| "loss": 1.1814, |
| "step": 513 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 4.436780512449334e-05, |
| "loss": 1.2158, |
| "step": 514 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 4.434164085796724e-05, |
| "loss": 1.1483, |
| "step": 515 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 4.4315423712133595e-05, |
| "loss": 1.1577, |
| "step": 516 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 4.4289153758669075e-05, |
| "loss": 1.1561, |
| "step": 517 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 4.426283106939474e-05, |
| "loss": 1.207, |
| "step": 518 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 4.42364557162758e-05, |
| "loss": 1.1995, |
| "step": 519 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 4.421002777142148e-05, |
| "loss": 1.2024, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 4.418354730708476e-05, |
| "loss": 1.1775, |
| "step": 521 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 4.415701439566223e-05, |
| "loss": 1.1377, |
| "step": 522 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 4.413042910969385e-05, |
| "loss": 1.1504, |
| "step": 523 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 4.4103791521862784e-05, |
| "loss": 1.1651, |
| "step": 524 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 4.4077101704995166e-05, |
| "loss": 1.1886, |
| "step": 525 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 4.405035973205994e-05, |
| "loss": 1.1605, |
| "step": 526 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 4.4023565676168655e-05, |
| "loss": 1.1652, |
| "step": 527 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 4.399671961057522e-05, |
| "loss": 1.1908, |
| "step": 528 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 4.396982160867575e-05, |
| "loss": 1.1913, |
| "step": 529 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 4.3942871744008374e-05, |
| "loss": 1.1393, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 4.391587009025297e-05, |
| "loss": 1.1878, |
| "step": 531 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 4.388881672123105e-05, |
| "loss": 1.1439, |
| "step": 532 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 4.386171171090547e-05, |
| "loss": 1.1588, |
| "step": 533 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 4.383455513338032e-05, |
| "loss": 1.1719, |
| "step": 534 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 4.3807347062900624e-05, |
| "loss": 1.204, |
| "step": 535 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 4.3780087573852213e-05, |
| "loss": 1.1343, |
| "step": 536 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 4.375277674076149e-05, |
| "loss": 1.1705, |
| "step": 537 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 4.3725414638295235e-05, |
| "loss": 1.1341, |
| "step": 538 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 4.369800134126039e-05, |
| "loss": 1.1727, |
| "step": 539 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 4.367053692460385e-05, |
| "loss": 1.1999, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 4.3643021463412294e-05, |
| "loss": 1.171, |
| "step": 541 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 4.3615455032911946e-05, |
| "loss": 1.2348, |
| "step": 542 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 4.358783770846836e-05, |
| "loss": 1.1795, |
| "step": 543 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 4.356016956558625e-05, |
| "loss": 1.1452, |
| "step": 544 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 4.3532450679909274e-05, |
| "loss": 1.2039, |
| "step": 545 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 4.3504681127219793e-05, |
| "loss": 1.1606, |
| "step": 546 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 4.3476860983438714e-05, |
| "loss": 1.171, |
| "step": 547 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 4.3448990324625244e-05, |
| "loss": 1.175, |
| "step": 548 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 4.342106922697669e-05, |
| "loss": 1.1518, |
| "step": 549 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 4.3393097766828293e-05, |
| "loss": 1.2225, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 4.336507602065293e-05, |
| "loss": 1.2039, |
| "step": 551 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 4.3337004065061e-05, |
| "loss": 1.1321, |
| "step": 552 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 4.3308881976800146e-05, |
| "loss": 1.1624, |
| "step": 553 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 4.3280709832755094e-05, |
| "loss": 1.152, |
| "step": 554 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 4.325248770994741e-05, |
| "loss": 1.1204, |
| "step": 555 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 4.3224215685535294e-05, |
| "loss": 1.1581, |
| "step": 556 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 4.319589383681338e-05, |
| "loss": 1.1694, |
| "step": 557 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 4.316752224121252e-05, |
| "loss": 1.1878, |
| "step": 558 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 4.313910097629959e-05, |
| "loss": 1.1455, |
| "step": 559 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 4.311063011977723e-05, |
| "loss": 1.1626, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 4.308210974948367e-05, |
| "loss": 1.1247, |
| "step": 561 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 4.305353994339252e-05, |
| "loss": 1.1165, |
| "step": 562 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 4.302492077961253e-05, |
| "loss": 1.159, |
| "step": 563 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 4.2996252336387414e-05, |
| "loss": 1.1264, |
| "step": 564 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 4.29675346920956e-05, |
| "loss": 1.1833, |
| "step": 565 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 4.293876792525002e-05, |
| "loss": 1.1058, |
| "step": 566 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 4.2909952114497925e-05, |
| "loss": 1.1406, |
| "step": 567 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 4.288108733862064e-05, |
| "loss": 1.1515, |
| "step": 568 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 4.2852173676533356e-05, |
| "loss": 1.1789, |
| "step": 569 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 4.282321120728493e-05, |
| "loss": 1.1295, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 4.279420001005764e-05, |
| "loss": 1.2086, |
| "step": 571 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 4.2765140164166984e-05, |
| "loss": 1.133, |
| "step": 572 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 4.273603174906149e-05, |
| "loss": 1.1454, |
| "step": 573 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 4.270687484432243e-05, |
| "loss": 1.1213, |
| "step": 574 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 4.267766952966369e-05, |
| "loss": 1.1569, |
| "step": 575 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 4.2648415884931476e-05, |
| "loss": 1.2088, |
| "step": 576 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 4.261911399010413e-05, |
| "loss": 1.2193, |
| "step": 577 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 4.258976392529192e-05, |
| "loss": 1.1351, |
| "step": 578 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 4.256036577073681e-05, |
| "loss": 1.164, |
| "step": 579 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 4.2530919606812216e-05, |
| "loss": 1.1552, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 4.250142551402284e-05, |
| "loss": 1.1633, |
| "step": 581 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 4.247188357300439e-05, |
| "loss": 1.0952, |
| "step": 582 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 4.244229386452342e-05, |
| "loss": 1.1708, |
| "step": 583 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 4.241265646947706e-05, |
| "loss": 1.1491, |
| "step": 584 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 4.2382971468892806e-05, |
| "loss": 1.0903, |
| "step": 585 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 4.235323894392832e-05, |
| "loss": 1.169, |
| "step": 586 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 4.23234589758712e-05, |
| "loss": 1.1661, |
| "step": 587 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 4.229363164613874e-05, |
| "loss": 1.1544, |
| "step": 588 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 4.2263757036277705e-05, |
| "loss": 1.2005, |
| "step": 589 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 4.223383522796415e-05, |
| "loss": 1.1497, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 4.220386630300315e-05, |
| "loss": 1.1374, |
| "step": 591 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 4.217385034332861e-05, |
| "loss": 1.1383, |
| "step": 592 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 4.214378743100302e-05, |
| "loss": 1.1291, |
| "step": 593 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 4.211367764821722e-05, |
| "loss": 1.1526, |
| "step": 594 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 4.2083521077290213e-05, |
| "loss": 1.1522, |
| "step": 595 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 4.205331780066892e-05, |
| "loss": 1.1595, |
| "step": 596 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 4.202306790092792e-05, |
| "loss": 1.1817, |
| "step": 597 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 4.199277146076933e-05, |
| "loss": 1.1368, |
| "step": 598 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 4.1962428563022414e-05, |
| "loss": 1.1418, |
| "step": 599 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 4.193203929064353e-05, |
| "loss": 1.109, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.3, |
| "eval_acceptance_rate": 0.7722666263580322, |
| "eval_acceptance_rate_1": 0.7711466550827026, |
| "eval_acceptance_rate_2": 0.7723726034164429, |
| "eval_acceptance_rate_3": 0.7718802094459534, |
| "eval_acceptance_rate_4": 0.7722136378288269, |
| "eval_avg_generation_length": 215.4989013671875, |
| "eval_avg_input_length": 365.6648864746094, |
| "eval_expected_speedup": 2.5479414221081074, |
| "eval_expected_tokens": 3.184926777635134, |
| "eval_loss": 1.0362516641616821, |
| "eval_max_generation_length": 424.7227783203125, |
| "eval_max_input_length": 630.4989624023438, |
| "eval_min_generation_length": 59.60786437988281, |
| "eval_min_input_length": 160.72378540039062, |
| "eval_pulsar_acceptance_rate": 0.5462316944087835, |
| "eval_runtime": 115.5731, |
| "eval_samples_per_second": 4.283, |
| "eval_steps_per_second": 0.138, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 4.1901603726715766e-05, |
| "loss": 1.1199, |
| "step": 601 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 4.18711219544488e-05, |
| "loss": 1.1416, |
| "step": 602 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 4.184059405717863e-05, |
| "loss": 1.1544, |
| "step": 603 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 4.181002011836737e-05, |
| "loss": 1.1035, |
| "step": 604 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 4.177940022160299e-05, |
| "loss": 1.1592, |
| "step": 605 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 4.174873445059913e-05, |
| "loss": 1.1491, |
| "step": 606 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 4.171802288919482e-05, |
| "loss": 1.1718, |
| "step": 607 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 4.1687265621354314e-05, |
| "loss": 1.1692, |
| "step": 608 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 4.165646273116681e-05, |
| "loss": 1.1169, |
| "step": 609 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 4.16256143028462e-05, |
| "loss": 1.1261, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 4.159472042073096e-05, |
| "loss": 1.1371, |
| "step": 611 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 4.156378116928375e-05, |
| "loss": 1.1792, |
| "step": 612 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 4.1532796633091296e-05, |
| "loss": 1.1732, |
| "step": 613 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 4.150176689686417e-05, |
| "loss": 1.1309, |
| "step": 614 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 4.147069204543645e-05, |
| "loss": 1.1263, |
| "step": 615 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 4.143957216376561e-05, |
| "loss": 1.1767, |
| "step": 616 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 4.140840733693223e-05, |
| "loss": 1.1351, |
| "step": 617 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 4.137719765013974e-05, |
| "loss": 1.1663, |
| "step": 618 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 4.134594318871423e-05, |
| "loss": 1.1027, |
| "step": 619 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 4.131464403810422e-05, |
| "loss": 1.1137, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 4.128330028388038e-05, |
| "loss": 1.1601, |
| "step": 621 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 4.1251912011735326e-05, |
| "loss": 1.1421, |
| "step": 622 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 4.122047930748343e-05, |
| "loss": 1.0937, |
| "step": 623 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 4.118900225706047e-05, |
| "loss": 1.1721, |
| "step": 624 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 4.115748094652352e-05, |
| "loss": 1.1249, |
| "step": 625 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 4.112591546205064e-05, |
| "loss": 1.1497, |
| "step": 626 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 4.1094305889940646e-05, |
| "loss": 1.125, |
| "step": 627 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 4.106265231661292e-05, |
| "loss": 1.146, |
| "step": 628 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 4.1030954828607095e-05, |
| "loss": 1.095, |
| "step": 629 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 4.099921351258292e-05, |
| "loss": 1.1813, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 4.0967428455319925e-05, |
| "loss": 1.1432, |
| "step": 631 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 4.093559974371725e-05, |
| "loss": 1.1015, |
| "step": 632 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 4.090372746479337e-05, |
| "loss": 1.1351, |
| "step": 633 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 4.0871811705685883e-05, |
| "loss": 1.1564, |
| "step": 634 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 4.0839852553651265e-05, |
| "loss": 1.1309, |
| "step": 635 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 4.0807850096064605e-05, |
| "loss": 1.1105, |
| "step": 636 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 4.0775804420419404e-05, |
| "loss": 1.1595, |
| "step": 637 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 4.0743715614327317e-05, |
| "loss": 1.1385, |
| "step": 638 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 4.07115837655179e-05, |
| "loss": 1.1302, |
| "step": 639 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 4.067940896183843e-05, |
| "loss": 1.1351, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 4.064719129125356e-05, |
| "loss": 1.1174, |
| "step": 641 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 4.06149308418452e-05, |
| "loss": 1.1146, |
| "step": 642 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 4.058262770181217e-05, |
| "loss": 1.1536, |
| "step": 643 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 4.0550281959470023e-05, |
| "loss": 1.131, |
| "step": 644 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 4.051789370325078e-05, |
| "loss": 1.1646, |
| "step": 645 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 4.048546302170271e-05, |
| "loss": 1.1256, |
| "step": 646 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 4.0452990003490047e-05, |
| "loss": 1.1432, |
| "step": 647 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 4.042047473739278e-05, |
| "loss": 1.1254, |
| "step": 648 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 4.0387917312306414e-05, |
| "loss": 1.1108, |
| "step": 649 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 4.03553178172417e-05, |
| "loss": 1.1011, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 4.0322676341324415e-05, |
| "loss": 1.1704, |
| "step": 651 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 4.028999297379511e-05, |
| "loss": 1.1681, |
| "step": 652 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 4.025726780400886e-05, |
| "loss": 1.1494, |
| "step": 653 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 4.0224500921435035e-05, |
| "loss": 1.0603, |
| "step": 654 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 4.019169241565703e-05, |
| "loss": 1.145, |
| "step": 655 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 4.0158842376372064e-05, |
| "loss": 1.1126, |
| "step": 656 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 4.0125950893390876e-05, |
| "loss": 1.0923, |
| "step": 657 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 4.009301805663752e-05, |
| "loss": 1.1282, |
| "step": 658 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 4.006004395614913e-05, |
| "loss": 1.1105, |
| "step": 659 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 4.002702868207563e-05, |
| "loss": 1.1426, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 3.999397232467952e-05, |
| "loss": 1.1869, |
| "step": 661 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 3.996087497433562e-05, |
| "loss": 1.145, |
| "step": 662 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 3.9927736721530805e-05, |
| "loss": 1.1091, |
| "step": 663 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 3.9894557656863823e-05, |
| "loss": 1.1219, |
| "step": 664 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 3.9861337871044954e-05, |
| "loss": 1.1093, |
| "step": 665 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 3.9828077454895815e-05, |
| "loss": 1.1313, |
| "step": 666 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 3.9794776499349126e-05, |
| "loss": 1.153, |
| "step": 667 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 3.976143509544843e-05, |
| "loss": 1.1286, |
| "step": 668 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 3.972805333434784e-05, |
| "loss": 1.0872, |
| "step": 669 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.969463130731183e-05, |
| "loss": 1.1666, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.966116910571494e-05, |
| "loss": 1.1418, |
| "step": 671 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.9627666821041545e-05, |
| "loss": 1.0809, |
| "step": 672 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.9594124544885615e-05, |
| "loss": 1.1292, |
| "step": 673 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.956054236895046e-05, |
| "loss": 1.0884, |
| "step": 674 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.952692038504846e-05, |
| "loss": 1.1141, |
| "step": 675 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.949325868510083e-05, |
| "loss": 1.1354, |
| "step": 676 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.945955736113739e-05, |
| "loss": 1.0922, |
| "step": 677 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.942581650529625e-05, |
| "loss": 1.1635, |
| "step": 678 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.9392036209823644e-05, |
| "loss": 1.1277, |
| "step": 679 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.935821656707359e-05, |
| "loss": 1.1573, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.932435766950772e-05, |
| "loss": 1.1519, |
| "step": 681 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.929045960969494e-05, |
| "loss": 1.1483, |
| "step": 682 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.925652248031127e-05, |
| "loss": 1.1415, |
| "step": 683 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.9222546374139533e-05, |
| "loss": 1.1372, |
| "step": 684 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.9188531384069096e-05, |
| "loss": 1.1498, |
| "step": 685 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.915447760309564e-05, |
| "loss": 1.1528, |
| "step": 686 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.9120385124320894e-05, |
| "loss": 1.1207, |
| "step": 687 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.9086254040952416e-05, |
| "loss": 1.1115, |
| "step": 688 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.905208444630327e-05, |
| "loss": 1.1155, |
| "step": 689 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.901787643379182e-05, |
| "loss": 1.1199, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 3.898363009694148e-05, |
| "loss": 1.1424, |
| "step": 691 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 3.894934552938041e-05, |
| "loss": 1.0838, |
| "step": 692 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 3.891502282484132e-05, |
| "loss": 1.2201, |
| "step": 693 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 3.888066207716117e-05, |
| "loss": 1.1223, |
| "step": 694 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 3.884626338028094e-05, |
| "loss": 1.1345, |
| "step": 695 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 3.8811826828245334e-05, |
| "loss": 1.1092, |
| "step": 696 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 3.877735251520258e-05, |
| "loss": 1.1209, |
| "step": 697 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 3.874284053540416e-05, |
| "loss": 1.1368, |
| "step": 698 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 3.870829098320446e-05, |
| "loss": 1.1273, |
| "step": 699 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 3.867370395306068e-05, |
| "loss": 1.0906, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.35, |
| "eval_acceptance_rate": 0.7809614539146423, |
| "eval_acceptance_rate_1": 0.7801964282989502, |
| "eval_acceptance_rate_2": 0.7812386751174927, |
| "eval_acceptance_rate_3": 0.7801474332809448, |
| "eval_acceptance_rate_4": 0.7808693647384644, |
| "eval_avg_generation_length": 215.4989013671875, |
| "eval_avg_input_length": 365.6648864746094, |
| "eval_expected_speedup": 2.5913201862101434, |
| "eval_expected_tokens": 3.2391502327626793, |
| "eval_loss": 1.0097919702529907, |
| "eval_max_generation_length": 424.7227783203125, |
| "eval_max_input_length": 630.4989624023438, |
| "eval_min_generation_length": 59.60786437988281, |
| "eval_min_input_length": 160.72378540039062, |
| "eval_pulsar_acceptance_rate": 0.5597875581906698, |
| "eval_runtime": 116.5895, |
| "eval_samples_per_second": 4.246, |
| "eval_steps_per_second": 0.137, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 3.8639079539532405e-05, |
| "loss": 1.1175, |
| "step": 701 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 3.860441783728149e-05, |
| "loss": 1.136, |
| "step": 702 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 3.8569718941071684e-05, |
| "loss": 1.1491, |
| "step": 703 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 3.853498294576845e-05, |
| "loss": 1.1229, |
| "step": 704 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 3.850020994633868e-05, |
| "loss": 1.0972, |
| "step": 705 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 3.846540003785042e-05, |
| "loss": 1.0989, |
| "step": 706 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 3.8430553315472626e-05, |
| "loss": 1.0977, |
| "step": 707 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 3.8395669874474915e-05, |
| "loss": 1.1266, |
| "step": 708 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 3.8360749810227286e-05, |
| "loss": 1.1067, |
| "step": 709 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 3.832579321819985e-05, |
| "loss": 1.1327, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 3.829080019396259e-05, |
| "loss": 1.1055, |
| "step": 711 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 3.825577083318512e-05, |
| "loss": 1.1305, |
| "step": 712 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 3.822070523163636e-05, |
| "loss": 1.1188, |
| "step": 713 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 3.818560348518433e-05, |
| "loss": 1.1313, |
| "step": 714 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 3.8150465689795854e-05, |
| "loss": 1.1256, |
| "step": 715 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 3.8115291941536345e-05, |
| "loss": 1.0979, |
| "step": 716 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 3.8080082336569455e-05, |
| "loss": 1.1199, |
| "step": 717 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 3.8044836971156936e-05, |
| "loss": 1.1361, |
| "step": 718 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 3.800955594165825e-05, |
| "loss": 1.1286, |
| "step": 719 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 3.797423934453038e-05, |
| "loss": 1.0763, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 3.793888727632757e-05, |
| "loss": 1.143, |
| "step": 721 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 3.7903499833701006e-05, |
| "loss": 1.1314, |
| "step": 722 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 3.786807711339863e-05, |
| "loss": 1.1358, |
| "step": 723 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 3.783261921226479e-05, |
| "loss": 1.1273, |
| "step": 724 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 3.779712622724003e-05, |
| "loss": 1.1515, |
| "step": 725 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 3.776159825536082e-05, |
| "loss": 1.1058, |
| "step": 726 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 3.7726035393759285e-05, |
| "loss": 1.0671, |
| "step": 727 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 3.7690437739662924e-05, |
| "loss": 1.0752, |
| "step": 728 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 3.7654805390394366e-05, |
| "loss": 1.0842, |
| "step": 729 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 3.76191384433711e-05, |
| "loss": 1.117, |
| "step": 730 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 3.7583436996105185e-05, |
| "loss": 1.0982, |
| "step": 731 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 3.7547701146203005e-05, |
| "loss": 1.1008, |
| "step": 732 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 3.751193099136505e-05, |
| "loss": 1.1313, |
| "step": 733 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 3.747612662938552e-05, |
| "loss": 1.1067, |
| "step": 734 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 3.7440288158152187e-05, |
| "loss": 1.1422, |
| "step": 735 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 3.7404415675646054e-05, |
| "loss": 1.0987, |
| "step": 736 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 3.7368509279941134e-05, |
| "loss": 1.1174, |
| "step": 737 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 3.733256906920413e-05, |
| "loss": 1.1447, |
| "step": 738 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 3.72965951416942e-05, |
| "loss": 1.1129, |
| "step": 739 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 3.726058759576271e-05, |
| "loss": 1.1683, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 3.722454652985289e-05, |
| "loss": 1.141, |
| "step": 741 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 3.718847204249966e-05, |
| "loss": 1.1411, |
| "step": 742 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 3.715236423232928e-05, |
| "loss": 1.1417, |
| "step": 743 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 3.711622319805913e-05, |
| "loss": 1.1284, |
| "step": 744 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 3.708004903849741e-05, |
| "loss": 1.158, |
| "step": 745 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 3.704384185254288e-05, |
| "loss": 1.1625, |
| "step": 746 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 3.700760173918463e-05, |
| "loss": 1.0862, |
| "step": 747 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 3.697132879750174e-05, |
| "loss": 1.151, |
| "step": 748 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 3.693502312666304e-05, |
| "loss": 1.0727, |
| "step": 749 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 3.689868482592684e-05, |
| "loss": 1.0726, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 3.6862313994640686e-05, |
| "loss": 1.1454, |
| "step": 751 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 3.6825910732241026e-05, |
| "loss": 1.1174, |
| "step": 752 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 3.678947513825299e-05, |
| "loss": 1.1179, |
| "step": 753 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 3.675300731229012e-05, |
| "loss": 1.0917, |
| "step": 754 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 3.671650735405404e-05, |
| "loss": 1.0923, |
| "step": 755 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 3.667997536333424e-05, |
| "loss": 1.1101, |
| "step": 756 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 3.6643411440007804e-05, |
| "loss": 1.0979, |
| "step": 757 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 3.66068156840391e-05, |
| "loss": 1.0926, |
| "step": 758 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 3.65701881954795e-05, |
| "loss": 1.0618, |
| "step": 759 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 3.65335290744672e-05, |
| "loss": 1.1073, |
| "step": 760 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 3.649683842122681e-05, |
| "loss": 1.1045, |
| "step": 761 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 3.6460116336069176e-05, |
| "loss": 1.1433, |
| "step": 762 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 3.642336291939109e-05, |
| "loss": 1.1615, |
| "step": 763 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 3.6386578271674984e-05, |
| "loss": 1.1436, |
| "step": 764 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 3.634976249348867e-05, |
| "loss": 1.14, |
| "step": 765 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 3.631291568548509e-05, |
| "loss": 1.1574, |
| "step": 766 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 3.627603794840201e-05, |
| "loss": 1.1343, |
| "step": 767 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 3.623912938306176e-05, |
| "loss": 1.0863, |
| "step": 768 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 3.6202190090370944e-05, |
| "loss": 1.1124, |
| "step": 769 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 3.616522017132017e-05, |
| "loss": 1.1027, |
| "step": 770 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 3.6128219726983784e-05, |
| "loss": 1.0998, |
| "step": 771 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 3.6091188858519607e-05, |
| "loss": 1.0973, |
| "step": 772 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 3.6054127667168596e-05, |
| "loss": 1.1419, |
| "step": 773 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 3.601703625425464e-05, |
| "loss": 1.1095, |
| "step": 774 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 3.597991472118426e-05, |
| "loss": 1.0861, |
| "step": 775 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 3.5942763169446295e-05, |
| "loss": 1.0426, |
| "step": 776 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 3.590558170061168e-05, |
| "loss": 1.1339, |
| "step": 777 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 3.586837041633312e-05, |
| "loss": 1.1308, |
| "step": 778 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 3.5831129418344845e-05, |
| "loss": 1.1134, |
| "step": 779 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 3.579385880846232e-05, |
| "loss": 1.1633, |
| "step": 780 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 3.5756558688581985e-05, |
| "loss": 1.101, |
| "step": 781 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 3.571922916068094e-05, |
| "loss": 1.1154, |
| "step": 782 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 3.568187032681667e-05, |
| "loss": 1.0932, |
| "step": 783 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 3.564448228912682e-05, |
| "loss": 1.0782, |
| "step": 784 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 3.5607065149828843e-05, |
| "loss": 1.0861, |
| "step": 785 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 3.556961901121978e-05, |
| "loss": 1.1374, |
| "step": 786 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 3.553214397567595e-05, |
| "loss": 1.0708, |
| "step": 787 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 3.549464014565265e-05, |
| "loss": 1.0721, |
| "step": 788 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 3.545710762368392e-05, |
| "loss": 1.1101, |
| "step": 789 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 3.5419546512382266e-05, |
| "loss": 1.1279, |
| "step": 790 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 3.5381956914438305e-05, |
| "loss": 1.1433, |
| "step": 791 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 3.534433893262058e-05, |
| "loss": 1.0469, |
| "step": 792 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 3.530669266977521e-05, |
| "loss": 1.0985, |
| "step": 793 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 3.526901822882564e-05, |
| "loss": 1.1376, |
| "step": 794 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 3.523131571277235e-05, |
| "loss": 1.1125, |
| "step": 795 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 3.519358522469259e-05, |
| "loss": 1.1431, |
| "step": 796 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 3.515582686774007e-05, |
| "loss": 1.1175, |
| "step": 797 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 3.511804074514468e-05, |
| "loss": 1.0595, |
| "step": 798 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 3.508022696021226e-05, |
| "loss": 1.0998, |
| "step": 799 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 3.504238561632424e-05, |
| "loss": 1.139, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.4, |
| "eval_acceptance_rate": 0.7878292202949524, |
| "eval_acceptance_rate_1": 0.7870592474937439, |
| "eval_acceptance_rate_2": 0.7882823348045349, |
| "eval_acceptance_rate_3": 0.7869051098823547, |
| "eval_acceptance_rate_4": 0.7877203822135925, |
| "eval_avg_generation_length": 215.4989013671875, |
| "eval_avg_input_length": 365.6648864746094, |
| "eval_expected_speedup": 2.6261817718295335, |
| "eval_expected_tokens": 3.282727214786917, |
| "eval_loss": 0.9893876910209656, |
| "eval_max_generation_length": 424.7227783203125, |
| "eval_max_input_length": 630.4989624023438, |
| "eval_min_generation_length": 59.60786437988281, |
| "eval_min_input_length": 160.72378540039062, |
| "eval_pulsar_acceptance_rate": 0.5706818036967292, |
| "eval_runtime": 123.1218, |
| "eval_samples_per_second": 4.02, |
| "eval_steps_per_second": 0.13, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 3.500451681693741e-05, |
| "loss": 1.1112, |
| "step": 801 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 3.496662066558363e-05, |
| "loss": 1.1186, |
| "step": 802 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 3.4928697265869515e-05, |
| "loss": 1.1539, |
| "step": 803 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 3.489074672147621e-05, |
| "loss": 1.1095, |
| "step": 804 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 3.485276913615905e-05, |
| "loss": 1.1393, |
| "step": 805 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 3.4814764613747286e-05, |
| "loss": 1.1348, |
| "step": 806 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 3.4776733258143854e-05, |
| "loss": 1.1079, |
| "step": 807 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 3.473867517332501e-05, |
| "loss": 1.115, |
| "step": 808 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 3.470059046334011e-05, |
| "loss": 1.0997, |
| "step": 809 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 3.4662479232311306e-05, |
| "loss": 1.1019, |
| "step": 810 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 3.4624341584433244e-05, |
| "loss": 1.1012, |
| "step": 811 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 3.458617762397279e-05, |
| "loss": 1.11, |
| "step": 812 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 3.454798745526876e-05, |
| "loss": 1.0878, |
| "step": 813 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 3.450977118273162e-05, |
| "loss": 1.1063, |
| "step": 814 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 3.447152891084319e-05, |
| "loss": 1.062, |
| "step": 815 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 3.4433260744156396e-05, |
| "loss": 1.0861, |
| "step": 816 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 3.439496678729493e-05, |
| "loss": 1.1144, |
| "step": 817 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 3.435664714495301e-05, |
| "loss": 1.1233, |
| "step": 818 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 3.4318301921895084e-05, |
| "loss": 1.076, |
| "step": 819 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 3.427993122295552e-05, |
| "loss": 1.0954, |
| "step": 820 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 3.424153515303835e-05, |
| "loss": 1.1487, |
| "step": 821 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 3.4203113817116957e-05, |
| "loss": 1.1348, |
| "step": 822 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 3.41646673202338e-05, |
| "loss": 1.0922, |
| "step": 823 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 3.412619576750014e-05, |
| "loss": 1.0858, |
| "step": 824 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 3.4087699264095745e-05, |
| "loss": 1.1319, |
| "step": 825 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 3.4049177915268576e-05, |
| "loss": 1.0388, |
| "step": 826 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 3.4010631826334526e-05, |
| "loss": 1.0843, |
| "step": 827 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 3.397206110267713e-05, |
| "loss": 1.1308, |
| "step": 828 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 3.3933465849747275e-05, |
| "loss": 1.0888, |
| "step": 829 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 3.389484617306292e-05, |
| "loss": 1.0859, |
| "step": 830 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 3.385620217820877e-05, |
| "loss": 1.1502, |
| "step": 831 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 3.381753397083604e-05, |
| "loss": 1.0971, |
| "step": 832 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 3.377884165666212e-05, |
| "loss": 1.1102, |
| "step": 833 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 3.3740125341470336e-05, |
| "loss": 1.0611, |
| "step": 834 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 3.3701385131109616e-05, |
| "loss": 1.1284, |
| "step": 835 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 3.3662621131494204e-05, |
| "loss": 1.0967, |
| "step": 836 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 3.36238334486034e-05, |
| "loss": 1.1029, |
| "step": 837 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 3.358502218848125e-05, |
| "loss": 1.0541, |
| "step": 838 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 3.3546187457236244e-05, |
| "loss": 1.1006, |
| "step": 839 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 3.350732936104108e-05, |
| "loss": 1.119, |
| "step": 840 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 3.346844800613229e-05, |
| "loss": 1.0804, |
| "step": 841 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 3.342954349881001e-05, |
| "loss": 1.1324, |
| "step": 842 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 3.33906159454377e-05, |
| "loss": 1.116, |
| "step": 843 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 3.335166545244178e-05, |
| "loss": 1.0748, |
| "step": 844 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 3.3312692126311425e-05, |
| "loss": 1.0553, |
| "step": 845 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 3.327369607359821e-05, |
| "loss": 1.0633, |
| "step": 846 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 3.3234677400915865e-05, |
| "loss": 1.0415, |
| "step": 847 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 3.319563621493994e-05, |
| "loss": 1.0678, |
| "step": 848 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 3.3156572622407565e-05, |
| "loss": 1.0809, |
| "step": 849 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 3.311748673011709e-05, |
| "loss": 1.0527, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 3.307837864492786e-05, |
| "loss": 1.1094, |
| "step": 851 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 3.3039248473759885e-05, |
| "loss": 1.0822, |
| "step": 852 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 3.300009632359357e-05, |
| "loss": 1.1206, |
| "step": 853 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 3.2960922301469385e-05, |
| "loss": 1.0881, |
| "step": 854 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 3.2921726514487614e-05, |
| "loss": 1.1375, |
| "step": 855 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 3.2882509069808044e-05, |
| "loss": 1.0995, |
| "step": 856 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 3.284327007464966e-05, |
| "loss": 1.0886, |
| "step": 857 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 3.28040096362904e-05, |
| "loss": 1.0945, |
| "step": 858 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 3.276472786206679e-05, |
| "loss": 1.119, |
| "step": 859 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 3.272542485937369e-05, |
| "loss": 1.0826, |
| "step": 860 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 3.2686100735664024e-05, |
| "loss": 1.0542, |
| "step": 861 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 3.264675559844844e-05, |
| "loss": 1.09, |
| "step": 862 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 3.260738955529504e-05, |
| "loss": 1.0797, |
| "step": 863 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 3.2568002713829084e-05, |
| "loss": 1.0397, |
| "step": 864 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 3.252859518173269e-05, |
| "loss": 1.1289, |
| "step": 865 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 3.2489167066744547e-05, |
| "loss": 1.0691, |
| "step": 866 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 3.244971847665962e-05, |
| "loss": 1.0645, |
| "step": 867 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 3.241024951932885e-05, |
| "loss": 1.1049, |
| "step": 868 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 3.237076030265884e-05, |
| "loss": 1.0716, |
| "step": 869 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 3.2331250934611624e-05, |
| "loss": 1.0983, |
| "step": 870 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 3.229172152320429e-05, |
| "loss": 1.0458, |
| "step": 871 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 3.225217217650876e-05, |
| "loss": 1.0907, |
| "step": 872 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 3.22126030026514e-05, |
| "loss": 1.083, |
| "step": 873 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 3.217301410981285e-05, |
| "loss": 1.0795, |
| "step": 874 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 3.213340560622763e-05, |
| "loss": 1.0982, |
| "step": 875 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 3.2093777600183875e-05, |
| "loss": 1.0506, |
| "step": 876 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 3.205413020002303e-05, |
| "loss": 1.1047, |
| "step": 877 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 3.201446351413958e-05, |
| "loss": 1.0556, |
| "step": 878 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 3.1974777650980735e-05, |
| "loss": 1.1373, |
| "step": 879 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 3.1935072719046115e-05, |
| "loss": 1.0722, |
| "step": 880 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 3.18953488268875e-05, |
| "loss": 1.0447, |
| "step": 881 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 3.185560608310849e-05, |
| "loss": 1.07, |
| "step": 882 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 3.181584459636423e-05, |
| "loss": 1.0674, |
| "step": 883 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 3.1776064475361114e-05, |
| "loss": 1.0579, |
| "step": 884 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 3.173626582885645e-05, |
| "loss": 1.0901, |
| "step": 885 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 3.169644876565824e-05, |
| "loss": 1.0997, |
| "step": 886 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 3.165661339462481e-05, |
| "loss": 1.054, |
| "step": 887 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 3.161675982466454e-05, |
| "loss": 1.09, |
| "step": 888 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 3.1576888164735575e-05, |
| "loss": 1.1098, |
| "step": 889 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 3.1536998523845494e-05, |
| "loss": 1.0551, |
| "step": 890 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 3.149709101105107e-05, |
| "loss": 1.1155, |
| "step": 891 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 3.145716573545792e-05, |
| "loss": 1.1034, |
| "step": 892 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 3.141722280622021e-05, |
| "loss": 1.103, |
| "step": 893 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 3.137726233254038e-05, |
| "loss": 1.1217, |
| "step": 894 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 3.133728442366885e-05, |
| "loss": 1.0947, |
| "step": 895 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 3.129728918890371e-05, |
| "loss": 1.1059, |
| "step": 896 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 3.1257276737590365e-05, |
| "loss": 1.1023, |
| "step": 897 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 3.121724717912138e-05, |
| "loss": 1.0828, |
| "step": 898 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 3.117720062293599e-05, |
| "loss": 1.0746, |
| "step": 899 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 3.1137137178519985e-05, |
| "loss": 1.089, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.45, |
| "eval_acceptance_rate": 0.7923885583877563, |
| "eval_acceptance_rate_1": 0.791546106338501, |
| "eval_acceptance_rate_2": 0.7925243973731995, |
| "eval_acceptance_rate_3": 0.7917414307594299, |
| "eval_acceptance_rate_4": 0.7924862504005432, |
| "eval_avg_generation_length": 215.4989013671875, |
| "eval_avg_input_length": 365.6648864746094, |
| "eval_expected_speedup": 2.6496205162262405, |
| "eval_expected_tokens": 3.3120256452828007, |
| "eval_loss": 0.9742996692657471, |
| "eval_max_generation_length": 424.7227783203125, |
| "eval_max_input_length": 630.4989624023438, |
| "eval_min_generation_length": 59.60786437988281, |
| "eval_min_input_length": 160.72378540039062, |
| "eval_pulsar_acceptance_rate": 0.5780064113207001, |
| "eval_runtime": 122.5543, |
| "eval_samples_per_second": 4.039, |
| "eval_steps_per_second": 0.131, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 3.1097056955405274e-05, |
| "loss": 1.0531, |
| "step": 901 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 3.105696006316966e-05, |
| "loss": 1.1154, |
| "step": 902 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 3.101684661143653e-05, |
| "loss": 1.122, |
| "step": 903 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 3.0976716709874496e-05, |
| "loss": 1.0532, |
| "step": 904 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 3.093657046819722e-05, |
| "loss": 1.0593, |
| "step": 905 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 3.0896407996162954e-05, |
| "loss": 1.0343, |
| "step": 906 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 3.08562294035744e-05, |
| "loss": 1.0496, |
| "step": 907 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 3.081603480027826e-05, |
| "loss": 1.0795, |
| "step": 908 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 3.077582429616506e-05, |
| "loss": 1.0683, |
| "step": 909 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 3.073559800116879e-05, |
| "loss": 1.1216, |
| "step": 910 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 3.06953560252666e-05, |
| "loss": 1.0314, |
| "step": 911 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 3.065509847847851e-05, |
| "loss": 1.0508, |
| "step": 912 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 3.061482547086712e-05, |
| "loss": 1.0959, |
| "step": 913 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 3.05745371125373e-05, |
| "loss": 1.11, |
| "step": 914 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 3.053423351363586e-05, |
| "loss": 1.1324, |
| "step": 915 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 3.0493914784351328e-05, |
| "loss": 1.0934, |
| "step": 916 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 3.045358103491357e-05, |
| "loss": 1.058, |
| "step": 917 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 3.0413232375593497e-05, |
| "loss": 1.0738, |
| "step": 918 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 3.037286891670281e-05, |
| "loss": 1.0387, |
| "step": 919 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 3.0332490768593675e-05, |
| "loss": 1.1324, |
| "step": 920 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 3.0292098041658397e-05, |
| "loss": 1.0277, |
| "step": 921 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 3.025169084632915e-05, |
| "loss": 1.0649, |
| "step": 922 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 3.021126929307766e-05, |
| "loss": 1.0698, |
| "step": 923 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 3.017083349241492e-05, |
| "loss": 1.0828, |
| "step": 924 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 3.0130383554890856e-05, |
| "loss": 1.0868, |
| "step": 925 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 3.008991959109406e-05, |
| "loss": 1.1356, |
| "step": 926 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 3.004944171165146e-05, |
| "loss": 1.079, |
| "step": 927 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 3.0008950027228033e-05, |
| "loss": 1.0532, |
| "step": 928 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 2.9968444648526493e-05, |
| "loss": 1.0455, |
| "step": 929 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.9927925686287006e-05, |
| "loss": 1.0617, |
| "step": 930 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.988739325128687e-05, |
| "loss": 1.0588, |
| "step": 931 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.984684745434021e-05, |
| "loss": 1.0922, |
| "step": 932 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.9806288406297676e-05, |
| "loss": 1.0807, |
| "step": 933 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.9765716218046175e-05, |
| "loss": 1.0941, |
| "step": 934 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.972513100050851e-05, |
| "loss": 1.0444, |
| "step": 935 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.9684532864643122e-05, |
| "loss": 1.06, |
| "step": 936 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.964392192144375e-05, |
| "loss": 1.0972, |
| "step": 937 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.9603298281939178e-05, |
| "loss": 1.0861, |
| "step": 938 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.956266205719288e-05, |
| "loss": 1.0482, |
| "step": 939 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.952201335830275e-05, |
| "loss": 1.0848, |
| "step": 940 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.9481352296400766e-05, |
| "loss": 1.0709, |
| "step": 941 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.944067898265272e-05, |
| "loss": 1.0574, |
| "step": 942 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.9399993528257902e-05, |
| "loss": 1.0338, |
| "step": 943 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.9359296044448794e-05, |
| "loss": 1.0411, |
| "step": 944 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.9318586642490763e-05, |
| "loss": 1.0973, |
| "step": 945 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.927786543368175e-05, |
| "loss": 1.1025, |
| "step": 946 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.9237132529351996e-05, |
| "loss": 1.0788, |
| "step": 947 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.9196388040863693e-05, |
| "loss": 1.0344, |
| "step": 948 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.915563207961074e-05, |
| "loss": 1.1088, |
| "step": 949 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.9114864757018352e-05, |
| "loss": 1.08, |
| "step": 950 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.9074086184542843e-05, |
| "loss": 1.1063, |
| "step": 951 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.9033296473671278e-05, |
| "loss": 1.1185, |
| "step": 952 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.8992495735921165e-05, |
| "loss": 1.0879, |
| "step": 953 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.8951684082840163e-05, |
| "loss": 1.1085, |
| "step": 954 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.8910861626005776e-05, |
| "loss": 1.1001, |
| "step": 955 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.8870028477025042e-05, |
| "loss": 1.0557, |
| "step": 956 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.8829184747534214e-05, |
| "loss": 1.0364, |
| "step": 957 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.878833054919851e-05, |
| "loss": 1.0747, |
| "step": 958 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.874746599371175e-05, |
| "loss": 1.0824, |
| "step": 959 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.870659119279605e-05, |
| "loss": 1.0787, |
| "step": 960 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.8665706258201574e-05, |
| "loss": 1.0667, |
| "step": 961 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.862481130170615e-05, |
| "loss": 1.0696, |
| "step": 962 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.8583906435115047e-05, |
| "loss": 1.0751, |
| "step": 963 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.8542991770260608e-05, |
| "loss": 1.0816, |
| "step": 964 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.850206741900195e-05, |
| "loss": 1.0741, |
| "step": 965 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.846113349322469e-05, |
| "loss": 1.1111, |
| "step": 966 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.8420190104840628e-05, |
| "loss": 1.0798, |
| "step": 967 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.8379237365787426e-05, |
| "loss": 1.0841, |
| "step": 968 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.8338275388028295e-05, |
| "loss": 1.075, |
| "step": 969 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.8297304283551728e-05, |
| "loss": 1.0658, |
| "step": 970 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.825632416437115e-05, |
| "loss": 1.0991, |
| "step": 971 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.8215335142524657e-05, |
| "loss": 1.0533, |
| "step": 972 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.817433733007466e-05, |
| "loss": 1.0409, |
| "step": 973 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.8133330839107608e-05, |
| "loss": 1.0434, |
| "step": 974 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.8092315781733696e-05, |
| "loss": 1.0965, |
| "step": 975 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.8051292270086503e-05, |
| "loss": 1.0593, |
| "step": 976 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.8010260416322774e-05, |
| "loss": 1.0305, |
| "step": 977 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.7969220332622003e-05, |
| "loss": 1.1153, |
| "step": 978 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.792817213118623e-05, |
| "loss": 1.0635, |
| "step": 979 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.788711592423966e-05, |
| "loss": 1.0628, |
| "step": 980 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.7846051824028386e-05, |
| "loss": 1.0642, |
| "step": 981 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.7804979942820113e-05, |
| "loss": 1.0516, |
| "step": 982 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.776390039290378e-05, |
| "loss": 1.0787, |
| "step": 983 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.7722813286589316e-05, |
| "loss": 1.0905, |
| "step": 984 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.7681718736207298e-05, |
| "loss": 1.0554, |
| "step": 985 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.764061685410865e-05, |
| "loss": 1.0823, |
| "step": 986 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.7599507752664354e-05, |
| "loss": 0.9882, |
| "step": 987 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.755839154426513e-05, |
| "loss": 1.0977, |
| "step": 988 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.7517268341321112e-05, |
| "loss": 1.0862, |
| "step": 989 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.7476138256261575e-05, |
| "loss": 1.0822, |
| "step": 990 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.7435001401534586e-05, |
| "loss": 1.0608, |
| "step": 991 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.7393857889606756e-05, |
| "loss": 1.0532, |
| "step": 992 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.7352707832962865e-05, |
| "loss": 1.0531, |
| "step": 993 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.731155134410559e-05, |
| "loss": 1.0396, |
| "step": 994 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.727038853555521e-05, |
| "loss": 1.0583, |
| "step": 995 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.722921951984927e-05, |
| "loss": 1.0939, |
| "step": 996 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.7188044409542278e-05, |
| "loss": 1.0158, |
| "step": 997 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.7146863317205427e-05, |
| "loss": 1.0531, |
| "step": 998 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.7105676355426248e-05, |
| "loss": 1.0627, |
| "step": 999 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.7064483636808313e-05, |
| "loss": 1.1029, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.5, |
| "eval_acceptance_rate": 0.7963886857032776, |
| "eval_acceptance_rate_1": 0.7957209944725037, |
| "eval_acceptance_rate_2": 0.7964814901351929, |
| "eval_acceptance_rate_3": 0.7954385876655579, |
| "eval_acceptance_rate_4": 0.7965638041496277, |
| "eval_avg_generation_length": 215.4989013671875, |
| "eval_avg_input_length": 365.6648864746094, |
| "eval_expected_speedup": 2.6703800889432756, |
| "eval_expected_tokens": 3.3379751111790945, |
| "eval_loss": 0.9622464776039124, |
| "eval_max_generation_length": 424.7227783203125, |
| "eval_max_input_length": 630.4989624023438, |
| "eval_min_generation_length": 59.60786437988281, |
| "eval_min_input_length": 160.72378540039062, |
| "eval_pulsar_acceptance_rate": 0.5844937777947735, |
| "eval_runtime": 117.5443, |
| "eval_samples_per_second": 4.211, |
| "eval_steps_per_second": 0.136, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.7023285273970945e-05, |
| "loss": 1.0857, |
| "step": 1001 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.6982081379548896e-05, |
| "loss": 1.0756, |
| "step": 1002 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.6940872066192052e-05, |
| "loss": 1.024, |
| "step": 1003 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.689965744656508e-05, |
| "loss": 1.0708, |
| "step": 1004 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.6858437633347194e-05, |
| "loss": 1.0548, |
| "step": 1005 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.681721273923178e-05, |
| "loss": 1.0898, |
| "step": 1006 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.6775982876926126e-05, |
| "loss": 1.1023, |
| "step": 1007 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.6734748159151102e-05, |
| "loss": 1.0682, |
| "step": 1008 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.6693508698640852e-05, |
| "loss": 1.0774, |
| "step": 1009 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.6652264608142484e-05, |
| "loss": 1.0695, |
| "step": 1010 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.661101600041577e-05, |
| "loss": 1.0483, |
| "step": 1011 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.656976298823284e-05, |
| "loss": 1.0645, |
| "step": 1012 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.652850568437783e-05, |
| "loss": 1.0311, |
| "step": 1013 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.6487244201646645e-05, |
| "loss": 1.0826, |
| "step": 1014 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.6445978652846602e-05, |
| "loss": 1.0644, |
| "step": 1015 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.6404709150796137e-05, |
| "loss": 1.0863, |
| "step": 1016 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.63634358083245e-05, |
| "loss": 1.0993, |
| "step": 1017 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.632215873827142e-05, |
| "loss": 1.0334, |
| "step": 1018 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.6280878053486828e-05, |
| "loss": 1.0576, |
| "step": 1019 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.623959386683056e-05, |
| "loss": 1.1187, |
| "step": 1020 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.6198306291171993e-05, |
| "loss": 1.0788, |
| "step": 1021 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.6157015439389774e-05, |
| "loss": 1.0775, |
| "step": 1022 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.6115721424371532e-05, |
| "loss": 1.1013, |
| "step": 1023 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.6074424359013517e-05, |
| "loss": 1.066, |
| "step": 1024 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.6033124356220328e-05, |
| "loss": 1.0695, |
| "step": 1025 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.599182152890461e-05, |
| "loss": 1.068, |
| "step": 1026 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.5950515989986697e-05, |
| "loss": 1.0418, |
| "step": 1027 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.5909207852394363e-05, |
| "loss": 1.0706, |
| "step": 1028 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.5867897229062478e-05, |
| "loss": 1.0937, |
| "step": 1029 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 2.5826584232932706e-05, |
| "loss": 1.1007, |
| "step": 1030 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 2.578526897695321e-05, |
| "loss": 1.0272, |
| "step": 1031 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 2.5743951574078314e-05, |
| "loss": 1.0467, |
| "step": 1032 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 2.5702632137268223e-05, |
| "loss": 1.0459, |
| "step": 1033 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 2.5661310779488695e-05, |
| "loss": 1.0844, |
| "step": 1034 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 2.5619987613710756e-05, |
| "loss": 1.0614, |
| "step": 1035 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 2.5578662752910347e-05, |
| "loss": 1.0805, |
| "step": 1036 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 2.553733631006807e-05, |
| "loss": 1.0927, |
| "step": 1037 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 2.5496008398168843e-05, |
| "loss": 1.0433, |
| "step": 1038 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 2.5454679130201593e-05, |
| "loss": 1.0463, |
| "step": 1039 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 2.5413348619158967e-05, |
| "loss": 1.1019, |
| "step": 1040 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 2.5372016978036995e-05, |
| "loss": 1.0926, |
| "step": 1041 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 2.5330684319834814e-05, |
| "loss": 1.0412, |
| "step": 1042 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 2.528935075755432e-05, |
| "loss": 1.0451, |
| "step": 1043 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 2.5248016404199908e-05, |
| "loss": 1.0732, |
| "step": 1044 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 2.5206681372778124e-05, |
| "loss": 1.0617, |
| "step": 1045 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 2.5165345776297355e-05, |
| "loss": 1.0603, |
| "step": 1046 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 2.512400972776755e-05, |
| "loss": 1.0688, |
| "step": 1047 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 2.508267334019988e-05, |
| "loss": 1.0741, |
| "step": 1048 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 2.5041336726606457e-05, |
| "loss": 1.0621, |
| "step": 1049 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 2.5e-05, |
| "loss": 1.0775, |
| "step": 1050 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 2.4958663273393546e-05, |
| "loss": 1.0548, |
| "step": 1051 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 2.4917326659800123e-05, |
| "loss": 1.0343, |
| "step": 1052 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 2.487599027223246e-05, |
| "loss": 1.0501, |
| "step": 1053 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 2.483465422370265e-05, |
| "loss": 1.1063, |
| "step": 1054 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 2.4793318627221878e-05, |
| "loss": 1.0481, |
| "step": 1055 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 2.475198359580009e-05, |
| "loss": 1.0129, |
| "step": 1056 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 2.4710649242445686e-05, |
| "loss": 1.0714, |
| "step": 1057 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 2.46693156801652e-05, |
| "loss": 1.071, |
| "step": 1058 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 2.4627983021963014e-05, |
| "loss": 1.0617, |
| "step": 1059 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 2.458665138084104e-05, |
| "loss": 1.0716, |
| "step": 1060 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 2.4545320869798406e-05, |
| "loss": 1.0762, |
| "step": 1061 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 2.4503991601831163e-05, |
| "loss": 1.078, |
| "step": 1062 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 2.4462663689931935e-05, |
| "loss": 1.0802, |
| "step": 1063 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 2.4421337247089655e-05, |
| "loss": 1.0348, |
| "step": 1064 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 2.438001238628925e-05, |
| "loss": 1.0986, |
| "step": 1065 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 2.4338689220511304e-05, |
| "loss": 1.0836, |
| "step": 1066 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 2.4297367862731783e-05, |
| "loss": 1.0953, |
| "step": 1067 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 2.425604842592169e-05, |
| "loss": 1.082, |
| "step": 1068 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 2.4214731023046793e-05, |
| "loss": 1.0634, |
| "step": 1069 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 2.4173415767067297e-05, |
| "loss": 1.0953, |
| "step": 1070 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 2.413210277093753e-05, |
| "loss": 1.1048, |
| "step": 1071 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 2.4090792147605647e-05, |
| "loss": 1.0331, |
| "step": 1072 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 2.404948401001331e-05, |
| "loss": 1.0468, |
| "step": 1073 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 2.4008178471095397e-05, |
| "loss": 1.0733, |
| "step": 1074 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 2.3966875643779667e-05, |
| "loss": 1.067, |
| "step": 1075 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 2.392557564098649e-05, |
| "loss": 1.0541, |
| "step": 1076 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 2.388427857562847e-05, |
| "loss": 1.0561, |
| "step": 1077 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 2.3842984560610228e-05, |
| "loss": 1.0801, |
| "step": 1078 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 2.3801693708828013e-05, |
| "loss": 1.108, |
| "step": 1079 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 2.3760406133169443e-05, |
| "loss": 1.0695, |
| "step": 1080 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 2.3719121946513174e-05, |
| "loss": 1.0585, |
| "step": 1081 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 2.367784126172859e-05, |
| "loss": 1.0683, |
| "step": 1082 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 2.3636564191675507e-05, |
| "loss": 1.0416, |
| "step": 1083 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 2.3595290849203862e-05, |
| "loss": 1.0576, |
| "step": 1084 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 2.3554021347153403e-05, |
| "loss": 1.0409, |
| "step": 1085 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 2.351275579835336e-05, |
| "loss": 1.0671, |
| "step": 1086 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 2.3471494315622177e-05, |
| "loss": 1.0665, |
| "step": 1087 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 2.3430237011767167e-05, |
| "loss": 1.0606, |
| "step": 1088 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 2.3388983999584224e-05, |
| "loss": 1.0902, |
| "step": 1089 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 2.334773539185752e-05, |
| "loss": 1.0823, |
| "step": 1090 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 2.330649130135915e-05, |
| "loss": 1.0549, |
| "step": 1091 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 2.32652518408489e-05, |
| "loss": 1.0574, |
| "step": 1092 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 2.3224017123073877e-05, |
| "loss": 1.067, |
| "step": 1093 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 2.3182787260768223e-05, |
| "loss": 1.0771, |
| "step": 1094 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 2.3141562366652812e-05, |
| "loss": 1.0409, |
| "step": 1095 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 2.3100342553434924e-05, |
| "loss": 1.0722, |
| "step": 1096 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 2.3059127933807954e-05, |
| "loss": 1.0269, |
| "step": 1097 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 2.30179186204511e-05, |
| "loss": 1.103, |
| "step": 1098 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 2.2976714726029068e-05, |
| "loss": 1.0702, |
| "step": 1099 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 2.2935516363191693e-05, |
| "loss": 1.0696, |
| "step": 1100 |
| }, |
| { |
| "epoch": 0.55, |
| "eval_acceptance_rate": 0.7997491955757141, |
| "eval_acceptance_rate_1": 0.7989146113395691, |
| "eval_acceptance_rate_2": 0.8000128865242004, |
| "eval_acceptance_rate_3": 0.7989808917045593, |
| "eval_acceptance_rate_4": 0.7997729778289795, |
| "eval_avg_generation_length": 215.4989013671875, |
| "eval_avg_input_length": 365.6648864746094, |
| "eval_expected_speedup": 2.6879625375133878, |
| "eval_expected_tokens": 3.3599531718917346, |
| "eval_loss": 0.9537034630775452, |
| "eval_max_generation_length": 424.7227783203125, |
| "eval_max_input_length": 630.4989624023438, |
| "eval_min_generation_length": 59.60786437988281, |
| "eval_min_input_length": 160.72378540039062, |
| "eval_pulsar_acceptance_rate": 0.5899882929729336, |
| "eval_runtime": 125.036, |
| "eval_samples_per_second": 3.959, |
| "eval_steps_per_second": 0.128, |
| "step": 1100 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 2.2894323644573758e-05, |
| "loss": 1.0623, |
| "step": 1101 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 2.2853136682794575e-05, |
| "loss": 1.0298, |
| "step": 1102 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 2.281195559045772e-05, |
| "loss": 1.0234, |
| "step": 1103 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 2.2770780480150744e-05, |
| "loss": 1.0601, |
| "step": 1104 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 2.2729611464444794e-05, |
| "loss": 1.071, |
| "step": 1105 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 2.2688448655894415e-05, |
| "loss": 1.1104, |
| "step": 1106 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 2.2647292167037144e-05, |
| "loss": 1.0179, |
| "step": 1107 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 2.2606142110393247e-05, |
| "loss": 1.0864, |
| "step": 1108 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 2.2564998598465423e-05, |
| "loss": 1.0685, |
| "step": 1109 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 2.2523861743738434e-05, |
| "loss": 1.0662, |
| "step": 1110 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 2.248273165867889e-05, |
| "loss": 1.0783, |
| "step": 1111 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 2.2441608455734873e-05, |
| "loss": 1.0326, |
| "step": 1112 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 2.240049224733566e-05, |
| "loss": 1.0112, |
| "step": 1113 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 2.2359383145891364e-05, |
| "loss": 1.0759, |
| "step": 1114 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 2.231828126379271e-05, |
| "loss": 1.0492, |
| "step": 1115 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 2.2277186713410687e-05, |
| "loss": 1.0746, |
| "step": 1116 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 2.223609960709622e-05, |
| "loss": 1.0831, |
| "step": 1117 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 2.2195020057179896e-05, |
| "loss": 1.0267, |
| "step": 1118 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 2.215394817597162e-05, |
| "loss": 1.1172, |
| "step": 1119 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 2.2112884075760347e-05, |
| "loss": 1.0778, |
| "step": 1120 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 2.2071827868813774e-05, |
| "loss": 1.0356, |
| "step": 1121 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 2.2030779667377996e-05, |
| "loss": 1.0894, |
| "step": 1122 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 2.1989739583677238e-05, |
| "loss": 1.0672, |
| "step": 1123 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 2.19487077299135e-05, |
| "loss": 0.9969, |
| "step": 1124 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 2.190768421826631e-05, |
| "loss": 1.0739, |
| "step": 1125 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 2.186666916089239e-05, |
| "loss": 1.0887, |
| "step": 1126 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 2.1825662669925354e-05, |
| "loss": 1.0433, |
| "step": 1127 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 2.1784664857475352e-05, |
| "loss": 1.0055, |
| "step": 1128 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 2.1743675835628856e-05, |
| "loss": 1.0935, |
| "step": 1129 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 2.1702695716448278e-05, |
| "loss": 1.0663, |
| "step": 1130 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 2.1661724611971708e-05, |
| "loss": 1.0657, |
| "step": 1131 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 2.1620762634212586e-05, |
| "loss": 1.0548, |
| "step": 1132 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 2.1579809895159375e-05, |
| "loss": 1.0691, |
| "step": 1133 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 2.153886650677531e-05, |
| "loss": 1.0483, |
| "step": 1134 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 2.1497932580998053e-05, |
| "loss": 1.0064, |
| "step": 1135 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 2.1457008229739394e-05, |
| "loss": 1.0678, |
| "step": 1136 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 2.141609356488496e-05, |
| "loss": 1.0393, |
| "step": 1137 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 2.1375188698293855e-05, |
| "loss": 1.0745, |
| "step": 1138 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 2.1334293741798432e-05, |
| "loss": 1.0285, |
| "step": 1139 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 2.1293408807203947e-05, |
| "loss": 1.0815, |
| "step": 1140 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 2.1252534006288264e-05, |
| "loss": 1.0833, |
| "step": 1141 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 2.1211669450801493e-05, |
| "loss": 1.1144, |
| "step": 1142 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 2.117081525246579e-05, |
| "loss": 1.0715, |
| "step": 1143 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 2.1129971522974967e-05, |
| "loss": 0.9991, |
| "step": 1144 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 2.1089138373994223e-05, |
| "loss": 1.0727, |
| "step": 1145 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 2.1048315917159846e-05, |
| "loss": 1.0665, |
| "step": 1146 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 2.100750426407884e-05, |
| "loss": 1.0329, |
| "step": 1147 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 2.0966703526328728e-05, |
| "loss": 1.0818, |
| "step": 1148 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 2.0925913815457153e-05, |
| "loss": 1.0683, |
| "step": 1149 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 2.088513524298165e-05, |
| "loss": 1.0691, |
| "step": 1150 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 2.0844367920389273e-05, |
| "loss": 1.0868, |
| "step": 1151 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 2.080361195913631e-05, |
| "loss": 1.0347, |
| "step": 1152 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 2.0762867470648013e-05, |
| "loss": 1.0336, |
| "step": 1153 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 2.072213456631825e-05, |
| "loss": 1.0622, |
| "step": 1154 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 2.068141335750925e-05, |
| "loss": 1.0389, |
| "step": 1155 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 2.0640703955551212e-05, |
| "loss": 1.021, |
| "step": 1156 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 2.0600006471742104e-05, |
| "loss": 1.0481, |
| "step": 1157 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 2.0559321017347285e-05, |
| "loss": 1.0539, |
| "step": 1158 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 2.0518647703599236e-05, |
| "loss": 1.0805, |
| "step": 1159 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 2.047798664169726e-05, |
| "loss": 1.0347, |
| "step": 1160 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 2.043733794280712e-05, |
| "loss": 1.0473, |
| "step": 1161 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 2.0396701718060824e-05, |
| "loss": 1.0746, |
| "step": 1162 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 2.035607807855625e-05, |
| "loss": 1.0589, |
| "step": 1163 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 2.031546713535688e-05, |
| "loss": 1.0741, |
| "step": 1164 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 2.02748689994915e-05, |
| "loss": 1.0455, |
| "step": 1165 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 2.0234283781953834e-05, |
| "loss": 1.0328, |
| "step": 1166 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 2.0193711593702326e-05, |
| "loss": 1.0738, |
| "step": 1167 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 2.0153152545659798e-05, |
| "loss": 1.0643, |
| "step": 1168 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 2.0112606748713138e-05, |
| "loss": 1.077, |
| "step": 1169 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 2.0072074313712997e-05, |
| "loss": 1.0995, |
| "step": 1170 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 2.003155535147351e-05, |
| "loss": 1.0389, |
| "step": 1171 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.9991049972771972e-05, |
| "loss": 1.0526, |
| "step": 1172 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.9950558288348542e-05, |
| "loss": 1.0182, |
| "step": 1173 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.9910080408905944e-05, |
| "loss": 1.0181, |
| "step": 1174 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.9869616445109147e-05, |
| "loss": 1.072, |
| "step": 1175 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.9829166507585083e-05, |
| "loss": 1.0588, |
| "step": 1176 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.9788730706922342e-05, |
| "loss": 1.0817, |
| "step": 1177 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.974830915367086e-05, |
| "loss": 1.0473, |
| "step": 1178 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.9707901958341612e-05, |
| "loss": 1.0257, |
| "step": 1179 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.9667509231406334e-05, |
| "loss": 1.0812, |
| "step": 1180 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.962713108329719e-05, |
| "loss": 1.0369, |
| "step": 1181 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.958676762440651e-05, |
| "loss": 1.0576, |
| "step": 1182 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.9546418965086442e-05, |
| "loss": 1.0561, |
| "step": 1183 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.9506085215648675e-05, |
| "loss": 1.0647, |
| "step": 1184 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.9465766486364143e-05, |
| "loss": 1.0738, |
| "step": 1185 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.9425462887462712e-05, |
| "loss": 1.0721, |
| "step": 1186 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.9385174529132884e-05, |
| "loss": 1.0369, |
| "step": 1187 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.93449015215215e-05, |
| "loss": 1.0593, |
| "step": 1188 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.930464397473341e-05, |
| "loss": 1.0127, |
| "step": 1189 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.9264401998831213e-05, |
| "loss": 1.0549, |
| "step": 1190 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.922417570383494e-05, |
| "loss": 1.0684, |
| "step": 1191 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.9183965199721745e-05, |
| "loss": 1.0996, |
| "step": 1192 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.9143770596425615e-05, |
| "loss": 1.0973, |
| "step": 1193 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.9103592003837045e-05, |
| "loss": 1.0669, |
| "step": 1194 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.9063429531802786e-05, |
| "loss": 1.0473, |
| "step": 1195 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.90232832901255e-05, |
| "loss": 1.0443, |
| "step": 1196 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.8983153388563486e-05, |
| "loss": 1.0229, |
| "step": 1197 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.8943039936830344e-05, |
| "loss": 1.0435, |
| "step": 1198 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.8902943044594735e-05, |
| "loss": 1.033, |
| "step": 1199 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.8862862821480025e-05, |
| "loss": 1.0436, |
| "step": 1200 |
| }, |
| { |
| "epoch": 0.6, |
| "eval_acceptance_rate": 0.8013065457344055, |
| "eval_acceptance_rate_1": 0.8006454110145569, |
| "eval_acceptance_rate_2": 0.801694929599762, |
| "eval_acceptance_rate_3": 0.8004037737846375, |
| "eval_acceptance_rate_4": 0.8012261986732483, |
| "eval_avg_generation_length": 215.4989013671875, |
| "eval_avg_input_length": 365.6648864746094, |
| "eval_expected_speedup": 2.6961550087048733, |
| "eval_expected_tokens": 3.3701937608810915, |
| "eval_loss": 0.9475569128990173, |
| "eval_max_generation_length": 424.7227783203125, |
| "eval_max_input_length": 630.4989624023438, |
| "eval_min_generation_length": 59.60786437988281, |
| "eval_min_input_length": 160.72378540039062, |
| "eval_pulsar_acceptance_rate": 0.592548440220273, |
| "eval_runtime": 115.6606, |
| "eval_samples_per_second": 4.28, |
| "eval_steps_per_second": 0.138, |
| "step": 1200 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.8822799377064014e-05, |
| "loss": 1.0242, |
| "step": 1201 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.8782752820878634e-05, |
| "loss": 1.0798, |
| "step": 1202 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.8742723262409634e-05, |
| "loss": 1.0341, |
| "step": 1203 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.87027108110963e-05, |
| "loss": 1.061, |
| "step": 1204 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.866271557633115e-05, |
| "loss": 1.0348, |
| "step": 1205 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.8622737667459623e-05, |
| "loss": 1.0868, |
| "step": 1206 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.8582777193779805e-05, |
| "loss": 1.0313, |
| "step": 1207 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.8542834264542092e-05, |
| "loss": 1.0401, |
| "step": 1208 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.850290898894893e-05, |
| "loss": 1.057, |
| "step": 1209 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.8463001476154508e-05, |
| "loss": 1.0521, |
| "step": 1210 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.8423111835264434e-05, |
| "loss": 1.0316, |
| "step": 1211 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.8383240175335464e-05, |
| "loss": 1.1016, |
| "step": 1212 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.8343386605375192e-05, |
| "loss": 1.0224, |
| "step": 1213 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.8303551234341763e-05, |
| "loss": 1.0724, |
| "step": 1214 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.826373417114355e-05, |
| "loss": 1.0664, |
| "step": 1215 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.8223935524638898e-05, |
| "loss": 1.0341, |
| "step": 1216 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.818415540363577e-05, |
| "loss": 1.1081, |
| "step": 1217 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.814439391689151e-05, |
| "loss": 1.0463, |
| "step": 1218 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.8104651173112503e-05, |
| "loss": 1.0667, |
| "step": 1219 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.806492728095389e-05, |
| "loss": 1.0352, |
| "step": 1220 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.802522234901927e-05, |
| "loss": 1.0096, |
| "step": 1221 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.798553648586042e-05, |
| "loss": 1.0155, |
| "step": 1222 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.7945869799976973e-05, |
| "loss": 1.0623, |
| "step": 1223 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.7906222399816124e-05, |
| "loss": 0.9985, |
| "step": 1224 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.7866594393772373e-05, |
| "loss": 1.066, |
| "step": 1225 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.7826985890187148e-05, |
| "loss": 1.0624, |
| "step": 1226 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.7787396997348604e-05, |
| "loss": 1.0523, |
| "step": 1227 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.7747827823491252e-05, |
| "loss": 1.0252, |
| "step": 1228 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.770827847679571e-05, |
| "loss": 1.0527, |
| "step": 1229 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.7668749065388385e-05, |
| "loss": 1.0294, |
| "step": 1230 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.7629239697341165e-05, |
| "loss": 1.0102, |
| "step": 1231 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.758975048067116e-05, |
| "loss": 1.0569, |
| "step": 1232 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.7550281523340382e-05, |
| "loss": 1.0251, |
| "step": 1233 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.7510832933255463e-05, |
| "loss": 1.0192, |
| "step": 1234 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.7471404818267316e-05, |
| "loss": 1.0713, |
| "step": 1235 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.7431997286170922e-05, |
| "loss": 1.043, |
| "step": 1236 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.7392610444704966e-05, |
| "loss": 1.0782, |
| "step": 1237 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.7353244401551566e-05, |
| "loss": 1.0445, |
| "step": 1238 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.7313899264335985e-05, |
| "loss": 1.0483, |
| "step": 1239 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.7274575140626318e-05, |
| "loss": 1.0852, |
| "step": 1240 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.723527213793322e-05, |
| "loss": 1.0298, |
| "step": 1241 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.7195990363709604e-05, |
| "loss": 1.0794, |
| "step": 1242 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.7156729925350336e-05, |
| "loss": 1.0514, |
| "step": 1243 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.7117490930191965e-05, |
| "loss": 1.0075, |
| "step": 1244 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.7078273485512392e-05, |
| "loss": 1.0509, |
| "step": 1245 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.703907769853062e-05, |
| "loss": 1.1256, |
| "step": 1246 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.699990367640643e-05, |
| "loss": 1.0699, |
| "step": 1247 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.696075152624012e-05, |
| "loss": 1.0245, |
| "step": 1248 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.6921621355072144e-05, |
| "loss": 1.0455, |
| "step": 1249 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.6882513269882917e-05, |
| "loss": 1.051, |
| "step": 1250 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.684342737759244e-05, |
| "loss": 1.0808, |
| "step": 1251 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.6804363785060056e-05, |
| "loss": 1.065, |
| "step": 1252 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.6765322599084147e-05, |
| "loss": 1.0606, |
| "step": 1253 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.6726303926401792e-05, |
| "loss": 1.0661, |
| "step": 1254 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.668730787368858e-05, |
| "loss": 1.0482, |
| "step": 1255 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.6648334547558226e-05, |
| "loss": 1.0518, |
| "step": 1256 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.6609384054562305e-05, |
| "loss": 1.0909, |
| "step": 1257 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.6570456501189998e-05, |
| "loss": 1.0817, |
| "step": 1258 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.6531551993867717e-05, |
| "loss": 1.0685, |
| "step": 1259 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.6492670638958924e-05, |
| "loss": 1.0819, |
| "step": 1260 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.6453812542763752e-05, |
| "loss": 1.067, |
| "step": 1261 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.641497781151877e-05, |
| "loss": 1.0387, |
| "step": 1262 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.6376166551396607e-05, |
| "loss": 1.0154, |
| "step": 1263 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.6337378868505805e-05, |
| "loss": 1.0503, |
| "step": 1264 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.6298614868890387e-05, |
| "loss": 1.0694, |
| "step": 1265 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.6259874658529663e-05, |
| "loss": 1.0069, |
| "step": 1266 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.622115834333789e-05, |
| "loss": 1.0337, |
| "step": 1267 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.6182466029163975e-05, |
| "loss": 1.0248, |
| "step": 1268 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.614379782179124e-05, |
| "loss": 1.0355, |
| "step": 1269 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.6105153826937085e-05, |
| "loss": 1.0345, |
| "step": 1270 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.6066534150252727e-05, |
| "loss": 1.0513, |
| "step": 1271 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.602793889732288e-05, |
| "loss": 1.018, |
| "step": 1272 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.598936817366548e-05, |
| "loss": 1.0813, |
| "step": 1273 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.5950822084731426e-05, |
| "loss": 1.0543, |
| "step": 1274 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.591230073590425e-05, |
| "loss": 1.0384, |
| "step": 1275 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.5873804232499863e-05, |
| "loss": 1.0532, |
| "step": 1276 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.583533267976621e-05, |
| "loss": 1.0492, |
| "step": 1277 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.5796886182883053e-05, |
| "loss": 1.0111, |
| "step": 1278 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.5758464846961657e-05, |
| "loss": 1.052, |
| "step": 1279 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.5720068777044476e-05, |
| "loss": 1.0407, |
| "step": 1280 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.5681698078104926e-05, |
| "loss": 1.039, |
| "step": 1281 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.5643352855046996e-05, |
| "loss": 1.0545, |
| "step": 1282 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.560503321270507e-05, |
| "loss": 1.0622, |
| "step": 1283 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.5566739255843606e-05, |
| "loss": 1.0539, |
| "step": 1284 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.5528471089156804e-05, |
| "loss": 1.0274, |
| "step": 1285 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.549022881726839e-05, |
| "loss": 1.0528, |
| "step": 1286 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.5452012544731245e-05, |
| "loss": 1.0162, |
| "step": 1287 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.541382237602721e-05, |
| "loss": 1.0221, |
| "step": 1288 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.537565841556676e-05, |
| "loss": 1.0372, |
| "step": 1289 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.5337520767688703e-05, |
| "loss": 1.047, |
| "step": 1290 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.5299409536659895e-05, |
| "loss": 1.0357, |
| "step": 1291 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.5261324826675e-05, |
| "loss": 1.0522, |
| "step": 1292 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.5223266741856152e-05, |
| "loss": 1.0266, |
| "step": 1293 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.5185235386252717e-05, |
| "loss": 1.063, |
| "step": 1294 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.5147230863840966e-05, |
| "loss": 1.0149, |
| "step": 1295 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.5109253278523799e-05, |
| "loss": 1.0265, |
| "step": 1296 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.5071302734130489e-05, |
| "loss": 1.0762, |
| "step": 1297 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.5033379334416375e-05, |
| "loss": 1.0528, |
| "step": 1298 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.499548318306259e-05, |
| "loss": 1.0235, |
| "step": 1299 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.495761438367577e-05, |
| "loss": 1.0173, |
| "step": 1300 |
| }, |
| { |
| "epoch": 0.65, |
| "eval_acceptance_rate": 0.8032432198524475, |
| "eval_acceptance_rate_1": 0.8025344014167786, |
| "eval_acceptance_rate_2": 0.8036085367202759, |
| "eval_acceptance_rate_3": 0.802588701248169, |
| "eval_acceptance_rate_4": 0.8030030727386475, |
| "eval_avg_generation_length": 215.4989013671875, |
| "eval_avg_input_length": 365.6648864746094, |
| "eval_expected_speedup": 2.706382212110599, |
| "eval_expected_tokens": 3.3829777651382487, |
| "eval_loss": 0.9430884122848511, |
| "eval_max_generation_length": 424.7227783203125, |
| "eval_max_input_length": 630.4989624023438, |
| "eval_min_generation_length": 59.60786437988281, |
| "eval_min_input_length": 160.72378540039062, |
| "eval_pulsar_acceptance_rate": 0.5957444412845622, |
| "eval_runtime": 115.4683, |
| "eval_samples_per_second": 4.287, |
| "eval_steps_per_second": 0.139, |
| "step": 1300 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.4919773039787748e-05, |
| "loss": 1.0457, |
| "step": 1301 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.4881959254855324e-05, |
| "loss": 1.0641, |
| "step": 1302 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.4844173132259933e-05, |
| "loss": 0.9923, |
| "step": 1303 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.4806414775307418e-05, |
| "loss": 1.0533, |
| "step": 1304 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.4768684287227652e-05, |
| "loss": 1.0136, |
| "step": 1305 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.4730981771174369e-05, |
| "loss": 1.0206, |
| "step": 1306 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.4693307330224798e-05, |
| "loss": 1.0024, |
| "step": 1307 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.465566106737942e-05, |
| "loss": 1.047, |
| "step": 1308 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.4618043085561702e-05, |
| "loss": 0.9761, |
| "step": 1309 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.4580453487617745e-05, |
| "loss": 1.0514, |
| "step": 1310 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.4542892376316078e-05, |
| "loss": 1.059, |
| "step": 1311 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.4505359854347361e-05, |
| "loss": 1.0643, |
| "step": 1312 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.4467856024324056e-05, |
| "loss": 1.0965, |
| "step": 1313 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.4430380988780218e-05, |
| "loss": 1.1107, |
| "step": 1314 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.439293485017116e-05, |
| "loss": 1.0131, |
| "step": 1315 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.4355517710873184e-05, |
| "loss": 1.0364, |
| "step": 1316 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.4318129673183333e-05, |
| "loss": 1.0873, |
| "step": 1317 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.4280770839319071e-05, |
| "loss": 1.03, |
| "step": 1318 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.4243441311418013e-05, |
| "loss": 1.0264, |
| "step": 1319 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.4206141191537682e-05, |
| "loss": 1.0223, |
| "step": 1320 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.4168870581655159e-05, |
| "loss": 1.022, |
| "step": 1321 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.4131629583666888e-05, |
| "loss": 1.0765, |
| "step": 1322 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.4094418299388331e-05, |
| "loss": 1.0306, |
| "step": 1323 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.4057236830553704e-05, |
| "loss": 1.0315, |
| "step": 1324 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.4020085278815745e-05, |
| "loss": 1.0226, |
| "step": 1325 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.3982963745745352e-05, |
| "loss": 1.0242, |
| "step": 1326 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.3945872332831412e-05, |
| "loss": 1.0382, |
| "step": 1327 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.3908811141480408e-05, |
| "loss": 1.0098, |
| "step": 1328 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.3871780273016215e-05, |
| "loss": 1.0667, |
| "step": 1329 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.383477982867984e-05, |
| "loss": 1.0581, |
| "step": 1330 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.3797809909629058e-05, |
| "loss": 1.0291, |
| "step": 1331 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.3760870616938248e-05, |
| "loss": 1.0711, |
| "step": 1332 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.3723962051597988e-05, |
| "loss": 1.0387, |
| "step": 1333 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.3687084314514907e-05, |
| "loss": 1.097, |
| "step": 1334 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.3650237506511331e-05, |
| "loss": 1.0019, |
| "step": 1335 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.3613421728325018e-05, |
| "loss": 1.0201, |
| "step": 1336 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.3576637080608923e-05, |
| "loss": 1.0126, |
| "step": 1337 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.353988366393083e-05, |
| "loss": 1.0421, |
| "step": 1338 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.3503161578773193e-05, |
| "loss": 1.0552, |
| "step": 1339 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.346647092553281e-05, |
| "loss": 1.0585, |
| "step": 1340 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.3429811804520492e-05, |
| "loss": 1.0129, |
| "step": 1341 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.3393184315960918e-05, |
| "loss": 1.0877, |
| "step": 1342 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.33565885599922e-05, |
| "loss": 1.0335, |
| "step": 1343 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.3320024636665757e-05, |
| "loss": 1.0139, |
| "step": 1344 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.3283492645945966e-05, |
| "loss": 1.0425, |
| "step": 1345 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.3246992687709889e-05, |
| "loss": 1.0431, |
| "step": 1346 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.3210524861747015e-05, |
| "loss": 1.0346, |
| "step": 1347 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.3174089267758983e-05, |
| "loss": 1.0184, |
| "step": 1348 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.3137686005359318e-05, |
| "loss": 1.0431, |
| "step": 1349 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.3101315174073162e-05, |
| "loss": 1.0161, |
| "step": 1350 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.306497687333697e-05, |
| "loss": 1.0257, |
| "step": 1351 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.3028671202498261e-05, |
| "loss": 1.0514, |
| "step": 1352 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.2992398260815369e-05, |
| "loss": 1.0614, |
| "step": 1353 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.2956158147457115e-05, |
| "loss": 1.0438, |
| "step": 1354 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.2919950961502603e-05, |
| "loss": 1.0316, |
| "step": 1355 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.2883776801940884e-05, |
| "loss": 1.066, |
| "step": 1356 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.2847635767670722e-05, |
| "loss": 1.0638, |
| "step": 1357 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.2811527957500345e-05, |
| "loss": 1.0276, |
| "step": 1358 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.2775453470147106e-05, |
| "loss": 1.0446, |
| "step": 1359 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.2739412404237306e-05, |
| "loss": 1.0407, |
| "step": 1360 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.2703404858305806e-05, |
| "loss": 1.0204, |
| "step": 1361 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.2667430930795877e-05, |
| "loss": 1.0152, |
| "step": 1362 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.2631490720058875e-05, |
| "loss": 1.0735, |
| "step": 1363 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.2595584324353943e-05, |
| "loss": 1.0463, |
| "step": 1364 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.255971184184783e-05, |
| "loss": 1.0835, |
| "step": 1365 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.2523873370614489e-05, |
| "loss": 1.0654, |
| "step": 1366 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.2488069008634954e-05, |
| "loss": 1.014, |
| "step": 1367 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.245229885379699e-05, |
| "loss": 1.0756, |
| "step": 1368 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.2416563003894826e-05, |
| "loss": 1.0627, |
| "step": 1369 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.2380861556628915e-05, |
| "loss": 1.0369, |
| "step": 1370 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.2345194609605636e-05, |
| "loss": 1.1073, |
| "step": 1371 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.2309562260337073e-05, |
| "loss": 1.0814, |
| "step": 1372 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.2273964606240718e-05, |
| "loss": 1.0399, |
| "step": 1373 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.2238401744639185e-05, |
| "loss": 1.0231, |
| "step": 1374 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.2202873772759981e-05, |
| "loss": 0.9938, |
| "step": 1375 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.216738078773522e-05, |
| "loss": 1.0156, |
| "step": 1376 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.2131922886601368e-05, |
| "loss": 1.0335, |
| "step": 1377 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.209650016629899e-05, |
| "loss": 1.0594, |
| "step": 1378 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.2061112723672438e-05, |
| "loss": 1.0885, |
| "step": 1379 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.202576065546963e-05, |
| "loss": 1.0391, |
| "step": 1380 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.1990444058341757e-05, |
| "loss": 1.0322, |
| "step": 1381 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.1955163028843063e-05, |
| "loss": 1.0315, |
| "step": 1382 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.1919917663430552e-05, |
| "loss": 0.9996, |
| "step": 1383 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.1884708058463668e-05, |
| "loss": 1.01, |
| "step": 1384 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.1849534310204152e-05, |
| "loss": 1.0394, |
| "step": 1385 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.1814396514815676e-05, |
| "loss": 1.0346, |
| "step": 1386 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.1779294768363639e-05, |
| "loss": 1.0432, |
| "step": 1387 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.1744229166814888e-05, |
| "loss": 1.088, |
| "step": 1388 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.170919980603741e-05, |
| "loss": 1.0689, |
| "step": 1389 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.1674206781800162e-05, |
| "loss": 1.0677, |
| "step": 1390 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.1639250189772718e-05, |
| "loss": 1.042, |
| "step": 1391 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.1604330125525079e-05, |
| "loss": 1.0653, |
| "step": 1392 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.1569446684527383e-05, |
| "loss": 1.0066, |
| "step": 1393 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.1534599962149587e-05, |
| "loss": 1.0591, |
| "step": 1394 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.1499790053661327e-05, |
| "loss": 1.0406, |
| "step": 1395 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.146501705423155e-05, |
| "loss": 1.0649, |
| "step": 1396 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.1430281058928324e-05, |
| "loss": 1.02, |
| "step": 1397 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.1395582162718525e-05, |
| "loss": 1.0406, |
| "step": 1398 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.1360920460467598e-05, |
| "loss": 1.0342, |
| "step": 1399 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.1326296046939333e-05, |
| "loss": 1.0464, |
| "step": 1400 |
| }, |
| { |
| "epoch": 0.7, |
| "eval_acceptance_rate": 0.8042372465133667, |
| "eval_acceptance_rate_1": 0.8035486340522766, |
| "eval_acceptance_rate_2": 0.8043899536132812, |
| "eval_acceptance_rate_3": 0.8036088347434998, |
| "eval_acceptance_rate_4": 0.8041674494743347, |
| "eval_avg_generation_length": 215.4989013671875, |
| "eval_avg_input_length": 365.6648864746094, |
| "eval_expected_speedup": 2.711648435017575, |
| "eval_expected_tokens": 3.389560543771969, |
| "eval_loss": 0.940096914768219, |
| "eval_max_generation_length": 424.7227783203125, |
| "eval_max_input_length": 630.4989624023438, |
| "eval_min_generation_length": 59.60786437988281, |
| "eval_min_input_length": 160.72378540039062, |
| "eval_pulsar_acceptance_rate": 0.5973901359429922, |
| "eval_runtime": 123.2454, |
| "eval_samples_per_second": 4.016, |
| "eval_steps_per_second": 0.13, |
| "step": 1400 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.129170901679554e-05, |
| "loss": 1.042, |
| "step": 1401 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.1257159464595855e-05, |
| "loss": 1.0294, |
| "step": 1402 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.1222647484797422e-05, |
| "loss": 1.0263, |
| "step": 1403 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.118817317175467e-05, |
| "loss": 1.0384, |
| "step": 1404 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.1153736619719077e-05, |
| "loss": 1.0327, |
| "step": 1405 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.1119337922838832e-05, |
| "loss": 1.0409, |
| "step": 1406 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.1084977175158687e-05, |
| "loss": 1.0396, |
| "step": 1407 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.1050654470619601e-05, |
| "loss": 1.1275, |
| "step": 1408 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.1016369903058529e-05, |
| "loss": 1.0538, |
| "step": 1409 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.0982123566208185e-05, |
| "loss": 1.0855, |
| "step": 1410 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.0947915553696742e-05, |
| "loss": 0.9956, |
| "step": 1411 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.091374595904759e-05, |
| "loss": 1.0467, |
| "step": 1412 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.0879614875679109e-05, |
| "loss": 1.0299, |
| "step": 1413 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.0845522396904367e-05, |
| "loss": 1.0187, |
| "step": 1414 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.0811468615930911e-05, |
| "loss": 1.0109, |
| "step": 1415 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.0777453625860472e-05, |
| "loss": 1.0297, |
| "step": 1416 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.0743477519688725e-05, |
| "loss": 1.0798, |
| "step": 1417 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.0709540390305061e-05, |
| "loss": 1.0548, |
| "step": 1418 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.0675642330492286e-05, |
| "loss": 1.0965, |
| "step": 1419 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.064178343292641e-05, |
| "loss": 1.0221, |
| "step": 1420 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.0607963790176365e-05, |
| "loss": 1.0737, |
| "step": 1421 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.0574183494703748e-05, |
| "loss": 1.0514, |
| "step": 1422 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.0540442638862618e-05, |
| "loss": 1.096, |
| "step": 1423 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.0506741314899166e-05, |
| "loss": 1.0143, |
| "step": 1424 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.0473079614951545e-05, |
| "loss": 1.0495, |
| "step": 1425 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.0439457631049549e-05, |
| "loss": 1.0386, |
| "step": 1426 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.0405875455114387e-05, |
| "loss": 1.1013, |
| "step": 1427 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.0372333178958462e-05, |
| "loss": 1.0383, |
| "step": 1428 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.0338830894285065e-05, |
| "loss": 1.0655, |
| "step": 1429 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.0305368692688174e-05, |
| "loss": 1.0374, |
| "step": 1430 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.0271946665652166e-05, |
| "loss": 1.088, |
| "step": 1431 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.0238564904551574e-05, |
| "loss": 1.0178, |
| "step": 1432 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.0205223500650876e-05, |
| "loss": 1.0377, |
| "step": 1433 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.0171922545104184e-05, |
| "loss": 1.0714, |
| "step": 1434 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.0138662128955053e-05, |
| "loss": 1.0635, |
| "step": 1435 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.0105442343136184e-05, |
| "loss": 1.0433, |
| "step": 1436 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.0072263278469194e-05, |
| "loss": 0.9967, |
| "step": 1437 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.0039125025664392e-05, |
| "loss": 1.0536, |
| "step": 1438 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.0006027675320493e-05, |
| "loss": 1.0511, |
| "step": 1439 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 9.972971317924374e-06, |
| "loss": 1.0873, |
| "step": 1440 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 9.939956043850876e-06, |
| "loss": 1.0639, |
| "step": 1441 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 9.90698194336248e-06, |
| "loss": 1.0858, |
| "step": 1442 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 9.874049106609135e-06, |
| "loss": 1.0387, |
| "step": 1443 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 9.841157623627947e-06, |
| "loss": 1.0666, |
| "step": 1444 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 9.808307584342971e-06, |
| "loss": 1.0573, |
| "step": 1445 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 9.775499078564973e-06, |
| "loss": 1.0618, |
| "step": 1446 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 9.742732195991142e-06, |
| "loss": 1.0088, |
| "step": 1447 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 9.710007026204895e-06, |
| "loss": 1.0615, |
| "step": 1448 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 9.677323658675594e-06, |
| "loss": 1.0663, |
| "step": 1449 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 9.644682182758306e-06, |
| "loss": 1.0105, |
| "step": 1450 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 9.612082687693597e-06, |
| "loss": 1.0695, |
| "step": 1451 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 9.579525262607226e-06, |
| "loss": 1.0658, |
| "step": 1452 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 9.547009996509964e-06, |
| "loss": 1.0246, |
| "step": 1453 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 9.514536978297303e-06, |
| "loss": 0.9943, |
| "step": 1454 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 9.482106296749221e-06, |
| "loss": 1.0844, |
| "step": 1455 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 9.449718040529987e-06, |
| "loss": 1.1073, |
| "step": 1456 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 9.417372298187833e-06, |
| "loss": 1.0455, |
| "step": 1457 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 9.385069158154805e-06, |
| "loss": 1.0369, |
| "step": 1458 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 9.352808708746441e-06, |
| "loss": 1.0404, |
| "step": 1459 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 9.320591038161574e-06, |
| "loss": 1.0611, |
| "step": 1460 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 9.2884162344821e-06, |
| "loss": 1.0744, |
| "step": 1461 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 9.25628438567269e-06, |
| "loss": 1.0264, |
| "step": 1462 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 9.224195579580602e-06, |
| "loss": 1.0279, |
| "step": 1463 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 9.192149903935405e-06, |
| "loss": 1.0402, |
| "step": 1464 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 9.160147446348739e-06, |
| "loss": 1.0319, |
| "step": 1465 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 9.128188294314119e-06, |
| "loss": 1.0546, |
| "step": 1466 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 9.096272535206641e-06, |
| "loss": 1.0245, |
| "step": 1467 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 9.064400256282757e-06, |
| "loss": 1.0581, |
| "step": 1468 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 9.032571544680086e-06, |
| "loss": 1.0473, |
| "step": 1469 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 9.000786487417085e-06, |
| "loss": 1.1013, |
| "step": 1470 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 8.969045171392909e-06, |
| "loss": 1.0264, |
| "step": 1471 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 8.937347683387095e-06, |
| "loss": 1.0607, |
| "step": 1472 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 8.905694110059353e-06, |
| "loss": 1.0356, |
| "step": 1473 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 8.874084537949364e-06, |
| "loss": 1.0532, |
| "step": 1474 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 8.842519053476476e-06, |
| "loss": 1.0576, |
| "step": 1475 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 8.810997742939531e-06, |
| "loss": 1.0658, |
| "step": 1476 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 8.77952069251658e-06, |
| "loss": 1.0597, |
| "step": 1477 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 8.74808798826467e-06, |
| "loss": 1.0723, |
| "step": 1478 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 8.71669971611963e-06, |
| "loss": 1.0384, |
| "step": 1479 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 8.685355961895784e-06, |
| "loss": 1.0108, |
| "step": 1480 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 8.654056811285772e-06, |
| "loss": 1.0575, |
| "step": 1481 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 8.622802349860268e-06, |
| "loss": 1.0468, |
| "step": 1482 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 8.591592663067771e-06, |
| "loss": 1.0431, |
| "step": 1483 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 8.56042783623439e-06, |
| "loss": 1.0367, |
| "step": 1484 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 8.52930795456355e-06, |
| "loss": 1.0272, |
| "step": 1485 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 8.49823310313584e-06, |
| "loss": 1.0274, |
| "step": 1486 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 8.467203366908707e-06, |
| "loss": 1.0355, |
| "step": 1487 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 8.436218830716258e-06, |
| "loss": 1.0592, |
| "step": 1488 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 8.405279579269046e-06, |
| "loss": 1.0564, |
| "step": 1489 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 8.374385697153792e-06, |
| "loss": 1.0333, |
| "step": 1490 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 8.343537268833199e-06, |
| "loss": 1.0266, |
| "step": 1491 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 8.31273437864569e-06, |
| "loss": 1.0348, |
| "step": 1492 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 8.281977110805177e-06, |
| "loss": 1.0625, |
| "step": 1493 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 8.251265549400877e-06, |
| "loss": 1.0341, |
| "step": 1494 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 8.220599778397017e-06, |
| "loss": 1.0775, |
| "step": 1495 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 8.189979881632634e-06, |
| "loss": 1.0235, |
| "step": 1496 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 8.159405942821375e-06, |
| "loss": 1.0356, |
| "step": 1497 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 8.1288780455512e-06, |
| "loss": 1.032, |
| "step": 1498 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 8.098396273284236e-06, |
| "loss": 1.0012, |
| "step": 1499 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 8.067960709356478e-06, |
| "loss": 1.0359, |
| "step": 1500 |
| }, |
| { |
| "epoch": 0.75, |
| "eval_acceptance_rate": 0.8049868941307068, |
| "eval_acceptance_rate_1": 0.8042713403701782, |
| "eval_acceptance_rate_2": 0.8051593899726868, |
| "eval_acceptance_rate_3": 0.8043556213378906, |
| "eval_acceptance_rate_4": 0.8048893213272095, |
| "eval_avg_generation_length": 215.4989013671875, |
| "eval_avg_input_length": 365.6648864746094, |
| "eval_expected_speedup": 2.7156275956240408, |
| "eval_expected_tokens": 3.3945344945300513, |
| "eval_loss": 0.938217043876648, |
| "eval_max_generation_length": 424.7227783203125, |
| "eval_max_input_length": 630.4989624023438, |
| "eval_min_generation_length": 59.60786437988281, |
| "eval_min_input_length": 160.72378540039062, |
| "eval_pulsar_acceptance_rate": 0.5986336236325127, |
| "eval_runtime": 120.8862, |
| "eval_samples_per_second": 4.095, |
| "eval_steps_per_second": 0.132, |
| "step": 1500 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 8.037571436977582e-06, |
| "loss": 1.0471, |
| "step": 1501 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 8.007228539230676e-06, |
| "loss": 0.9997, |
| "step": 1502 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 7.976932099072068e-06, |
| "loss": 1.0484, |
| "step": 1503 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 7.946682199331088e-06, |
| "loss": 1.0266, |
| "step": 1504 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 7.91647892270979e-06, |
| "loss": 1.0376, |
| "step": 1505 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 7.886322351782783e-06, |
| "loss": 1.0515, |
| "step": 1506 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 7.856212568996987e-06, |
| "loss": 1.0684, |
| "step": 1507 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 7.826149656671386e-06, |
| "loss": 1.0676, |
| "step": 1508 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 7.796133696996858e-06, |
| "loss": 1.0405, |
| "step": 1509 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 7.766164772035856e-06, |
| "loss": 1.0213, |
| "step": 1510 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 7.736242963722299e-06, |
| "loss": 1.0327, |
| "step": 1511 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 7.706368353861269e-06, |
| "loss": 1.0809, |
| "step": 1512 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 7.676541024128798e-06, |
| "loss": 1.0638, |
| "step": 1513 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 7.646761056071686e-06, |
| "loss": 1.0103, |
| "step": 1514 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 7.617028531107201e-06, |
| "loss": 1.0343, |
| "step": 1515 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 7.587343530522945e-06, |
| "loss": 1.0095, |
| "step": 1516 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 7.5577061354765835e-06, |
| "loss": 1.0968, |
| "step": 1517 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 7.528116426995604e-06, |
| "loss": 1.0377, |
| "step": 1518 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 7.498574485977172e-06, |
| "loss": 1.0598, |
| "step": 1519 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 7.469080393187786e-06, |
| "loss": 1.0418, |
| "step": 1520 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 7.4396342292631895e-06, |
| "loss": 1.014, |
| "step": 1521 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 7.410236074708077e-06, |
| "loss": 1.0752, |
| "step": 1522 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 7.380886009895874e-06, |
| "loss": 1.0723, |
| "step": 1523 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 7.351584115068535e-06, |
| "loss": 1.0302, |
| "step": 1524 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 7.3223304703363135e-06, |
| "loss": 1.0601, |
| "step": 1525 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 7.293125155677566e-06, |
| "loss": 1.0232, |
| "step": 1526 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 7.263968250938516e-06, |
| "loss": 1.0432, |
| "step": 1527 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 7.234859835833021e-06, |
| "loss": 1.0607, |
| "step": 1528 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 7.205799989942372e-06, |
| "loss": 1.073, |
| "step": 1529 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 7.176788792715075e-06, |
| "loss": 1.047, |
| "step": 1530 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 7.147826323466638e-06, |
| "loss": 1.0289, |
| "step": 1531 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 7.118912661379368e-06, |
| "loss": 1.0914, |
| "step": 1532 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 7.090047885502077e-06, |
| "loss": 1.012, |
| "step": 1533 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 7.061232074749985e-06, |
| "loss": 1.0762, |
| "step": 1534 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 7.032465307904404e-06, |
| "loss": 1.029, |
| "step": 1535 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 7.003747663612581e-06, |
| "loss": 1.0638, |
| "step": 1536 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 6.9750792203874785e-06, |
| "loss": 1.0181, |
| "step": 1537 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 6.94646005660749e-06, |
| "loss": 1.0649, |
| "step": 1538 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 6.91789025051634e-06, |
| "loss": 1.0724, |
| "step": 1539 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 6.889369880222776e-06, |
| "loss": 1.0353, |
| "step": 1540 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 6.860899023700407e-06, |
| "loss": 1.0466, |
| "step": 1541 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 6.832477758787484e-06, |
| "loss": 1.0394, |
| "step": 1542 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 6.8041061631866245e-06, |
| "loss": 0.9814, |
| "step": 1543 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 6.775784314464717e-06, |
| "loss": 1.0304, |
| "step": 1544 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 6.747512290052596e-06, |
| "loss": 1.0376, |
| "step": 1545 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 6.71929016724491e-06, |
| "loss": 1.0842, |
| "step": 1546 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 6.691118023199861e-06, |
| "loss": 1.0342, |
| "step": 1547 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 6.662995934939007e-06, |
| "loss": 1.0171, |
| "step": 1548 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 6.634923979347074e-06, |
| "loss": 1.0236, |
| "step": 1549 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 6.606902233171711e-06, |
| "loss": 1.0852, |
| "step": 1550 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 6.5789307730233065e-06, |
| "loss": 1.0514, |
| "step": 1551 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 6.551009675374764e-06, |
| "loss": 1.0344, |
| "step": 1552 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 6.5231390165612884e-06, |
| "loss": 1.0183, |
| "step": 1553 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 6.49531887278021e-06, |
| "loss": 1.0296, |
| "step": 1554 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 6.46754932009073e-06, |
| "loss": 1.0207, |
| "step": 1555 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 6.439830434413754e-06, |
| "loss": 1.0031, |
| "step": 1556 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 6.412162291531651e-06, |
| "loss": 1.0529, |
| "step": 1557 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 6.384544967088063e-06, |
| "loss": 1.0185, |
| "step": 1558 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 6.3569785365877125e-06, |
| "loss": 1.0528, |
| "step": 1559 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 6.329463075396161e-06, |
| "loss": 1.0372, |
| "step": 1560 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 6.301998658739619e-06, |
| "loss": 1.0403, |
| "step": 1561 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 6.274585361704774e-06, |
| "loss": 1.0474, |
| "step": 1562 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 6.247223259238511e-06, |
| "loss": 1.0359, |
| "step": 1563 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 6.219912426147795e-06, |
| "loss": 1.0536, |
| "step": 1564 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 6.192652937099388e-06, |
| "loss": 1.0763, |
| "step": 1565 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 6.165444866619685e-06, |
| "loss": 1.0357, |
| "step": 1566 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 6.138288289094532e-06, |
| "loss": 1.0304, |
| "step": 1567 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 6.111183278768956e-06, |
| "loss": 1.0305, |
| "step": 1568 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 6.084129909747033e-06, |
| "loss": 1.0493, |
| "step": 1569 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 6.057128255991637e-06, |
| "loss": 1.0175, |
| "step": 1570 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 6.030178391324251e-06, |
| "loss": 1.038, |
| "step": 1571 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 6.003280389424789e-06, |
| "loss": 1.0722, |
| "step": 1572 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 5.976434323831348e-06, |
| "loss": 0.9955, |
| "step": 1573 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 5.9496402679400594e-06, |
| "loss": 1.0745, |
| "step": 1574 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 5.9228982950048416e-06, |
| "loss": 1.0231, |
| "step": 1575 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 5.896208478137222e-06, |
| "loss": 1.0219, |
| "step": 1576 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 5.869570890306153e-06, |
| "loss": 1.0268, |
| "step": 1577 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 5.842985604337769e-06, |
| "loss": 1.0198, |
| "step": 1578 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 5.816452692915242e-06, |
| "loss": 1.0343, |
| "step": 1579 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 5.78997222857853e-06, |
| "loss": 1.0505, |
| "step": 1580 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 5.763544283724204e-06, |
| "loss": 1.0467, |
| "step": 1581 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 5.737168930605272e-06, |
| "loss": 1.0451, |
| "step": 1582 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 5.710846241330928e-06, |
| "loss": 1.0099, |
| "step": 1583 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 5.684576287866411e-06, |
| "loss": 1.099, |
| "step": 1584 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 5.6583591420327684e-06, |
| "loss": 1.0455, |
| "step": 1585 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 5.632194875506663e-06, |
| "loss": 1.0892, |
| "step": 1586 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 5.606083559820219e-06, |
| "loss": 1.019, |
| "step": 1587 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 5.580025266360764e-06, |
| "loss": 1.0292, |
| "step": 1588 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 5.554020066370677e-06, |
| "loss": 1.0548, |
| "step": 1589 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 5.528068030947192e-06, |
| "loss": 1.0244, |
| "step": 1590 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 5.50216923104217e-06, |
| "loss": 1.0455, |
| "step": 1591 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 5.476323737461955e-06, |
| "loss": 1.0269, |
| "step": 1592 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 5.45053162086713e-06, |
| "loss": 1.0354, |
| "step": 1593 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 5.424792951772353e-06, |
| "loss": 1.0091, |
| "step": 1594 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 5.399107800546177e-06, |
| "loss": 1.0317, |
| "step": 1595 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 5.373476237410807e-06, |
| "loss": 1.0673, |
| "step": 1596 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 5.347898332441975e-06, |
| "loss": 1.0433, |
| "step": 1597 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 5.322374155568688e-06, |
| "loss": 1.043, |
| "step": 1598 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 5.296903776573065e-06, |
| "loss": 1.0033, |
| "step": 1599 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 5.271487265090163e-06, |
| "loss": 1.0322, |
| "step": 1600 |
| }, |
| { |
| "epoch": 0.8, |
| "eval_acceptance_rate": 0.8054376244544983, |
| "eval_acceptance_rate_1": 0.8046924471855164, |
| "eval_acceptance_rate_2": 0.805618166923523, |
| "eval_acceptance_rate_3": 0.804777204990387, |
| "eval_acceptance_rate_4": 0.8054026961326599, |
| "eval_avg_generation_length": 215.4989013671875, |
| "eval_avg_input_length": 365.6648864746094, |
| "eval_expected_speedup": 2.7180232513126814, |
| "eval_expected_tokens": 3.3975290641408518, |
| "eval_loss": 0.9369726181030273, |
| "eval_max_generation_length": 424.7227783203125, |
| "eval_max_input_length": 630.4989624023438, |
| "eval_min_generation_length": 59.60786437988281, |
| "eval_min_input_length": 160.72378540039062, |
| "eval_pulsar_acceptance_rate": 0.5993822660352129, |
| "eval_runtime": 116.5819, |
| "eval_samples_per_second": 4.246, |
| "eval_steps_per_second": 0.137, |
| "step": 1600 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 5.24612469060774e-06, |
| "loss": 1.0502, |
| "step": 1601 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 5.220816122466119e-06, |
| "loss": 1.0557, |
| "step": 1602 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 5.195561629857953e-06, |
| "loss": 1.0019, |
| "step": 1603 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 5.170361281828054e-06, |
| "loss": 0.987, |
| "step": 1604 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 5.145215147273224e-06, |
| "loss": 1.0385, |
| "step": 1605 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 5.120123294942022e-06, |
| "loss": 1.0036, |
| "step": 1606 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 5.095085793434629e-06, |
| "loss": 1.0538, |
| "step": 1607 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 5.070102711202607e-06, |
| "loss": 1.0107, |
| "step": 1608 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 5.045174116548745e-06, |
| "loss": 1.0824, |
| "step": 1609 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 5.0203000776268825e-06, |
| "loss": 1.0556, |
| "step": 1610 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 4.995480662441679e-06, |
| "loss": 1.0486, |
| "step": 1611 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 4.970715938848478e-06, |
| "loss": 1.0595, |
| "step": 1612 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 4.946005974553086e-06, |
| "loss": 1.0412, |
| "step": 1613 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 4.9213508371115935e-06, |
| "loss": 1.0516, |
| "step": 1614 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 4.896750593930216e-06, |
| "loss": 1.0574, |
| "step": 1615 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 4.872205312265074e-06, |
| "loss": 1.054, |
| "step": 1616 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 4.847715059222024e-06, |
| "loss": 1.0704, |
| "step": 1617 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 4.823279901756497e-06, |
| "loss": 1.0938, |
| "step": 1618 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 4.798899906673263e-06, |
| "loss": 1.0153, |
| "step": 1619 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 4.7745751406263165e-06, |
| "loss": 1.0324, |
| "step": 1620 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 4.750305670118629e-06, |
| "loss": 1.0414, |
| "step": 1621 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 4.726091561502e-06, |
| "loss": 1.0737, |
| "step": 1622 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 4.7019328809768895e-06, |
| "loss": 1.0583, |
| "step": 1623 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 4.677829694592198e-06, |
| "loss": 1.0573, |
| "step": 1624 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 4.653782068245127e-06, |
| "loss": 1.0613, |
| "step": 1625 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 4.629790067680964e-06, |
| "loss": 1.0058, |
| "step": 1626 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 4.605853758492914e-06, |
| "loss": 1.0071, |
| "step": 1627 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 4.581973206121948e-06, |
| "loss": 1.0141, |
| "step": 1628 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 4.5581484758565665e-06, |
| "loss": 1.0345, |
| "step": 1629 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 4.534379632832692e-06, |
| "loss": 1.0637, |
| "step": 1630 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 4.51066674203342e-06, |
| "loss": 1.015, |
| "step": 1631 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 4.487009868288888e-06, |
| "loss": 1.0177, |
| "step": 1632 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 4.463409076276095e-06, |
| "loss": 1.0255, |
| "step": 1633 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 4.439864430518692e-06, |
| "loss": 0.9817, |
| "step": 1634 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 4.416375995386857e-06, |
| "loss": 1.0544, |
| "step": 1635 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 4.392943835097069e-06, |
| "loss": 1.0541, |
| "step": 1636 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 4.369568013711947e-06, |
| "loss": 1.0618, |
| "step": 1637 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 4.346248595140112e-06, |
| "loss": 1.0363, |
| "step": 1638 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 4.322985643135952e-06, |
| "loss": 1.0105, |
| "step": 1639 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 4.299779221299499e-06, |
| "loss": 1.0419, |
| "step": 1640 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 4.27662939307622e-06, |
| "loss": 1.0737, |
| "step": 1641 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 4.253536221756851e-06, |
| "loss": 1.0044, |
| "step": 1642 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 4.230499770477258e-06, |
| "loss": 1.0848, |
| "step": 1643 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 4.207520102218213e-06, |
| "loss": 1.062, |
| "step": 1644 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 4.184597279805241e-06, |
| "loss": 1.0315, |
| "step": 1645 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 4.161731365908481e-06, |
| "loss": 1.0672, |
| "step": 1646 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 4.138922423042449e-06, |
| "loss": 1.0524, |
| "step": 1647 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 4.116170513565942e-06, |
| "loss": 1.0456, |
| "step": 1648 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 4.093475699681806e-06, |
| "loss": 1.0835, |
| "step": 1649 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 4.070838043436786e-06, |
| "loss": 1.0587, |
| "step": 1650 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 4.048257606721381e-06, |
| "loss": 1.0059, |
| "step": 1651 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 4.025734451269636e-06, |
| "loss": 1.0314, |
| "step": 1652 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 4.003268638659005e-06, |
| "loss": 1.067, |
| "step": 1653 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 3.98086023031016e-06, |
| "loss": 1.01, |
| "step": 1654 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 3.958509287486823e-06, |
| "loss": 1.0388, |
| "step": 1655 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 3.936215871295634e-06, |
| "loss": 1.0556, |
| "step": 1656 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 3.913980042685928e-06, |
| "loss": 0.9993, |
| "step": 1657 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 3.891801862449629e-06, |
| "loss": 1.0657, |
| "step": 1658 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 3.869681391221011e-06, |
| "loss": 1.0398, |
| "step": 1659 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 3.847618689476612e-06, |
| "loss": 1.0507, |
| "step": 1660 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 3.825613817535021e-06, |
| "loss": 1.0495, |
| "step": 1661 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 3.8036668355567045e-06, |
| "loss": 1.0608, |
| "step": 1662 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 3.78177780354389e-06, |
| "loss": 1.0432, |
| "step": 1663 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 3.7599467813403315e-06, |
| "loss": 1.0121, |
| "step": 1664 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 3.738173828631228e-06, |
| "loss": 1.0254, |
| "step": 1665 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 3.7164590049429987e-06, |
| "loss": 1.0461, |
| "step": 1666 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 3.6948023696431354e-06, |
| "loss": 1.0759, |
| "step": 1667 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 3.6732039819400683e-06, |
| "loss": 1.0132, |
| "step": 1668 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 3.65166390088294e-06, |
| "loss": 1.0444, |
| "step": 1669 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 3.630182185361522e-06, |
| "loss": 1.0453, |
| "step": 1670 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 3.6087588941060124e-06, |
| "loss": 1.046, |
| "step": 1671 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 3.5873940856868656e-06, |
| "loss": 1.0085, |
| "step": 1672 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 3.5660878185146463e-06, |
| "loss": 1.0241, |
| "step": 1673 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 3.544840150839876e-06, |
| "loss": 1.0226, |
| "step": 1674 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 3.523651140752868e-06, |
| "loss": 1.0297, |
| "step": 1675 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 3.502520846183577e-06, |
| "loss": 1.0479, |
| "step": 1676 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 3.4814493249014116e-06, |
| "loss": 1.0554, |
| "step": 1677 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 3.46043663451511e-06, |
| "loss": 1.0514, |
| "step": 1678 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 3.439482832472554e-06, |
| "loss": 0.9779, |
| "step": 1679 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 3.418587976060653e-06, |
| "loss": 1.022, |
| "step": 1680 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 3.3977521224051427e-06, |
| "loss": 1.0822, |
| "step": 1681 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 3.3769753284704526e-06, |
| "loss": 1.0158, |
| "step": 1682 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 3.3562576510595385e-06, |
| "loss": 1.0438, |
| "step": 1683 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 3.3355991468137394e-06, |
| "loss": 1.0723, |
| "step": 1684 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 3.314999872212618e-06, |
| "loss": 1.0183, |
| "step": 1685 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 3.2944598835738193e-06, |
| "loss": 0.9958, |
| "step": 1686 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 3.2739792370528628e-06, |
| "loss": 1.0672, |
| "step": 1687 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 3.2535579886430718e-06, |
| "loss": 1.0308, |
| "step": 1688 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 3.2331961941753474e-06, |
| "loss": 1.0232, |
| "step": 1689 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 3.2128939093180655e-06, |
| "loss": 1.0111, |
| "step": 1690 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 3.192651189576909e-06, |
| "loss": 1.0072, |
| "step": 1691 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 3.1724680902946753e-06, |
| "loss": 1.0377, |
| "step": 1692 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 3.152344666651208e-06, |
| "loss": 1.043, |
| "step": 1693 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 3.1322809736631654e-06, |
| "loss": 1.0183, |
| "step": 1694 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 3.11227706618393e-06, |
| "loss": 1.0676, |
| "step": 1695 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 3.092332998903416e-06, |
| "loss": 1.0629, |
| "step": 1696 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 3.072448826347932e-06, |
| "loss": 1.0335, |
| "step": 1697 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 3.0526246028800637e-06, |
| "loss": 1.0138, |
| "step": 1698 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 3.0328603826984658e-06, |
| "loss": 1.1068, |
| "step": 1699 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 3.013156219837776e-06, |
| "loss": 1.0675, |
| "step": 1700 |
| }, |
| { |
| "epoch": 0.85, |
| "eval_acceptance_rate": 0.805456280708313, |
| "eval_acceptance_rate_1": 0.8047510981559753, |
| "eval_acceptance_rate_2": 0.8056121468544006, |
| "eval_acceptance_rate_3": 0.8047822713851929, |
| "eval_acceptance_rate_4": 0.8054250478744507, |
| "eval_avg_generation_length": 215.4989013671875, |
| "eval_avg_input_length": 365.6648864746094, |
| "eval_expected_speedup": 2.71812246148479, |
| "eval_expected_tokens": 3.3976530768559874, |
| "eval_loss": 0.9364066123962402, |
| "eval_max_generation_length": 424.7227783203125, |
| "eval_max_input_length": 630.4989624023438, |
| "eval_min_generation_length": 59.60786437988281, |
| "eval_min_input_length": 160.72378540039062, |
| "eval_pulsar_acceptance_rate": 0.5994132692139968, |
| "eval_runtime": 120.9013, |
| "eval_samples_per_second": 4.094, |
| "eval_steps_per_second": 0.132, |
| "step": 1700 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 2.9935121681684138e-06, |
| "loss": 1.0321, |
| "step": 1701 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 2.9739282813964627e-06, |
| "loss": 0.995, |
| "step": 1702 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 2.954404613063527e-06, |
| "loss": 1.0246, |
| "step": 1703 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 2.9349412165465773e-06, |
| "loss": 1.0467, |
| "step": 1704 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 2.9155381450577863e-06, |
| "loss": 1.058, |
| "step": 1705 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 2.896195451644415e-06, |
| "loss": 1.0161, |
| "step": 1706 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 2.876913189188643e-06, |
| "loss": 1.0368, |
| "step": 1707 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 2.8576914104074425e-06, |
| "loss": 1.0646, |
| "step": 1708 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 2.838530167852435e-06, |
| "loss": 1.0333, |
| "step": 1709 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 2.8194295139097048e-06, |
| "loss": 1.0371, |
| "step": 1710 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 2.8003895007997274e-06, |
| "loss": 1.0161, |
| "step": 1711 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 2.781410180577157e-06, |
| "loss": 1.0142, |
| "step": 1712 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 2.7624916051307405e-06, |
| "loss": 0.9961, |
| "step": 1713 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 2.743633826183145e-06, |
| "loss": 1.0144, |
| "step": 1714 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 2.7248368952908053e-06, |
| "loss": 1.0487, |
| "step": 1715 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 2.706100863843822e-06, |
| "loss": 1.0585, |
| "step": 1716 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 2.6874257830657805e-06, |
| "loss": 1.039, |
| "step": 1717 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 2.6688117040136464e-06, |
| "loss": 1.0201, |
| "step": 1718 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 2.6502586775776076e-06, |
| "loss": 1.034, |
| "step": 1719 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 2.6317667544809134e-06, |
| "loss": 1.0182, |
| "step": 1720 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 2.6133359852797886e-06, |
| "loss": 1.0833, |
| "step": 1721 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 2.5949664203632428e-06, |
| "loss": 1.0108, |
| "step": 1722 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 2.576658109952973e-06, |
| "loss": 1.0262, |
| "step": 1723 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 2.558411104103198e-06, |
| "loss": 1.057, |
| "step": 1724 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 2.5402254527005287e-06, |
| "loss": 1.0137, |
| "step": 1725 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 2.522101205463853e-06, |
| "loss": 1.0776, |
| "step": 1726 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 2.5040384119441594e-06, |
| "loss": 1.0798, |
| "step": 1727 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 2.4860371215244484e-06, |
| "loss": 1.0189, |
| "step": 1728 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 2.4680973834195516e-06, |
| "loss": 1.0754, |
| "step": 1729 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 2.4502192466760276e-06, |
| "loss": 0.9813, |
| "step": 1730 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 2.4324027601720257e-06, |
| "loss": 1.0602, |
| "step": 1731 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 2.414647972617129e-06, |
| "loss": 1.0331, |
| "step": 1732 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 2.3969549325522618e-06, |
| "loss": 1.0336, |
| "step": 1733 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 2.379323688349516e-06, |
| "loss": 1.0352, |
| "step": 1734 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 2.361754288212031e-06, |
| "loss": 1.0061, |
| "step": 1735 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 2.3442467801738863e-06, |
| "loss": 1.0297, |
| "step": 1736 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 2.326801212099938e-06, |
| "loss": 1.0246, |
| "step": 1737 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 2.309417631685698e-06, |
| "loss": 1.0362, |
| "step": 1738 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 2.2920960864572212e-06, |
| "loss": 1.0604, |
| "step": 1739 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 2.2748366237709374e-06, |
| "loss": 1.0378, |
| "step": 1740 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 2.2576392908135707e-06, |
| "loss": 1.0502, |
| "step": 1741 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 2.24050413460197e-06, |
| "loss": 1.0255, |
| "step": 1742 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 2.223431201982992e-06, |
| "loss": 1.0185, |
| "step": 1743 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 2.2064205396333886e-06, |
| "loss": 1.0011, |
| "step": 1744 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 2.1894721940596554e-06, |
| "loss": 1.0398, |
| "step": 1745 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 2.1725862115979322e-06, |
| "loss": 1.0836, |
| "step": 1746 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 2.155762638413841e-06, |
| "loss": 1.0202, |
| "step": 1747 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 2.13900152050239e-06, |
| "loss": 1.0487, |
| "step": 1748 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 2.1223029036878395e-06, |
| "loss": 1.0326, |
| "step": 1749 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 2.1056668336235622e-06, |
| "loss": 1.0501, |
| "step": 1750 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 2.089093355791952e-06, |
| "loss": 1.0302, |
| "step": 1751 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 2.072582515504254e-06, |
| "loss": 1.0516, |
| "step": 1752 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 2.0561343579004715e-06, |
| "loss": 0.9906, |
| "step": 1753 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 2.0397489279492506e-06, |
| "loss": 1.1006, |
| "step": 1754 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 2.023426270447723e-06, |
| "loss": 1.0594, |
| "step": 1755 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 2.007166430021415e-06, |
| "loss": 1.0731, |
| "step": 1756 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.990969451124111e-06, |
| "loss": 1.0098, |
| "step": 1757 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.9748353780377234e-06, |
| "loss": 1.0514, |
| "step": 1758 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.958764254872206e-06, |
| "loss": 1.0095, |
| "step": 1759 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.9427561255653816e-06, |
| "loss": 1.0192, |
| "step": 1760 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.9268110338828747e-06, |
| "loss": 1.0065, |
| "step": 1761 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.9109290234179567e-06, |
| "loss": 1.0526, |
| "step": 1762 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.895110137591427e-06, |
| "loss": 1.046, |
| "step": 1763 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.87935441965153e-06, |
| "loss": 1.012, |
| "step": 1764 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.8636619126737892e-06, |
| "loss": 1.0432, |
| "step": 1765 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.8480326595609166e-06, |
| "loss": 1.0238, |
| "step": 1766 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.832466703042701e-06, |
| "loss": 1.054, |
| "step": 1767 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.8169640856758651e-06, |
| "loss": 1.0777, |
| "step": 1768 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.80152484984398e-06, |
| "loss": 1.041, |
| "step": 1769 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.7861490377573258e-06, |
| "loss": 1.0392, |
| "step": 1770 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.770836691452779e-06, |
| "loss": 0.9898, |
| "step": 1771 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.7555878527937164e-06, |
| "loss": 1.0174, |
| "step": 1772 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.7404025634698756e-06, |
| "loss": 1.0932, |
| "step": 1773 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.7252808649972668e-06, |
| "loss": 1.0655, |
| "step": 1774 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.710222798718028e-06, |
| "loss": 1.0313, |
| "step": 1775 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.6952284058003366e-06, |
| "loss": 1.0293, |
| "step": 1776 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.6802977272382974e-06, |
| "loss": 1.0253, |
| "step": 1777 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.6654308038518058e-06, |
| "loss": 1.0673, |
| "step": 1778 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.650627676286473e-06, |
| "loss": 1.0366, |
| "step": 1779 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.6358883850134816e-06, |
| "loss": 1.0027, |
| "step": 1780 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.6212129703294832e-06, |
| "loss": 1.0071, |
| "step": 1781 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.6066014723565116e-06, |
| "loss": 1.0011, |
| "step": 1782 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.5920539310418341e-06, |
| "loss": 1.0222, |
| "step": 1783 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.5775703861578866e-06, |
| "loss": 1.0125, |
| "step": 1784 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.5631508773021165e-06, |
| "loss": 1.0607, |
| "step": 1785 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.5487954438969139e-06, |
| "loss": 1.0145, |
| "step": 1786 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.5345041251894871e-06, |
| "loss": 1.0147, |
| "step": 1787 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.5202769602517515e-06, |
| "loss": 1.0362, |
| "step": 1788 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.5061139879802372e-06, |
| "loss": 1.0256, |
| "step": 1789 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.4920152470959707e-06, |
| "loss": 1.0002, |
| "step": 1790 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.4779807761443636e-06, |
| "loss": 1.068, |
| "step": 1791 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.4640106134951316e-06, |
| "loss": 1.0429, |
| "step": 1792 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.4501047973421639e-06, |
| "loss": 1.0779, |
| "step": 1793 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.4362633657034248e-06, |
| "loss": 1.0725, |
| "step": 1794 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.4224863564208684e-06, |
| "loss": 1.086, |
| "step": 1795 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.4087738071603075e-06, |
| "loss": 1.0511, |
| "step": 1796 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.3951257554113306e-06, |
| "loss": 1.0371, |
| "step": 1797 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.3815422384871879e-06, |
| "loss": 1.0318, |
| "step": 1798 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.368023293524695e-06, |
| "loss": 1.0576, |
| "step": 1799 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.3545689574841342e-06, |
| "loss": 1.0213, |
| "step": 1800 |
| }, |
| { |
| "epoch": 0.9, |
| "eval_acceptance_rate": 0.8056168556213379, |
| "eval_acceptance_rate_1": 0.8049073219299316, |
| "eval_acceptance_rate_2": 0.8058131337165833, |
| "eval_acceptance_rate_3": 0.8049455285072327, |
| "eval_acceptance_rate_4": 0.8055549263954163, |
| "eval_avg_generation_length": 215.4989013671875, |
| "eval_avg_input_length": 365.6648864746094, |
| "eval_expected_speedup": 2.7189765345835317, |
| "eval_expected_tokens": 3.3987206682294144, |
| "eval_loss": 0.9362132549285889, |
| "eval_max_generation_length": 424.7227783203125, |
| "eval_max_input_length": 630.4989624023438, |
| "eval_min_generation_length": 59.60786437988281, |
| "eval_min_input_length": 160.72378540039062, |
| "eval_pulsar_acceptance_rate": 0.5996801670573536, |
| "eval_runtime": 117.7186, |
| "eval_samples_per_second": 4.205, |
| "eval_steps_per_second": 0.136, |
| "step": 1800 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.3411792671491424e-06, |
| "loss": 1.0487, |
| "step": 1801 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.3278542591266291e-06, |
| "loss": 1.0164, |
| "step": 1802 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.3145939698466502e-06, |
| "loss": 1.0293, |
| "step": 1803 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.3013984355623315e-06, |
| "loss": 1.1083, |
| "step": 1804 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.288267692349765e-06, |
| "loss": 1.0126, |
| "step": 1805 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.2752017761078927e-06, |
| "loss": 1.0457, |
| "step": 1806 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.2622007225584431e-06, |
| "loss": 1.0452, |
| "step": 1807 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.2492645672457837e-06, |
| "loss": 1.0583, |
| "step": 1808 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.2363933455368792e-06, |
| "loss": 1.073, |
| "step": 1809 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.2235870926211619e-06, |
| "loss": 1.0061, |
| "step": 1810 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.2108458435104274e-06, |
| "loss": 1.04, |
| "step": 1811 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.1981696330387787e-06, |
| "loss": 1.0073, |
| "step": 1812 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.1855584958624765e-06, |
| "loss": 1.0498, |
| "step": 1813 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.1730124664598912e-06, |
| "loss": 1.0291, |
| "step": 1814 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.1605315791313964e-06, |
| "loss": 1.0121, |
| "step": 1815 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.1481158679992555e-06, |
| "loss": 1.0092, |
| "step": 1816 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.135765367007552e-06, |
| "loss": 1.0793, |
| "step": 1817 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.1234801099220787e-06, |
| "loss": 1.014, |
| "step": 1818 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.1112601303302605e-06, |
| "loss": 1.0659, |
| "step": 1819 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.0991054616410589e-06, |
| "loss": 1.0831, |
| "step": 1820 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.087016137084873e-06, |
| "loss": 0.9983, |
| "step": 1821 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.074992189713453e-06, |
| "loss": 1.0953, |
| "step": 1822 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.0630336523998113e-06, |
| "loss": 0.975, |
| "step": 1823 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.051140557838129e-06, |
| "loss": 1.0235, |
| "step": 1824 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.0393129385436824e-06, |
| "loss": 1.0468, |
| "step": 1825 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.0275508268527246e-06, |
| "loss": 1.0269, |
| "step": 1826 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.0158542549224186e-06, |
| "loss": 1.0665, |
| "step": 1827 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.004223254730749e-06, |
| "loss": 1.0247, |
| "step": 1828 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 9.926578580764234e-07, |
| "loss": 1.0367, |
| "step": 1829 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 9.811580965787965e-07, |
| "loss": 1.0595, |
| "step": 1830 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 9.69724001677777e-07, |
| "loss": 1.0648, |
| "step": 1831 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 9.5835560463374e-07, |
| "loss": 1.0148, |
| "step": 1832 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 9.470529365274566e-07, |
| "loss": 1.0162, |
| "step": 1833 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 9.358160282599809e-07, |
| "loss": 1.0398, |
| "step": 1834 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 9.246449105525995e-07, |
| "loss": 1.0733, |
| "step": 1835 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 9.135396139467151e-07, |
| "loss": 1.0501, |
| "step": 1836 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 9.025001688037854e-07, |
| "loss": 1.043, |
| "step": 1837 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 8.915266053052373e-07, |
| "loss": 1.0533, |
| "step": 1838 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 8.806189534523668e-07, |
| "loss": 1.019, |
| "step": 1839 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 8.697772430662859e-07, |
| "loss": 1.0182, |
| "step": 1840 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 8.590015037878068e-07, |
| "loss": 0.9945, |
| "step": 1841 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 8.48291765077397e-07, |
| "loss": 1.0192, |
| "step": 1842 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 8.376480562150707e-07, |
| "loss": 1.0746, |
| "step": 1843 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 8.270704063003232e-07, |
| "loss": 1.0299, |
| "step": 1844 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 8.165588442520439e-07, |
| "loss": 1.0445, |
| "step": 1845 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 8.061133988084391e-07, |
| "loss": 1.0117, |
| "step": 1846 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 7.957340985269596e-07, |
| "loss": 1.0694, |
| "step": 1847 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 7.854209717842231e-07, |
| "loss": 1.054, |
| "step": 1848 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 7.751740467759145e-07, |
| "loss": 1.0164, |
| "step": 1849 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 7.649933515167407e-07, |
| "loss": 1.043, |
| "step": 1850 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 7.548789138403317e-07, |
| "loss": 1.0131, |
| "step": 1851 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 7.448307613991734e-07, |
| "loss": 1.0369, |
| "step": 1852 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 7.348489216645327e-07, |
| "loss": 1.0765, |
| "step": 1853 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 7.249334219263799e-07, |
| "loss": 1.0446, |
| "step": 1854 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 7.150842892933107e-07, |
| "loss": 1.0724, |
| "step": 1855 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 7.053015506924748e-07, |
| "loss": 1.0252, |
| "step": 1856 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 6.955852328695056e-07, |
| "loss": 1.0499, |
| "step": 1857 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 6.859353623884567e-07, |
| "loss": 0.9969, |
| "step": 1858 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 6.763519656316914e-07, |
| "loss": 1.0718, |
| "step": 1859 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 6.668350687998565e-07, |
| "loss": 1.0356, |
| "step": 1860 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 6.573846979117809e-07, |
| "loss": 1.0071, |
| "step": 1861 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 6.480008788044223e-07, |
| "loss": 1.0012, |
| "step": 1862 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 6.386836371327892e-07, |
| "loss": 1.0654, |
| "step": 1863 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 6.2943299836985e-07, |
| "loss": 1.0438, |
| "step": 1864 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 6.202489878065071e-07, |
| "loss": 1.0114, |
| "step": 1865 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 6.111316305514925e-07, |
| "loss": 1.0471, |
| "step": 1866 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 6.020809515313142e-07, |
| "loss": 1.0422, |
| "step": 1867 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 5.930969754901843e-07, |
| "loss": 1.0628, |
| "step": 1868 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 5.841797269899447e-07, |
| "loss": 1.0523, |
| "step": 1869 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 5.753292304100183e-07, |
| "loss": 1.0533, |
| "step": 1870 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 5.665455099473221e-07, |
| "loss": 1.0418, |
| "step": 1871 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 5.578285896162106e-07, |
| "loss": 1.0207, |
| "step": 1872 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 5.491784932484173e-07, |
| "loss": 1.006, |
| "step": 1873 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 5.405952444929668e-07, |
| "loss": 1.0387, |
| "step": 1874 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 5.32078866816138e-07, |
| "loss": 1.0243, |
| "step": 1875 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 5.236293835013839e-07, |
| "loss": 1.0285, |
| "step": 1876 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 5.152468176492675e-07, |
| "loss": 1.0484, |
| "step": 1877 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 5.069311921774039e-07, |
| "loss": 1.0017, |
| "step": 1878 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 4.986825298203935e-07, |
| "loss": 1.0356, |
| "step": 1879 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 4.905008531297661e-07, |
| "loss": 1.0651, |
| "step": 1880 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 4.823861844739152e-07, |
| "loss": 1.0348, |
| "step": 1881 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 4.743385460380334e-07, |
| "loss": 1.061, |
| "step": 1882 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 4.663579598240569e-07, |
| "loss": 1.0435, |
| "step": 1883 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 4.5844444765059945e-07, |
| "loss": 1.0007, |
| "step": 1884 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 4.505980311529101e-07, |
| "loss": 1.0314, |
| "step": 1885 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 4.4281873178278475e-07, |
| "loss": 1.0571, |
| "step": 1886 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 4.351065708085328e-07, |
| "loss": 1.0501, |
| "step": 1887 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 4.2746156931490754e-07, |
| "loss": 1.0112, |
| "step": 1888 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 4.1988374820305377e-07, |
| "loss": 1.0404, |
| "step": 1889 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 4.1237312819044085e-07, |
| "loss": 1.013, |
| "step": 1890 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 4.049297298108212e-07, |
| "loss": 1.0486, |
| "step": 1891 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 3.9755357341415835e-07, |
| "loss": 0.9871, |
| "step": 1892 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 3.902446791665848e-07, |
| "loss": 1.0775, |
| "step": 1893 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 3.83003067050336e-07, |
| "loss": 1.0516, |
| "step": 1894 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 3.758287568637081e-07, |
| "loss": 1.0603, |
| "step": 1895 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 3.687217682209837e-07, |
| "loss": 1.0598, |
| "step": 1896 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 3.616821205524007e-07, |
| "loss": 1.0456, |
| "step": 1897 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 3.5470983310409157e-07, |
| "loss": 1.0245, |
| "step": 1898 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 3.478049249380194e-07, |
| "loss": 1.0201, |
| "step": 1899 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 3.4096741493194197e-07, |
| "loss": 1.0096, |
| "step": 1900 |
| }, |
| { |
| "epoch": 0.95, |
| "eval_acceptance_rate": 0.8056114912033081, |
| "eval_acceptance_rate_1": 0.8048818707466125, |
| "eval_acceptance_rate_2": 0.8058326840400696, |
| "eval_acceptance_rate_3": 0.8049170970916748, |
| "eval_acceptance_rate_4": 0.8055611848831177, |
| "eval_avg_generation_length": 215.4989013671875, |
| "eval_avg_input_length": 365.6648864746094, |
| "eval_expected_speedup": 2.7189479972053645, |
| "eval_expected_tokens": 3.398684996506706, |
| "eval_loss": 0.9361222386360168, |
| "eval_max_generation_length": 424.7227783203125, |
| "eval_max_input_length": 630.4989624023438, |
| "eval_min_generation_length": 59.60786437988281, |
| "eval_min_input_length": 160.72378540039062, |
| "eval_pulsar_acceptance_rate": 0.5996712491266765, |
| "eval_runtime": 121.0063, |
| "eval_samples_per_second": 4.091, |
| "eval_steps_per_second": 0.132, |
| "step": 1900 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 3.341973217793476e-07, |
| "loss": 1.0343, |
| "step": 1901 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 3.2749466398941384e-07, |
| "loss": 1.0196, |
| "step": 1902 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 3.2085945988695464e-07, |
| "loss": 1.0649, |
| "step": 1903 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 3.142917276123564e-07, |
| "loss": 1.0531, |
| "step": 1904 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 3.077914851215585e-07, |
| "loss": 1.0683, |
| "step": 1905 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 3.013587501859677e-07, |
| "loss": 1.0391, |
| "step": 1906 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 2.949935403924381e-07, |
| "loss": 1.0373, |
| "step": 1907 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 2.886958731432132e-07, |
| "loss": 0.9893, |
| "step": 1908 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 2.824657656558705e-07, |
| "loss": 1.0253, |
| "step": 1909 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 2.763032349632877e-07, |
| "loss": 1.0286, |
| "step": 1910 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 2.7020829791359057e-07, |
| "loss": 1.0266, |
| "step": 1911 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 2.641809711700999e-07, |
| "loss": 1.0269, |
| "step": 1912 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 2.582212712113008e-07, |
| "loss": 1.0465, |
| "step": 1913 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 2.5232921433078483e-07, |
| "loss": 1.0377, |
| "step": 1914 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 2.4650481663720525e-07, |
| "loss": 1.0286, |
| "step": 1915 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 2.4074809405425225e-07, |
| "loss": 1.0304, |
| "step": 1916 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 2.3505906232057505e-07, |
| "loss": 1.0213, |
| "step": 1917 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 2.2943773698977932e-07, |
| "loss": 1.0257, |
| "step": 1918 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 2.2388413343034652e-07, |
| "loss": 1.0147, |
| "step": 1919 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 2.1839826682562015e-07, |
| "loss": 1.0204, |
| "step": 1920 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 2.129801521737501e-07, |
| "loss": 1.081, |
| "step": 1921 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 2.0762980428765122e-07, |
| "loss": 1.0299, |
| "step": 1922 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 2.0234723779497533e-07, |
| "loss": 1.0389, |
| "step": 1923 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.9713246713805588e-07, |
| "loss": 1.0487, |
| "step": 1924 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.919855065738746e-07, |
| "loss": 1.0534, |
| "step": 1925 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.8690637017402534e-07, |
| "loss": 1.0634, |
| "step": 1926 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.8189507182467535e-07, |
| "loss": 1.0394, |
| "step": 1927 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.7695162522652353e-07, |
| "loss": 1.0369, |
| "step": 1928 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.720760438947644e-07, |
| "loss": 0.9994, |
| "step": 1929 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.6726834115904643e-07, |
| "loss": 0.9936, |
| "step": 1930 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.6252853016344716e-07, |
| "loss": 1.0279, |
| "step": 1931 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.578566238664314e-07, |
| "loss": 1.0518, |
| "step": 1932 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.5325263504081256e-07, |
| "loss": 0.993, |
| "step": 1933 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.4871657627371916e-07, |
| "loss": 1.0272, |
| "step": 1934 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.4424845996655888e-07, |
| "loss": 1.0401, |
| "step": 1935 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.3984829833499636e-07, |
| "loss": 1.0372, |
| "step": 1936 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.3551610340890307e-07, |
| "loss": 1.0033, |
| "step": 1937 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.3125188703233816e-07, |
| "loss": 1.0374, |
| "step": 1938 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.2705566086350097e-07, |
| "loss": 1.0516, |
| "step": 1939 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.229274363747146e-07, |
| "loss": 1.0489, |
| "step": 1940 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.1886722485238978e-07, |
| "loss": 1.0244, |
| "step": 1941 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.1487503739698869e-07, |
| "loss": 1.0664, |
| "step": 1942 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.109508849230001e-07, |
| "loss": 1.0236, |
| "step": 1943 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.0709477815890601e-07, |
| "loss": 1.0221, |
| "step": 1944 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.0330672764715387e-07, |
| "loss": 1.034, |
| "step": 1945 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 9.958674374413168e-08, |
| "loss": 1.0451, |
| "step": 1946 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 9.59348366201318e-08, |
| "loss": 1.0319, |
| "step": 1947 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 9.235101625932885e-08, |
| "loss": 1.0202, |
| "step": 1948 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 8.883529245975186e-08, |
| "loss": 1.0208, |
| "step": 1949 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 8.538767483325383e-08, |
| "loss": 1.0155, |
| "step": 1950 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 8.200817280549222e-08, |
| "loss": 1.0102, |
| "step": 1951 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 7.869679561589293e-08, |
| "loss": 1.0196, |
| "step": 1952 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 7.545355231763918e-08, |
| "loss": 1.0505, |
| "step": 1953 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 7.227845177762982e-08, |
| "loss": 1.0344, |
| "step": 1954 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 6.91715026764711e-08, |
| "loss": 1.012, |
| "step": 1955 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 6.613271350844608e-08, |
| "loss": 1.026, |
| "step": 1956 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 6.316209258148131e-08, |
| "loss": 1.0403, |
| "step": 1957 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 6.025964801714412e-08, |
| "loss": 1.0685, |
| "step": 1958 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 5.742538775061201e-08, |
| "loss": 1.0138, |
| "step": 1959 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 5.4659319530636633e-08, |
| "loss": 1.0136, |
| "step": 1960 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 5.196145091954652e-08, |
| "loss": 1.016, |
| "step": 1961 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 4.9331789293211026e-08, |
| "loss": 1.0318, |
| "step": 1962 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 4.677034184102369e-08, |
| "loss": 1.0961, |
| "step": 1963 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 4.427711556588832e-08, |
| "loss": 1.0609, |
| "step": 1964 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 4.18521172841857e-08, |
| "loss": 1.0276, |
| "step": 1965 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 3.949535362576806e-08, |
| "loss": 1.0196, |
| "step": 1966 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 3.72068310339424e-08, |
| "loss": 1.0558, |
| "step": 1967 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 3.4986555765434413e-08, |
| "loss": 0.9643, |
| "step": 1968 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 3.283453389039959e-08, |
| "loss": 1.0254, |
| "step": 1969 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 3.075077129238158e-08, |
| "loss": 1.0258, |
| "step": 1970 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 2.8735273668303863e-08, |
| "loss": 1.0483, |
| "step": 1971 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 2.6788046528461453e-08, |
| "loss": 1.0653, |
| "step": 1972 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 2.490909519650142e-08, |
| "loss": 1.0227, |
| "step": 1973 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 2.309842480940627e-08, |
| "loss": 1.0532, |
| "step": 1974 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 2.1356040317474512e-08, |
| "loss": 1.0395, |
| "step": 1975 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.9681946484320644e-08, |
| "loss": 0.9769, |
| "step": 1976 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.8076147886855744e-08, |
| "loss": 1.032, |
| "step": 1977 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.6538648915270792e-08, |
| "loss": 1.034, |
| "step": 1978 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.50694537730256e-08, |
| "loss": 1.0486, |
| "step": 1979 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.3668566476848777e-08, |
| "loss": 1.0247, |
| "step": 1980 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.233599085671e-08, |
| "loss": 1.0239, |
| "step": 1981 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.107173055582278e-08, |
| "loss": 0.9958, |
| "step": 1982 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 9.875789030622252e-09, |
| "loss": 1.0838, |
| "step": 1983 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 8.74816955076796e-09, |
| "loss": 1.0383, |
| "step": 1984 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 7.688875199132751e-09, |
| "loss": 1.0347, |
| "step": 1985 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 6.697908871780567e-09, |
| "loss": 1.026, |
| "step": 1986 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 5.775273277980331e-09, |
| "loss": 1.0376, |
| "step": 1987 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 4.920970940180958e-09, |
| "loss": 1.0359, |
| "step": 1988 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 4.1350041940113604e-09, |
| "loss": 1.0066, |
| "step": 1989 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 3.417375188274896e-09, |
| "loss": 1.0479, |
| "step": 1990 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.768085884943816e-09, |
| "loss": 1.015, |
| "step": 1991 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.1871380591509392e-09, |
| "loss": 1.0411, |
| "step": 1992 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.6745332991813245e-09, |
| "loss": 1.0089, |
| "step": 1993 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.2302730064778222e-09, |
| "loss": 1.0002, |
| "step": 1994 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 8.543583956355239e-10, |
| "loss": 0.9952, |
| "step": 1995 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 5.467904943851077e-10, |
| "loss": 1.049, |
| "step": 1996 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.0757014360671686e-10, |
| "loss": 1.0491, |
| "step": 1997 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.3669799732163313e-10, |
| "loss": 1.0033, |
| "step": 1998 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.417452268950072e-11, |
| "loss": 1.0398, |
| "step": 1999 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 0.0, |
| "loss": 1.0098, |
| "step": 2000 |
| }, |
| { |
| "epoch": 1.0, |
| "eval_acceptance_rate": 0.8056314587593079, |
| "eval_acceptance_rate_1": 0.8049386739730835, |
| "eval_acceptance_rate_2": 0.805794894695282, |
| "eval_acceptance_rate_3": 0.8049510717391968, |
| "eval_acceptance_rate_4": 0.8055833578109741, |
| "eval_avg_generation_length": 215.4989013671875, |
| "eval_avg_input_length": 365.6648864746094, |
| "eval_expected_speedup": 2.719054221373988, |
| "eval_expected_tokens": 3.3988177767174848, |
| "eval_loss": 0.9361461400985718, |
| "eval_max_generation_length": 424.7227783203125, |
| "eval_max_input_length": 630.4989624023438, |
| "eval_min_generation_length": 59.60786437988281, |
| "eval_min_input_length": 160.72378540039062, |
| "eval_pulsar_acceptance_rate": 0.5997044441793712, |
| "eval_runtime": 119.8126, |
| "eval_samples_per_second": 4.131, |
| "eval_steps_per_second": 0.134, |
| "step": 2000 |
| } |
| ], |
| "logging_steps": 1.0, |
| "max_steps": 2000, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 9223372036854775807, |
| "save_steps": 1000, |
| "total_flos": 1.3440382893293568e+19, |
| "train_batch_size": 4, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|