| { |
| "best_metric": 1.4052919149398804, |
| "best_model_checkpoint": "./phi-sft-out/checkpoint-500", |
| "epoch": 2.971768202080238, |
| "eval_steps": 125, |
| "global_step": 2000, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0, |
| "learning_rate": 0.0, |
| "loss": 1.3979, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.0102999566398115e-06, |
| "loss": 1.1182, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.771212547196624e-06, |
| "loss": 1.1258, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 6.020599913279623e-06, |
| "loss": 1.0675, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 6.989700043360187e-06, |
| "loss": 1.1274, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 7.781512503836437e-06, |
| "loss": 1.1541, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 8.450980400142568e-06, |
| "loss": 1.1882, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 9.030899869919434e-06, |
| "loss": 1.3336, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 9.542425094393249e-06, |
| "loss": 0.8424, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 9.999999999999999e-06, |
| "loss": 0.9589, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.041392685158225e-05, |
| "loss": 1.0256, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.0791812460476248e-05, |
| "loss": 1.1169, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.1139433523068365e-05, |
| "loss": 1.0595, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.1461280356782378e-05, |
| "loss": 0.9097, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.1760912590556813e-05, |
| "loss": 1.149, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.2041199826559246e-05, |
| "loss": 1.0937, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.230448921378274e-05, |
| "loss": 1.0001, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.2552725051033058e-05, |
| "loss": 1.0672, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.2787536009528288e-05, |
| "loss": 0.9809, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.301029995663981e-05, |
| "loss": 0.8517, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.3222192947339192e-05, |
| "loss": 0.945, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.3424226808222062e-05, |
| "loss": 0.9433, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.3617278360175927e-05, |
| "loss": 0.9072, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.380211241711606e-05, |
| "loss": 1.0349, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.3979400086720374e-05, |
| "loss": 1.0584, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.4149733479708178e-05, |
| "loss": 0.9911, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.4313637641589872e-05, |
| "loss": 1.1692, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.4471580313422191e-05, |
| "loss": 1.0986, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.4623979978989559e-05, |
| "loss": 1.0138, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.4771212547196623e-05, |
| "loss": 0.9463, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.4913616938342726e-05, |
| "loss": 0.794, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.5051499783199059e-05, |
| "loss": 0.8846, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.5185139398778874e-05, |
| "loss": 0.9289, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.531478917042255e-05, |
| "loss": 0.7834, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.5440680443502753e-05, |
| "loss": 0.8562, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.5563025007672873e-05, |
| "loss": 1.055, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.5682017240669948e-05, |
| "loss": 1.1534, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.57978359661681e-05, |
| "loss": 0.9737, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.591064607026499e-05, |
| "loss": 0.987, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.6020599913279622e-05, |
| "loss": 0.925, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.6127838567197353e-05, |
| "loss": 1.0392, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.6232492903979005e-05, |
| "loss": 0.8435, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.6334684555795865e-05, |
| "loss": 1.0128, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.6434526764861872e-05, |
| "loss": 0.9464, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.6532125137753435e-05, |
| "loss": 0.8877, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.662757831681574e-05, |
| "loss": 1.1152, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.672097857935717e-05, |
| "loss": 1.0136, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.681241237375587e-05, |
| "loss": 0.9272, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.6901960800285137e-05, |
| "loss": 1.038, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.6989700043360187e-05, |
| "loss": 0.9117, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.7075701760979363e-05, |
| "loss": 0.7915, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.716003343634799e-05, |
| "loss": 0.7899, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.724275869600789e-05, |
| "loss": 0.8657, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.7323937598229687e-05, |
| "loss": 0.983, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.7403626894942437e-05, |
| "loss": 0.9886, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.7481880270062003e-05, |
| "loss": 0.81, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.7558748556724913e-05, |
| "loss": 0.8119, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.763427993562937e-05, |
| "loss": 0.9285, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.7708520116421443e-05, |
| "loss": 0.8236, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.7781512503836432e-05, |
| "loss": 0.8626, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.7853298350107667e-05, |
| "loss": 0.922, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.7923916894982537e-05, |
| "loss": 0.8988, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.7993405494535815e-05, |
| "loss": 1.1067, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.806179973983887e-05, |
| "loss": 1.0843, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.8129133566428553e-05, |
| "loss": 0.7697, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.8195439355418686e-05, |
| "loss": 0.8646, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.8260748027008263e-05, |
| "loss": 0.8284, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.8325089127062364e-05, |
| "loss": 0.703, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.8388490907372553e-05, |
| "loss": 0.9478, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.8450980400142568e-05, |
| "loss": 0.7572, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.8512583487190752e-05, |
| "loss": 0.9038, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.857332496431268e-05, |
| "loss": 0.7708, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.8633228601204555e-05, |
| "loss": 0.9743, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.8692317197309763e-05, |
| "loss": 0.8045, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.8750612633916997e-05, |
| "loss": 0.7002, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.8808135922807914e-05, |
| "loss": 0.9808, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.8864907251724818e-05, |
| "loss": 0.8373, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.8920946026904802e-05, |
| "loss": 0.7266, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.8976270912904414e-05, |
| "loss": 0.9247, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.9030899869919434e-05, |
| "loss": 1.0624, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.9084850188786497e-05, |
| "loss": 0.8695, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.9138138523837165e-05, |
| "loss": 0.7238, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.919078092376074e-05, |
| "loss": 1.0222, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.9242792860618813e-05, |
| "loss": 0.8256, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.929418925714293e-05, |
| "loss": 0.8127, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.9344984512435673e-05, |
| "loss": 1.1029, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.9395192526186183e-05, |
| "loss": 0.8873, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.9444826721501687e-05, |
| "loss": 0.7925, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.9493900066449125e-05, |
| "loss": 1.0414, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.9542425094393246e-05, |
| "loss": 1.0524, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.9590413923210934e-05, |
| "loss": 1.0657, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.9637878273455555e-05, |
| "loss": 0.8315, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.968482948553935e-05, |
| "loss": 0.9533, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.9731278535996986e-05, |
| "loss": 0.9165, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.9777236052888476e-05, |
| "loss": 0.8668, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.9822712330395683e-05, |
| "loss": 0.9267, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.986771734266245e-05, |
| "loss": 0.9919, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.991226075692495e-05, |
| "loss": 0.9339, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.9956351945975496e-05, |
| "loss": 0.644, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.9999999999999998e-05, |
| "loss": 0.9199, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 2e-05, |
| "loss": 1.0334, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.998957790515894e-05, |
| "loss": 0.7945, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.9979155810317878e-05, |
| "loss": 0.6566, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.9968733715476812e-05, |
| "loss": 0.7923, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.995831162063575e-05, |
| "loss": 0.8068, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.9947889525794685e-05, |
| "loss": 0.855, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.9937467430953623e-05, |
| "loss": 0.7377, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.992704533611256e-05, |
| "loss": 0.8029, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.99166232412715e-05, |
| "loss": 1.0084, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.9906201146430437e-05, |
| "loss": 0.9557, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.989577905158937e-05, |
| "loss": 0.7662, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.9885356956748306e-05, |
| "loss": 0.8791, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.9874934861907244e-05, |
| "loss": 0.9833, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.986451276706618e-05, |
| "loss": 0.9556, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.985409067222512e-05, |
| "loss": 0.741, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.9843668577384058e-05, |
| "loss": 0.776, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.9833246482542992e-05, |
| "loss": 0.855, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.982282438770193e-05, |
| "loss": 0.9365, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.9812402292860865e-05, |
| "loss": 0.753, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.9801980198019803e-05, |
| "loss": 0.97, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.979155810317874e-05, |
| "loss": 0.7882, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.978113600833768e-05, |
| "loss": 0.8523, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.9770713913496617e-05, |
| "loss": 0.6564, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.976029181865555e-05, |
| "loss": 0.7976, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.974986972381449e-05, |
| "loss": 0.8471, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.19, |
| "eval_loss": 1.4292019605636597, |
| "eval_runtime": 0.6812, |
| "eval_samples_per_second": 358.21, |
| "eval_steps_per_second": 45.51, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.9739447628973424e-05, |
| "loss": 0.8122, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.972902553413236e-05, |
| "loss": 0.994, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.97186034392913e-05, |
| "loss": 0.7831, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.9708181344450238e-05, |
| "loss": 0.858, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.9697759249609175e-05, |
| "loss": 0.8478, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.968733715476811e-05, |
| "loss": 0.7913, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.9676915059927048e-05, |
| "loss": 0.8237, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.9666492965085983e-05, |
| "loss": 0.7538, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.965607087024492e-05, |
| "loss": 0.9826, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.964564877540386e-05, |
| "loss": 0.8151, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.9635226680562797e-05, |
| "loss": 0.7537, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.962480458572173e-05, |
| "loss": 0.8646, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.961438249088067e-05, |
| "loss": 0.6634, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.9603960396039604e-05, |
| "loss": 1.0213, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.959353830119854e-05, |
| "loss": 0.8, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.958311620635748e-05, |
| "loss": 0.9006, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.9572694111516418e-05, |
| "loss": 0.8561, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.9562272016675355e-05, |
| "loss": 0.8626, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.955184992183429e-05, |
| "loss": 0.832, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.9541427826993228e-05, |
| "loss": 0.7907, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.9531005732152163e-05, |
| "loss": 0.807, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.95205836373111e-05, |
| "loss": 0.8199, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.951016154247004e-05, |
| "loss": 0.7954, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.9499739447628976e-05, |
| "loss": 0.791, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.948931735278791e-05, |
| "loss": 0.8333, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.947889525794685e-05, |
| "loss": 0.8163, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.9468473163105787e-05, |
| "loss": 0.911, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.945805106826472e-05, |
| "loss": 0.7676, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.944762897342366e-05, |
| "loss": 0.6159, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.9437206878582597e-05, |
| "loss": 0.8217, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.9426784783741535e-05, |
| "loss": 0.8446, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.941636268890047e-05, |
| "loss": 0.6126, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.9405940594059408e-05, |
| "loss": 0.668, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.9395518499218346e-05, |
| "loss": 0.8318, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.938509640437728e-05, |
| "loss": 0.6842, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.937467430953622e-05, |
| "loss": 0.9758, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.9364252214695156e-05, |
| "loss": 0.8164, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.935383011985409e-05, |
| "loss": 0.8072, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.934340802501303e-05, |
| "loss": 0.6641, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.9332985930171967e-05, |
| "loss": 0.8985, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.93225638353309e-05, |
| "loss": 0.9107, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.931214174048984e-05, |
| "loss": 0.7579, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.9301719645648777e-05, |
| "loss": 0.7827, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.9291297550807715e-05, |
| "loss": 0.8708, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.928087545596665e-05, |
| "loss": 0.8594, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.9270453361125588e-05, |
| "loss": 0.7278, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.9260031266284526e-05, |
| "loss": 0.8197, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.924960917144346e-05, |
| "loss": 0.7777, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.92391870766024e-05, |
| "loss": 0.8311, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.9228764981761336e-05, |
| "loss": 0.8333, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.921834288692027e-05, |
| "loss": 0.8569, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.920792079207921e-05, |
| "loss": 0.8113, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.9197498697238147e-05, |
| "loss": 0.9254, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.9187076602397085e-05, |
| "loss": 0.8268, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.917665450755602e-05, |
| "loss": 0.7842, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.9166232412714957e-05, |
| "loss": 0.8853, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.9155810317873895e-05, |
| "loss": 0.9494, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.914538822303283e-05, |
| "loss": 0.6899, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.9134966128191768e-05, |
| "loss": 0.7485, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.9124544033350706e-05, |
| "loss": 0.7627, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.911412193850964e-05, |
| "loss": 0.9534, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.910369984366858e-05, |
| "loss": 0.8938, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.9093277748827516e-05, |
| "loss": 0.8936, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.908285565398645e-05, |
| "loss": 0.8637, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.907243355914539e-05, |
| "loss": 0.7841, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.9062011464304327e-05, |
| "loss": 0.7361, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.9051589369463265e-05, |
| "loss": 0.9502, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.90411672746222e-05, |
| "loss": 0.7668, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.9030745179781137e-05, |
| "loss": 0.9525, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.9020323084940075e-05, |
| "loss": 0.9285, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.900990099009901e-05, |
| "loss": 0.8229, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.8999478895257948e-05, |
| "loss": 0.8772, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.8989056800416886e-05, |
| "loss": 0.8353, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.8978634705575824e-05, |
| "loss": 0.9087, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.896821261073476e-05, |
| "loss": 0.6933, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.8957790515893696e-05, |
| "loss": 0.7325, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.894736842105263e-05, |
| "loss": 0.7855, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.893694632621157e-05, |
| "loss": 0.8066, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.8926524231370507e-05, |
| "loss": 0.6879, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.8916102136529445e-05, |
| "loss": 0.6346, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.8905680041688383e-05, |
| "loss": 0.8345, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.8895257946847317e-05, |
| "loss": 0.7777, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.8884835852006255e-05, |
| "loss": 0.8028, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.887441375716519e-05, |
| "loss": 0.7616, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.8863991662324128e-05, |
| "loss": 0.8293, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.8853569567483066e-05, |
| "loss": 1.0718, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.8843147472642004e-05, |
| "loss": 0.8187, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.883272537780094e-05, |
| "loss": 0.9085, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.8822303282959876e-05, |
| "loss": 0.8187, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.881188118811881e-05, |
| "loss": 0.8541, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.880145909327775e-05, |
| "loss": 0.8495, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.8791036998436687e-05, |
| "loss": 0.7282, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.8780614903595625e-05, |
| "loss": 0.9282, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.8770192808754563e-05, |
| "loss": 0.8301, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.8759770713913497e-05, |
| "loss": 0.9449, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.8749348619072435e-05, |
| "loss": 0.8108, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.873892652423137e-05, |
| "loss": 0.9288, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.8728504429390308e-05, |
| "loss": 0.8278, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.8718082334549246e-05, |
| "loss": 0.832, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.8707660239708184e-05, |
| "loss": 0.6932, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.8697238144867122e-05, |
| "loss": 0.8464, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.8686816050026056e-05, |
| "loss": 0.818, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.8676393955184994e-05, |
| "loss": 0.6812, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.866597186034393e-05, |
| "loss": 0.7635, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.8655549765502867e-05, |
| "loss": 0.8627, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.8645127670661805e-05, |
| "loss": 0.8027, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.8634705575820743e-05, |
| "loss": 0.8261, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.862428348097968e-05, |
| "loss": 1.063, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.8613861386138615e-05, |
| "loss": 0.7366, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.860343929129755e-05, |
| "loss": 0.8244, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.8593017196456488e-05, |
| "loss": 0.7581, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.8582595101615426e-05, |
| "loss": 0.8713, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.8572173006774364e-05, |
| "loss": 0.8269, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.8561750911933302e-05, |
| "loss": 0.8373, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.8551328817092236e-05, |
| "loss": 1.0406, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.8540906722251174e-05, |
| "loss": 0.817, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.853048462741011e-05, |
| "loss": 0.7741, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.8520062532569047e-05, |
| "loss": 0.6475, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.8509640437727985e-05, |
| "loss": 0.9373, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.8499218342886923e-05, |
| "loss": 0.7653, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.848879624804586e-05, |
| "loss": 0.7827, |
| "step": 246 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.8478374153204795e-05, |
| "loss": 0.721, |
| "step": 247 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.8467952058363733e-05, |
| "loss": 0.9385, |
| "step": 248 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.8457529963522668e-05, |
| "loss": 0.8429, |
| "step": 249 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.8447107868681606e-05, |
| "loss": 0.7696, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.37, |
| "eval_loss": 1.419487714767456, |
| "eval_runtime": 0.6822, |
| "eval_samples_per_second": 357.684, |
| "eval_steps_per_second": 45.443, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.8436685773840544e-05, |
| "loss": 0.8948, |
| "step": 251 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.8426263678999482e-05, |
| "loss": 0.8965, |
| "step": 252 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.841584158415842e-05, |
| "loss": 0.865, |
| "step": 253 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.8405419489317354e-05, |
| "loss": 0.7987, |
| "step": 254 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.839499739447629e-05, |
| "loss": 0.6493, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.8384575299635227e-05, |
| "loss": 0.6366, |
| "step": 256 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.8374153204794165e-05, |
| "loss": 0.7761, |
| "step": 257 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.8363731109953103e-05, |
| "loss": 0.8842, |
| "step": 258 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.835330901511204e-05, |
| "loss": 0.8538, |
| "step": 259 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.8342886920270975e-05, |
| "loss": 0.7485, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.8332464825429913e-05, |
| "loss": 0.9639, |
| "step": 261 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.8322042730588848e-05, |
| "loss": 0.8671, |
| "step": 262 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.8311620635747786e-05, |
| "loss": 1.1155, |
| "step": 263 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.8301198540906724e-05, |
| "loss": 0.6222, |
| "step": 264 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.8290776446065662e-05, |
| "loss": 0.8193, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.82803543512246e-05, |
| "loss": 0.7023, |
| "step": 266 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.8269932256383534e-05, |
| "loss": 0.7934, |
| "step": 267 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.8259510161542472e-05, |
| "loss": 0.7694, |
| "step": 268 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.8249088066701407e-05, |
| "loss": 0.5762, |
| "step": 269 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.8238665971860345e-05, |
| "loss": 1.0749, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.8228243877019283e-05, |
| "loss": 0.7574, |
| "step": 271 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.821782178217822e-05, |
| "loss": 0.8867, |
| "step": 272 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.820739968733716e-05, |
| "loss": 0.8494, |
| "step": 273 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.8196977592496093e-05, |
| "loss": 0.9332, |
| "step": 274 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.818655549765503e-05, |
| "loss": 0.8358, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.8176133402813966e-05, |
| "loss": 0.8541, |
| "step": 276 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.8165711307972904e-05, |
| "loss": 0.6343, |
| "step": 277 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.8155289213131842e-05, |
| "loss": 0.8539, |
| "step": 278 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.814486711829078e-05, |
| "loss": 0.8616, |
| "step": 279 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.8134445023449714e-05, |
| "loss": 0.7041, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.8124022928608652e-05, |
| "loss": 0.8686, |
| "step": 281 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.8113600833767587e-05, |
| "loss": 0.8846, |
| "step": 282 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.8103178738926525e-05, |
| "loss": 0.6211, |
| "step": 283 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.8092756644085463e-05, |
| "loss": 0.6801, |
| "step": 284 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.80823345492444e-05, |
| "loss": 0.8719, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.807191245440334e-05, |
| "loss": 0.8244, |
| "step": 286 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.8061490359562273e-05, |
| "loss": 0.9202, |
| "step": 287 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.805106826472121e-05, |
| "loss": 0.7357, |
| "step": 288 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.8040646169880146e-05, |
| "loss": 0.6915, |
| "step": 289 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.8030224075039084e-05, |
| "loss": 0.6815, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.8019801980198022e-05, |
| "loss": 0.7258, |
| "step": 291 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.800937988535696e-05, |
| "loss": 0.7624, |
| "step": 292 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.7998957790515894e-05, |
| "loss": 0.8998, |
| "step": 293 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.7988535695674832e-05, |
| "loss": 0.8004, |
| "step": 294 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.797811360083377e-05, |
| "loss": 0.7638, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.7967691505992705e-05, |
| "loss": 0.9554, |
| "step": 296 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.7957269411151643e-05, |
| "loss": 0.8383, |
| "step": 297 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.794684731631058e-05, |
| "loss": 0.8609, |
| "step": 298 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.793642522146952e-05, |
| "loss": 0.7594, |
| "step": 299 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.7926003126628453e-05, |
| "loss": 0.6096, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.791558103178739e-05, |
| "loss": 0.7143, |
| "step": 301 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.7905158936946326e-05, |
| "loss": 0.7679, |
| "step": 302 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.7894736842105264e-05, |
| "loss": 0.8171, |
| "step": 303 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.7884314747264202e-05, |
| "loss": 0.9959, |
| "step": 304 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.787389265242314e-05, |
| "loss": 0.7385, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.7863470557582074e-05, |
| "loss": 0.9204, |
| "step": 306 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.7853048462741012e-05, |
| "loss": 0.7915, |
| "step": 307 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.784262636789995e-05, |
| "loss": 0.7563, |
| "step": 308 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.7832204273058885e-05, |
| "loss": 0.9926, |
| "step": 309 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.7821782178217823e-05, |
| "loss": 0.9249, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.781136008337676e-05, |
| "loss": 0.8747, |
| "step": 311 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.78009379885357e-05, |
| "loss": 0.7658, |
| "step": 312 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.7790515893694633e-05, |
| "loss": 0.8105, |
| "step": 313 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.778009379885357e-05, |
| "loss": 0.7345, |
| "step": 314 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.776967170401251e-05, |
| "loss": 0.6339, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.7759249609171444e-05, |
| "loss": 0.6961, |
| "step": 316 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.774882751433038e-05, |
| "loss": 0.7232, |
| "step": 317 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.773840541948932e-05, |
| "loss": 0.7195, |
| "step": 318 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.7727983324648254e-05, |
| "loss": 0.813, |
| "step": 319 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.7717561229807192e-05, |
| "loss": 0.7314, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.770713913496613e-05, |
| "loss": 0.8782, |
| "step": 321 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.7696717040125068e-05, |
| "loss": 0.8681, |
| "step": 322 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.7686294945284003e-05, |
| "loss": 0.765, |
| "step": 323 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.767587285044294e-05, |
| "loss": 0.7984, |
| "step": 324 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.766545075560188e-05, |
| "loss": 0.9159, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.7655028660760813e-05, |
| "loss": 0.7696, |
| "step": 326 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.764460656591975e-05, |
| "loss": 0.7275, |
| "step": 327 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.763418447107869e-05, |
| "loss": 0.7822, |
| "step": 328 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.7623762376237624e-05, |
| "loss": 0.8369, |
| "step": 329 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.761334028139656e-05, |
| "loss": 0.8974, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.76029181865555e-05, |
| "loss": 0.8036, |
| "step": 331 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.7592496091714434e-05, |
| "loss": 0.9051, |
| "step": 332 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.7582073996873372e-05, |
| "loss": 0.8013, |
| "step": 333 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.757165190203231e-05, |
| "loss": 0.7581, |
| "step": 334 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.7561229807191248e-05, |
| "loss": 0.7887, |
| "step": 335 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.7550807712350183e-05, |
| "loss": 0.6694, |
| "step": 336 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.754038561750912e-05, |
| "loss": 0.7176, |
| "step": 337 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.752996352266806e-05, |
| "loss": 0.7746, |
| "step": 338 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.7519541427826993e-05, |
| "loss": 0.8381, |
| "step": 339 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.750911933298593e-05, |
| "loss": 0.8125, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.749869723814487e-05, |
| "loss": 0.8943, |
| "step": 341 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.7488275143303807e-05, |
| "loss": 0.7914, |
| "step": 342 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.747785304846274e-05, |
| "loss": 0.6652, |
| "step": 343 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.746743095362168e-05, |
| "loss": 0.7164, |
| "step": 344 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.7457008858780614e-05, |
| "loss": 0.7685, |
| "step": 345 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.7446586763939552e-05, |
| "loss": 0.9363, |
| "step": 346 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.743616466909849e-05, |
| "loss": 1.0479, |
| "step": 347 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.7425742574257428e-05, |
| "loss": 0.8103, |
| "step": 348 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.7415320479416366e-05, |
| "loss": 0.9324, |
| "step": 349 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.74048983845753e-05, |
| "loss": 0.8677, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.739447628973424e-05, |
| "loss": 0.7647, |
| "step": 351 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.7384054194893173e-05, |
| "loss": 0.7685, |
| "step": 352 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.737363210005211e-05, |
| "loss": 0.9621, |
| "step": 353 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.736321000521105e-05, |
| "loss": 0.9382, |
| "step": 354 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.7352787910369987e-05, |
| "loss": 0.8555, |
| "step": 355 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.734236581552892e-05, |
| "loss": 0.6613, |
| "step": 356 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.733194372068786e-05, |
| "loss": 0.7441, |
| "step": 357 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.7321521625846794e-05, |
| "loss": 0.8258, |
| "step": 358 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.7311099531005732e-05, |
| "loss": 0.7499, |
| "step": 359 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.730067743616467e-05, |
| "loss": 0.7054, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.7290255341323608e-05, |
| "loss": 0.7664, |
| "step": 361 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.7279833246482546e-05, |
| "loss": 0.6954, |
| "step": 362 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.726941115164148e-05, |
| "loss": 0.7161, |
| "step": 363 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.725898905680042e-05, |
| "loss": 0.6779, |
| "step": 364 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.7248566961959353e-05, |
| "loss": 0.7639, |
| "step": 365 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.723814486711829e-05, |
| "loss": 0.8716, |
| "step": 366 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.722772277227723e-05, |
| "loss": 0.9918, |
| "step": 367 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.7217300677436167e-05, |
| "loss": 0.6391, |
| "step": 368 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.7206878582595105e-05, |
| "loss": 0.7262, |
| "step": 369 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.719645648775404e-05, |
| "loss": 0.7502, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.7186034392912974e-05, |
| "loss": 0.8408, |
| "step": 371 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.7175612298071912e-05, |
| "loss": 0.8284, |
| "step": 372 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.716519020323085e-05, |
| "loss": 0.7398, |
| "step": 373 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.7154768108389788e-05, |
| "loss": 0.7542, |
| "step": 374 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.7144346013548726e-05, |
| "loss": 0.8336, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.56, |
| "eval_loss": 1.4095131158828735, |
| "eval_runtime": 0.6814, |
| "eval_samples_per_second": 358.108, |
| "eval_steps_per_second": 45.497, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.713392391870766e-05, |
| "loss": 0.8013, |
| "step": 376 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.71235018238666e-05, |
| "loss": 0.729, |
| "step": 377 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.7113079729025533e-05, |
| "loss": 0.5801, |
| "step": 378 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.710265763418447e-05, |
| "loss": 0.8209, |
| "step": 379 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.709223553934341e-05, |
| "loss": 0.9324, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.7081813444502347e-05, |
| "loss": 0.7516, |
| "step": 381 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.7071391349661285e-05, |
| "loss": 0.8038, |
| "step": 382 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.706096925482022e-05, |
| "loss": 0.8966, |
| "step": 383 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.7050547159979158e-05, |
| "loss": 0.8014, |
| "step": 384 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.7040125065138092e-05, |
| "loss": 0.8488, |
| "step": 385 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.702970297029703e-05, |
| "loss": 0.534, |
| "step": 386 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.7019280875455968e-05, |
| "loss": 0.8201, |
| "step": 387 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.7008858780614906e-05, |
| "loss": 0.8467, |
| "step": 388 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.6998436685773844e-05, |
| "loss": 0.5978, |
| "step": 389 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.698801459093278e-05, |
| "loss": 0.6904, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.6977592496091716e-05, |
| "loss": 0.812, |
| "step": 391 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.696717040125065e-05, |
| "loss": 0.7595, |
| "step": 392 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.695674830640959e-05, |
| "loss": 0.7716, |
| "step": 393 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.6946326211568527e-05, |
| "loss": 0.7191, |
| "step": 394 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.6935904116727465e-05, |
| "loss": 0.8188, |
| "step": 395 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.6925482021886403e-05, |
| "loss": 0.6315, |
| "step": 396 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.6915059927045337e-05, |
| "loss": 0.8337, |
| "step": 397 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.6904637832204272e-05, |
| "loss": 0.7527, |
| "step": 398 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.689421573736321e-05, |
| "loss": 0.6687, |
| "step": 399 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.6883793642522148e-05, |
| "loss": 0.8028, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.6873371547681086e-05, |
| "loss": 0.8755, |
| "step": 401 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.6862949452840024e-05, |
| "loss": 0.8406, |
| "step": 402 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.685252735799896e-05, |
| "loss": 0.6639, |
| "step": 403 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.6842105263157896e-05, |
| "loss": 0.6692, |
| "step": 404 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.683168316831683e-05, |
| "loss": 0.8156, |
| "step": 405 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.682126107347577e-05, |
| "loss": 0.6696, |
| "step": 406 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.6810838978634707e-05, |
| "loss": 0.72, |
| "step": 407 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.6800416883793645e-05, |
| "loss": 0.8898, |
| "step": 408 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.6789994788952583e-05, |
| "loss": 0.7381, |
| "step": 409 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.6779572694111517e-05, |
| "loss": 0.7516, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.6769150599270455e-05, |
| "loss": 0.7757, |
| "step": 411 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.675872850442939e-05, |
| "loss": 0.6418, |
| "step": 412 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.6748306409588328e-05, |
| "loss": 0.8466, |
| "step": 413 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.6737884314747266e-05, |
| "loss": 0.7008, |
| "step": 414 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.6727462219906204e-05, |
| "loss": 0.6855, |
| "step": 415 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.6717040125065142e-05, |
| "loss": 0.754, |
| "step": 416 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.6706618030224076e-05, |
| "loss": 0.7652, |
| "step": 417 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.6696195935383014e-05, |
| "loss": 0.8674, |
| "step": 418 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.668577384054195e-05, |
| "loss": 0.7409, |
| "step": 419 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.6675351745700887e-05, |
| "loss": 0.6421, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.6664929650859825e-05, |
| "loss": 0.7868, |
| "step": 421 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.6654507556018763e-05, |
| "loss": 0.7902, |
| "step": 422 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.66440854611777e-05, |
| "loss": 0.8985, |
| "step": 423 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.6633663366336635e-05, |
| "loss": 0.7264, |
| "step": 424 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.662324127149557e-05, |
| "loss": 0.9267, |
| "step": 425 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.6612819176654508e-05, |
| "loss": 0.7026, |
| "step": 426 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.6602397081813446e-05, |
| "loss": 0.7923, |
| "step": 427 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.6591974986972384e-05, |
| "loss": 0.8185, |
| "step": 428 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.6581552892131322e-05, |
| "loss": 0.78, |
| "step": 429 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.6571130797290256e-05, |
| "loss": 0.986, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.6560708702449194e-05, |
| "loss": 0.7487, |
| "step": 431 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.655028660760813e-05, |
| "loss": 0.7466, |
| "step": 432 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.6539864512767067e-05, |
| "loss": 0.6738, |
| "step": 433 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.6529442417926005e-05, |
| "loss": 0.711, |
| "step": 434 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.6519020323084943e-05, |
| "loss": 0.8149, |
| "step": 435 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.650859822824388e-05, |
| "loss": 0.6593, |
| "step": 436 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.6498176133402815e-05, |
| "loss": 0.7133, |
| "step": 437 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.6487754038561753e-05, |
| "loss": 0.7012, |
| "step": 438 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.6477331943720688e-05, |
| "loss": 0.739, |
| "step": 439 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.6466909848879626e-05, |
| "loss": 0.8774, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.6456487754038564e-05, |
| "loss": 0.759, |
| "step": 441 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.6446065659197502e-05, |
| "loss": 0.8325, |
| "step": 442 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.6435643564356436e-05, |
| "loss": 0.9136, |
| "step": 443 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.6425221469515374e-05, |
| "loss": 0.622, |
| "step": 444 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.641479937467431e-05, |
| "loss": 0.7456, |
| "step": 445 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.6404377279833247e-05, |
| "loss": 0.7104, |
| "step": 446 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.6393955184992185e-05, |
| "loss": 0.8386, |
| "step": 447 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.6383533090151123e-05, |
| "loss": 0.8093, |
| "step": 448 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.637311099531006e-05, |
| "loss": 0.744, |
| "step": 449 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.6362688900468995e-05, |
| "loss": 0.7537, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.6352266805627933e-05, |
| "loss": 0.7441, |
| "step": 451 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.6341844710786868e-05, |
| "loss": 0.6012, |
| "step": 452 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.6331422615945806e-05, |
| "loss": 0.731, |
| "step": 453 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.6321000521104744e-05, |
| "loss": 0.8034, |
| "step": 454 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.6310578426263682e-05, |
| "loss": 0.9273, |
| "step": 455 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.6300156331422616e-05, |
| "loss": 0.7353, |
| "step": 456 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.6289734236581554e-05, |
| "loss": 0.8091, |
| "step": 457 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.6279312141740492e-05, |
| "loss": 0.682, |
| "step": 458 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.6268890046899427e-05, |
| "loss": 0.7877, |
| "step": 459 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.6258467952058365e-05, |
| "loss": 0.7639, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.6248045857217303e-05, |
| "loss": 0.852, |
| "step": 461 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.623762376237624e-05, |
| "loss": 0.7957, |
| "step": 462 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.6227201667535175e-05, |
| "loss": 0.7989, |
| "step": 463 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.6216779572694113e-05, |
| "loss": 0.7723, |
| "step": 464 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.620635747785305e-05, |
| "loss": 0.8614, |
| "step": 465 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.6195935383011986e-05, |
| "loss": 0.8833, |
| "step": 466 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.6185513288170924e-05, |
| "loss": 0.7677, |
| "step": 467 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.6175091193329862e-05, |
| "loss": 0.9211, |
| "step": 468 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.6164669098488796e-05, |
| "loss": 0.8102, |
| "step": 469 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.6154247003647734e-05, |
| "loss": 0.6814, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.6143824908806672e-05, |
| "loss": 0.7578, |
| "step": 471 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.6133402813965607e-05, |
| "loss": 0.8795, |
| "step": 472 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.6122980719124545e-05, |
| "loss": 0.7981, |
| "step": 473 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.6112558624283483e-05, |
| "loss": 0.621, |
| "step": 474 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.610213652944242e-05, |
| "loss": 0.7823, |
| "step": 475 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.6091714434601355e-05, |
| "loss": 0.6799, |
| "step": 476 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.6081292339760293e-05, |
| "loss": 0.8436, |
| "step": 477 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.607087024491923e-05, |
| "loss": 0.7218, |
| "step": 478 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.6060448150078166e-05, |
| "loss": 0.9744, |
| "step": 479 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.6050026055237104e-05, |
| "loss": 0.8046, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.6039603960396042e-05, |
| "loss": 0.7073, |
| "step": 481 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.6029181865554976e-05, |
| "loss": 0.9574, |
| "step": 482 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.6018759770713914e-05, |
| "loss": 0.7554, |
| "step": 483 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.6008337675872852e-05, |
| "loss": 0.6525, |
| "step": 484 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.599791558103179e-05, |
| "loss": 0.7979, |
| "step": 485 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.5987493486190725e-05, |
| "loss": 0.8549, |
| "step": 486 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.5977071391349663e-05, |
| "loss": 0.743, |
| "step": 487 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.59666492965086e-05, |
| "loss": 0.8243, |
| "step": 488 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.5956227201667535e-05, |
| "loss": 0.7138, |
| "step": 489 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.5945805106826473e-05, |
| "loss": 0.8334, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.593538301198541e-05, |
| "loss": 0.8128, |
| "step": 491 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.592496091714435e-05, |
| "loss": 0.7887, |
| "step": 492 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.5914538822303284e-05, |
| "loss": 0.9216, |
| "step": 493 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.5904116727462222e-05, |
| "loss": 0.8926, |
| "step": 494 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.5893694632621156e-05, |
| "loss": 0.8752, |
| "step": 495 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.5883272537780094e-05, |
| "loss": 0.9396, |
| "step": 496 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.5872850442939032e-05, |
| "loss": 0.6856, |
| "step": 497 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.586242834809797e-05, |
| "loss": 0.7895, |
| "step": 498 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.5852006253256905e-05, |
| "loss": 0.8479, |
| "step": 499 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.5841584158415843e-05, |
| "loss": 0.6227, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.74, |
| "eval_loss": 1.4052919149398804, |
| "eval_runtime": 0.6812, |
| "eval_samples_per_second": 358.176, |
| "eval_steps_per_second": 45.506, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.583116206357478e-05, |
| "loss": 0.6472, |
| "step": 501 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.5820739968733715e-05, |
| "loss": 0.8447, |
| "step": 502 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.5810317873892653e-05, |
| "loss": 0.951, |
| "step": 503 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.579989577905159e-05, |
| "loss": 0.7986, |
| "step": 504 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.578947368421053e-05, |
| "loss": 0.7053, |
| "step": 505 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.5779051589369464e-05, |
| "loss": 0.7952, |
| "step": 506 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.5768629494528402e-05, |
| "loss": 0.8477, |
| "step": 507 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.5758207399687336e-05, |
| "loss": 0.697, |
| "step": 508 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.5747785304846274e-05, |
| "loss": 0.8457, |
| "step": 509 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.5737363210005212e-05, |
| "loss": 0.575, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.572694111516415e-05, |
| "loss": 0.7074, |
| "step": 511 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.5716519020323088e-05, |
| "loss": 1.0142, |
| "step": 512 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.5706096925482023e-05, |
| "loss": 0.7981, |
| "step": 513 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.569567483064096e-05, |
| "loss": 0.6948, |
| "step": 514 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.5685252735799895e-05, |
| "loss": 0.797, |
| "step": 515 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.5674830640958833e-05, |
| "loss": 0.8838, |
| "step": 516 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.566440854611777e-05, |
| "loss": 1.0186, |
| "step": 517 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.565398645127671e-05, |
| "loss": 0.7033, |
| "step": 518 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.5643564356435644e-05, |
| "loss": 0.8382, |
| "step": 519 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.5633142261594582e-05, |
| "loss": 0.7796, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.5622720166753516e-05, |
| "loss": 0.8543, |
| "step": 521 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.5612298071912454e-05, |
| "loss": 0.7737, |
| "step": 522 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.5601875977071392e-05, |
| "loss": 0.859, |
| "step": 523 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.559145388223033e-05, |
| "loss": 0.7694, |
| "step": 524 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.5581031787389268e-05, |
| "loss": 0.735, |
| "step": 525 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.5570609692548203e-05, |
| "loss": 0.6844, |
| "step": 526 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.556018759770714e-05, |
| "loss": 0.8337, |
| "step": 527 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.5549765502866075e-05, |
| "loss": 0.7713, |
| "step": 528 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.5539343408025013e-05, |
| "loss": 0.8503, |
| "step": 529 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.552892131318395e-05, |
| "loss": 0.7254, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.551849921834289e-05, |
| "loss": 0.8452, |
| "step": 531 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.5508077123501827e-05, |
| "loss": 0.8226, |
| "step": 532 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.5497655028660762e-05, |
| "loss": 0.7946, |
| "step": 533 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.54872329338197e-05, |
| "loss": 0.7594, |
| "step": 534 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.5476810838978634e-05, |
| "loss": 0.7711, |
| "step": 535 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.5466388744137572e-05, |
| "loss": 0.6961, |
| "step": 536 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.545596664929651e-05, |
| "loss": 0.668, |
| "step": 537 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.5445544554455448e-05, |
| "loss": 0.8215, |
| "step": 538 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.5435122459614386e-05, |
| "loss": 0.899, |
| "step": 539 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.542470036477332e-05, |
| "loss": 0.6452, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.5414278269932255e-05, |
| "loss": 0.7401, |
| "step": 541 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.5403856175091193e-05, |
| "loss": 0.7663, |
| "step": 542 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.539343408025013e-05, |
| "loss": 0.8102, |
| "step": 543 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.538301198540907e-05, |
| "loss": 0.6833, |
| "step": 544 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.5372589890568007e-05, |
| "loss": 0.8215, |
| "step": 545 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.5362167795726942e-05, |
| "loss": 0.8205, |
| "step": 546 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.535174570088588e-05, |
| "loss": 0.7051, |
| "step": 547 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.5341323606044814e-05, |
| "loss": 0.6459, |
| "step": 548 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.5330901511203752e-05, |
| "loss": 0.6389, |
| "step": 549 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.532047941636269e-05, |
| "loss": 0.8173, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.5310057321521628e-05, |
| "loss": 0.7108, |
| "step": 551 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.5299635226680566e-05, |
| "loss": 0.78, |
| "step": 552 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.52892131318395e-05, |
| "loss": 0.7017, |
| "step": 553 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.527879103699844e-05, |
| "loss": 1.0129, |
| "step": 554 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.5268368942157373e-05, |
| "loss": 0.7601, |
| "step": 555 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.5257946847316311e-05, |
| "loss": 0.8861, |
| "step": 556 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.5247524752475249e-05, |
| "loss": 0.9158, |
| "step": 557 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.5237102657634187e-05, |
| "loss": 0.7781, |
| "step": 558 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.5226680562793123e-05, |
| "loss": 0.7289, |
| "step": 559 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.5216258467952058e-05, |
| "loss": 0.8442, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.5205836373110996e-05, |
| "loss": 0.7792, |
| "step": 561 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.5195414278269934e-05, |
| "loss": 0.6995, |
| "step": 562 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.518499218342887e-05, |
| "loss": 0.8379, |
| "step": 563 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.5174570088587808e-05, |
| "loss": 0.6961, |
| "step": 564 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.5164147993746744e-05, |
| "loss": 0.6868, |
| "step": 565 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.5153725898905682e-05, |
| "loss": 0.7872, |
| "step": 566 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.5143303804064617e-05, |
| "loss": 0.6468, |
| "step": 567 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.5132881709223555e-05, |
| "loss": 0.6298, |
| "step": 568 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.5122459614382491e-05, |
| "loss": 0.5914, |
| "step": 569 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.5112037519541429e-05, |
| "loss": 0.8262, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.5101615424700367e-05, |
| "loss": 0.6425, |
| "step": 571 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.5091193329859303e-05, |
| "loss": 0.7242, |
| "step": 572 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.508077123501824e-05, |
| "loss": 0.8667, |
| "step": 573 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.5070349140177176e-05, |
| "loss": 0.8544, |
| "step": 574 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.5059927045336114e-05, |
| "loss": 0.8213, |
| "step": 575 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.504950495049505e-05, |
| "loss": 0.8269, |
| "step": 576 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.5039082855653988e-05, |
| "loss": 0.7712, |
| "step": 577 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.5028660760812926e-05, |
| "loss": 0.8219, |
| "step": 578 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.5018238665971862e-05, |
| "loss": 0.7262, |
| "step": 579 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.5007816571130797e-05, |
| "loss": 0.7989, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.4997394476289735e-05, |
| "loss": 0.8232, |
| "step": 581 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.4986972381448673e-05, |
| "loss": 0.705, |
| "step": 582 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.4976550286607609e-05, |
| "loss": 0.7393, |
| "step": 583 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.4966128191766547e-05, |
| "loss": 0.8477, |
| "step": 584 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.4955706096925483e-05, |
| "loss": 0.7252, |
| "step": 585 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.494528400208442e-05, |
| "loss": 0.6958, |
| "step": 586 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.4934861907243356e-05, |
| "loss": 0.729, |
| "step": 587 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.4924439812402294e-05, |
| "loss": 0.8359, |
| "step": 588 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.4914017717561232e-05, |
| "loss": 0.7241, |
| "step": 589 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.4903595622720168e-05, |
| "loss": 0.6694, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.4893173527879106e-05, |
| "loss": 0.6559, |
| "step": 591 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.4882751433038042e-05, |
| "loss": 0.6371, |
| "step": 592 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.4872329338196979e-05, |
| "loss": 0.714, |
| "step": 593 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.4861907243355915e-05, |
| "loss": 0.9527, |
| "step": 594 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.4851485148514853e-05, |
| "loss": 0.683, |
| "step": 595 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.4841063053673789e-05, |
| "loss": 0.9006, |
| "step": 596 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.4830640958832727e-05, |
| "loss": 0.5827, |
| "step": 597 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.4820218863991665e-05, |
| "loss": 0.8005, |
| "step": 598 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.48097967691506e-05, |
| "loss": 0.8746, |
| "step": 599 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.4799374674309538e-05, |
| "loss": 0.8321, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.4788952579468474e-05, |
| "loss": 0.8988, |
| "step": 601 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.4778530484627412e-05, |
| "loss": 0.7194, |
| "step": 602 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.4768108389786348e-05, |
| "loss": 0.8779, |
| "step": 603 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.4757686294945286e-05, |
| "loss": 0.7417, |
| "step": 604 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.4747264200104224e-05, |
| "loss": 0.7501, |
| "step": 605 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.4736842105263159e-05, |
| "loss": 0.9036, |
| "step": 606 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.4726420010422095e-05, |
| "loss": 0.7942, |
| "step": 607 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.4715997915581033e-05, |
| "loss": 0.811, |
| "step": 608 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.470557582073997e-05, |
| "loss": 0.8806, |
| "step": 609 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.4695153725898907e-05, |
| "loss": 0.6999, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.4684731631057845e-05, |
| "loss": 0.7964, |
| "step": 611 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.467430953621678e-05, |
| "loss": 0.725, |
| "step": 612 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.4663887441375718e-05, |
| "loss": 0.7521, |
| "step": 613 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.4653465346534654e-05, |
| "loss": 0.6808, |
| "step": 614 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.4643043251693592e-05, |
| "loss": 0.9636, |
| "step": 615 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.463262115685253e-05, |
| "loss": 0.7499, |
| "step": 616 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.4622199062011466e-05, |
| "loss": 0.7492, |
| "step": 617 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.4611776967170404e-05, |
| "loss": 0.6387, |
| "step": 618 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.4601354872329339e-05, |
| "loss": 0.9108, |
| "step": 619 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.4590932777488277e-05, |
| "loss": 0.7543, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.4580510682647213e-05, |
| "loss": 0.6464, |
| "step": 621 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.457008858780615e-05, |
| "loss": 0.8188, |
| "step": 622 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.4559666492965087e-05, |
| "loss": 0.7667, |
| "step": 623 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.4549244398124025e-05, |
| "loss": 0.9312, |
| "step": 624 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.453882230328296e-05, |
| "loss": 0.7317, |
| "step": 625 |
| }, |
| { |
| "epoch": 0.93, |
| "eval_loss": 1.4036322832107544, |
| "eval_runtime": 0.6828, |
| "eval_samples_per_second": 357.339, |
| "eval_steps_per_second": 45.4, |
| "step": 625 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.4528400208441898e-05, |
| "loss": 0.804, |
| "step": 626 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.4517978113600835e-05, |
| "loss": 0.7383, |
| "step": 627 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.4507556018759772e-05, |
| "loss": 0.7157, |
| "step": 628 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.449713392391871e-05, |
| "loss": 0.6865, |
| "step": 629 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.4486711829077646e-05, |
| "loss": 0.8644, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.4476289734236584e-05, |
| "loss": 0.6549, |
| "step": 631 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.4465867639395519e-05, |
| "loss": 0.7999, |
| "step": 632 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.4455445544554456e-05, |
| "loss": 0.6767, |
| "step": 633 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.4445023449713393e-05, |
| "loss": 0.8104, |
| "step": 634 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.443460135487233e-05, |
| "loss": 0.4852, |
| "step": 635 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.4424179260031269e-05, |
| "loss": 0.8359, |
| "step": 636 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.4413757165190205e-05, |
| "loss": 0.7298, |
| "step": 637 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.440333507034914e-05, |
| "loss": 0.9331, |
| "step": 638 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.4392912975508078e-05, |
| "loss": 0.6205, |
| "step": 639 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.4382490880667015e-05, |
| "loss": 0.7542, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.4372068785825952e-05, |
| "loss": 0.8073, |
| "step": 641 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.436164669098489e-05, |
| "loss": 0.7385, |
| "step": 642 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.4351224596143826e-05, |
| "loss": 0.6963, |
| "step": 643 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.4340802501302764e-05, |
| "loss": 0.7731, |
| "step": 644 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.4330380406461699e-05, |
| "loss": 0.9263, |
| "step": 645 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.4319958311620636e-05, |
| "loss": 0.7894, |
| "step": 646 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.4309536216779574e-05, |
| "loss": 0.803, |
| "step": 647 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.429911412193851e-05, |
| "loss": 0.8345, |
| "step": 648 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.4288692027097449e-05, |
| "loss": 0.7219, |
| "step": 649 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.4278269932256385e-05, |
| "loss": 0.7639, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.4267847837415321e-05, |
| "loss": 0.7458, |
| "step": 651 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.4257425742574257e-05, |
| "loss": 0.85, |
| "step": 652 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.4247003647733195e-05, |
| "loss": 0.8162, |
| "step": 653 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.4236581552892132e-05, |
| "loss": 0.7392, |
| "step": 654 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.422615945805107e-05, |
| "loss": 0.6401, |
| "step": 655 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.4215737363210008e-05, |
| "loss": 0.8797, |
| "step": 656 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.4205315268368944e-05, |
| "loss": 0.7372, |
| "step": 657 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.419489317352788e-05, |
| "loss": 0.8824, |
| "step": 658 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.4184471078686816e-05, |
| "loss": 1.0634, |
| "step": 659 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.4174048983845754e-05, |
| "loss": 0.6788, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.416362688900469e-05, |
| "loss": 0.7513, |
| "step": 661 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.4153204794163629e-05, |
| "loss": 0.779, |
| "step": 662 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.4142782699322567e-05, |
| "loss": 0.8078, |
| "step": 663 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.4132360604481501e-05, |
| "loss": 0.8041, |
| "step": 664 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.4121938509640437e-05, |
| "loss": 0.7182, |
| "step": 665 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.4111516414799375e-05, |
| "loss": 0.8792, |
| "step": 666 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.4101094319958313e-05, |
| "loss": 0.7393, |
| "step": 667 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.409067222511725e-05, |
| "loss": 0.6594, |
| "step": 668 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.4080250130276188e-05, |
| "loss": 0.76, |
| "step": 669 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.4069828035435124e-05, |
| "loss": 0.7869, |
| "step": 670 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.405940594059406e-05, |
| "loss": 0.7759, |
| "step": 671 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.4048983845752996e-05, |
| "loss": 0.7721, |
| "step": 672 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.4038561750911934e-05, |
| "loss": 0.8935, |
| "step": 673 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.4028139656070872e-05, |
| "loss": 0.478, |
| "step": 674 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.4017717561229809e-05, |
| "loss": 0.7527, |
| "step": 675 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.4007295466388747e-05, |
| "loss": 0.5814, |
| "step": 676 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.3996873371547681e-05, |
| "loss": 0.6559, |
| "step": 677 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.3986451276706619e-05, |
| "loss": 0.5482, |
| "step": 678 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.3976029181865555e-05, |
| "loss": 0.5353, |
| "step": 679 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.3965607087024493e-05, |
| "loss": 0.4359, |
| "step": 680 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.395518499218343e-05, |
| "loss": 0.5285, |
| "step": 681 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.3944762897342368e-05, |
| "loss": 0.6784, |
| "step": 682 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.3934340802501306e-05, |
| "loss": 0.8082, |
| "step": 683 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.392391870766024e-05, |
| "loss": 0.7463, |
| "step": 684 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.3913496612819178e-05, |
| "loss": 0.6934, |
| "step": 685 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.3903074517978114e-05, |
| "loss": 0.7304, |
| "step": 686 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.3892652423137052e-05, |
| "loss": 0.5762, |
| "step": 687 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.3882230328295989e-05, |
| "loss": 0.6414, |
| "step": 688 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.3871808233454927e-05, |
| "loss": 0.6352, |
| "step": 689 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.3861386138613861e-05, |
| "loss": 0.5834, |
| "step": 690 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.3850964043772799e-05, |
| "loss": 0.7311, |
| "step": 691 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.3840541948931735e-05, |
| "loss": 0.6355, |
| "step": 692 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.3830119854090673e-05, |
| "loss": 0.6404, |
| "step": 693 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.3819697759249611e-05, |
| "loss": 0.6473, |
| "step": 694 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.3809275664408548e-05, |
| "loss": 0.6803, |
| "step": 695 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.3798853569567486e-05, |
| "loss": 0.6876, |
| "step": 696 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.378843147472642e-05, |
| "loss": 0.5576, |
| "step": 697 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.3778009379885358e-05, |
| "loss": 0.566, |
| "step": 698 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.3767587285044294e-05, |
| "loss": 0.5749, |
| "step": 699 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.3757165190203232e-05, |
| "loss": 0.6698, |
| "step": 700 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.3746743095362169e-05, |
| "loss": 0.6797, |
| "step": 701 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.3736321000521107e-05, |
| "loss": 0.6312, |
| "step": 702 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.3725898905680041e-05, |
| "loss": 0.7648, |
| "step": 703 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.3715476810838979e-05, |
| "loss": 0.7218, |
| "step": 704 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.3705054715997917e-05, |
| "loss": 0.7196, |
| "step": 705 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.3694632621156853e-05, |
| "loss": 0.569, |
| "step": 706 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.3684210526315791e-05, |
| "loss": 0.589, |
| "step": 707 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.3673788431474728e-05, |
| "loss": 0.5285, |
| "step": 708 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.3663366336633666e-05, |
| "loss": 0.6736, |
| "step": 709 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.36529442417926e-05, |
| "loss": 0.5691, |
| "step": 710 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.3642522146951538e-05, |
| "loss": 0.7969, |
| "step": 711 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.3632100052110474e-05, |
| "loss": 0.6301, |
| "step": 712 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.3621677957269412e-05, |
| "loss": 0.5835, |
| "step": 713 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.361125586242835e-05, |
| "loss": 0.6131, |
| "step": 714 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.3600833767587287e-05, |
| "loss": 0.5875, |
| "step": 715 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.3590411672746223e-05, |
| "loss": 0.5238, |
| "step": 716 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.3579989577905159e-05, |
| "loss": 0.6558, |
| "step": 717 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.3569567483064097e-05, |
| "loss": 0.7193, |
| "step": 718 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.3559145388223033e-05, |
| "loss": 0.7304, |
| "step": 719 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.3548723293381971e-05, |
| "loss": 0.6825, |
| "step": 720 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.353830119854091e-05, |
| "loss": 0.6913, |
| "step": 721 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.3527879103699846e-05, |
| "loss": 0.7598, |
| "step": 722 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.351745700885878e-05, |
| "loss": 0.7296, |
| "step": 723 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.3507034914017718e-05, |
| "loss": 0.5977, |
| "step": 724 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.3496612819176656e-05, |
| "loss": 0.7384, |
| "step": 725 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.3486190724335592e-05, |
| "loss": 0.7384, |
| "step": 726 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.347576862949453e-05, |
| "loss": 0.5237, |
| "step": 727 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.3465346534653467e-05, |
| "loss": 0.7766, |
| "step": 728 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.3454924439812403e-05, |
| "loss": 0.6077, |
| "step": 729 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.3444502344971339e-05, |
| "loss": 0.4952, |
| "step": 730 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.3434080250130277e-05, |
| "loss": 0.5211, |
| "step": 731 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.3423658155289215e-05, |
| "loss": 0.601, |
| "step": 732 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.3413236060448151e-05, |
| "loss": 0.7515, |
| "step": 733 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.340281396560709e-05, |
| "loss": 0.5726, |
| "step": 734 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.3392391870766026e-05, |
| "loss": 0.593, |
| "step": 735 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.3381969775924962e-05, |
| "loss": 0.6977, |
| "step": 736 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.3371547681083898e-05, |
| "loss": 0.7146, |
| "step": 737 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.3361125586242836e-05, |
| "loss": 0.556, |
| "step": 738 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.3350703491401772e-05, |
| "loss": 0.4768, |
| "step": 739 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.334028139656071e-05, |
| "loss": 0.6493, |
| "step": 740 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.3329859301719648e-05, |
| "loss": 0.7462, |
| "step": 741 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.3319437206878583e-05, |
| "loss": 0.644, |
| "step": 742 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.330901511203752e-05, |
| "loss": 0.6876, |
| "step": 743 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.3298593017196457e-05, |
| "loss": 0.6153, |
| "step": 744 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.3288170922355395e-05, |
| "loss": 0.6567, |
| "step": 745 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.3277748827514331e-05, |
| "loss": 0.8993, |
| "step": 746 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.326732673267327e-05, |
| "loss": 0.7152, |
| "step": 747 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.3256904637832207e-05, |
| "loss": 0.6741, |
| "step": 748 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.3246482542991142e-05, |
| "loss": 0.8251, |
| "step": 749 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.3236060448150078e-05, |
| "loss": 0.681, |
| "step": 750 |
| }, |
| { |
| "epoch": 1.11, |
| "eval_loss": 1.4148905277252197, |
| "eval_runtime": 0.6829, |
| "eval_samples_per_second": 357.29, |
| "eval_steps_per_second": 45.393, |
| "step": 750 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 1.3225638353309016e-05, |
| "loss": 0.585, |
| "step": 751 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 1.3215216258467954e-05, |
| "loss": 0.7098, |
| "step": 752 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 1.320479416362689e-05, |
| "loss": 0.6905, |
| "step": 753 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 1.3194372068785828e-05, |
| "loss": 0.5776, |
| "step": 754 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 1.3183949973944763e-05, |
| "loss": 0.533, |
| "step": 755 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 1.31735278791037e-05, |
| "loss": 0.8204, |
| "step": 756 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 1.3163105784262637e-05, |
| "loss": 0.5992, |
| "step": 757 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 1.3152683689421575e-05, |
| "loss": 0.6665, |
| "step": 758 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 1.3142261594580513e-05, |
| "loss": 0.6687, |
| "step": 759 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 1.313183949973945e-05, |
| "loss": 0.6453, |
| "step": 760 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 1.3121417404898387e-05, |
| "loss": 0.6158, |
| "step": 761 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 1.3110995310057322e-05, |
| "loss": 0.6409, |
| "step": 762 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 1.310057321521626e-05, |
| "loss": 0.8109, |
| "step": 763 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 1.3090151120375196e-05, |
| "loss": 0.6122, |
| "step": 764 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 1.3079729025534134e-05, |
| "loss": 0.6431, |
| "step": 765 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 1.306930693069307e-05, |
| "loss": 0.7474, |
| "step": 766 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 1.3058884835852008e-05, |
| "loss": 0.5338, |
| "step": 767 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 1.3048462741010943e-05, |
| "loss": 0.6808, |
| "step": 768 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 1.303804064616988e-05, |
| "loss": 0.5391, |
| "step": 769 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 1.3027618551328817e-05, |
| "loss": 0.731, |
| "step": 770 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 1.3017196456487755e-05, |
| "loss": 0.8208, |
| "step": 771 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 1.3006774361646693e-05, |
| "loss": 0.7597, |
| "step": 772 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 1.299635226680563e-05, |
| "loss": 0.7891, |
| "step": 773 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 1.2985930171964567e-05, |
| "loss": 0.4956, |
| "step": 774 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 1.2975508077123502e-05, |
| "loss": 0.6759, |
| "step": 775 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 1.296508598228244e-05, |
| "loss": 0.6952, |
| "step": 776 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 1.2954663887441376e-05, |
| "loss": 0.8049, |
| "step": 777 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 1.2944241792600314e-05, |
| "loss": 0.7684, |
| "step": 778 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 1.2933819697759252e-05, |
| "loss": 0.6378, |
| "step": 779 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 1.2923397602918188e-05, |
| "loss": 0.5973, |
| "step": 780 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 1.2912975508077123e-05, |
| "loss": 0.6076, |
| "step": 781 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 1.290255341323606e-05, |
| "loss": 0.6369, |
| "step": 782 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 1.2892131318394999e-05, |
| "loss": 0.5992, |
| "step": 783 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 1.2881709223553935e-05, |
| "loss": 0.6928, |
| "step": 784 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 1.2871287128712873e-05, |
| "loss": 0.7085, |
| "step": 785 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 1.286086503387181e-05, |
| "loss": 0.6317, |
| "step": 786 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 1.2850442939030747e-05, |
| "loss": 0.697, |
| "step": 787 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 1.2840020844189682e-05, |
| "loss": 0.6722, |
| "step": 788 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 1.282959874934862e-05, |
| "loss": 0.5766, |
| "step": 789 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 1.2819176654507558e-05, |
| "loss": 0.6338, |
| "step": 790 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 1.2808754559666494e-05, |
| "loss": 0.54, |
| "step": 791 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 1.2798332464825432e-05, |
| "loss": 0.5775, |
| "step": 792 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 1.2787910369984368e-05, |
| "loss": 0.5557, |
| "step": 793 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 1.2777488275143304e-05, |
| "loss": 0.8882, |
| "step": 794 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 1.276706618030224e-05, |
| "loss": 0.5439, |
| "step": 795 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 1.2756644085461179e-05, |
| "loss": 0.8192, |
| "step": 796 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 1.2746221990620115e-05, |
| "loss": 0.7168, |
| "step": 797 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 1.2735799895779053e-05, |
| "loss": 0.7053, |
| "step": 798 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 1.272537780093799e-05, |
| "loss": 0.7466, |
| "step": 799 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 1.2714955706096927e-05, |
| "loss": 0.602, |
| "step": 800 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 1.2704533611255863e-05, |
| "loss": 0.585, |
| "step": 801 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 1.26941115164148e-05, |
| "loss": 0.6028, |
| "step": 802 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 1.2683689421573738e-05, |
| "loss": 0.6587, |
| "step": 803 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 1.2673267326732674e-05, |
| "loss": 0.6094, |
| "step": 804 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 1.2662845231891612e-05, |
| "loss": 0.7432, |
| "step": 805 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 1.265242313705055e-05, |
| "loss": 0.681, |
| "step": 806 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 1.2642001042209484e-05, |
| "loss": 0.7389, |
| "step": 807 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 1.263157894736842e-05, |
| "loss": 0.6605, |
| "step": 808 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 1.2621156852527359e-05, |
| "loss": 1.035, |
| "step": 809 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 1.2610734757686297e-05, |
| "loss": 0.9414, |
| "step": 810 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 1.2600312662845233e-05, |
| "loss": 0.6568, |
| "step": 811 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 1.258989056800417e-05, |
| "loss": 0.7017, |
| "step": 812 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 1.2579468473163107e-05, |
| "loss": 0.4725, |
| "step": 813 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 1.2569046378322043e-05, |
| "loss": 0.5439, |
| "step": 814 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 1.255862428348098e-05, |
| "loss": 0.7674, |
| "step": 815 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 1.2548202188639918e-05, |
| "loss": 0.5886, |
| "step": 816 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 1.2537780093798856e-05, |
| "loss": 0.6738, |
| "step": 817 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 1.2527357998957792e-05, |
| "loss": 0.5979, |
| "step": 818 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 1.251693590411673e-05, |
| "loss": 0.6732, |
| "step": 819 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 1.2506513809275664e-05, |
| "loss": 0.7215, |
| "step": 820 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 1.2496091714434602e-05, |
| "loss": 0.8035, |
| "step": 821 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 1.2485669619593539e-05, |
| "loss": 0.791, |
| "step": 822 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 1.2475247524752477e-05, |
| "loss": 0.879, |
| "step": 823 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 1.2464825429911413e-05, |
| "loss": 0.6933, |
| "step": 824 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 1.245440333507035e-05, |
| "loss": 0.6732, |
| "step": 825 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 1.2443981240229289e-05, |
| "loss": 0.471, |
| "step": 826 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 1.2433559145388223e-05, |
| "loss": 0.6209, |
| "step": 827 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 1.242313705054716e-05, |
| "loss": 0.7711, |
| "step": 828 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 1.2412714955706098e-05, |
| "loss": 0.5243, |
| "step": 829 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 1.2402292860865036e-05, |
| "loss": 0.6372, |
| "step": 830 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 1.2391870766023972e-05, |
| "loss": 0.6331, |
| "step": 831 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 1.238144867118291e-05, |
| "loss": 0.724, |
| "step": 832 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 1.2371026576341844e-05, |
| "loss": 0.6556, |
| "step": 833 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 1.2360604481500782e-05, |
| "loss": 0.6917, |
| "step": 834 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 1.2350182386659719e-05, |
| "loss": 0.6015, |
| "step": 835 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 1.2339760291818657e-05, |
| "loss": 0.7699, |
| "step": 836 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 1.2329338196977595e-05, |
| "loss": 0.685, |
| "step": 837 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 1.231891610213653e-05, |
| "loss": 0.782, |
| "step": 838 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 1.2308494007295469e-05, |
| "loss": 0.6774, |
| "step": 839 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 1.2298071912454403e-05, |
| "loss": 0.638, |
| "step": 840 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 1.2287649817613341e-05, |
| "loss": 0.7751, |
| "step": 841 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 1.2277227722772278e-05, |
| "loss": 0.5947, |
| "step": 842 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 1.2266805627931216e-05, |
| "loss": 0.6068, |
| "step": 843 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 1.2256383533090152e-05, |
| "loss": 0.584, |
| "step": 844 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 1.224596143824909e-05, |
| "loss": 0.617, |
| "step": 845 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 1.2235539343408024e-05, |
| "loss": 0.7015, |
| "step": 846 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 1.2225117248566962e-05, |
| "loss": 0.5526, |
| "step": 847 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 1.22146951537259e-05, |
| "loss": 0.8132, |
| "step": 848 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 1.2204273058884837e-05, |
| "loss": 0.6442, |
| "step": 849 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 1.2193850964043774e-05, |
| "loss": 0.7009, |
| "step": 850 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 1.218342886920271e-05, |
| "loss": 0.73, |
| "step": 851 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 1.2173006774361649e-05, |
| "loss": 0.5938, |
| "step": 852 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 1.2162584679520583e-05, |
| "loss": 0.5819, |
| "step": 853 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 1.2152162584679521e-05, |
| "loss": 0.6471, |
| "step": 854 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 1.2141740489838458e-05, |
| "loss": 0.6964, |
| "step": 855 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 1.2131318394997396e-05, |
| "loss": 0.6639, |
| "step": 856 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 1.2120896300156333e-05, |
| "loss": 0.6479, |
| "step": 857 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 1.211047420531527e-05, |
| "loss": 0.5854, |
| "step": 858 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 1.2100052110474206e-05, |
| "loss": 0.6386, |
| "step": 859 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 1.2089630015633142e-05, |
| "loss": 0.5664, |
| "step": 860 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 1.207920792079208e-05, |
| "loss": 0.4708, |
| "step": 861 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 1.2068785825951017e-05, |
| "loss": 0.6463, |
| "step": 862 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 1.2058363731109954e-05, |
| "loss": 0.7414, |
| "step": 863 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 1.2047941636268892e-05, |
| "loss": 0.8658, |
| "step": 864 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 1.2037519541427829e-05, |
| "loss": 0.505, |
| "step": 865 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 1.2027097446586763e-05, |
| "loss": 0.6083, |
| "step": 866 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 1.2016675351745701e-05, |
| "loss": 0.5182, |
| "step": 867 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 1.200625325690464e-05, |
| "loss": 0.5486, |
| "step": 868 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 1.1995831162063575e-05, |
| "loss": 0.6522, |
| "step": 869 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 1.1985409067222513e-05, |
| "loss": 0.9345, |
| "step": 870 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 1.197498697238145e-05, |
| "loss": 0.6214, |
| "step": 871 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 1.1964564877540386e-05, |
| "loss": 0.6844, |
| "step": 872 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 1.1954142782699322e-05, |
| "loss": 0.7068, |
| "step": 873 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 1.194372068785826e-05, |
| "loss": 0.7144, |
| "step": 874 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 1.1933298593017198e-05, |
| "loss": 0.5733, |
| "step": 875 |
| }, |
| { |
| "epoch": 1.3, |
| "eval_loss": 1.4106025695800781, |
| "eval_runtime": 0.6824, |
| "eval_samples_per_second": 357.542, |
| "eval_steps_per_second": 45.425, |
| "step": 875 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 1.1922876498176134e-05, |
| "loss": 0.5646, |
| "step": 876 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 1.1912454403335072e-05, |
| "loss": 0.6975, |
| "step": 877 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 1.1902032308494009e-05, |
| "loss": 0.6477, |
| "step": 878 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 1.1891610213652945e-05, |
| "loss": 0.6836, |
| "step": 879 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 1.1881188118811881e-05, |
| "loss": 0.6404, |
| "step": 880 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 1.187076602397082e-05, |
| "loss": 0.6566, |
| "step": 881 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 1.1860343929129755e-05, |
| "loss": 0.6763, |
| "step": 882 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 1.1849921834288693e-05, |
| "loss": 0.6033, |
| "step": 883 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 1.1839499739447631e-05, |
| "loss": 0.5867, |
| "step": 884 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 1.1829077644606566e-05, |
| "loss": 0.5879, |
| "step": 885 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 1.1818655549765504e-05, |
| "loss": 0.5958, |
| "step": 886 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 1.180823345492444e-05, |
| "loss": 0.7745, |
| "step": 887 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 1.1797811360083378e-05, |
| "loss": 0.71, |
| "step": 888 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 1.1787389265242314e-05, |
| "loss": 0.6798, |
| "step": 889 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 1.1776967170401252e-05, |
| "loss": 0.7242, |
| "step": 890 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 1.176654507556019e-05, |
| "loss": 0.7227, |
| "step": 891 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 1.1756122980719125e-05, |
| "loss": 0.5997, |
| "step": 892 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 1.1745700885878061e-05, |
| "loss": 0.8046, |
| "step": 893 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 1.1735278791037e-05, |
| "loss": 0.7447, |
| "step": 894 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 1.1724856696195937e-05, |
| "loss": 0.7086, |
| "step": 895 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 1.1714434601354873e-05, |
| "loss": 0.6405, |
| "step": 896 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 1.1704012506513811e-05, |
| "loss": 0.5599, |
| "step": 897 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 1.1693590411672746e-05, |
| "loss": 0.7145, |
| "step": 898 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 1.1683168316831684e-05, |
| "loss": 0.6469, |
| "step": 899 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 1.167274622199062e-05, |
| "loss": 0.6017, |
| "step": 900 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 1.1662324127149558e-05, |
| "loss": 0.6717, |
| "step": 901 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 1.1651902032308494e-05, |
| "loss": 0.5498, |
| "step": 902 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 1.1641479937467432e-05, |
| "loss": 0.6697, |
| "step": 903 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 1.163105784262637e-05, |
| "loss": 0.6168, |
| "step": 904 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 1.1620635747785305e-05, |
| "loss": 0.6073, |
| "step": 905 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 1.1610213652944243e-05, |
| "loss": 0.7787, |
| "step": 906 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 1.159979155810318e-05, |
| "loss": 0.714, |
| "step": 907 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 1.1589369463262117e-05, |
| "loss": 0.5519, |
| "step": 908 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 1.1578947368421053e-05, |
| "loss": 0.5521, |
| "step": 909 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 1.1568525273579991e-05, |
| "loss": 0.6649, |
| "step": 910 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 1.1558103178738926e-05, |
| "loss": 0.6734, |
| "step": 911 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 1.1547681083897864e-05, |
| "loss": 0.6343, |
| "step": 912 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 1.15372589890568e-05, |
| "loss": 0.7617, |
| "step": 913 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 1.1526836894215738e-05, |
| "loss": 0.5289, |
| "step": 914 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 1.1516414799374676e-05, |
| "loss": 0.6656, |
| "step": 915 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 1.1505992704533612e-05, |
| "loss": 0.7067, |
| "step": 916 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 1.149557060969255e-05, |
| "loss": 0.5984, |
| "step": 917 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 1.1485148514851485e-05, |
| "loss": 0.592, |
| "step": 918 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 1.1474726420010423e-05, |
| "loss": 0.532, |
| "step": 919 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 1.1464304325169359e-05, |
| "loss": 0.5906, |
| "step": 920 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 1.1453882230328297e-05, |
| "loss": 0.6541, |
| "step": 921 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 1.1443460135487235e-05, |
| "loss": 0.7358, |
| "step": 922 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 1.1433038040646171e-05, |
| "loss": 0.805, |
| "step": 923 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 1.1422615945805106e-05, |
| "loss": 0.5806, |
| "step": 924 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 1.1412193850964044e-05, |
| "loss": 0.7057, |
| "step": 925 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 1.1401771756122982e-05, |
| "loss": 0.7038, |
| "step": 926 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 1.1391349661281918e-05, |
| "loss": 0.5984, |
| "step": 927 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 1.1380927566440856e-05, |
| "loss": 0.6154, |
| "step": 928 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 1.1370505471599792e-05, |
| "loss": 0.5576, |
| "step": 929 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 1.136008337675873e-05, |
| "loss": 0.6755, |
| "step": 930 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 1.1349661281917665e-05, |
| "loss": 0.737, |
| "step": 931 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 1.1339239187076603e-05, |
| "loss": 0.5667, |
| "step": 932 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 1.132881709223554e-05, |
| "loss": 0.6664, |
| "step": 933 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 1.1318394997394477e-05, |
| "loss": 0.7441, |
| "step": 934 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 1.1307972902553415e-05, |
| "loss": 0.6832, |
| "step": 935 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 1.1297550807712351e-05, |
| "loss": 0.6012, |
| "step": 936 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 1.1287128712871288e-05, |
| "loss": 0.641, |
| "step": 937 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 1.1276706618030224e-05, |
| "loss": 0.6449, |
| "step": 938 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 1.1266284523189162e-05, |
| "loss": 0.5676, |
| "step": 939 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 1.1255862428348098e-05, |
| "loss": 0.7318, |
| "step": 940 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 1.1245440333507036e-05, |
| "loss": 0.7254, |
| "step": 941 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 1.1235018238665974e-05, |
| "loss": 0.5324, |
| "step": 942 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 1.122459614382491e-05, |
| "loss": 0.7383, |
| "step": 943 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 1.1214174048983847e-05, |
| "loss": 0.6054, |
| "step": 944 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 1.1203751954142783e-05, |
| "loss": 0.6625, |
| "step": 945 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 1.119332985930172e-05, |
| "loss": 0.7526, |
| "step": 946 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 1.1182907764460657e-05, |
| "loss": 0.6867, |
| "step": 947 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 1.1172485669619595e-05, |
| "loss": 0.695, |
| "step": 948 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 1.1162063574778533e-05, |
| "loss": 0.6788, |
| "step": 949 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 1.1151641479937468e-05, |
| "loss": 0.7039, |
| "step": 950 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 1.1141219385096404e-05, |
| "loss": 0.5979, |
| "step": 951 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 1.1130797290255342e-05, |
| "loss": 0.7379, |
| "step": 952 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 1.112037519541428e-05, |
| "loss": 0.7438, |
| "step": 953 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 1.1109953100573216e-05, |
| "loss": 0.6428, |
| "step": 954 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 1.1099531005732154e-05, |
| "loss": 0.6789, |
| "step": 955 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 1.108910891089109e-05, |
| "loss": 0.672, |
| "step": 956 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 1.1078686816050027e-05, |
| "loss": 0.5176, |
| "step": 957 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 1.1068264721208963e-05, |
| "loss": 0.7264, |
| "step": 958 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 1.10578426263679e-05, |
| "loss": 0.5447, |
| "step": 959 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 1.1047420531526839e-05, |
| "loss": 0.6478, |
| "step": 960 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 1.1036998436685775e-05, |
| "loss": 0.5701, |
| "step": 961 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 1.1026576341844713e-05, |
| "loss": 0.7855, |
| "step": 962 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 1.1016154247003648e-05, |
| "loss": 0.6387, |
| "step": 963 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 1.1005732152162586e-05, |
| "loss": 0.5867, |
| "step": 964 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 1.0995310057321522e-05, |
| "loss": 0.6254, |
| "step": 965 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 1.098488796248046e-05, |
| "loss": 0.6631, |
| "step": 966 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 1.0974465867639396e-05, |
| "loss": 0.8164, |
| "step": 967 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 1.0964043772798334e-05, |
| "loss": 0.7209, |
| "step": 968 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 1.0953621677957272e-05, |
| "loss": 0.76, |
| "step": 969 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 1.0943199583116207e-05, |
| "loss": 0.6395, |
| "step": 970 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 1.0932777488275143e-05, |
| "loss": 0.7242, |
| "step": 971 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 1.092235539343408e-05, |
| "loss": 0.6829, |
| "step": 972 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 1.0911933298593019e-05, |
| "loss": 0.6493, |
| "step": 973 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 1.0901511203751955e-05, |
| "loss": 0.6401, |
| "step": 974 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 1.0891089108910893e-05, |
| "loss": 0.4674, |
| "step": 975 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 1.0880667014069828e-05, |
| "loss": 0.6746, |
| "step": 976 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 1.0870244919228766e-05, |
| "loss": 0.5146, |
| "step": 977 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 1.0859822824387702e-05, |
| "loss": 0.6411, |
| "step": 978 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 1.084940072954664e-05, |
| "loss": 0.5716, |
| "step": 979 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 1.0838978634705578e-05, |
| "loss": 0.5572, |
| "step": 980 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 1.0828556539864514e-05, |
| "loss": 0.5844, |
| "step": 981 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 1.0818134445023452e-05, |
| "loss": 0.5285, |
| "step": 982 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 1.0807712350182387e-05, |
| "loss": 0.5529, |
| "step": 983 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 1.0797290255341324e-05, |
| "loss": 0.7165, |
| "step": 984 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 1.078686816050026e-05, |
| "loss": 0.4933, |
| "step": 985 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 1.0776446065659199e-05, |
| "loss": 0.5561, |
| "step": 986 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 1.0766023970818135e-05, |
| "loss": 0.4758, |
| "step": 987 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 1.0755601875977073e-05, |
| "loss": 0.5724, |
| "step": 988 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 1.0745179781136008e-05, |
| "loss": 0.5707, |
| "step": 989 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 1.0734757686294945e-05, |
| "loss": 0.5184, |
| "step": 990 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 1.0724335591453883e-05, |
| "loss": 0.6284, |
| "step": 991 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 1.071391349661282e-05, |
| "loss": 0.5725, |
| "step": 992 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 1.0703491401771758e-05, |
| "loss": 0.6264, |
| "step": 993 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 1.0693069306930694e-05, |
| "loss": 0.5356, |
| "step": 994 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 1.0682647212089632e-05, |
| "loss": 0.707, |
| "step": 995 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 1.0672225117248567e-05, |
| "loss": 0.5491, |
| "step": 996 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 1.0661803022407504e-05, |
| "loss": 0.7105, |
| "step": 997 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 1.065138092756644e-05, |
| "loss": 0.7774, |
| "step": 998 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 1.0640958832725379e-05, |
| "loss": 0.6542, |
| "step": 999 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 1.0630536737884317e-05, |
| "loss": 0.6497, |
| "step": 1000 |
| }, |
| { |
| "epoch": 1.49, |
| "eval_loss": 1.4143973588943481, |
| "eval_runtime": 0.6815, |
| "eval_samples_per_second": 358.039, |
| "eval_steps_per_second": 45.489, |
| "step": 1000 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 1.0620114643043253e-05, |
| "loss": 0.7721, |
| "step": 1001 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 1.060969254820219e-05, |
| "loss": 0.6448, |
| "step": 1002 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 1.0599270453361125e-05, |
| "loss": 0.6809, |
| "step": 1003 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 1.0588848358520063e-05, |
| "loss": 0.6446, |
| "step": 1004 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 1.0578426263679e-05, |
| "loss": 0.6307, |
| "step": 1005 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 1.0568004168837938e-05, |
| "loss": 0.6946, |
| "step": 1006 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 1.0557582073996876e-05, |
| "loss": 0.4875, |
| "step": 1007 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 1.0547159979155812e-05, |
| "loss": 0.6212, |
| "step": 1008 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 1.0536737884314746e-05, |
| "loss": 0.7248, |
| "step": 1009 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 1.0526315789473684e-05, |
| "loss": 0.6195, |
| "step": 1010 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 1.0515893694632622e-05, |
| "loss": 0.6364, |
| "step": 1011 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 1.0505471599791559e-05, |
| "loss": 0.8186, |
| "step": 1012 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 1.0495049504950497e-05, |
| "loss": 0.5885, |
| "step": 1013 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 1.0484627410109433e-05, |
| "loss": 0.5516, |
| "step": 1014 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 1.047420531526837e-05, |
| "loss": 0.6075, |
| "step": 1015 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 1.0463783220427305e-05, |
| "loss": 0.5196, |
| "step": 1016 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 1.0453361125586243e-05, |
| "loss": 0.6308, |
| "step": 1017 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 1.0442939030745181e-05, |
| "loss": 0.8091, |
| "step": 1018 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 1.0432516935904118e-05, |
| "loss": 0.5811, |
| "step": 1019 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 1.0422094841063056e-05, |
| "loss": 0.6284, |
| "step": 1020 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 1.0411672746221992e-05, |
| "loss": 0.5792, |
| "step": 1021 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 1.0401250651380928e-05, |
| "loss": 0.6207, |
| "step": 1022 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 1.0390828556539864e-05, |
| "loss": 0.6106, |
| "step": 1023 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 1.0380406461698802e-05, |
| "loss": 0.6752, |
| "step": 1024 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 1.0369984366857739e-05, |
| "loss": 0.7184, |
| "step": 1025 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 1.0359562272016677e-05, |
| "loss": 0.6681, |
| "step": 1026 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 1.0349140177175615e-05, |
| "loss": 0.7036, |
| "step": 1027 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 1.033871808233455e-05, |
| "loss": 0.7062, |
| "step": 1028 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 1.0328295987493485e-05, |
| "loss": 0.6072, |
| "step": 1029 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 1.0317873892652423e-05, |
| "loss": 0.58, |
| "step": 1030 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 1.0307451797811361e-05, |
| "loss": 0.6639, |
| "step": 1031 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 1.0297029702970298e-05, |
| "loss": 0.6224, |
| "step": 1032 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 1.0286607608129236e-05, |
| "loss": 0.6674, |
| "step": 1033 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 1.0276185513288172e-05, |
| "loss": 0.5734, |
| "step": 1034 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 1.0265763418447108e-05, |
| "loss": 0.7933, |
| "step": 1035 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 1.0255341323606044e-05, |
| "loss": 0.7328, |
| "step": 1036 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 1.0244919228764982e-05, |
| "loss": 0.4796, |
| "step": 1037 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 1.023449713392392e-05, |
| "loss": 0.5682, |
| "step": 1038 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 1.0224075039082857e-05, |
| "loss": 0.6945, |
| "step": 1039 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 1.0213652944241795e-05, |
| "loss": 0.8304, |
| "step": 1040 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 1.0203230849400729e-05, |
| "loss": 0.5836, |
| "step": 1041 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 1.0192808754559667e-05, |
| "loss": 0.5384, |
| "step": 1042 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 1.0182386659718603e-05, |
| "loss": 0.7605, |
| "step": 1043 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 1.0171964564877541e-05, |
| "loss": 0.8921, |
| "step": 1044 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 1.0161542470036478e-05, |
| "loss": 0.5456, |
| "step": 1045 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 1.0151120375195416e-05, |
| "loss": 0.857, |
| "step": 1046 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 1.0140698280354354e-05, |
| "loss": 0.5074, |
| "step": 1047 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 1.0130276185513288e-05, |
| "loss": 0.7102, |
| "step": 1048 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 1.0119854090672226e-05, |
| "loss": 0.6623, |
| "step": 1049 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 1.0109431995831162e-05, |
| "loss": 0.7003, |
| "step": 1050 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 1.00990099009901e-05, |
| "loss": 0.6981, |
| "step": 1051 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 1.0088587806149037e-05, |
| "loss": 0.6992, |
| "step": 1052 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 1.0078165711307975e-05, |
| "loss": 0.565, |
| "step": 1053 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 1.0067743616466909e-05, |
| "loss": 0.6609, |
| "step": 1054 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 1.0057321521625847e-05, |
| "loss": 0.5319, |
| "step": 1055 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 1.0046899426784783e-05, |
| "loss": 0.5287, |
| "step": 1056 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 1.0036477331943721e-05, |
| "loss": 0.5939, |
| "step": 1057 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 1.002605523710266e-05, |
| "loss": 0.6506, |
| "step": 1058 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 1.0015633142261596e-05, |
| "loss": 0.4579, |
| "step": 1059 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 1.0005211047420534e-05, |
| "loss": 0.738, |
| "step": 1060 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 9.99478895257947e-06, |
| "loss": 0.6641, |
| "step": 1061 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 9.984366857738406e-06, |
| "loss": 0.5838, |
| "step": 1062 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 9.973944762897342e-06, |
| "loss": 0.6514, |
| "step": 1063 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 9.96352266805628e-06, |
| "loss": 0.7209, |
| "step": 1064 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 9.953100573215218e-06, |
| "loss": 0.6068, |
| "step": 1065 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 9.942678478374153e-06, |
| "loss": 0.6643, |
| "step": 1066 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 9.93225638353309e-06, |
| "loss": 0.6382, |
| "step": 1067 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 9.921834288692029e-06, |
| "loss": 0.6521, |
| "step": 1068 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 9.911412193850965e-06, |
| "loss": 0.5781, |
| "step": 1069 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 9.900990099009901e-06, |
| "loss": 0.4597, |
| "step": 1070 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 9.89056800416884e-06, |
| "loss": 0.6441, |
| "step": 1071 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 9.880145909327776e-06, |
| "loss": 0.8519, |
| "step": 1072 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 9.869723814486712e-06, |
| "loss": 0.7241, |
| "step": 1073 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 9.85930171964565e-06, |
| "loss": 0.6976, |
| "step": 1074 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 9.848879624804588e-06, |
| "loss": 0.5765, |
| "step": 1075 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 9.838457529963524e-06, |
| "loss": 0.4675, |
| "step": 1076 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 9.82803543512246e-06, |
| "loss": 0.5756, |
| "step": 1077 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 9.817613340281398e-06, |
| "loss": 0.634, |
| "step": 1078 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 9.807191245440335e-06, |
| "loss": 0.5635, |
| "step": 1079 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 9.79676915059927e-06, |
| "loss": 0.6979, |
| "step": 1080 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 9.786347055758209e-06, |
| "loss": 0.57, |
| "step": 1081 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 9.775924960917145e-06, |
| "loss": 0.6934, |
| "step": 1082 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 9.765502866076081e-06, |
| "loss": 0.616, |
| "step": 1083 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 9.75508077123502e-06, |
| "loss": 0.6176, |
| "step": 1084 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 9.744658676393956e-06, |
| "loss": 0.6425, |
| "step": 1085 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 9.734236581552893e-06, |
| "loss": 0.6245, |
| "step": 1086 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 9.72381448671183e-06, |
| "loss": 0.6386, |
| "step": 1087 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 9.713392391870768e-06, |
| "loss": 0.7235, |
| "step": 1088 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 9.702970297029704e-06, |
| "loss": 0.5517, |
| "step": 1089 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 9.69254820218864e-06, |
| "loss": 0.7184, |
| "step": 1090 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 9.682126107347578e-06, |
| "loss": 0.6694, |
| "step": 1091 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 9.671704012506515e-06, |
| "loss": 0.6786, |
| "step": 1092 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 9.66128191766545e-06, |
| "loss": 0.5875, |
| "step": 1093 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 9.650859822824389e-06, |
| "loss": 0.6556, |
| "step": 1094 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 9.640437727983325e-06, |
| "loss": 0.6521, |
| "step": 1095 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 9.630015633142263e-06, |
| "loss": 0.5984, |
| "step": 1096 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 9.6195935383012e-06, |
| "loss": 0.6923, |
| "step": 1097 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 9.609171443460136e-06, |
| "loss": 0.6851, |
| "step": 1098 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 9.598749348619073e-06, |
| "loss": 0.5771, |
| "step": 1099 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 9.58832725377801e-06, |
| "loss": 0.6362, |
| "step": 1100 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 9.577905158936948e-06, |
| "loss": 0.5773, |
| "step": 1101 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 9.567483064095884e-06, |
| "loss": 0.7179, |
| "step": 1102 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 9.55706096925482e-06, |
| "loss": 0.5209, |
| "step": 1103 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 9.546638874413758e-06, |
| "loss": 0.6773, |
| "step": 1104 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 9.536216779572694e-06, |
| "loss": 0.7015, |
| "step": 1105 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 9.525794684731632e-06, |
| "loss": 0.6894, |
| "step": 1106 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 9.515372589890569e-06, |
| "loss": 0.5376, |
| "step": 1107 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 9.504950495049505e-06, |
| "loss": 0.7093, |
| "step": 1108 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 9.494528400208443e-06, |
| "loss": 0.6872, |
| "step": 1109 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 9.48410630536738e-06, |
| "loss": 0.6233, |
| "step": 1110 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 9.473684210526315e-06, |
| "loss": 0.6311, |
| "step": 1111 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 9.463262115685253e-06, |
| "loss": 0.6976, |
| "step": 1112 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 9.452840020844191e-06, |
| "loss": 0.6252, |
| "step": 1113 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 9.442417926003128e-06, |
| "loss": 0.6584, |
| "step": 1114 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 9.431995831162064e-06, |
| "loss": 0.6392, |
| "step": 1115 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 9.421573736321002e-06, |
| "loss": 0.6101, |
| "step": 1116 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 9.411151641479938e-06, |
| "loss": 0.5382, |
| "step": 1117 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 9.400729546638874e-06, |
| "loss": 0.7471, |
| "step": 1118 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 9.390307451797812e-06, |
| "loss": 0.5256, |
| "step": 1119 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 9.379885356956749e-06, |
| "loss": 0.6382, |
| "step": 1120 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 9.369463262115685e-06, |
| "loss": 0.6343, |
| "step": 1121 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 9.359041167274623e-06, |
| "loss": 0.5383, |
| "step": 1122 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 9.348619072433561e-06, |
| "loss": 0.6111, |
| "step": 1123 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 9.338196977592497e-06, |
| "loss": 0.5914, |
| "step": 1124 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 9.327774882751433e-06, |
| "loss": 0.677, |
| "step": 1125 |
| }, |
| { |
| "epoch": 1.67, |
| "eval_loss": 1.411847710609436, |
| "eval_runtime": 0.6813, |
| "eval_samples_per_second": 358.161, |
| "eval_steps_per_second": 45.504, |
| "step": 1125 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 9.317352787910371e-06, |
| "loss": 0.7064, |
| "step": 1126 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 9.306930693069308e-06, |
| "loss": 0.5949, |
| "step": 1127 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 9.296508598228244e-06, |
| "loss": 0.6016, |
| "step": 1128 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 9.286086503387182e-06, |
| "loss": 0.578, |
| "step": 1129 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 9.275664408546118e-06, |
| "loss": 0.5404, |
| "step": 1130 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 9.265242313705054e-06, |
| "loss": 0.5496, |
| "step": 1131 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 9.254820218863992e-06, |
| "loss": 0.4981, |
| "step": 1132 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 9.24439812402293e-06, |
| "loss": 0.7691, |
| "step": 1133 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 9.233976029181867e-06, |
| "loss": 0.5824, |
| "step": 1134 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 9.223553934340803e-06, |
| "loss": 0.696, |
| "step": 1135 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 9.213131839499741e-06, |
| "loss": 0.5786, |
| "step": 1136 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 9.202709744658677e-06, |
| "loss": 0.7455, |
| "step": 1137 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 9.192287649817613e-06, |
| "loss": 0.604, |
| "step": 1138 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 9.181865554976551e-06, |
| "loss": 0.5661, |
| "step": 1139 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 9.171443460135488e-06, |
| "loss": 0.5715, |
| "step": 1140 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 9.161021365294424e-06, |
| "loss": 0.5487, |
| "step": 1141 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 9.150599270453362e-06, |
| "loss": 0.6885, |
| "step": 1142 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 9.1401771756123e-06, |
| "loss": 0.6279, |
| "step": 1143 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 9.129755080771236e-06, |
| "loss": 0.7855, |
| "step": 1144 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 9.119332985930172e-06, |
| "loss": 0.6336, |
| "step": 1145 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 9.10891089108911e-06, |
| "loss": 0.8321, |
| "step": 1146 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 9.098488796248047e-06, |
| "loss": 0.6312, |
| "step": 1147 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 9.088066701406983e-06, |
| "loss": 0.6152, |
| "step": 1148 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 9.077644606565921e-06, |
| "loss": 0.5374, |
| "step": 1149 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 9.067222511724857e-06, |
| "loss": 0.7545, |
| "step": 1150 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 9.056800416883793e-06, |
| "loss": 0.6596, |
| "step": 1151 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 9.046378322042731e-06, |
| "loss": 0.7634, |
| "step": 1152 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 9.03595622720167e-06, |
| "loss": 0.6529, |
| "step": 1153 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 9.025534132360606e-06, |
| "loss": 0.7558, |
| "step": 1154 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 9.015112037519542e-06, |
| "loss": 0.6638, |
| "step": 1155 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 9.00468994267848e-06, |
| "loss": 0.7319, |
| "step": 1156 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 8.994267847837416e-06, |
| "loss": 0.536, |
| "step": 1157 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 8.983845752996352e-06, |
| "loss": 0.6132, |
| "step": 1158 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 8.97342365815529e-06, |
| "loss": 0.4758, |
| "step": 1159 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 8.963001563314227e-06, |
| "loss": 0.7605, |
| "step": 1160 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 8.952579468473163e-06, |
| "loss": 0.5668, |
| "step": 1161 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 8.942157373632101e-06, |
| "loss": 0.5375, |
| "step": 1162 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 8.931735278791037e-06, |
| "loss": 0.5818, |
| "step": 1163 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 8.921313183949975e-06, |
| "loss": 0.6875, |
| "step": 1164 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 8.910891089108911e-06, |
| "loss": 0.7939, |
| "step": 1165 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 8.90046899426785e-06, |
| "loss": 0.6524, |
| "step": 1166 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 8.890046899426786e-06, |
| "loss": 0.6138, |
| "step": 1167 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 8.879624804585722e-06, |
| "loss": 0.534, |
| "step": 1168 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 8.86920270974466e-06, |
| "loss": 0.6889, |
| "step": 1169 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 8.858780614903596e-06, |
| "loss": 0.717, |
| "step": 1170 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 8.848358520062534e-06, |
| "loss": 0.7018, |
| "step": 1171 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 8.83793642522147e-06, |
| "loss": 0.64, |
| "step": 1172 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 8.827514330380407e-06, |
| "loss": 0.7527, |
| "step": 1173 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 8.817092235539345e-06, |
| "loss": 0.6939, |
| "step": 1174 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 8.80667014069828e-06, |
| "loss": 0.6369, |
| "step": 1175 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 8.796248045857217e-06, |
| "loss": 0.5309, |
| "step": 1176 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 8.785825951016155e-06, |
| "loss": 0.4973, |
| "step": 1177 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 8.775403856175091e-06, |
| "loss": 0.5326, |
| "step": 1178 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 8.76498176133403e-06, |
| "loss": 0.5437, |
| "step": 1179 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 8.754559666492966e-06, |
| "loss": 0.6647, |
| "step": 1180 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 8.744137571651904e-06, |
| "loss": 0.6631, |
| "step": 1181 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 8.73371547681084e-06, |
| "loss": 0.5625, |
| "step": 1182 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 8.723293381969776e-06, |
| "loss": 0.6866, |
| "step": 1183 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 8.712871287128714e-06, |
| "loss": 0.6413, |
| "step": 1184 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 8.70244919228765e-06, |
| "loss": 0.7603, |
| "step": 1185 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 8.692027097446587e-06, |
| "loss": 0.6702, |
| "step": 1186 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 8.681605002605525e-06, |
| "loss": 0.5994, |
| "step": 1187 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 8.67118290776446e-06, |
| "loss": 0.6898, |
| "step": 1188 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 8.660760812923397e-06, |
| "loss": 0.6075, |
| "step": 1189 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 8.650338718082335e-06, |
| "loss": 0.6365, |
| "step": 1190 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 8.639916623241273e-06, |
| "loss": 0.5342, |
| "step": 1191 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 8.62949452840021e-06, |
| "loss": 0.5531, |
| "step": 1192 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 8.619072433559146e-06, |
| "loss": 0.6621, |
| "step": 1193 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 8.608650338718084e-06, |
| "loss": 0.5947, |
| "step": 1194 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 8.59822824387702e-06, |
| "loss": 0.6552, |
| "step": 1195 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 8.587806149035956e-06, |
| "loss": 0.6817, |
| "step": 1196 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 8.577384054194894e-06, |
| "loss": 0.5532, |
| "step": 1197 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 8.56696195935383e-06, |
| "loss": 0.5102, |
| "step": 1198 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 8.556539864512767e-06, |
| "loss": 0.7545, |
| "step": 1199 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 8.546117769671705e-06, |
| "loss": 0.5834, |
| "step": 1200 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 8.535695674830642e-06, |
| "loss": 0.8146, |
| "step": 1201 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 8.525273579989579e-06, |
| "loss": 0.5369, |
| "step": 1202 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 8.514851485148515e-06, |
| "loss": 0.6098, |
| "step": 1203 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 8.504429390307453e-06, |
| "loss": 0.5301, |
| "step": 1204 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 8.49400729546639e-06, |
| "loss": 0.6017, |
| "step": 1205 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 8.483585200625326e-06, |
| "loss": 0.5611, |
| "step": 1206 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 8.473163105784263e-06, |
| "loss": 0.6197, |
| "step": 1207 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 8.462741010943201e-06, |
| "loss": 0.5759, |
| "step": 1208 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 8.452318916102136e-06, |
| "loss": 0.7369, |
| "step": 1209 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 8.441896821261074e-06, |
| "loss": 0.7782, |
| "step": 1210 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 8.431474726420012e-06, |
| "loss": 0.5824, |
| "step": 1211 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 8.421052631578948e-06, |
| "loss": 0.6864, |
| "step": 1212 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 8.410630536737885e-06, |
| "loss": 0.6121, |
| "step": 1213 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 8.400208441896822e-06, |
| "loss": 0.639, |
| "step": 1214 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 8.389786347055759e-06, |
| "loss": 0.505, |
| "step": 1215 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 8.379364252214695e-06, |
| "loss": 0.7913, |
| "step": 1216 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 8.368942157373633e-06, |
| "loss": 0.5854, |
| "step": 1217 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 8.358520062532571e-06, |
| "loss": 0.5374, |
| "step": 1218 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 8.348097967691507e-06, |
| "loss": 0.6356, |
| "step": 1219 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 8.337675872850443e-06, |
| "loss": 0.6565, |
| "step": 1220 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 8.327253778009381e-06, |
| "loss": 0.5773, |
| "step": 1221 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 8.316831683168318e-06, |
| "loss": 0.7198, |
| "step": 1222 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 8.306409588327254e-06, |
| "loss": 0.6541, |
| "step": 1223 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 8.295987493486192e-06, |
| "loss": 0.7711, |
| "step": 1224 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 8.285565398645128e-06, |
| "loss": 0.6218, |
| "step": 1225 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 8.275143303804064e-06, |
| "loss": 0.5316, |
| "step": 1226 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 8.264721208963002e-06, |
| "loss": 0.6984, |
| "step": 1227 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 8.25429911412194e-06, |
| "loss": 0.5554, |
| "step": 1228 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 8.243877019280877e-06, |
| "loss": 0.6413, |
| "step": 1229 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 8.233454924439813e-06, |
| "loss": 0.6028, |
| "step": 1230 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 8.223032829598751e-06, |
| "loss": 0.8506, |
| "step": 1231 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 8.212610734757687e-06, |
| "loss": 0.6611, |
| "step": 1232 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 8.202188639916623e-06, |
| "loss": 0.6906, |
| "step": 1233 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 8.191766545075561e-06, |
| "loss": 0.8589, |
| "step": 1234 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 8.181344450234498e-06, |
| "loss": 0.833, |
| "step": 1235 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 8.170922355393434e-06, |
| "loss": 0.7199, |
| "step": 1236 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 8.160500260552372e-06, |
| "loss": 0.6588, |
| "step": 1237 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 8.150078165711308e-06, |
| "loss": 0.7235, |
| "step": 1238 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 8.139656070870246e-06, |
| "loss": 0.5888, |
| "step": 1239 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 8.129233976029182e-06, |
| "loss": 0.6261, |
| "step": 1240 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 8.11881188118812e-06, |
| "loss": 0.6471, |
| "step": 1241 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 8.108389786347057e-06, |
| "loss": 0.5856, |
| "step": 1242 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 8.097967691505993e-06, |
| "loss": 0.8154, |
| "step": 1243 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 8.087545596664931e-06, |
| "loss": 0.6434, |
| "step": 1244 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 8.077123501823867e-06, |
| "loss": 0.8807, |
| "step": 1245 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 8.066701406982803e-06, |
| "loss": 0.749, |
| "step": 1246 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 8.056279312141741e-06, |
| "loss": 0.7665, |
| "step": 1247 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 8.045857217300678e-06, |
| "loss": 0.6843, |
| "step": 1248 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 8.035435122459616e-06, |
| "loss": 0.6921, |
| "step": 1249 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 8.025013027618552e-06, |
| "loss": 0.5725, |
| "step": 1250 |
| }, |
| { |
| "epoch": 1.86, |
| "eval_loss": 1.4075517654418945, |
| "eval_runtime": 0.6804, |
| "eval_samples_per_second": 358.593, |
| "eval_steps_per_second": 45.559, |
| "step": 1250 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 8.014590932777488e-06, |
| "loss": 0.5409, |
| "step": 1251 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 8.004168837936426e-06, |
| "loss": 0.5595, |
| "step": 1252 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 7.993746743095362e-06, |
| "loss": 0.5603, |
| "step": 1253 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 7.9833246482543e-06, |
| "loss": 0.6518, |
| "step": 1254 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 7.972902553413237e-06, |
| "loss": 0.6447, |
| "step": 1255 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 7.962480458572175e-06, |
| "loss": 0.4989, |
| "step": 1256 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 7.952058363731111e-06, |
| "loss": 0.7849, |
| "step": 1257 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 7.941636268890047e-06, |
| "loss": 0.4812, |
| "step": 1258 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 7.931214174048985e-06, |
| "loss": 0.6783, |
| "step": 1259 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 7.920792079207921e-06, |
| "loss": 0.6995, |
| "step": 1260 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 7.910369984366858e-06, |
| "loss": 0.698, |
| "step": 1261 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 7.899947889525796e-06, |
| "loss": 0.5814, |
| "step": 1262 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 7.889525794684732e-06, |
| "loss": 0.5582, |
| "step": 1263 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 7.879103699843668e-06, |
| "loss": 0.7499, |
| "step": 1264 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 7.868681605002606e-06, |
| "loss": 0.5714, |
| "step": 1265 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 7.858259510161544e-06, |
| "loss": 0.5645, |
| "step": 1266 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 7.84783741532048e-06, |
| "loss": 0.6369, |
| "step": 1267 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 7.837415320479417e-06, |
| "loss": 0.5001, |
| "step": 1268 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 7.826993225638355e-06, |
| "loss": 0.719, |
| "step": 1269 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 7.816571130797291e-06, |
| "loss": 0.6336, |
| "step": 1270 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 7.806149035956227e-06, |
| "loss": 0.5277, |
| "step": 1271 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 7.795726941115165e-06, |
| "loss": 0.5794, |
| "step": 1272 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 7.785304846274101e-06, |
| "loss": 0.633, |
| "step": 1273 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 7.774882751433038e-06, |
| "loss": 0.551, |
| "step": 1274 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 7.764460656591976e-06, |
| "loss": 0.6205, |
| "step": 1275 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 7.754038561750914e-06, |
| "loss": 0.6605, |
| "step": 1276 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 7.74361646690985e-06, |
| "loss": 0.6877, |
| "step": 1277 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 7.733194372068786e-06, |
| "loss": 0.6566, |
| "step": 1278 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 7.722772277227724e-06, |
| "loss": 0.7377, |
| "step": 1279 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 7.71235018238666e-06, |
| "loss": 0.6225, |
| "step": 1280 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 7.701928087545597e-06, |
| "loss": 0.7017, |
| "step": 1281 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 7.691505992704535e-06, |
| "loss": 0.7516, |
| "step": 1282 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 7.681083897863471e-06, |
| "loss": 0.5827, |
| "step": 1283 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 7.670661803022407e-06, |
| "loss": 0.6481, |
| "step": 1284 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 7.660239708181345e-06, |
| "loss": 0.6529, |
| "step": 1285 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 7.649817613340283e-06, |
| "loss": 0.6492, |
| "step": 1286 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 7.63939551849922e-06, |
| "loss": 0.6063, |
| "step": 1287 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 7.628973423658156e-06, |
| "loss": 0.576, |
| "step": 1288 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 7.6185513288170936e-06, |
| "loss": 0.8672, |
| "step": 1289 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 7.608129233976029e-06, |
| "loss": 0.6517, |
| "step": 1290 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 7.597707139134967e-06, |
| "loss": 0.7137, |
| "step": 1291 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 7.587285044293904e-06, |
| "loss": 0.8719, |
| "step": 1292 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 7.576862949452841e-06, |
| "loss": 0.6337, |
| "step": 1293 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 7.5664408546117774e-06, |
| "loss": 0.6376, |
| "step": 1294 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 7.5560187597707146e-06, |
| "loss": 0.7165, |
| "step": 1295 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 7.545596664929652e-06, |
| "loss": 0.5128, |
| "step": 1296 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 7.535174570088588e-06, |
| "loss": 0.7557, |
| "step": 1297 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 7.524752475247525e-06, |
| "loss": 0.6188, |
| "step": 1298 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 7.514330380406463e-06, |
| "loss": 0.6326, |
| "step": 1299 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 7.5039082855653985e-06, |
| "loss": 0.6014, |
| "step": 1300 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 7.493486190724336e-06, |
| "loss": 0.5037, |
| "step": 1301 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 7.4830640958832735e-06, |
| "loss": 0.6358, |
| "step": 1302 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 7.47264200104221e-06, |
| "loss": 0.5939, |
| "step": 1303 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 7.462219906201147e-06, |
| "loss": 0.5287, |
| "step": 1304 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 7.451797811360084e-06, |
| "loss": 0.8351, |
| "step": 1305 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 7.441375716519021e-06, |
| "loss": 0.7786, |
| "step": 1306 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 7.4309536216779574e-06, |
| "loss": 0.6495, |
| "step": 1307 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 7.4205315268368945e-06, |
| "loss": 0.748, |
| "step": 1308 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 7.4101094319958325e-06, |
| "loss": 0.5109, |
| "step": 1309 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 7.399687337154769e-06, |
| "loss": 0.6619, |
| "step": 1310 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 7.389265242313706e-06, |
| "loss": 0.6808, |
| "step": 1311 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 7.378843147472643e-06, |
| "loss": 0.7248, |
| "step": 1312 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 7.368421052631579e-06, |
| "loss": 0.6203, |
| "step": 1313 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 7.357998957790516e-06, |
| "loss": 0.6776, |
| "step": 1314 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 7.3475768629494535e-06, |
| "loss": 0.7585, |
| "step": 1315 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 7.33715476810839e-06, |
| "loss": 0.5965, |
| "step": 1316 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 7.326732673267327e-06, |
| "loss": 0.5494, |
| "step": 1317 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 7.316310578426265e-06, |
| "loss": 0.5453, |
| "step": 1318 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 7.305888483585202e-06, |
| "loss": 0.5375, |
| "step": 1319 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 7.295466388744138e-06, |
| "loss": 0.7838, |
| "step": 1320 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 7.285044293903075e-06, |
| "loss": 0.6702, |
| "step": 1321 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 7.2746221990620125e-06, |
| "loss": 0.6292, |
| "step": 1322 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 7.264200104220949e-06, |
| "loss": 0.6293, |
| "step": 1323 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 7.253778009379886e-06, |
| "loss": 0.5935, |
| "step": 1324 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 7.243355914538823e-06, |
| "loss": 0.6296, |
| "step": 1325 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 7.232933819697759e-06, |
| "loss": 0.8563, |
| "step": 1326 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 7.222511724856696e-06, |
| "loss": 0.7091, |
| "step": 1327 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 7.212089630015634e-06, |
| "loss": 0.5249, |
| "step": 1328 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 7.20166753517457e-06, |
| "loss": 0.6949, |
| "step": 1329 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 7.191245440333508e-06, |
| "loss": 0.7877, |
| "step": 1330 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 7.180823345492445e-06, |
| "loss": 0.7735, |
| "step": 1331 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 7.170401250651382e-06, |
| "loss": 0.5916, |
| "step": 1332 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 7.159979155810318e-06, |
| "loss": 0.7739, |
| "step": 1333 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 7.149557060969255e-06, |
| "loss": 0.7005, |
| "step": 1334 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 7.1391349661281925e-06, |
| "loss": 0.5461, |
| "step": 1335 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 7.128712871287129e-06, |
| "loss": 0.6582, |
| "step": 1336 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 7.118290776446066e-06, |
| "loss": 0.5393, |
| "step": 1337 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 7.107868681605004e-06, |
| "loss": 0.6856, |
| "step": 1338 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 7.09744658676394e-06, |
| "loss": 0.632, |
| "step": 1339 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 7.087024491922877e-06, |
| "loss": 0.6302, |
| "step": 1340 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 7.076602397081814e-06, |
| "loss": 0.5432, |
| "step": 1341 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 7.066180302240751e-06, |
| "loss": 0.6521, |
| "step": 1342 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 7.055758207399688e-06, |
| "loss": 0.7714, |
| "step": 1343 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 7.045336112558625e-06, |
| "loss": 0.7093, |
| "step": 1344 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 7.034914017717562e-06, |
| "loss": 0.4782, |
| "step": 1345 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 7.024491922876498e-06, |
| "loss": 0.5833, |
| "step": 1346 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 7.014069828035436e-06, |
| "loss": 0.5684, |
| "step": 1347 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 7.003647733194373e-06, |
| "loss": 0.6659, |
| "step": 1348 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 6.9932256383533096e-06, |
| "loss": 0.5705, |
| "step": 1349 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 6.982803543512247e-06, |
| "loss": 0.4822, |
| "step": 1350 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 6.972381448671184e-06, |
| "loss": 0.7815, |
| "step": 1351 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 6.96195935383012e-06, |
| "loss": 0.6152, |
| "step": 1352 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 6.951537258989057e-06, |
| "loss": 0.4539, |
| "step": 1353 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 6.941115164147994e-06, |
| "loss": 0.6625, |
| "step": 1354 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 6.930693069306931e-06, |
| "loss": 0.6365, |
| "step": 1355 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 6.920270974465868e-06, |
| "loss": 0.5446, |
| "step": 1356 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 6.909848879624806e-06, |
| "loss": 0.4474, |
| "step": 1357 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 6.899426784783743e-06, |
| "loss": 0.5761, |
| "step": 1358 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 6.889004689942679e-06, |
| "loss": 0.5729, |
| "step": 1359 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 6.878582595101616e-06, |
| "loss": 0.5691, |
| "step": 1360 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 6.868160500260553e-06, |
| "loss": 0.7458, |
| "step": 1361 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 6.8577384054194896e-06, |
| "loss": 0.508, |
| "step": 1362 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 6.847316310578427e-06, |
| "loss": 0.6225, |
| "step": 1363 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 6.836894215737364e-06, |
| "loss": 0.8399, |
| "step": 1364 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 6.8264721208963e-06, |
| "loss": 0.7263, |
| "step": 1365 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 6.816050026055237e-06, |
| "loss": 0.5129, |
| "step": 1366 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 6.805627931214175e-06, |
| "loss": 0.4625, |
| "step": 1367 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 6.795205836373111e-06, |
| "loss": 0.5439, |
| "step": 1368 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 6.7847837415320485e-06, |
| "loss": 0.4709, |
| "step": 1369 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 6.774361646690986e-06, |
| "loss": 0.5698, |
| "step": 1370 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 6.763939551849923e-06, |
| "loss": 0.7403, |
| "step": 1371 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 6.753517457008859e-06, |
| "loss": 0.6427, |
| "step": 1372 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 6.743095362167796e-06, |
| "loss": 0.4637, |
| "step": 1373 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 6.732673267326733e-06, |
| "loss": 0.6643, |
| "step": 1374 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 6.7222511724856695e-06, |
| "loss": 0.5957, |
| "step": 1375 |
| }, |
| { |
| "epoch": 2.04, |
| "eval_loss": 1.4207512140274048, |
| "eval_runtime": 0.6823, |
| "eval_samples_per_second": 357.593, |
| "eval_steps_per_second": 45.432, |
| "step": 1375 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 6.7118290776446075e-06, |
| "loss": 0.7083, |
| "step": 1376 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 6.701406982803545e-06, |
| "loss": 0.646, |
| "step": 1377 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 6.690984887962481e-06, |
| "loss": 0.5159, |
| "step": 1378 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 6.680562793121418e-06, |
| "loss": 0.5711, |
| "step": 1379 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 6.670140698280355e-06, |
| "loss": 0.4267, |
| "step": 1380 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 6.659718603439291e-06, |
| "loss": 0.6104, |
| "step": 1381 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 6.6492965085982285e-06, |
| "loss": 0.4375, |
| "step": 1382 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 6.638874413757166e-06, |
| "loss": 0.4105, |
| "step": 1383 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 6.628452318916104e-06, |
| "loss": 0.5392, |
| "step": 1384 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 6.618030224075039e-06, |
| "loss": 0.5736, |
| "step": 1385 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 6.607608129233977e-06, |
| "loss": 0.5493, |
| "step": 1386 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 6.597186034392914e-06, |
| "loss": 0.5316, |
| "step": 1387 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 6.58676393955185e-06, |
| "loss": 0.692, |
| "step": 1388 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 6.5763418447107875e-06, |
| "loss": 0.4327, |
| "step": 1389 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 6.565919749869725e-06, |
| "loss": 0.5193, |
| "step": 1390 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 6.555497655028661e-06, |
| "loss": 0.5096, |
| "step": 1391 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 6.545075560187598e-06, |
| "loss": 0.4802, |
| "step": 1392 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 6.534653465346535e-06, |
| "loss": 0.5468, |
| "step": 1393 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 6.524231370505471e-06, |
| "loss": 0.4853, |
| "step": 1394 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 6.5138092756644085e-06, |
| "loss": 0.55, |
| "step": 1395 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 6.5033871808233465e-06, |
| "loss": 0.5078, |
| "step": 1396 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 6.4929650859822836e-06, |
| "loss": 0.4978, |
| "step": 1397 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 6.48254299114122e-06, |
| "loss": 0.5947, |
| "step": 1398 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 6.472120896300157e-06, |
| "loss": 0.6789, |
| "step": 1399 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 6.461698801459094e-06, |
| "loss": 0.5463, |
| "step": 1400 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 6.45127670661803e-06, |
| "loss": 0.5934, |
| "step": 1401 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 6.4408546117769675e-06, |
| "loss": 0.471, |
| "step": 1402 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 6.430432516935905e-06, |
| "loss": 0.5396, |
| "step": 1403 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 6.420010422094841e-06, |
| "loss": 0.623, |
| "step": 1404 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 6.409588327253779e-06, |
| "loss": 0.6724, |
| "step": 1405 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 6.399166232412716e-06, |
| "loss": 0.5963, |
| "step": 1406 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 6.388744137571652e-06, |
| "loss": 0.5499, |
| "step": 1407 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 6.378322042730589e-06, |
| "loss": 0.6323, |
| "step": 1408 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 6.3678999478895264e-06, |
| "loss": 0.499, |
| "step": 1409 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 6.3574778530484636e-06, |
| "loss": 0.4566, |
| "step": 1410 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 6.3470557582074e-06, |
| "loss": 0.5331, |
| "step": 1411 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 6.336633663366337e-06, |
| "loss": 0.7105, |
| "step": 1412 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 6.326211568525275e-06, |
| "loss": 0.5394, |
| "step": 1413 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 6.31578947368421e-06, |
| "loss": 0.5557, |
| "step": 1414 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 6.305367378843148e-06, |
| "loss": 0.5491, |
| "step": 1415 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 6.294945284002085e-06, |
| "loss": 0.5869, |
| "step": 1416 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 6.284523189161022e-06, |
| "loss": 0.6956, |
| "step": 1417 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 6.274101094319959e-06, |
| "loss": 0.5558, |
| "step": 1418 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 6.263678999478896e-06, |
| "loss": 0.6025, |
| "step": 1419 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 6.253256904637832e-06, |
| "loss": 0.6036, |
| "step": 1420 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 6.242834809796769e-06, |
| "loss": 0.3936, |
| "step": 1421 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 6.232412714955706e-06, |
| "loss": 0.5376, |
| "step": 1422 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 6.221990620114644e-06, |
| "loss": 0.5732, |
| "step": 1423 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 6.21156852527358e-06, |
| "loss": 0.577, |
| "step": 1424 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 6.201146430432518e-06, |
| "loss": 0.616, |
| "step": 1425 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 6.190724335591455e-06, |
| "loss": 0.5335, |
| "step": 1426 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 6.180302240750391e-06, |
| "loss": 0.4471, |
| "step": 1427 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 6.169880145909328e-06, |
| "loss": 0.6473, |
| "step": 1428 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 6.159458051068265e-06, |
| "loss": 0.6074, |
| "step": 1429 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 6.149035956227202e-06, |
| "loss": 0.5544, |
| "step": 1430 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 6.138613861386139e-06, |
| "loss": 0.3948, |
| "step": 1431 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 6.128191766545076e-06, |
| "loss": 0.589, |
| "step": 1432 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 6.117769671704012e-06, |
| "loss": 0.4751, |
| "step": 1433 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 6.10734757686295e-06, |
| "loss": 0.577, |
| "step": 1434 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 6.096925482021887e-06, |
| "loss": 0.5376, |
| "step": 1435 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 6.086503387180824e-06, |
| "loss": 0.5722, |
| "step": 1436 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 6.076081292339761e-06, |
| "loss": 0.4824, |
| "step": 1437 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 6.065659197498698e-06, |
| "loss": 0.4119, |
| "step": 1438 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 6.055237102657635e-06, |
| "loss": 0.444, |
| "step": 1439 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 6.044815007816571e-06, |
| "loss": 0.6221, |
| "step": 1440 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 6.034392912975508e-06, |
| "loss": 0.5805, |
| "step": 1441 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 6.023970818134446e-06, |
| "loss": 0.6408, |
| "step": 1442 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 6.013548723293382e-06, |
| "loss": 0.5007, |
| "step": 1443 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 6.00312662845232e-06, |
| "loss": 0.5371, |
| "step": 1444 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 5.992704533611257e-06, |
| "loss": 0.5794, |
| "step": 1445 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 5.982282438770193e-06, |
| "loss": 0.5111, |
| "step": 1446 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 5.97186034392913e-06, |
| "loss": 0.5254, |
| "step": 1447 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 5.961438249088067e-06, |
| "loss": 0.5311, |
| "step": 1448 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 5.951016154247004e-06, |
| "loss": 0.5346, |
| "step": 1449 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 5.940594059405941e-06, |
| "loss": 0.7025, |
| "step": 1450 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 5.930171964564878e-06, |
| "loss": 0.6876, |
| "step": 1451 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 5.919749869723816e-06, |
| "loss": 0.4683, |
| "step": 1452 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 5.909327774882752e-06, |
| "loss": 0.635, |
| "step": 1453 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 5.898905680041689e-06, |
| "loss": 0.553, |
| "step": 1454 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 5.888483585200626e-06, |
| "loss": 0.5567, |
| "step": 1455 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 5.8780614903595625e-06, |
| "loss": 0.615, |
| "step": 1456 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 5.8676393955185e-06, |
| "loss": 0.4091, |
| "step": 1457 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 5.857217300677437e-06, |
| "loss": 0.5923, |
| "step": 1458 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 5.846795205836373e-06, |
| "loss": 0.5891, |
| "step": 1459 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 5.83637311099531e-06, |
| "loss": 0.3863, |
| "step": 1460 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 5.825951016154247e-06, |
| "loss": 0.5389, |
| "step": 1461 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 5.815528921313185e-06, |
| "loss": 0.6676, |
| "step": 1462 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 5.8051068264721214e-06, |
| "loss": 0.4971, |
| "step": 1463 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 5.7946847316310586e-06, |
| "loss": 0.6272, |
| "step": 1464 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 5.784262636789996e-06, |
| "loss": 0.6953, |
| "step": 1465 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 5.773840541948932e-06, |
| "loss": 0.4932, |
| "step": 1466 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 5.763418447107869e-06, |
| "loss": 0.5199, |
| "step": 1467 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 5.752996352266806e-06, |
| "loss": 0.6085, |
| "step": 1468 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 5.7425742574257425e-06, |
| "loss": 0.5431, |
| "step": 1469 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 5.7321521625846796e-06, |
| "loss": 0.5015, |
| "step": 1470 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 5.7217300677436175e-06, |
| "loss": 0.5943, |
| "step": 1471 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 5.711307972902553e-06, |
| "loss": 0.532, |
| "step": 1472 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 5.700885878061491e-06, |
| "loss": 0.6758, |
| "step": 1473 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 5.690463783220428e-06, |
| "loss": 0.5742, |
| "step": 1474 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 5.680041688379365e-06, |
| "loss": 0.6717, |
| "step": 1475 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 5.6696195935383014e-06, |
| "loss": 0.5265, |
| "step": 1476 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 5.6591974986972385e-06, |
| "loss": 0.8185, |
| "step": 1477 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 5.648775403856176e-06, |
| "loss": 0.5426, |
| "step": 1478 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 5.638353309015112e-06, |
| "loss": 0.5532, |
| "step": 1479 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 5.627931214174049e-06, |
| "loss": 0.6544, |
| "step": 1480 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 5.617509119332987e-06, |
| "loss": 0.7564, |
| "step": 1481 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 5.607087024491923e-06, |
| "loss": 0.5349, |
| "step": 1482 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 5.59666492965086e-06, |
| "loss": 0.6994, |
| "step": 1483 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 5.5862428348097975e-06, |
| "loss": 0.5019, |
| "step": 1484 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 5.575820739968734e-06, |
| "loss": 0.5182, |
| "step": 1485 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 5.565398645127671e-06, |
| "loss": 0.5585, |
| "step": 1486 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 5.554976550286608e-06, |
| "loss": 0.8533, |
| "step": 1487 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 5.544554455445545e-06, |
| "loss": 0.4058, |
| "step": 1488 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 5.534132360604481e-06, |
| "loss": 0.5624, |
| "step": 1489 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 5.523710265763419e-06, |
| "loss": 0.4231, |
| "step": 1490 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 5.5132881709223565e-06, |
| "loss": 0.5389, |
| "step": 1491 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 5.502866076081293e-06, |
| "loss": 0.5637, |
| "step": 1492 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 5.49244398124023e-06, |
| "loss": 0.552, |
| "step": 1493 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 5.482021886399167e-06, |
| "loss": 0.557, |
| "step": 1494 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 5.471599791558103e-06, |
| "loss": 0.5708, |
| "step": 1495 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 5.46117769671704e-06, |
| "loss": 0.4975, |
| "step": 1496 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 5.4507556018759775e-06, |
| "loss": 0.4509, |
| "step": 1497 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 5.440333507034914e-06, |
| "loss": 0.8597, |
| "step": 1498 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 5.429911412193851e-06, |
| "loss": 0.4297, |
| "step": 1499 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 5.419489317352789e-06, |
| "loss": 0.5614, |
| "step": 1500 |
| }, |
| { |
| "epoch": 2.23, |
| "eval_loss": 1.4297773838043213, |
| "eval_runtime": 0.6803, |
| "eval_samples_per_second": 358.648, |
| "eval_steps_per_second": 45.566, |
| "step": 1500 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 5.409067222511726e-06, |
| "loss": 0.5314, |
| "step": 1501 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 5.398645127670662e-06, |
| "loss": 0.5238, |
| "step": 1502 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 5.388223032829599e-06, |
| "loss": 0.5741, |
| "step": 1503 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 5.3778009379885365e-06, |
| "loss": 0.6223, |
| "step": 1504 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 5.367378843147473e-06, |
| "loss": 0.5331, |
| "step": 1505 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 5.35695674830641e-06, |
| "loss": 0.5776, |
| "step": 1506 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 5.346534653465347e-06, |
| "loss": 0.6081, |
| "step": 1507 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 5.336112558624283e-06, |
| "loss": 0.6222, |
| "step": 1508 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 5.32569046378322e-06, |
| "loss": 0.5129, |
| "step": 1509 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 5.315268368942158e-06, |
| "loss": 0.6309, |
| "step": 1510 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 5.304846274101095e-06, |
| "loss": 0.7283, |
| "step": 1511 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 5.294424179260032e-06, |
| "loss": 0.4695, |
| "step": 1512 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 5.284002084418969e-06, |
| "loss": 0.5256, |
| "step": 1513 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 5.273579989577906e-06, |
| "loss": 0.6412, |
| "step": 1514 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 5.263157894736842e-06, |
| "loss": 0.5908, |
| "step": 1515 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 5.252735799895779e-06, |
| "loss": 0.6038, |
| "step": 1516 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 5.2423137050547165e-06, |
| "loss": 0.5952, |
| "step": 1517 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 5.231891610213653e-06, |
| "loss": 0.564, |
| "step": 1518 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 5.221469515372591e-06, |
| "loss": 0.6033, |
| "step": 1519 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 5.211047420531528e-06, |
| "loss": 0.5444, |
| "step": 1520 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 5.200625325690464e-06, |
| "loss": 0.6867, |
| "step": 1521 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 5.190203230849401e-06, |
| "loss": 0.5776, |
| "step": 1522 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 5.179781136008338e-06, |
| "loss": 0.3986, |
| "step": 1523 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 5.169359041167275e-06, |
| "loss": 0.6939, |
| "step": 1524 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 5.158936946326212e-06, |
| "loss": 0.6302, |
| "step": 1525 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 5.148514851485149e-06, |
| "loss": 0.5259, |
| "step": 1526 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 5.138092756644086e-06, |
| "loss": 0.6822, |
| "step": 1527 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 5.127670661803022e-06, |
| "loss": 0.5968, |
| "step": 1528 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 5.11724856696196e-06, |
| "loss": 0.5363, |
| "step": 1529 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 5.106826472120897e-06, |
| "loss": 0.643, |
| "step": 1530 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 5.0964043772798336e-06, |
| "loss": 0.5424, |
| "step": 1531 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 5.085982282438771e-06, |
| "loss": 0.4609, |
| "step": 1532 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 5.075560187597708e-06, |
| "loss": 0.4353, |
| "step": 1533 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 5.065138092756644e-06, |
| "loss": 0.5022, |
| "step": 1534 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 5.054715997915581e-06, |
| "loss": 0.5313, |
| "step": 1535 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 5.044293903074518e-06, |
| "loss": 0.742, |
| "step": 1536 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 5.0338718082334546e-06, |
| "loss": 0.6679, |
| "step": 1537 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 5.023449713392392e-06, |
| "loss": 0.6202, |
| "step": 1538 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 5.01302761855133e-06, |
| "loss": 0.6376, |
| "step": 1539 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 5.002605523710267e-06, |
| "loss": 0.6088, |
| "step": 1540 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 4.992183428869203e-06, |
| "loss": 0.6197, |
| "step": 1541 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 4.98176133402814e-06, |
| "loss": 0.4952, |
| "step": 1542 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 4.971339239187076e-06, |
| "loss": 0.5653, |
| "step": 1543 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 4.960917144346014e-06, |
| "loss": 0.6625, |
| "step": 1544 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 4.950495049504951e-06, |
| "loss": 0.5563, |
| "step": 1545 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 4.940072954663888e-06, |
| "loss": 0.4598, |
| "step": 1546 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 4.929650859822825e-06, |
| "loss": 0.4758, |
| "step": 1547 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 4.919228764981762e-06, |
| "loss": 0.5755, |
| "step": 1548 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 4.908806670140699e-06, |
| "loss": 0.5821, |
| "step": 1549 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 4.898384575299635e-06, |
| "loss": 0.5302, |
| "step": 1550 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 4.8879624804585725e-06, |
| "loss": 0.409, |
| "step": 1551 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 4.87754038561751e-06, |
| "loss": 0.5535, |
| "step": 1552 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 4.867118290776447e-06, |
| "loss": 0.5381, |
| "step": 1553 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 4.856696195935384e-06, |
| "loss": 0.4693, |
| "step": 1554 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 4.84627410109432e-06, |
| "loss": 0.5233, |
| "step": 1555 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 4.835852006253257e-06, |
| "loss": 0.4853, |
| "step": 1556 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 4.825429911412194e-06, |
| "loss": 0.6049, |
| "step": 1557 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 4.8150078165711315e-06, |
| "loss": 0.5033, |
| "step": 1558 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 4.804585721730068e-06, |
| "loss": 0.4888, |
| "step": 1559 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 4.794163626889005e-06, |
| "loss": 0.5139, |
| "step": 1560 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 4.783741532047942e-06, |
| "loss": 0.5208, |
| "step": 1561 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 4.773319437206879e-06, |
| "loss": 0.489, |
| "step": 1562 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 4.762897342365816e-06, |
| "loss": 0.6092, |
| "step": 1563 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 4.7524752475247525e-06, |
| "loss": 0.5177, |
| "step": 1564 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 4.74205315268369e-06, |
| "loss": 0.5636, |
| "step": 1565 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 4.731631057842627e-06, |
| "loss": 0.4413, |
| "step": 1566 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 4.721208963001564e-06, |
| "loss": 0.5452, |
| "step": 1567 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 4.710786868160501e-06, |
| "loss": 0.4996, |
| "step": 1568 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 4.700364773319437e-06, |
| "loss": 0.5318, |
| "step": 1569 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 4.689942678478374e-06, |
| "loss": 0.62, |
| "step": 1570 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 4.6795205836373115e-06, |
| "loss": 0.5734, |
| "step": 1571 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 4.669098488796249e-06, |
| "loss": 0.5812, |
| "step": 1572 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 4.658676393955186e-06, |
| "loss": 0.4816, |
| "step": 1573 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 4.648254299114122e-06, |
| "loss": 0.6509, |
| "step": 1574 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 4.637832204273059e-06, |
| "loss": 0.468, |
| "step": 1575 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 4.627410109431996e-06, |
| "loss": 0.5422, |
| "step": 1576 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 4.616988014590933e-06, |
| "loss": 0.618, |
| "step": 1577 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 4.6065659197498704e-06, |
| "loss": 0.4425, |
| "step": 1578 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 4.596143824908807e-06, |
| "loss": 0.6268, |
| "step": 1579 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 4.585721730067744e-06, |
| "loss": 0.4941, |
| "step": 1580 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 4.575299635226681e-06, |
| "loss": 0.6144, |
| "step": 1581 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 4.564877540385618e-06, |
| "loss": 0.533, |
| "step": 1582 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 4.554455445544555e-06, |
| "loss": 0.5668, |
| "step": 1583 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 4.5440333507034915e-06, |
| "loss": 0.6712, |
| "step": 1584 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 4.5336112558624286e-06, |
| "loss": 0.5805, |
| "step": 1585 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 4.523189161021366e-06, |
| "loss": 0.4658, |
| "step": 1586 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 4.512767066180303e-06, |
| "loss": 0.606, |
| "step": 1587 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 4.50234497133924e-06, |
| "loss": 0.6212, |
| "step": 1588 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 4.491922876498176e-06, |
| "loss": 0.4007, |
| "step": 1589 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 4.481500781657113e-06, |
| "loss": 0.5201, |
| "step": 1590 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 4.4710786868160504e-06, |
| "loss": 0.4539, |
| "step": 1591 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 4.4606565919749875e-06, |
| "loss": 0.5943, |
| "step": 1592 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 4.450234497133925e-06, |
| "loss": 0.5188, |
| "step": 1593 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 4.439812402292861e-06, |
| "loss": 0.6455, |
| "step": 1594 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 4.429390307451798e-06, |
| "loss": 0.4858, |
| "step": 1595 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 4.418968212610735e-06, |
| "loss": 0.613, |
| "step": 1596 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 4.408546117769672e-06, |
| "loss": 0.4338, |
| "step": 1597 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 4.3981240229286085e-06, |
| "loss": 0.5126, |
| "step": 1598 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 4.387701928087546e-06, |
| "loss": 0.6405, |
| "step": 1599 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 4.377279833246483e-06, |
| "loss": 0.5013, |
| "step": 1600 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 4.36685773840542e-06, |
| "loss": 0.6717, |
| "step": 1601 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 4.356435643564357e-06, |
| "loss": 0.5712, |
| "step": 1602 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 4.346013548723293e-06, |
| "loss": 0.7613, |
| "step": 1603 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 4.33559145388223e-06, |
| "loss": 0.588, |
| "step": 1604 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 4.3251693590411675e-06, |
| "loss": 0.6183, |
| "step": 1605 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 4.314747264200105e-06, |
| "loss": 0.4745, |
| "step": 1606 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 4.304325169359042e-06, |
| "loss": 0.4361, |
| "step": 1607 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 4.293903074517978e-06, |
| "loss": 0.4841, |
| "step": 1608 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 4.283480979676915e-06, |
| "loss": 0.5249, |
| "step": 1609 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 4.273058884835852e-06, |
| "loss": 0.7436, |
| "step": 1610 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 4.262636789994789e-06, |
| "loss": 0.5277, |
| "step": 1611 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 4.2522146951537265e-06, |
| "loss": 0.3535, |
| "step": 1612 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 4.241792600312663e-06, |
| "loss": 0.655, |
| "step": 1613 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 4.231370505471601e-06, |
| "loss": 0.5238, |
| "step": 1614 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 4.220948410630537e-06, |
| "loss": 0.5948, |
| "step": 1615 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 4.210526315789474e-06, |
| "loss": 0.6959, |
| "step": 1616 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 4.200104220948411e-06, |
| "loss": 0.5273, |
| "step": 1617 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 4.1896821261073475e-06, |
| "loss": 0.6697, |
| "step": 1618 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 4.1792600312662855e-06, |
| "loss": 0.503, |
| "step": 1619 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 4.168837936425222e-06, |
| "loss": 0.5863, |
| "step": 1620 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 4.158415841584159e-06, |
| "loss": 0.4961, |
| "step": 1621 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 4.147993746743096e-06, |
| "loss": 0.5255, |
| "step": 1622 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 4.137571651902032e-06, |
| "loss": 0.4837, |
| "step": 1623 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 4.12714955706097e-06, |
| "loss": 0.5621, |
| "step": 1624 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 4.1167274622199065e-06, |
| "loss": 0.5075, |
| "step": 1625 |
| }, |
| { |
| "epoch": 2.41, |
| "eval_loss": 1.4323381185531616, |
| "eval_runtime": 0.6837, |
| "eval_samples_per_second": 356.863, |
| "eval_steps_per_second": 45.339, |
| "step": 1625 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 4.106305367378844e-06, |
| "loss": 0.5151, |
| "step": 1626 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 4.095883272537781e-06, |
| "loss": 0.5405, |
| "step": 1627 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 4.085461177696717e-06, |
| "loss": 0.5789, |
| "step": 1628 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 4.075039082855654e-06, |
| "loss": 0.475, |
| "step": 1629 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 4.064616988014591e-06, |
| "loss": 0.4902, |
| "step": 1630 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 4.054194893173528e-06, |
| "loss": 0.4199, |
| "step": 1631 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 4.0437727983324655e-06, |
| "loss": 0.7661, |
| "step": 1632 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 4.033350703491402e-06, |
| "loss": 0.5535, |
| "step": 1633 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 4.022928608650339e-06, |
| "loss": 0.4622, |
| "step": 1634 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 4.012506513809276e-06, |
| "loss": 0.5973, |
| "step": 1635 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 4.002084418968213e-06, |
| "loss": 0.6755, |
| "step": 1636 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 3.99166232412715e-06, |
| "loss": 0.6185, |
| "step": 1637 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 3.981240229286087e-06, |
| "loss": 0.6135, |
| "step": 1638 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 3.970818134445024e-06, |
| "loss": 0.5178, |
| "step": 1639 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 3.960396039603961e-06, |
| "loss": 0.6091, |
| "step": 1640 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 3.949973944762898e-06, |
| "loss": 0.7047, |
| "step": 1641 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 3.939551849921834e-06, |
| "loss": 0.5921, |
| "step": 1642 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 3.929129755080772e-06, |
| "loss": 0.6276, |
| "step": 1643 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 3.918707660239708e-06, |
| "loss": 0.6498, |
| "step": 1644 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 3.9082855653986454e-06, |
| "loss": 0.3791, |
| "step": 1645 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 3.8978634705575826e-06, |
| "loss": 0.4615, |
| "step": 1646 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 3.887441375716519e-06, |
| "loss": 0.7022, |
| "step": 1647 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 3.877019280875457e-06, |
| "loss": 0.5022, |
| "step": 1648 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 3.866597186034393e-06, |
| "loss": 0.5307, |
| "step": 1649 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 3.85617509119333e-06, |
| "loss": 0.4806, |
| "step": 1650 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 3.845752996352267e-06, |
| "loss": 0.4631, |
| "step": 1651 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 3.8353309015112036e-06, |
| "loss": 0.6401, |
| "step": 1652 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 3.8249088066701415e-06, |
| "loss": 0.5456, |
| "step": 1653 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 3.814486711829078e-06, |
| "loss": 0.6043, |
| "step": 1654 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 3.8040646169880145e-06, |
| "loss": 0.581, |
| "step": 1655 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 3.793642522146952e-06, |
| "loss": 0.4814, |
| "step": 1656 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 3.7832204273058887e-06, |
| "loss": 0.5056, |
| "step": 1657 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 3.772798332464826e-06, |
| "loss": 0.6425, |
| "step": 1658 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 3.7623762376237625e-06, |
| "loss": 0.4627, |
| "step": 1659 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 3.7519541427826992e-06, |
| "loss": 0.4298, |
| "step": 1660 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 3.7415320479416368e-06, |
| "loss": 0.4714, |
| "step": 1661 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 3.7311099531005735e-06, |
| "loss": 0.6986, |
| "step": 1662 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 3.7206878582595106e-06, |
| "loss": 0.568, |
| "step": 1663 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 3.7102657634184473e-06, |
| "loss": 0.6797, |
| "step": 1664 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 3.6998436685773844e-06, |
| "loss": 0.5133, |
| "step": 1665 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 3.6894215737363215e-06, |
| "loss": 0.5704, |
| "step": 1666 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 3.678999478895258e-06, |
| "loss": 0.6097, |
| "step": 1667 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 3.668577384054195e-06, |
| "loss": 0.5462, |
| "step": 1668 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 3.6581552892131324e-06, |
| "loss": 0.5924, |
| "step": 1669 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 3.647733194372069e-06, |
| "loss": 0.5578, |
| "step": 1670 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 3.6373110995310062e-06, |
| "loss": 0.6586, |
| "step": 1671 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 3.626889004689943e-06, |
| "loss": 0.5688, |
| "step": 1672 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 3.6164669098488796e-06, |
| "loss": 0.4522, |
| "step": 1673 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 3.606044815007817e-06, |
| "loss": 0.6976, |
| "step": 1674 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 3.595622720166754e-06, |
| "loss": 0.3733, |
| "step": 1675 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 3.585200625325691e-06, |
| "loss": 0.5843, |
| "step": 1676 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 3.5747785304846277e-06, |
| "loss": 0.5925, |
| "step": 1677 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 3.5643564356435644e-06, |
| "loss": 0.4837, |
| "step": 1678 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 3.553934340802502e-06, |
| "loss": 0.4926, |
| "step": 1679 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 3.5435122459614386e-06, |
| "loss": 0.4393, |
| "step": 1680 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 3.5330901511203753e-06, |
| "loss": 0.6351, |
| "step": 1681 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 3.5226680562793124e-06, |
| "loss": 0.6526, |
| "step": 1682 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 3.512245961438249e-06, |
| "loss": 0.7369, |
| "step": 1683 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 3.5018238665971867e-06, |
| "loss": 0.5773, |
| "step": 1684 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 3.4914017717561233e-06, |
| "loss": 0.7258, |
| "step": 1685 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 3.48097967691506e-06, |
| "loss": 0.6264, |
| "step": 1686 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 3.470557582073997e-06, |
| "loss": 0.627, |
| "step": 1687 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 3.460135487232934e-06, |
| "loss": 0.4603, |
| "step": 1688 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 3.4497133923918714e-06, |
| "loss": 0.571, |
| "step": 1689 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 3.439291297550808e-06, |
| "loss": 0.5288, |
| "step": 1690 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 3.4288692027097448e-06, |
| "loss": 0.5435, |
| "step": 1691 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 3.418447107868682e-06, |
| "loss": 0.6104, |
| "step": 1692 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 3.4080250130276186e-06, |
| "loss": 0.5522, |
| "step": 1693 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 3.3976029181865557e-06, |
| "loss": 0.5264, |
| "step": 1694 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 3.387180823345493e-06, |
| "loss": 0.7212, |
| "step": 1695 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 3.3767587285044295e-06, |
| "loss": 0.5408, |
| "step": 1696 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 3.3663366336633666e-06, |
| "loss": 0.5482, |
| "step": 1697 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 3.3559145388223038e-06, |
| "loss": 0.4867, |
| "step": 1698 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 3.3454924439812404e-06, |
| "loss": 0.6006, |
| "step": 1699 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 3.3350703491401776e-06, |
| "loss": 0.6798, |
| "step": 1700 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 3.3246482542991143e-06, |
| "loss": 0.4708, |
| "step": 1701 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 3.314226159458052e-06, |
| "loss": 0.759, |
| "step": 1702 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 3.3038040646169885e-06, |
| "loss": 0.436, |
| "step": 1703 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 3.293381969775925e-06, |
| "loss": 0.6385, |
| "step": 1704 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 3.2829598749348623e-06, |
| "loss": 0.6309, |
| "step": 1705 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 3.272537780093799e-06, |
| "loss": 0.6622, |
| "step": 1706 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 3.2621156852527357e-06, |
| "loss": 0.5741, |
| "step": 1707 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 3.2516935904116732e-06, |
| "loss": 0.528, |
| "step": 1708 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 3.24127149557061e-06, |
| "loss": 0.5451, |
| "step": 1709 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 3.230849400729547e-06, |
| "loss": 0.4386, |
| "step": 1710 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 3.2204273058884837e-06, |
| "loss": 0.4772, |
| "step": 1711 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 3.2100052110474204e-06, |
| "loss": 0.5936, |
| "step": 1712 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 3.199583116206358e-06, |
| "loss": 0.5729, |
| "step": 1713 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 3.1891610213652947e-06, |
| "loss": 0.5988, |
| "step": 1714 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 3.1787389265242318e-06, |
| "loss": 0.5455, |
| "step": 1715 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 3.1683168316831685e-06, |
| "loss": 0.6271, |
| "step": 1716 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 3.157894736842105e-06, |
| "loss": 0.5517, |
| "step": 1717 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 3.1474726420010427e-06, |
| "loss": 0.5897, |
| "step": 1718 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 3.1370505471599794e-06, |
| "loss": 0.6368, |
| "step": 1719 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 3.126628452318916e-06, |
| "loss": 0.4669, |
| "step": 1720 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 3.116206357477853e-06, |
| "loss": 0.6623, |
| "step": 1721 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 3.10578426263679e-06, |
| "loss": 0.5739, |
| "step": 1722 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 3.0953621677957274e-06, |
| "loss": 0.5866, |
| "step": 1723 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 3.084940072954664e-06, |
| "loss": 0.4443, |
| "step": 1724 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 3.074517978113601e-06, |
| "loss": 0.8309, |
| "step": 1725 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 3.064095883272538e-06, |
| "loss": 0.5985, |
| "step": 1726 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 3.053673788431475e-06, |
| "loss": 0.5278, |
| "step": 1727 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 3.043251693590412e-06, |
| "loss": 0.5418, |
| "step": 1728 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 3.032829598749349e-06, |
| "loss": 0.5745, |
| "step": 1729 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 3.0224075039082856e-06, |
| "loss": 0.6004, |
| "step": 1730 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 3.011985409067223e-06, |
| "loss": 0.6654, |
| "step": 1731 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 3.00156331422616e-06, |
| "loss": 0.6163, |
| "step": 1732 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 2.9911412193850965e-06, |
| "loss": 0.5957, |
| "step": 1733 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 2.9807191245440336e-06, |
| "loss": 0.5443, |
| "step": 1734 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 2.9702970297029703e-06, |
| "loss": 0.4866, |
| "step": 1735 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 2.959874934861908e-06, |
| "loss": 0.5523, |
| "step": 1736 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 2.9494528400208445e-06, |
| "loss": 0.4601, |
| "step": 1737 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 2.9390307451797812e-06, |
| "loss": 0.4797, |
| "step": 1738 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 2.9286086503387184e-06, |
| "loss": 0.5734, |
| "step": 1739 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 2.918186555497655e-06, |
| "loss": 0.5403, |
| "step": 1740 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 2.9077644606565926e-06, |
| "loss": 0.5428, |
| "step": 1741 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 2.8973423658155293e-06, |
| "loss": 0.6137, |
| "step": 1742 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 2.886920270974466e-06, |
| "loss": 0.5775, |
| "step": 1743 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 2.876498176133403e-06, |
| "loss": 0.6051, |
| "step": 1744 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 2.8660760812923398e-06, |
| "loss": 0.6475, |
| "step": 1745 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 2.8556539864512765e-06, |
| "loss": 0.4947, |
| "step": 1746 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 2.845231891610214e-06, |
| "loss": 0.5435, |
| "step": 1747 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 2.8348097967691507e-06, |
| "loss": 0.6496, |
| "step": 1748 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 2.824387701928088e-06, |
| "loss": 0.6912, |
| "step": 1749 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 2.8139656070870245e-06, |
| "loss": 0.5153, |
| "step": 1750 |
| }, |
| { |
| "epoch": 2.6, |
| "eval_loss": 1.4344613552093506, |
| "eval_runtime": 0.6841, |
| "eval_samples_per_second": 356.664, |
| "eval_steps_per_second": 45.314, |
| "step": 1750 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 2.8035435122459616e-06, |
| "loss": 0.5702, |
| "step": 1751 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 2.7931214174048988e-06, |
| "loss": 0.5376, |
| "step": 1752 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 2.7826993225638355e-06, |
| "loss": 0.6134, |
| "step": 1753 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 2.7722772277227726e-06, |
| "loss": 0.5489, |
| "step": 1754 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 2.7618551328817097e-06, |
| "loss": 0.5557, |
| "step": 1755 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 2.7514330380406464e-06, |
| "loss": 0.7084, |
| "step": 1756 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 2.7410109431995835e-06, |
| "loss": 0.4808, |
| "step": 1757 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 2.73058884835852e-06, |
| "loss": 0.4808, |
| "step": 1758 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 2.720166753517457e-06, |
| "loss": 0.5798, |
| "step": 1759 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 2.7097446586763944e-06, |
| "loss": 0.6533, |
| "step": 1760 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 2.699322563835331e-06, |
| "loss": 0.6561, |
| "step": 1761 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 2.6889004689942682e-06, |
| "loss": 0.5985, |
| "step": 1762 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 2.678478374153205e-06, |
| "loss": 0.6139, |
| "step": 1763 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 2.6680562793121416e-06, |
| "loss": 0.5598, |
| "step": 1764 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 2.657634184471079e-06, |
| "loss": 0.5569, |
| "step": 1765 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 2.647212089630016e-06, |
| "loss": 0.4172, |
| "step": 1766 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 2.636789994788953e-06, |
| "loss": 0.5894, |
| "step": 1767 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 2.6263678999478897e-06, |
| "loss": 0.5695, |
| "step": 1768 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 2.6159458051068264e-06, |
| "loss": 0.5858, |
| "step": 1769 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 2.605523710265764e-06, |
| "loss": 0.6265, |
| "step": 1770 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 2.5951016154247006e-06, |
| "loss": 0.4789, |
| "step": 1771 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 2.5846795205836373e-06, |
| "loss": 0.4668, |
| "step": 1772 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 2.5742574257425744e-06, |
| "loss": 0.4899, |
| "step": 1773 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 2.563835330901511e-06, |
| "loss": 0.636, |
| "step": 1774 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 2.5534132360604486e-06, |
| "loss": 0.7126, |
| "step": 1775 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 2.5429911412193853e-06, |
| "loss": 0.5875, |
| "step": 1776 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 2.532569046378322e-06, |
| "loss": 0.4471, |
| "step": 1777 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 2.522146951537259e-06, |
| "loss": 0.6958, |
| "step": 1778 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 2.511724856696196e-06, |
| "loss": 0.6529, |
| "step": 1779 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 2.5013027618551334e-06, |
| "loss": 0.5361, |
| "step": 1780 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 2.49088066701407e-06, |
| "loss": 0.4742, |
| "step": 1781 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 2.480458572173007e-06, |
| "loss": 0.5766, |
| "step": 1782 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 2.470036477331944e-06, |
| "loss": 0.529, |
| "step": 1783 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 2.459614382490881e-06, |
| "loss": 0.5732, |
| "step": 1784 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 2.4491922876498177e-06, |
| "loss": 0.7092, |
| "step": 1785 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 2.438770192808755e-06, |
| "loss": 0.5066, |
| "step": 1786 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 2.428348097967692e-06, |
| "loss": 0.6981, |
| "step": 1787 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 2.4179260031266286e-06, |
| "loss": 0.5282, |
| "step": 1788 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 2.4075039082855657e-06, |
| "loss": 0.5561, |
| "step": 1789 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 2.3970818134445024e-06, |
| "loss": 0.4699, |
| "step": 1790 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 2.3866597186034396e-06, |
| "loss": 0.5713, |
| "step": 1791 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 2.3762376237623762e-06, |
| "loss": 0.5086, |
| "step": 1792 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 2.3658155289213134e-06, |
| "loss": 0.8841, |
| "step": 1793 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 2.3553934340802505e-06, |
| "loss": 0.6792, |
| "step": 1794 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 2.344971339239187e-06, |
| "loss": 0.6206, |
| "step": 1795 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 2.3345492443981243e-06, |
| "loss": 0.7345, |
| "step": 1796 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 2.324127149557061e-06, |
| "loss": 0.5052, |
| "step": 1797 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 2.313705054715998e-06, |
| "loss": 0.5066, |
| "step": 1798 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 2.3032829598749352e-06, |
| "loss": 0.5445, |
| "step": 1799 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 2.292860865033872e-06, |
| "loss": 0.4033, |
| "step": 1800 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 2.282438770192809e-06, |
| "loss": 0.5, |
| "step": 1801 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 2.2720166753517457e-06, |
| "loss": 0.5898, |
| "step": 1802 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 2.261594580510683e-06, |
| "loss": 0.7819, |
| "step": 1803 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 2.25117248566962e-06, |
| "loss": 0.5184, |
| "step": 1804 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 2.2407503908285567e-06, |
| "loss": 0.4091, |
| "step": 1805 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 2.2303282959874938e-06, |
| "loss": 0.5811, |
| "step": 1806 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 2.2199062011464305e-06, |
| "loss": 0.5912, |
| "step": 1807 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 2.2094841063053676e-06, |
| "loss": 0.6045, |
| "step": 1808 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 2.1990620114643043e-06, |
| "loss": 0.5767, |
| "step": 1809 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 2.1886399166232414e-06, |
| "loss": 0.4764, |
| "step": 1810 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 2.1782178217821785e-06, |
| "loss": 0.4454, |
| "step": 1811 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 2.167795726941115e-06, |
| "loss": 0.5807, |
| "step": 1812 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 2.1573736321000523e-06, |
| "loss": 0.5435, |
| "step": 1813 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 2.146951537258989e-06, |
| "loss": 0.4746, |
| "step": 1814 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 2.136529442417926e-06, |
| "loss": 0.5002, |
| "step": 1815 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 2.1261073475768632e-06, |
| "loss": 0.5045, |
| "step": 1816 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 2.1156852527358004e-06, |
| "loss": 0.4686, |
| "step": 1817 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 2.105263157894737e-06, |
| "loss": 0.5116, |
| "step": 1818 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 2.0948410630536738e-06, |
| "loss": 0.5624, |
| "step": 1819 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 2.084418968212611e-06, |
| "loss": 0.5412, |
| "step": 1820 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 2.073996873371548e-06, |
| "loss": 0.5513, |
| "step": 1821 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 2.063574778530485e-06, |
| "loss": 0.7242, |
| "step": 1822 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 2.053152683689422e-06, |
| "loss": 0.5097, |
| "step": 1823 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 2.0427305888483585e-06, |
| "loss": 0.6423, |
| "step": 1824 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 2.0323084940072956e-06, |
| "loss": 0.7055, |
| "step": 1825 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 2.0218863991662327e-06, |
| "loss": 0.4196, |
| "step": 1826 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 2.0114643043251694e-06, |
| "loss": 0.5106, |
| "step": 1827 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 2.0010422094841065e-06, |
| "loss": 0.6064, |
| "step": 1828 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 1.9906201146430437e-06, |
| "loss": 0.511, |
| "step": 1829 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 1.9801980198019803e-06, |
| "loss": 0.52, |
| "step": 1830 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 1.969775924960917e-06, |
| "loss": 0.575, |
| "step": 1831 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 1.959353830119854e-06, |
| "loss": 0.5916, |
| "step": 1832 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 1.9489317352787913e-06, |
| "loss": 0.5505, |
| "step": 1833 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 1.9385096404377284e-06, |
| "loss": 0.8142, |
| "step": 1834 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 1.928087545596665e-06, |
| "loss": 0.5447, |
| "step": 1835 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 1.9176654507556018e-06, |
| "loss": 0.4267, |
| "step": 1836 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 1.907243355914539e-06, |
| "loss": 0.681, |
| "step": 1837 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 1.896821261073476e-06, |
| "loss": 0.5375, |
| "step": 1838 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 1.886399166232413e-06, |
| "loss": 0.5282, |
| "step": 1839 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 1.8759770713913496e-06, |
| "loss": 0.7037, |
| "step": 1840 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 1.8655549765502867e-06, |
| "loss": 0.621, |
| "step": 1841 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 1.8551328817092236e-06, |
| "loss": 0.5827, |
| "step": 1842 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 1.8447107868681608e-06, |
| "loss": 0.4925, |
| "step": 1843 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 1.8342886920270974e-06, |
| "loss": 0.5638, |
| "step": 1844 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 1.8238665971860346e-06, |
| "loss": 0.6055, |
| "step": 1845 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 1.8134445023449715e-06, |
| "loss": 0.5512, |
| "step": 1846 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 1.8030224075039086e-06, |
| "loss": 0.6097, |
| "step": 1847 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 1.7926003126628455e-06, |
| "loss": 0.5256, |
| "step": 1848 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 1.7821782178217822e-06, |
| "loss": 0.5355, |
| "step": 1849 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 1.7717561229807193e-06, |
| "loss": 0.5782, |
| "step": 1850 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 1.7613340281396562e-06, |
| "loss": 0.5097, |
| "step": 1851 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 1.7509119332985933e-06, |
| "loss": 0.4361, |
| "step": 1852 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 1.74048983845753e-06, |
| "loss": 0.6183, |
| "step": 1853 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 1.730067743616467e-06, |
| "loss": 0.4774, |
| "step": 1854 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 1.719645648775404e-06, |
| "loss": 0.5586, |
| "step": 1855 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 1.709223553934341e-06, |
| "loss": 0.5146, |
| "step": 1856 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 1.6988014590932779e-06, |
| "loss": 0.5886, |
| "step": 1857 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 1.6883793642522148e-06, |
| "loss": 0.5711, |
| "step": 1858 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 1.6779572694111519e-06, |
| "loss": 0.5242, |
| "step": 1859 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 1.6675351745700888e-06, |
| "loss": 0.571, |
| "step": 1860 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 1.657113079729026e-06, |
| "loss": 0.4609, |
| "step": 1861 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 1.6466909848879626e-06, |
| "loss": 0.5, |
| "step": 1862 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 1.6362688900468995e-06, |
| "loss": 0.6818, |
| "step": 1863 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 1.6258467952058366e-06, |
| "loss": 0.4666, |
| "step": 1864 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 1.6154247003647735e-06, |
| "loss": 0.5301, |
| "step": 1865 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 1.6050026055237102e-06, |
| "loss": 0.4877, |
| "step": 1866 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 1.5945805106826473e-06, |
| "loss": 0.4592, |
| "step": 1867 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 1.5841584158415842e-06, |
| "loss": 0.6341, |
| "step": 1868 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 1.5737363210005214e-06, |
| "loss": 0.429, |
| "step": 1869 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 1.563314226159458e-06, |
| "loss": 0.6416, |
| "step": 1870 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 1.552892131318395e-06, |
| "loss": 0.5914, |
| "step": 1871 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 1.542470036477332e-06, |
| "loss": 0.5367, |
| "step": 1872 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 1.532047941636269e-06, |
| "loss": 0.6263, |
| "step": 1873 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 1.521625846795206e-06, |
| "loss": 0.6064, |
| "step": 1874 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 1.5112037519541428e-06, |
| "loss": 0.6656, |
| "step": 1875 |
| }, |
| { |
| "epoch": 2.79, |
| "eval_loss": 1.4329994916915894, |
| "eval_runtime": 0.6835, |
| "eval_samples_per_second": 356.997, |
| "eval_steps_per_second": 45.356, |
| "step": 1875 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 1.50078165711308e-06, |
| "loss": 0.5228, |
| "step": 1876 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 1.4903595622720168e-06, |
| "loss": 0.607, |
| "step": 1877 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 1.479937467430954e-06, |
| "loss": 0.6114, |
| "step": 1878 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 1.4695153725898906e-06, |
| "loss": 0.5437, |
| "step": 1879 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 1.4590932777488275e-06, |
| "loss": 0.5683, |
| "step": 1880 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 1.4486711829077646e-06, |
| "loss": 0.5363, |
| "step": 1881 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 1.4382490880667015e-06, |
| "loss": 0.5422, |
| "step": 1882 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 1.4278269932256382e-06, |
| "loss": 0.6043, |
| "step": 1883 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 1.4174048983845754e-06, |
| "loss": 0.4592, |
| "step": 1884 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 1.4069828035435123e-06, |
| "loss": 0.694, |
| "step": 1885 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 1.3965607087024494e-06, |
| "loss": 0.4755, |
| "step": 1886 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 1.3861386138613863e-06, |
| "loss": 0.6365, |
| "step": 1887 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 1.3757165190203232e-06, |
| "loss": 0.4961, |
| "step": 1888 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 1.36529442417926e-06, |
| "loss": 0.593, |
| "step": 1889 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 1.3548723293381972e-06, |
| "loss": 0.5515, |
| "step": 1890 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 1.3444502344971341e-06, |
| "loss": 0.6097, |
| "step": 1891 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 1.3340281396560708e-06, |
| "loss": 0.5223, |
| "step": 1892 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 1.323606044815008e-06, |
| "loss": 0.5084, |
| "step": 1893 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 1.3131839499739448e-06, |
| "loss": 0.4278, |
| "step": 1894 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 1.302761855132882e-06, |
| "loss": 0.5073, |
| "step": 1895 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 1.2923397602918186e-06, |
| "loss": 0.5202, |
| "step": 1896 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 1.2819176654507556e-06, |
| "loss": 0.6624, |
| "step": 1897 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 1.2714955706096927e-06, |
| "loss": 0.5346, |
| "step": 1898 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 1.2610734757686296e-06, |
| "loss": 0.8463, |
| "step": 1899 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 1.2506513809275667e-06, |
| "loss": 0.5435, |
| "step": 1900 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 1.2402292860865036e-06, |
| "loss": 0.6065, |
| "step": 1901 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 1.2298071912454405e-06, |
| "loss": 0.5813, |
| "step": 1902 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 1.2193850964043774e-06, |
| "loss": 0.451, |
| "step": 1903 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 1.2089630015633143e-06, |
| "loss": 0.6216, |
| "step": 1904 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 1.1985409067222512e-06, |
| "loss": 0.7343, |
| "step": 1905 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 1.1881188118811881e-06, |
| "loss": 0.5359, |
| "step": 1906 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 1.1776967170401252e-06, |
| "loss": 0.7486, |
| "step": 1907 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 1.1672746221990621e-06, |
| "loss": 0.4705, |
| "step": 1908 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 1.156852527357999e-06, |
| "loss": 0.5463, |
| "step": 1909 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 1.146430432516936e-06, |
| "loss": 0.4879, |
| "step": 1910 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 1.1360083376758729e-06, |
| "loss": 0.5123, |
| "step": 1911 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 1.12558624283481e-06, |
| "loss": 0.5135, |
| "step": 1912 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 1.1151641479937469e-06, |
| "loss": 0.4328, |
| "step": 1913 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 1.1047420531526838e-06, |
| "loss": 0.5401, |
| "step": 1914 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 1.0943199583116207e-06, |
| "loss": 0.5942, |
| "step": 1915 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 1.0838978634705576e-06, |
| "loss": 0.4718, |
| "step": 1916 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 1.0734757686294945e-06, |
| "loss": 0.578, |
| "step": 1917 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 1.0630536737884316e-06, |
| "loss": 0.6242, |
| "step": 1918 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 1.0526315789473685e-06, |
| "loss": 0.6435, |
| "step": 1919 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 1.0422094841063054e-06, |
| "loss": 0.5521, |
| "step": 1920 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 1.0317873892652426e-06, |
| "loss": 0.5445, |
| "step": 1921 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 1.0213652944241792e-06, |
| "loss": 0.4296, |
| "step": 1922 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 1.0109431995831164e-06, |
| "loss": 0.5042, |
| "step": 1923 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 1.0005211047420533e-06, |
| "loss": 0.7633, |
| "step": 1924 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 9.900990099009902e-07, |
| "loss": 0.6108, |
| "step": 1925 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 9.79676915059927e-07, |
| "loss": 0.646, |
| "step": 1926 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 9.692548202188642e-07, |
| "loss": 0.5247, |
| "step": 1927 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 9.588327253778009e-07, |
| "loss": 0.533, |
| "step": 1928 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 9.48410630536738e-07, |
| "loss": 0.5962, |
| "step": 1929 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 9.379885356956748e-07, |
| "loss": 0.7111, |
| "step": 1930 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 9.275664408546118e-07, |
| "loss": 0.5513, |
| "step": 1931 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 9.171443460135487e-07, |
| "loss": 0.426, |
| "step": 1932 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 9.067222511724857e-07, |
| "loss": 0.6282, |
| "step": 1933 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 8.963001563314227e-07, |
| "loss": 0.5791, |
| "step": 1934 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 8.858780614903597e-07, |
| "loss": 0.5285, |
| "step": 1935 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 8.754559666492967e-07, |
| "loss": 0.5627, |
| "step": 1936 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 8.650338718082335e-07, |
| "loss": 0.7554, |
| "step": 1937 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 8.546117769671705e-07, |
| "loss": 0.5481, |
| "step": 1938 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 8.441896821261074e-07, |
| "loss": 0.6376, |
| "step": 1939 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 8.337675872850444e-07, |
| "loss": 0.4177, |
| "step": 1940 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 8.233454924439813e-07, |
| "loss": 0.5453, |
| "step": 1941 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 8.129233976029183e-07, |
| "loss": 0.5542, |
| "step": 1942 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 8.025013027618551e-07, |
| "loss": 0.4512, |
| "step": 1943 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 7.920792079207921e-07, |
| "loss": 0.5229, |
| "step": 1944 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 7.81657113079729e-07, |
| "loss": 0.6864, |
| "step": 1945 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 7.71235018238666e-07, |
| "loss": 0.5412, |
| "step": 1946 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 7.60812923397603e-07, |
| "loss": 0.5141, |
| "step": 1947 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 7.5039082855654e-07, |
| "loss": 0.5687, |
| "step": 1948 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 7.39968733715477e-07, |
| "loss": 0.6329, |
| "step": 1949 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 7.295466388744138e-07, |
| "loss": 0.5664, |
| "step": 1950 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 7.191245440333508e-07, |
| "loss": 0.5928, |
| "step": 1951 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 7.087024491922877e-07, |
| "loss": 0.6811, |
| "step": 1952 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 6.982803543512247e-07, |
| "loss": 0.5385, |
| "step": 1953 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 6.878582595101616e-07, |
| "loss": 0.5689, |
| "step": 1954 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 6.774361646690986e-07, |
| "loss": 0.5895, |
| "step": 1955 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 6.670140698280354e-07, |
| "loss": 0.6174, |
| "step": 1956 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 6.565919749869724e-07, |
| "loss": 0.6659, |
| "step": 1957 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 6.461698801459093e-07, |
| "loss": 0.4894, |
| "step": 1958 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 6.357477853048463e-07, |
| "loss": 0.4542, |
| "step": 1959 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 6.253256904637833e-07, |
| "loss": 0.5413, |
| "step": 1960 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 6.149035956227203e-07, |
| "loss": 0.6968, |
| "step": 1961 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 6.044815007816572e-07, |
| "loss": 0.5405, |
| "step": 1962 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 5.940594059405941e-07, |
| "loss": 0.7684, |
| "step": 1963 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 5.836373110995311e-07, |
| "loss": 0.4515, |
| "step": 1964 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 5.73215216258468e-07, |
| "loss": 0.5659, |
| "step": 1965 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 5.62793121417405e-07, |
| "loss": 0.4828, |
| "step": 1966 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 5.523710265763419e-07, |
| "loss": 0.627, |
| "step": 1967 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 5.419489317352788e-07, |
| "loss": 0.4728, |
| "step": 1968 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 5.315268368942158e-07, |
| "loss": 0.7635, |
| "step": 1969 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 5.211047420531527e-07, |
| "loss": 0.6551, |
| "step": 1970 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 5.106826472120896e-07, |
| "loss": 0.5136, |
| "step": 1971 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 5.002605523710266e-07, |
| "loss": 0.6456, |
| "step": 1972 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 4.898384575299635e-07, |
| "loss": 0.6743, |
| "step": 1973 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 4.794163626889004e-07, |
| "loss": 0.4168, |
| "step": 1974 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 4.689942678478374e-07, |
| "loss": 0.685, |
| "step": 1975 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 4.5857217300677436e-07, |
| "loss": 0.4732, |
| "step": 1976 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 4.4815007816571137e-07, |
| "loss": 0.5129, |
| "step": 1977 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 4.3772798332464833e-07, |
| "loss": 0.5885, |
| "step": 1978 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 4.2730588848358524e-07, |
| "loss": 0.6191, |
| "step": 1979 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 4.168837936425222e-07, |
| "loss": 0.6379, |
| "step": 1980 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 4.0646169880145915e-07, |
| "loss": 0.5286, |
| "step": 1981 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 3.9603960396039606e-07, |
| "loss": 0.6491, |
| "step": 1982 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 3.85617509119333e-07, |
| "loss": 0.426, |
| "step": 1983 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 3.7519541427827e-07, |
| "loss": 0.6008, |
| "step": 1984 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 3.647733194372069e-07, |
| "loss": 0.5416, |
| "step": 1985 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 3.5435122459614384e-07, |
| "loss": 0.6618, |
| "step": 1986 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 3.439291297550808e-07, |
| "loss": 0.5346, |
| "step": 1987 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 3.335070349140177e-07, |
| "loss": 0.5715, |
| "step": 1988 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 3.2308494007295466e-07, |
| "loss": 0.619, |
| "step": 1989 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 3.1266284523189167e-07, |
| "loss": 0.5217, |
| "step": 1990 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 3.022407503908286e-07, |
| "loss": 0.5005, |
| "step": 1991 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 2.9181865554976554e-07, |
| "loss": 0.4824, |
| "step": 1992 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 2.813965607087025e-07, |
| "loss": 0.5146, |
| "step": 1993 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 2.709744658676394e-07, |
| "loss": 0.6188, |
| "step": 1994 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 2.6055237102657636e-07, |
| "loss": 0.6423, |
| "step": 1995 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 2.501302761855133e-07, |
| "loss": 0.6654, |
| "step": 1996 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 2.397081813444502e-07, |
| "loss": 0.619, |
| "step": 1997 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 2.2928608650338718e-07, |
| "loss": 0.6527, |
| "step": 1998 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 2.1886399166232417e-07, |
| "loss": 0.615, |
| "step": 1999 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 2.084418968212611e-07, |
| "loss": 0.4173, |
| "step": 2000 |
| }, |
| { |
| "epoch": 2.97, |
| "eval_loss": 1.431810975074768, |
| "eval_runtime": 0.682, |
| "eval_samples_per_second": 357.793, |
| "eval_steps_per_second": 45.457, |
| "step": 2000 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 2019, |
| "num_train_epochs": 3, |
| "save_steps": 250, |
| "total_flos": 2.254373375705088e+17, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|