| { |
| "best_metric": 3.7452244758605957, |
| "best_model_checkpoint": "bird_sounds_classification/checkpoint-2770", |
| "epoch": 10.0, |
| "global_step": 2770, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.0000000000000001e-07, |
| "loss": 3.8957, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0000000000000002e-07, |
| "loss": 3.921, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0000000000000004e-07, |
| "loss": 3.9181, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.0000000000000003e-07, |
| "loss": 3.9141, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 5.000000000000001e-07, |
| "loss": 3.9233, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 6.000000000000001e-07, |
| "loss": 3.9152, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 7.000000000000001e-07, |
| "loss": 3.9263, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 8.000000000000001e-07, |
| "loss": 3.9305, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 9.000000000000001e-07, |
| "loss": 3.9151, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.0000000000000002e-06, |
| "loss": 3.8992, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.1e-06, |
| "loss": 3.9155, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.2000000000000002e-06, |
| "loss": 3.9264, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.3e-06, |
| "loss": 3.8954, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.4000000000000001e-06, |
| "loss": 3.9049, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.5e-06, |
| "loss": 3.9178, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.6000000000000001e-06, |
| "loss": 3.9094, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.7000000000000002e-06, |
| "loss": 3.9141, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.8000000000000001e-06, |
| "loss": 3.8963, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.9000000000000002e-06, |
| "loss": 3.9003, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 2.0000000000000003e-06, |
| "loss": 3.9276, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 2.1000000000000002e-06, |
| "loss": 3.9261, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 2.2e-06, |
| "loss": 3.9129, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 2.3000000000000004e-06, |
| "loss": 3.9343, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 2.4000000000000003e-06, |
| "loss": 3.987, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 2.5e-06, |
| "loss": 3.9166, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 2.6e-06, |
| "loss": 3.9105, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 2.7000000000000004e-06, |
| "loss": 3.9047, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 2.8000000000000003e-06, |
| "loss": 3.9042, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 2.9e-06, |
| "loss": 3.9153, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 3e-06, |
| "loss": 3.9159, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 3.1000000000000004e-06, |
| "loss": 3.9023, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 3.2000000000000003e-06, |
| "loss": 3.8961, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 3.3000000000000006e-06, |
| "loss": 3.8725, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 3.4000000000000005e-06, |
| "loss": 3.8763, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 3.5e-06, |
| "loss": 3.9041, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 3.6000000000000003e-06, |
| "loss": 3.8972, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 3.7e-06, |
| "loss": 3.9123, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 3.8000000000000005e-06, |
| "loss": 3.8862, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 3.900000000000001e-06, |
| "loss": 3.9151, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 4.000000000000001e-06, |
| "loss": 3.9086, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 4.1e-06, |
| "loss": 3.8992, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 4.2000000000000004e-06, |
| "loss": 3.8783, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 4.3e-06, |
| "loss": 3.9076, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 4.4e-06, |
| "loss": 3.9, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 4.5e-06, |
| "loss": 3.9286, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 4.600000000000001e-06, |
| "loss": 3.9123, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 4.7e-06, |
| "loss": 3.8866, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 4.800000000000001e-06, |
| "loss": 3.8716, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 4.9000000000000005e-06, |
| "loss": 3.9158, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 5e-06, |
| "loss": 3.9134, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 4.998161764705883e-06, |
| "loss": 3.9056, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 4.996323529411765e-06, |
| "loss": 3.9211, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 4.994485294117647e-06, |
| "loss": 3.9112, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 4.99264705882353e-06, |
| "loss": 3.9093, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 4.990808823529413e-06, |
| "loss": 3.9111, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 4.988970588235294e-06, |
| "loss": 3.9031, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 4.987132352941177e-06, |
| "loss": 3.8199, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 4.9852941176470596e-06, |
| "loss": 3.9104, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 4.983455882352941e-06, |
| "loss": 3.9171, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 4.981617647058824e-06, |
| "loss": 3.9245, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 4.9797794117647065e-06, |
| "loss": 3.9053, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 4.977941176470588e-06, |
| "loss": 3.9025, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 4.976102941176471e-06, |
| "loss": 3.9206, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 4.9742647058823535e-06, |
| "loss": 3.9126, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 4.972426470588236e-06, |
| "loss": 3.9147, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 4.970588235294118e-06, |
| "loss": 3.9052, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 4.9687500000000005e-06, |
| "loss": 3.9048, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 4.966911764705883e-06, |
| "loss": 3.8963, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 4.965073529411765e-06, |
| "loss": 3.9229, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 4.9632352941176475e-06, |
| "loss": 3.9114, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 4.96139705882353e-06, |
| "loss": 3.917, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 4.959558823529413e-06, |
| "loss": 3.9059, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 4.9577205882352944e-06, |
| "loss": 3.9141, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 4.955882352941177e-06, |
| "loss": 3.9358, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 4.95404411764706e-06, |
| "loss": 3.9199, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 4.952205882352941e-06, |
| "loss": 3.9629, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 4.950367647058824e-06, |
| "loss": 3.9253, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 4.948529411764707e-06, |
| "loss": 3.9178, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 4.946691176470589e-06, |
| "loss": 3.8573, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 4.944852941176471e-06, |
| "loss": 3.923, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 4.943014705882354e-06, |
| "loss": 3.9113, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 4.941176470588236e-06, |
| "loss": 3.9223, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 4.939338235294118e-06, |
| "loss": 3.9211, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 4.937500000000001e-06, |
| "loss": 3.9393, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 4.935661764705883e-06, |
| "loss": 3.9066, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 4.933823529411765e-06, |
| "loss": 3.9651, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 4.9319852941176476e-06, |
| "loss": 3.9318, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 4.93014705882353e-06, |
| "loss": 3.8814, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 4.928308823529413e-06, |
| "loss": 3.9313, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 4.9264705882352945e-06, |
| "loss": 3.9193, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 4.924632352941177e-06, |
| "loss": 3.9002, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 4.92279411764706e-06, |
| "loss": 3.9175, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 4.9209558823529415e-06, |
| "loss": 3.9267, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 4.919117647058823e-06, |
| "loss": 3.9336, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 4.917279411764706e-06, |
| "loss": 3.887, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 4.9154411764705885e-06, |
| "loss": 3.9319, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 4.913602941176471e-06, |
| "loss": 3.8965, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 4.911764705882353e-06, |
| "loss": 3.9333, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 4.9099264705882355e-06, |
| "loss": 3.8966, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 4.908088235294118e-06, |
| "loss": 3.9095, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 4.90625e-06, |
| "loss": 3.9153, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 4.9044117647058824e-06, |
| "loss": 3.8886, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 4.902573529411765e-06, |
| "loss": 3.925, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 4.900735294117648e-06, |
| "loss": 3.9202, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 4.898897058823529e-06, |
| "loss": 3.9166, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 4.897058823529412e-06, |
| "loss": 3.8953, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 4.895220588235295e-06, |
| "loss": 3.9142, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 4.893382352941176e-06, |
| "loss": 3.9326, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 4.891544117647059e-06, |
| "loss": 3.9234, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 4.889705882352942e-06, |
| "loss": 3.9074, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 4.887867647058823e-06, |
| "loss": 3.9124, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 4.886029411764706e-06, |
| "loss": 3.9224, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 4.8841911764705886e-06, |
| "loss": 3.9149, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 4.882352941176471e-06, |
| "loss": 3.902, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 4.880514705882353e-06, |
| "loss": 3.8917, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 4.8786764705882355e-06, |
| "loss": 3.9184, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 4.876838235294118e-06, |
| "loss": 3.9209, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 4.875e-06, |
| "loss": 3.9136, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 4.8731617647058825e-06, |
| "loss": 3.9409, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 4.871323529411765e-06, |
| "loss": 3.9229, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 4.869485294117648e-06, |
| "loss": 3.9351, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 4.8676470588235295e-06, |
| "loss": 3.9235, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 4.865808823529412e-06, |
| "loss": 3.9185, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 4.863970588235295e-06, |
| "loss": 3.9277, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 4.8621323529411765e-06, |
| "loss": 3.9142, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 4.860294117647059e-06, |
| "loss": 3.9086, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 4.858455882352942e-06, |
| "loss": 3.9022, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 4.856617647058824e-06, |
| "loss": 3.8916, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 4.854779411764706e-06, |
| "loss": 3.8947, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 4.852941176470589e-06, |
| "loss": 3.9271, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 4.851102941176471e-06, |
| "loss": 3.9083, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 4.849264705882353e-06, |
| "loss": 3.8508, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 4.847426470588236e-06, |
| "loss": 3.8974, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 4.845588235294118e-06, |
| "loss": 3.9153, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 4.84375e-06, |
| "loss": 3.9164, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 4.841911764705883e-06, |
| "loss": 3.922, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 4.840073529411765e-06, |
| "loss": 3.9043, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 4.838235294117648e-06, |
| "loss": 3.9602, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 4.83639705882353e-06, |
| "loss": 3.9293, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 4.834558823529412e-06, |
| "loss": 3.9465, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 4.832720588235295e-06, |
| "loss": 3.9197, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 4.8308823529411766e-06, |
| "loss": 3.9176, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 4.829044117647059e-06, |
| "loss": 3.9099, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 4.827205882352942e-06, |
| "loss": 3.9161, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 4.825367647058824e-06, |
| "loss": 3.9185, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 4.823529411764706e-06, |
| "loss": 3.9115, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 4.821691176470589e-06, |
| "loss": 3.9147, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 4.819852941176471e-06, |
| "loss": 3.9376, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 4.818014705882353e-06, |
| "loss": 3.915, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 4.816176470588236e-06, |
| "loss": 3.9177, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 4.814338235294118e-06, |
| "loss": 3.9417, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 4.8125e-06, |
| "loss": 3.8834, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 4.810661764705883e-06, |
| "loss": 3.912, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 4.808823529411765e-06, |
| "loss": 3.9026, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 4.806985294117648e-06, |
| "loss": 3.9035, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 4.80514705882353e-06, |
| "loss": 3.9025, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 4.803308823529412e-06, |
| "loss": 3.8928, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 4.801470588235295e-06, |
| "loss": 3.9144, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 4.799632352941177e-06, |
| "loss": 3.9156, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 4.797794117647059e-06, |
| "loss": 3.9111, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 4.795955882352942e-06, |
| "loss": 3.9151, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 4.7941176470588245e-06, |
| "loss": 3.8999, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 4.792279411764706e-06, |
| "loss": 3.9154, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 4.790441176470589e-06, |
| "loss": 3.899, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 4.7886029411764714e-06, |
| "loss": 3.9119, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 4.786764705882353e-06, |
| "loss": 3.8925, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 4.784926470588236e-06, |
| "loss": 3.9321, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 4.783088235294118e-06, |
| "loss": 3.909, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 4.781250000000001e-06, |
| "loss": 3.9066, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 4.779411764705883e-06, |
| "loss": 3.9001, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 4.777573529411765e-06, |
| "loss": 3.9022, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 4.775735294117648e-06, |
| "loss": 3.9078, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 4.77389705882353e-06, |
| "loss": 3.9196, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 4.772058823529412e-06, |
| "loss": 3.8968, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 4.770220588235295e-06, |
| "loss": 3.9011, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 4.768382352941177e-06, |
| "loss": 3.9063, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 4.766544117647059e-06, |
| "loss": 3.9207, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 4.764705882352941e-06, |
| "loss": 3.9207, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 4.762867647058824e-06, |
| "loss": 3.9065, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 4.761029411764706e-06, |
| "loss": 3.9008, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 4.759191176470588e-06, |
| "loss": 3.9283, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 4.757352941176471e-06, |
| "loss": 3.9035, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 4.755514705882353e-06, |
| "loss": 3.9002, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 4.753676470588235e-06, |
| "loss": 3.8927, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 4.751838235294118e-06, |
| "loss": 3.9271, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 4.75e-06, |
| "loss": 3.9176, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 4.748161764705883e-06, |
| "loss": 3.9195, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 4.746323529411765e-06, |
| "loss": 3.91, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 4.744485294117647e-06, |
| "loss": 3.8993, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 4.74264705882353e-06, |
| "loss": 3.8949, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 4.740808823529412e-06, |
| "loss": 3.8935, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 4.738970588235294e-06, |
| "loss": 3.8936, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 4.737132352941177e-06, |
| "loss": 3.8571, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 4.7352941176470594e-06, |
| "loss": 3.9071, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 4.733455882352941e-06, |
| "loss": 3.9012, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 4.731617647058824e-06, |
| "loss": 3.9317, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 4.729779411764706e-06, |
| "loss": 3.8995, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 4.727941176470588e-06, |
| "loss": 3.9174, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 4.726102941176471e-06, |
| "loss": 3.8955, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 4.724264705882353e-06, |
| "loss": 3.9231, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 4.722426470588235e-06, |
| "loss": 3.936, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 4.720588235294118e-06, |
| "loss": 3.9145, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 4.71875e-06, |
| "loss": 3.9248, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 4.716911764705883e-06, |
| "loss": 3.9302, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 4.715073529411765e-06, |
| "loss": 3.9142, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 4.713235294117647e-06, |
| "loss": 3.9368, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 4.71139705882353e-06, |
| "loss": 3.9272, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 4.709558823529412e-06, |
| "loss": 3.9115, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 4.707720588235294e-06, |
| "loss": 3.9266, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 4.705882352941177e-06, |
| "loss": 3.9063, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 4.7040441176470595e-06, |
| "loss": 3.8973, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 4.702205882352941e-06, |
| "loss": 3.9274, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 4.700367647058824e-06, |
| "loss": 3.9189, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 4.6985294117647065e-06, |
| "loss": 3.91, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 4.696691176470588e-06, |
| "loss": 3.9023, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 4.694852941176471e-06, |
| "loss": 3.9333, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 4.6930147058823535e-06, |
| "loss": 3.9163, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 4.691176470588236e-06, |
| "loss": 3.9132, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 4.689338235294118e-06, |
| "loss": 3.8849, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 4.6875000000000004e-06, |
| "loss": 3.9059, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 4.685661764705883e-06, |
| "loss": 3.9018, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 4.683823529411765e-06, |
| "loss": 3.901, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 4.681985294117647e-06, |
| "loss": 3.9107, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 4.68014705882353e-06, |
| "loss": 3.8986, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 4.678308823529412e-06, |
| "loss": 3.9104, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 4.676470588235294e-06, |
| "loss": 3.9185, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 4.674632352941177e-06, |
| "loss": 3.8912, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 4.67279411764706e-06, |
| "loss": 3.9225, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 4.670955882352941e-06, |
| "loss": 3.9269, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 4.669117647058824e-06, |
| "loss": 3.9236, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 4.6672794117647066e-06, |
| "loss": 3.8892, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 4.665441176470588e-06, |
| "loss": 3.9083, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 4.663602941176471e-06, |
| "loss": 3.9088, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 4.6617647058823535e-06, |
| "loss": 3.9078, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 4.659926470588236e-06, |
| "loss": 3.9213, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 4.658088235294118e-06, |
| "loss": 3.9141, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 4.6562500000000005e-06, |
| "loss": 3.8859, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 4.654411764705883e-06, |
| "loss": 3.906, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 4.652573529411765e-06, |
| "loss": 3.8916, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 4.6507352941176475e-06, |
| "loss": 3.9085, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 4.64889705882353e-06, |
| "loss": 3.9039, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 4.647058823529412e-06, |
| "loss": 3.9172, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 4.6452205882352945e-06, |
| "loss": 3.8915, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 4.643382352941177e-06, |
| "loss": 3.8961, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 4.64154411764706e-06, |
| "loss": 3.8966, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 4.6397058823529414e-06, |
| "loss": 3.9654, |
| "step": 246 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 4.637867647058824e-06, |
| "loss": 3.9111, |
| "step": 247 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 4.636029411764707e-06, |
| "loss": 3.928, |
| "step": 248 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 4.634191176470588e-06, |
| "loss": 3.9172, |
| "step": 249 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 4.632352941176471e-06, |
| "loss": 3.9005, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 4.630514705882354e-06, |
| "loss": 3.9194, |
| "step": 251 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 4.628676470588236e-06, |
| "loss": 3.8748, |
| "step": 252 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 4.626838235294118e-06, |
| "loss": 3.911, |
| "step": 253 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 4.625000000000001e-06, |
| "loss": 3.9159, |
| "step": 254 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 4.623161764705883e-06, |
| "loss": 3.8986, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 4.621323529411765e-06, |
| "loss": 3.9252, |
| "step": 256 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 4.619485294117648e-06, |
| "loss": 3.9239, |
| "step": 257 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 4.61764705882353e-06, |
| "loss": 3.8983, |
| "step": 258 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 4.615808823529413e-06, |
| "loss": 3.9108, |
| "step": 259 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 4.6139705882352946e-06, |
| "loss": 3.9053, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 4.612132352941177e-06, |
| "loss": 3.9081, |
| "step": 261 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 4.61029411764706e-06, |
| "loss": 3.9596, |
| "step": 262 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 4.6084558823529415e-06, |
| "loss": 3.8736, |
| "step": 263 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 4.606617647058823e-06, |
| "loss": 3.9105, |
| "step": 264 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 4.604779411764706e-06, |
| "loss": 3.8659, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 4.6029411764705885e-06, |
| "loss": 3.8871, |
| "step": 266 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 4.601102941176471e-06, |
| "loss": 3.9245, |
| "step": 267 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 4.599264705882353e-06, |
| "loss": 3.9141, |
| "step": 268 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 4.5974264705882355e-06, |
| "loss": 3.913, |
| "step": 269 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 4.595588235294118e-06, |
| "loss": 3.9259, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 4.59375e-06, |
| "loss": 3.917, |
| "step": 271 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 4.5919117647058825e-06, |
| "loss": 3.9239, |
| "step": 272 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 4.590073529411765e-06, |
| "loss": 3.9198, |
| "step": 273 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 4.588235294117647e-06, |
| "loss": 3.9101, |
| "step": 274 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 4.5863970588235294e-06, |
| "loss": 3.9076, |
| "step": 275 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.584558823529412e-06, |
| "loss": 3.9136, |
| "step": 276 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.582720588235295e-06, |
| "loss": 3.9107, |
| "step": 277 |
| }, |
| { |
| "epoch": 1.0, |
| "eval_accuracy": 0.02702702702702703, |
| "eval_loss": 3.9059979915618896, |
| "eval_runtime": 142.1807, |
| "eval_samples_per_second": 2.602, |
| "eval_steps_per_second": 0.654, |
| "step": 277 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.580882352941176e-06, |
| "loss": 3.9043, |
| "step": 278 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.579044117647059e-06, |
| "loss": 3.919, |
| "step": 279 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.577205882352942e-06, |
| "loss": 3.9638, |
| "step": 280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.575367647058823e-06, |
| "loss": 3.8705, |
| "step": 281 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.573529411764706e-06, |
| "loss": 3.8757, |
| "step": 282 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.571691176470589e-06, |
| "loss": 3.8841, |
| "step": 283 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 4.569852941176471e-06, |
| "loss": 3.9085, |
| "step": 284 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 4.568014705882353e-06, |
| "loss": 3.8842, |
| "step": 285 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 4.5661764705882356e-06, |
| "loss": 3.9095, |
| "step": 286 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 4.564338235294118e-06, |
| "loss": 3.9127, |
| "step": 287 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 4.5625e-06, |
| "loss": 3.8955, |
| "step": 288 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 4.5606617647058825e-06, |
| "loss": 3.9008, |
| "step": 289 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 4.558823529411765e-06, |
| "loss": 3.881, |
| "step": 290 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 4.556985294117648e-06, |
| "loss": 3.8898, |
| "step": 291 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 4.5551470588235295e-06, |
| "loss": 3.924, |
| "step": 292 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 4.553308823529412e-06, |
| "loss": 3.9063, |
| "step": 293 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 4.551470588235295e-06, |
| "loss": 3.9213, |
| "step": 294 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 4.5496323529411765e-06, |
| "loss": 3.9123, |
| "step": 295 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 4.547794117647059e-06, |
| "loss": 3.867, |
| "step": 296 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 4.545955882352942e-06, |
| "loss": 3.8986, |
| "step": 297 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 4.5441176470588235e-06, |
| "loss": 3.935, |
| "step": 298 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 4.542279411764706e-06, |
| "loss": 3.8983, |
| "step": 299 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 4.540441176470589e-06, |
| "loss": 3.9206, |
| "step": 300 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 4.538602941176471e-06, |
| "loss": 3.9229, |
| "step": 301 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 4.536764705882353e-06, |
| "loss": 3.8912, |
| "step": 302 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 4.534926470588236e-06, |
| "loss": 3.9187, |
| "step": 303 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 4.533088235294118e-06, |
| "loss": 3.9085, |
| "step": 304 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 4.53125e-06, |
| "loss": 3.9166, |
| "step": 305 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 4.529411764705883e-06, |
| "loss": 3.9341, |
| "step": 306 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 4.527573529411765e-06, |
| "loss": 3.9027, |
| "step": 307 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 4.525735294117648e-06, |
| "loss": 3.8721, |
| "step": 308 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 4.52389705882353e-06, |
| "loss": 3.9293, |
| "step": 309 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 4.522058823529412e-06, |
| "loss": 3.9006, |
| "step": 310 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 4.520220588235295e-06, |
| "loss": 3.8877, |
| "step": 311 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 4.518382352941177e-06, |
| "loss": 3.919, |
| "step": 312 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 4.516544117647059e-06, |
| "loss": 3.9037, |
| "step": 313 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 4.514705882352942e-06, |
| "loss": 3.9116, |
| "step": 314 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 4.5128676470588236e-06, |
| "loss": 3.8164, |
| "step": 315 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 4.511029411764706e-06, |
| "loss": 3.9438, |
| "step": 316 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 4.509191176470589e-06, |
| "loss": 3.9129, |
| "step": 317 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 4.507352941176471e-06, |
| "loss": 3.9021, |
| "step": 318 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 4.505514705882353e-06, |
| "loss": 3.9306, |
| "step": 319 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 4.503676470588236e-06, |
| "loss": 3.9272, |
| "step": 320 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 4.501838235294118e-06, |
| "loss": 3.9075, |
| "step": 321 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 4.5e-06, |
| "loss": 3.8948, |
| "step": 322 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 4.498161764705883e-06, |
| "loss": 3.9115, |
| "step": 323 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 4.496323529411765e-06, |
| "loss": 3.9039, |
| "step": 324 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 4.494485294117648e-06, |
| "loss": 3.8994, |
| "step": 325 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 4.49264705882353e-06, |
| "loss": 3.9024, |
| "step": 326 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 4.490808823529412e-06, |
| "loss": 3.8941, |
| "step": 327 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 4.488970588235295e-06, |
| "loss": 3.9055, |
| "step": 328 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 4.487132352941177e-06, |
| "loss": 3.9013, |
| "step": 329 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 4.485294117647059e-06, |
| "loss": 3.8317, |
| "step": 330 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 4.483455882352942e-06, |
| "loss": 3.9012, |
| "step": 331 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 4.4816176470588245e-06, |
| "loss": 3.9125, |
| "step": 332 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 4.479779411764706e-06, |
| "loss": 3.9104, |
| "step": 333 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 4.477941176470589e-06, |
| "loss": 3.9224, |
| "step": 334 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 4.4761029411764715e-06, |
| "loss": 3.9048, |
| "step": 335 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 4.474264705882353e-06, |
| "loss": 3.9096, |
| "step": 336 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 4.472426470588236e-06, |
| "loss": 3.9211, |
| "step": 337 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 4.4705882352941184e-06, |
| "loss": 3.9193, |
| "step": 338 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 4.46875e-06, |
| "loss": 3.9223, |
| "step": 339 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 4.466911764705883e-06, |
| "loss": 3.923, |
| "step": 340 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 4.465073529411765e-06, |
| "loss": 3.8889, |
| "step": 341 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 4.463235294117648e-06, |
| "loss": 3.9045, |
| "step": 342 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 4.46139705882353e-06, |
| "loss": 3.8967, |
| "step": 343 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 4.459558823529412e-06, |
| "loss": 3.9626, |
| "step": 344 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 4.457720588235295e-06, |
| "loss": 3.9064, |
| "step": 345 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 4.455882352941177e-06, |
| "loss": 3.877, |
| "step": 346 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 4.454044117647059e-06, |
| "loss": 3.8755, |
| "step": 347 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 4.452205882352941e-06, |
| "loss": 3.9059, |
| "step": 348 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 4.450367647058824e-06, |
| "loss": 3.8842, |
| "step": 349 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 4.448529411764706e-06, |
| "loss": 3.9051, |
| "step": 350 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 4.446691176470588e-06, |
| "loss": 3.9528, |
| "step": 351 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 4.444852941176471e-06, |
| "loss": 3.9053, |
| "step": 352 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 4.443014705882353e-06, |
| "loss": 3.9222, |
| "step": 353 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 4.441176470588235e-06, |
| "loss": 3.8908, |
| "step": 354 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 4.439338235294118e-06, |
| "loss": 3.8965, |
| "step": 355 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 4.4375e-06, |
| "loss": 3.8937, |
| "step": 356 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 4.435661764705883e-06, |
| "loss": 3.9123, |
| "step": 357 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 4.433823529411765e-06, |
| "loss": 3.8119, |
| "step": 358 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 4.431985294117647e-06, |
| "loss": 3.9067, |
| "step": 359 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 4.43014705882353e-06, |
| "loss": 3.8936, |
| "step": 360 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 4.428308823529412e-06, |
| "loss": 3.9095, |
| "step": 361 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 4.426470588235294e-06, |
| "loss": 3.9131, |
| "step": 362 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 4.424632352941177e-06, |
| "loss": 3.9189, |
| "step": 363 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 4.422794117647059e-06, |
| "loss": 3.9028, |
| "step": 364 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 4.420955882352941e-06, |
| "loss": 3.911, |
| "step": 365 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 4.419117647058824e-06, |
| "loss": 3.8861, |
| "step": 366 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 4.4172794117647064e-06, |
| "loss": 3.9194, |
| "step": 367 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 4.415441176470588e-06, |
| "loss": 3.8888, |
| "step": 368 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 4.413602941176471e-06, |
| "loss": 3.9209, |
| "step": 369 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 4.411764705882353e-06, |
| "loss": 3.8949, |
| "step": 370 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 4.409926470588235e-06, |
| "loss": 3.8525, |
| "step": 371 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 4.408088235294118e-06, |
| "loss": 3.9011, |
| "step": 372 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 4.40625e-06, |
| "loss": 3.9155, |
| "step": 373 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 4.404411764705883e-06, |
| "loss": 3.9018, |
| "step": 374 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 4.402573529411765e-06, |
| "loss": 3.9371, |
| "step": 375 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 4.400735294117647e-06, |
| "loss": 4.0396, |
| "step": 376 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 4.39889705882353e-06, |
| "loss": 3.9046, |
| "step": 377 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 4.397058823529412e-06, |
| "loss": 3.9383, |
| "step": 378 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 4.395220588235294e-06, |
| "loss": 3.9031, |
| "step": 379 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 4.393382352941177e-06, |
| "loss": 3.9034, |
| "step": 380 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 4.3915441176470595e-06, |
| "loss": 3.9138, |
| "step": 381 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 4.389705882352941e-06, |
| "loss": 3.8821, |
| "step": 382 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 4.387867647058824e-06, |
| "loss": 3.9014, |
| "step": 383 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 4.3860294117647065e-06, |
| "loss": 3.9251, |
| "step": 384 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 4.384191176470588e-06, |
| "loss": 3.9485, |
| "step": 385 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 4.382352941176471e-06, |
| "loss": 3.8887, |
| "step": 386 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 4.3805147058823535e-06, |
| "loss": 3.8645, |
| "step": 387 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 4.378676470588235e-06, |
| "loss": 3.8943, |
| "step": 388 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 4.376838235294118e-06, |
| "loss": 3.9062, |
| "step": 389 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 4.3750000000000005e-06, |
| "loss": 3.894, |
| "step": 390 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 4.373161764705883e-06, |
| "loss": 3.8975, |
| "step": 391 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 4.371323529411765e-06, |
| "loss": 3.9143, |
| "step": 392 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 4.3694852941176474e-06, |
| "loss": 3.9158, |
| "step": 393 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 4.36764705882353e-06, |
| "loss": 3.914, |
| "step": 394 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 4.365808823529412e-06, |
| "loss": 3.8992, |
| "step": 395 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 4.363970588235294e-06, |
| "loss": 3.8928, |
| "step": 396 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 4.362132352941177e-06, |
| "loss": 3.9955, |
| "step": 397 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 4.36029411764706e-06, |
| "loss": 3.8875, |
| "step": 398 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 4.358455882352941e-06, |
| "loss": 3.9393, |
| "step": 399 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 4.356617647058824e-06, |
| "loss": 3.9529, |
| "step": 400 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 4.354779411764707e-06, |
| "loss": 3.8982, |
| "step": 401 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 4.352941176470588e-06, |
| "loss": 3.8867, |
| "step": 402 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 4.351102941176471e-06, |
| "loss": 3.921, |
| "step": 403 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 4.349264705882354e-06, |
| "loss": 3.9074, |
| "step": 404 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 4.347426470588235e-06, |
| "loss": 3.9156, |
| "step": 405 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 4.345588235294118e-06, |
| "loss": 3.8489, |
| "step": 406 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 4.3437500000000006e-06, |
| "loss": 3.8925, |
| "step": 407 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 4.341911764705883e-06, |
| "loss": 3.8975, |
| "step": 408 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 4.340073529411765e-06, |
| "loss": 3.9195, |
| "step": 409 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 4.3382352941176475e-06, |
| "loss": 3.9293, |
| "step": 410 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 4.33639705882353e-06, |
| "loss": 3.8872, |
| "step": 411 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 4.334558823529412e-06, |
| "loss": 3.9238, |
| "step": 412 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 4.3327205882352945e-06, |
| "loss": 3.8997, |
| "step": 413 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 4.330882352941177e-06, |
| "loss": 3.8918, |
| "step": 414 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 4.32904411764706e-06, |
| "loss": 3.9261, |
| "step": 415 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 4.3272058823529415e-06, |
| "loss": 3.8922, |
| "step": 416 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 4.325367647058824e-06, |
| "loss": 3.8748, |
| "step": 417 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 4.323529411764707e-06, |
| "loss": 3.886, |
| "step": 418 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 4.3216911764705884e-06, |
| "loss": 3.906, |
| "step": 419 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 4.319852941176471e-06, |
| "loss": 3.8752, |
| "step": 420 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 4.318014705882354e-06, |
| "loss": 3.8896, |
| "step": 421 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 4.316176470588236e-06, |
| "loss": 3.9088, |
| "step": 422 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 4.314338235294118e-06, |
| "loss": 3.8822, |
| "step": 423 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 4.312500000000001e-06, |
| "loss": 3.8848, |
| "step": 424 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 4.310661764705883e-06, |
| "loss": 3.8989, |
| "step": 425 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 4.308823529411765e-06, |
| "loss": 3.9137, |
| "step": 426 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 4.306985294117648e-06, |
| "loss": 3.9107, |
| "step": 427 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 4.30514705882353e-06, |
| "loss": 3.8984, |
| "step": 428 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 4.303308823529412e-06, |
| "loss": 3.9207, |
| "step": 429 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 4.301470588235295e-06, |
| "loss": 3.9043, |
| "step": 430 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 4.299632352941177e-06, |
| "loss": 3.8906, |
| "step": 431 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 4.29779411764706e-06, |
| "loss": 3.9199, |
| "step": 432 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 4.2959558823529416e-06, |
| "loss": 3.8849, |
| "step": 433 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 4.294117647058823e-06, |
| "loss": 3.9311, |
| "step": 434 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 4.292279411764706e-06, |
| "loss": 3.9074, |
| "step": 435 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 4.2904411764705885e-06, |
| "loss": 3.8945, |
| "step": 436 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 4.28860294117647e-06, |
| "loss": 3.9328, |
| "step": 437 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 4.286764705882353e-06, |
| "loss": 3.8798, |
| "step": 438 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 4.2849264705882355e-06, |
| "loss": 3.9068, |
| "step": 439 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 4.283088235294118e-06, |
| "loss": 3.9126, |
| "step": 440 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 4.28125e-06, |
| "loss": 3.8877, |
| "step": 441 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 4.2794117647058825e-06, |
| "loss": 3.9105, |
| "step": 442 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 4.277573529411765e-06, |
| "loss": 3.8941, |
| "step": 443 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 4.275735294117647e-06, |
| "loss": 3.9122, |
| "step": 444 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 4.2738970588235295e-06, |
| "loss": 3.8897, |
| "step": 445 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 4.272058823529412e-06, |
| "loss": 3.9235, |
| "step": 446 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 4.270220588235295e-06, |
| "loss": 3.9015, |
| "step": 447 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 4.2683823529411764e-06, |
| "loss": 3.9018, |
| "step": 448 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 4.266544117647059e-06, |
| "loss": 3.8936, |
| "step": 449 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 4.264705882352942e-06, |
| "loss": 3.8925, |
| "step": 450 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 4.262867647058823e-06, |
| "loss": 3.8914, |
| "step": 451 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 4.261029411764706e-06, |
| "loss": 3.895, |
| "step": 452 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 4.259191176470589e-06, |
| "loss": 3.907, |
| "step": 453 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 4.25735294117647e-06, |
| "loss": 3.9336, |
| "step": 454 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 4.255514705882353e-06, |
| "loss": 3.9143, |
| "step": 455 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 4.253676470588236e-06, |
| "loss": 3.9045, |
| "step": 456 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 4.251838235294118e-06, |
| "loss": 3.8867, |
| "step": 457 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 4.25e-06, |
| "loss": 3.8994, |
| "step": 458 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 4.2481617647058826e-06, |
| "loss": 3.9215, |
| "step": 459 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 4.246323529411765e-06, |
| "loss": 3.9025, |
| "step": 460 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 4.244485294117647e-06, |
| "loss": 3.9166, |
| "step": 461 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 4.2426470588235295e-06, |
| "loss": 3.9022, |
| "step": 462 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 4.240808823529412e-06, |
| "loss": 3.9368, |
| "step": 463 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 4.238970588235295e-06, |
| "loss": 3.9049, |
| "step": 464 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 4.2371323529411765e-06, |
| "loss": 3.8937, |
| "step": 465 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 4.235294117647059e-06, |
| "loss": 3.9108, |
| "step": 466 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 4.233455882352942e-06, |
| "loss": 3.8961, |
| "step": 467 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 4.2316176470588235e-06, |
| "loss": 3.8642, |
| "step": 468 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 4.229779411764706e-06, |
| "loss": 3.9033, |
| "step": 469 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 4.227941176470589e-06, |
| "loss": 3.8794, |
| "step": 470 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 4.226102941176471e-06, |
| "loss": 3.9012, |
| "step": 471 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 4.224264705882353e-06, |
| "loss": 3.9066, |
| "step": 472 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 4.222426470588236e-06, |
| "loss": 3.895, |
| "step": 473 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 4.220588235294118e-06, |
| "loss": 3.9152, |
| "step": 474 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 4.21875e-06, |
| "loss": 3.8955, |
| "step": 475 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 4.216911764705883e-06, |
| "loss": 3.8859, |
| "step": 476 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 4.215073529411765e-06, |
| "loss": 3.9166, |
| "step": 477 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 4.213235294117647e-06, |
| "loss": 3.9213, |
| "step": 478 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 4.21139705882353e-06, |
| "loss": 3.8926, |
| "step": 479 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 4.209558823529412e-06, |
| "loss": 3.9371, |
| "step": 480 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 4.207720588235295e-06, |
| "loss": 3.8835, |
| "step": 481 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 4.205882352941177e-06, |
| "loss": 3.9003, |
| "step": 482 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 4.204044117647059e-06, |
| "loss": 3.9181, |
| "step": 483 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 4.202205882352942e-06, |
| "loss": 3.892, |
| "step": 484 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 4.200367647058824e-06, |
| "loss": 3.8665, |
| "step": 485 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 4.198529411764706e-06, |
| "loss": 3.9085, |
| "step": 486 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 4.196691176470589e-06, |
| "loss": 3.8802, |
| "step": 487 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 4.194852941176471e-06, |
| "loss": 3.9198, |
| "step": 488 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 4.193014705882353e-06, |
| "loss": 3.9018, |
| "step": 489 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 4.191176470588236e-06, |
| "loss": 3.9065, |
| "step": 490 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 4.189338235294118e-06, |
| "loss": 3.9043, |
| "step": 491 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 4.1875e-06, |
| "loss": 3.9025, |
| "step": 492 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 4.185661764705883e-06, |
| "loss": 3.8515, |
| "step": 493 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 4.183823529411765e-06, |
| "loss": 3.8925, |
| "step": 494 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 4.181985294117647e-06, |
| "loss": 3.9417, |
| "step": 495 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 4.18014705882353e-06, |
| "loss": 3.9228, |
| "step": 496 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 4.178308823529412e-06, |
| "loss": 3.9237, |
| "step": 497 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 4.176470588235295e-06, |
| "loss": 3.9108, |
| "step": 498 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 4.174632352941177e-06, |
| "loss": 3.9109, |
| "step": 499 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 4.172794117647059e-06, |
| "loss": 3.8823, |
| "step": 500 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 4.170955882352942e-06, |
| "loss": 3.914, |
| "step": 501 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 4.169117647058824e-06, |
| "loss": 3.8861, |
| "step": 502 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 4.167279411764706e-06, |
| "loss": 3.9059, |
| "step": 503 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 4.165441176470589e-06, |
| "loss": 3.8935, |
| "step": 504 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 4.1636029411764715e-06, |
| "loss": 3.9075, |
| "step": 505 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 4.161764705882353e-06, |
| "loss": 3.9218, |
| "step": 506 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 4.159926470588236e-06, |
| "loss": 3.9114, |
| "step": 507 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 4.1580882352941185e-06, |
| "loss": 3.8665, |
| "step": 508 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 4.15625e-06, |
| "loss": 3.8972, |
| "step": 509 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 4.154411764705883e-06, |
| "loss": 3.9366, |
| "step": 510 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 4.1525735294117654e-06, |
| "loss": 3.9204, |
| "step": 511 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 4.150735294117648e-06, |
| "loss": 3.903, |
| "step": 512 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 4.14889705882353e-06, |
| "loss": 3.9079, |
| "step": 513 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 4.147058823529412e-06, |
| "loss": 3.9246, |
| "step": 514 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 4.145220588235295e-06, |
| "loss": 3.9133, |
| "step": 515 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 4.143382352941177e-06, |
| "loss": 3.9275, |
| "step": 516 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 4.141544117647059e-06, |
| "loss": 3.8852, |
| "step": 517 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 4.139705882352941e-06, |
| "loss": 3.8748, |
| "step": 518 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 4.137867647058824e-06, |
| "loss": 3.9109, |
| "step": 519 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 4.136029411764706e-06, |
| "loss": 3.9106, |
| "step": 520 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 4.134191176470588e-06, |
| "loss": 3.8687, |
| "step": 521 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 4.132352941176471e-06, |
| "loss": 3.9115, |
| "step": 522 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 4.130514705882353e-06, |
| "loss": 3.9291, |
| "step": 523 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 4.128676470588235e-06, |
| "loss": 3.891, |
| "step": 524 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 4.126838235294118e-06, |
| "loss": 3.9054, |
| "step": 525 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 4.125e-06, |
| "loss": 3.8861, |
| "step": 526 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 4.123161764705882e-06, |
| "loss": 3.883, |
| "step": 527 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 4.121323529411765e-06, |
| "loss": 3.9183, |
| "step": 528 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 4.119485294117647e-06, |
| "loss": 3.8538, |
| "step": 529 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 4.11764705882353e-06, |
| "loss": 3.9181, |
| "step": 530 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 4.115808823529412e-06, |
| "loss": 3.9074, |
| "step": 531 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 4.113970588235294e-06, |
| "loss": 3.859, |
| "step": 532 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 4.112132352941177e-06, |
| "loss": 3.8948, |
| "step": 533 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 4.110294117647059e-06, |
| "loss": 3.8735, |
| "step": 534 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 4.108455882352941e-06, |
| "loss": 3.9044, |
| "step": 535 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 4.106617647058824e-06, |
| "loss": 3.9045, |
| "step": 536 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 4.1047794117647065e-06, |
| "loss": 3.8887, |
| "step": 537 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 4.102941176470588e-06, |
| "loss": 3.8561, |
| "step": 538 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 4.101102941176471e-06, |
| "loss": 3.8849, |
| "step": 539 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 4.0992647058823534e-06, |
| "loss": 3.9215, |
| "step": 540 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 4.097426470588235e-06, |
| "loss": 3.916, |
| "step": 541 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 4.095588235294118e-06, |
| "loss": 3.8836, |
| "step": 542 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 4.09375e-06, |
| "loss": 3.881, |
| "step": 543 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 4.091911764705882e-06, |
| "loss": 3.8885, |
| "step": 544 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 4.090073529411765e-06, |
| "loss": 3.9141, |
| "step": 545 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 4.088235294117647e-06, |
| "loss": 3.9221, |
| "step": 546 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 4.08639705882353e-06, |
| "loss": 3.9106, |
| "step": 547 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 4.084558823529412e-06, |
| "loss": 3.8529, |
| "step": 548 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 4.082720588235294e-06, |
| "loss": 3.853, |
| "step": 549 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 4.080882352941177e-06, |
| "loss": 3.9045, |
| "step": 550 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 4.079044117647059e-06, |
| "loss": 3.9129, |
| "step": 551 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 4.077205882352941e-06, |
| "loss": 3.8988, |
| "step": 552 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 4.075367647058824e-06, |
| "loss": 3.9316, |
| "step": 553 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 4.0735294117647065e-06, |
| "loss": 3.9005, |
| "step": 554 |
| }, |
| { |
| "epoch": 2.0, |
| "eval_accuracy": 0.04594594594594595, |
| "eval_loss": 3.8943464756011963, |
| "eval_runtime": 141.8971, |
| "eval_samples_per_second": 2.608, |
| "eval_steps_per_second": 0.655, |
| "step": 554 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 4.071691176470588e-06, |
| "loss": 3.8755, |
| "step": 555 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 4.069852941176471e-06, |
| "loss": 3.9586, |
| "step": 556 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 4.0680147058823535e-06, |
| "loss": 3.8969, |
| "step": 557 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 4.066176470588235e-06, |
| "loss": 3.9125, |
| "step": 558 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 4.064338235294118e-06, |
| "loss": 3.8738, |
| "step": 559 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 4.0625000000000005e-06, |
| "loss": 3.9892, |
| "step": 560 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 4.060661764705883e-06, |
| "loss": 3.9066, |
| "step": 561 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 4.058823529411765e-06, |
| "loss": 3.8973, |
| "step": 562 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 4.0569852941176475e-06, |
| "loss": 3.9196, |
| "step": 563 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 4.05514705882353e-06, |
| "loss": 3.878, |
| "step": 564 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 4.053308823529412e-06, |
| "loss": 3.9476, |
| "step": 565 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 4.0514705882352944e-06, |
| "loss": 3.8901, |
| "step": 566 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 4.049632352941177e-06, |
| "loss": 3.927, |
| "step": 567 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 4.047794117647059e-06, |
| "loss": 3.8822, |
| "step": 568 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 4.045955882352941e-06, |
| "loss": 3.938, |
| "step": 569 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 4.044117647058824e-06, |
| "loss": 3.8705, |
| "step": 570 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 4.042279411764707e-06, |
| "loss": 3.8969, |
| "step": 571 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 4.040441176470588e-06, |
| "loss": 3.8945, |
| "step": 572 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 4.038602941176471e-06, |
| "loss": 3.9055, |
| "step": 573 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 4.036764705882354e-06, |
| "loss": 3.8952, |
| "step": 574 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 4.034926470588235e-06, |
| "loss": 3.9069, |
| "step": 575 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 4.033088235294118e-06, |
| "loss": 3.9034, |
| "step": 576 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 4.031250000000001e-06, |
| "loss": 3.8944, |
| "step": 577 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 4.029411764705883e-06, |
| "loss": 3.8567, |
| "step": 578 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 4.027573529411765e-06, |
| "loss": 3.883, |
| "step": 579 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 4.0257352941176476e-06, |
| "loss": 3.8884, |
| "step": 580 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 4.02389705882353e-06, |
| "loss": 3.9006, |
| "step": 581 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 4.022058823529412e-06, |
| "loss": 3.8631, |
| "step": 582 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 4.0202205882352945e-06, |
| "loss": 3.9281, |
| "step": 583 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 4.018382352941177e-06, |
| "loss": 3.9136, |
| "step": 584 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 4.016544117647059e-06, |
| "loss": 3.8971, |
| "step": 585 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 4.0147058823529415e-06, |
| "loss": 3.8815, |
| "step": 586 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 4.012867647058824e-06, |
| "loss": 3.8956, |
| "step": 587 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 4.011029411764707e-06, |
| "loss": 3.9125, |
| "step": 588 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 4.0091911764705885e-06, |
| "loss": 3.9272, |
| "step": 589 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 4.007352941176471e-06, |
| "loss": 3.9132, |
| "step": 590 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 4.005514705882354e-06, |
| "loss": 3.92, |
| "step": 591 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 4.0036764705882355e-06, |
| "loss": 3.8869, |
| "step": 592 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 4.001838235294118e-06, |
| "loss": 3.9242, |
| "step": 593 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 4.000000000000001e-06, |
| "loss": 3.9256, |
| "step": 594 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 3.998161764705883e-06, |
| "loss": 3.9462, |
| "step": 595 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 3.996323529411765e-06, |
| "loss": 3.8908, |
| "step": 596 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 3.994485294117648e-06, |
| "loss": 3.9261, |
| "step": 597 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 3.99264705882353e-06, |
| "loss": 3.9065, |
| "step": 598 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 3.990808823529412e-06, |
| "loss": 3.8781, |
| "step": 599 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 3.988970588235295e-06, |
| "loss": 3.8783, |
| "step": 600 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 3.987132352941177e-06, |
| "loss": 3.8928, |
| "step": 601 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 3.98529411764706e-06, |
| "loss": 3.8868, |
| "step": 602 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 3.983455882352942e-06, |
| "loss": 3.8967, |
| "step": 603 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 3.981617647058823e-06, |
| "loss": 3.851, |
| "step": 604 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 3.979779411764706e-06, |
| "loss": 3.8998, |
| "step": 605 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 3.9779411764705886e-06, |
| "loss": 3.9114, |
| "step": 606 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 3.97610294117647e-06, |
| "loss": 3.8974, |
| "step": 607 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 3.974264705882353e-06, |
| "loss": 3.914, |
| "step": 608 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 3.9724264705882355e-06, |
| "loss": 3.9001, |
| "step": 609 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 3.970588235294118e-06, |
| "loss": 3.8892, |
| "step": 610 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 3.96875e-06, |
| "loss": 3.9047, |
| "step": 611 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 3.9669117647058825e-06, |
| "loss": 3.8536, |
| "step": 612 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 3.965073529411765e-06, |
| "loss": 3.9175, |
| "step": 613 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 3.963235294117647e-06, |
| "loss": 3.8781, |
| "step": 614 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 3.9613970588235295e-06, |
| "loss": 3.8895, |
| "step": 615 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 3.959558823529412e-06, |
| "loss": 3.8261, |
| "step": 616 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 3.957720588235294e-06, |
| "loss": 3.887, |
| "step": 617 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 3.9558823529411765e-06, |
| "loss": 3.8918, |
| "step": 618 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 3.954044117647059e-06, |
| "loss": 3.9152, |
| "step": 619 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 3.952205882352942e-06, |
| "loss": 3.8806, |
| "step": 620 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 3.9503676470588234e-06, |
| "loss": 3.896, |
| "step": 621 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 3.948529411764706e-06, |
| "loss": 3.866, |
| "step": 622 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 3.946691176470589e-06, |
| "loss": 3.8914, |
| "step": 623 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 3.94485294117647e-06, |
| "loss": 3.9126, |
| "step": 624 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 3.943014705882353e-06, |
| "loss": 3.9088, |
| "step": 625 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 3.941176470588236e-06, |
| "loss": 3.8809, |
| "step": 626 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 3.939338235294118e-06, |
| "loss": 3.9812, |
| "step": 627 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 3.9375e-06, |
| "loss": 3.8903, |
| "step": 628 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 3.935661764705883e-06, |
| "loss": 3.8881, |
| "step": 629 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 3.933823529411765e-06, |
| "loss": 3.8948, |
| "step": 630 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 3.931985294117647e-06, |
| "loss": 3.9183, |
| "step": 631 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 3.9301470588235296e-06, |
| "loss": 3.8933, |
| "step": 632 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 3.928308823529412e-06, |
| "loss": 3.8459, |
| "step": 633 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 3.926470588235295e-06, |
| "loss": 3.8873, |
| "step": 634 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 3.9246323529411766e-06, |
| "loss": 3.9084, |
| "step": 635 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 3.922794117647059e-06, |
| "loss": 3.9305, |
| "step": 636 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 3.920955882352942e-06, |
| "loss": 3.8834, |
| "step": 637 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 3.9191176470588235e-06, |
| "loss": 3.9081, |
| "step": 638 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 3.917279411764706e-06, |
| "loss": 3.8891, |
| "step": 639 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 3.915441176470589e-06, |
| "loss": 3.9156, |
| "step": 640 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 3.9136029411764705e-06, |
| "loss": 3.8956, |
| "step": 641 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 3.911764705882353e-06, |
| "loss": 3.9062, |
| "step": 642 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 3.909926470588236e-06, |
| "loss": 3.852, |
| "step": 643 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 3.908088235294118e-06, |
| "loss": 3.8811, |
| "step": 644 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 3.90625e-06, |
| "loss": 3.8982, |
| "step": 645 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 3.904411764705883e-06, |
| "loss": 3.8916, |
| "step": 646 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 3.902573529411765e-06, |
| "loss": 3.8889, |
| "step": 647 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 3.900735294117647e-06, |
| "loss": 3.8836, |
| "step": 648 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 3.89889705882353e-06, |
| "loss": 3.8858, |
| "step": 649 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 3.897058823529412e-06, |
| "loss": 3.9226, |
| "step": 650 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 3.895220588235295e-06, |
| "loss": 3.88, |
| "step": 651 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 3.893382352941177e-06, |
| "loss": 3.9019, |
| "step": 652 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 3.891544117647059e-06, |
| "loss": 3.9161, |
| "step": 653 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 3.889705882352942e-06, |
| "loss": 3.8321, |
| "step": 654 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 3.887867647058824e-06, |
| "loss": 3.8858, |
| "step": 655 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 3.886029411764706e-06, |
| "loss": 3.8875, |
| "step": 656 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 3.884191176470589e-06, |
| "loss": 3.9128, |
| "step": 657 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 3.882352941176471e-06, |
| "loss": 3.9009, |
| "step": 658 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 3.880514705882353e-06, |
| "loss": 3.8546, |
| "step": 659 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 3.878676470588236e-06, |
| "loss": 3.9097, |
| "step": 660 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 3.876838235294118e-06, |
| "loss": 3.8824, |
| "step": 661 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 3.875e-06, |
| "loss": 3.8953, |
| "step": 662 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 3.873161764705883e-06, |
| "loss": 3.8983, |
| "step": 663 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 3.871323529411765e-06, |
| "loss": 3.9264, |
| "step": 664 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 3.869485294117647e-06, |
| "loss": 3.8979, |
| "step": 665 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 3.86764705882353e-06, |
| "loss": 3.8801, |
| "step": 666 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 3.865808823529412e-06, |
| "loss": 3.8174, |
| "step": 667 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 3.863970588235295e-06, |
| "loss": 3.8782, |
| "step": 668 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 3.862132352941177e-06, |
| "loss": 3.8441, |
| "step": 669 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 3.860294117647059e-06, |
| "loss": 3.8826, |
| "step": 670 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 3.858455882352942e-06, |
| "loss": 3.9378, |
| "step": 671 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 3.856617647058824e-06, |
| "loss": 3.8864, |
| "step": 672 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 3.854779411764706e-06, |
| "loss": 3.874, |
| "step": 673 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 3.852941176470589e-06, |
| "loss": 3.9054, |
| "step": 674 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 3.8511029411764715e-06, |
| "loss": 3.8911, |
| "step": 675 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 3.849264705882353e-06, |
| "loss": 3.9021, |
| "step": 676 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 3.847426470588236e-06, |
| "loss": 3.9301, |
| "step": 677 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 3.8455882352941185e-06, |
| "loss": 3.9065, |
| "step": 678 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 3.84375e-06, |
| "loss": 3.8838, |
| "step": 679 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 3.841911764705883e-06, |
| "loss": 3.9251, |
| "step": 680 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 3.8400735294117655e-06, |
| "loss": 3.8842, |
| "step": 681 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 3.838235294117647e-06, |
| "loss": 3.8571, |
| "step": 682 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 3.83639705882353e-06, |
| "loss": 3.8722, |
| "step": 683 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 3.8345588235294124e-06, |
| "loss": 3.8869, |
| "step": 684 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 3.832720588235295e-06, |
| "loss": 3.8968, |
| "step": 685 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 3.830882352941177e-06, |
| "loss": 3.9054, |
| "step": 686 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 3.829044117647059e-06, |
| "loss": 3.8092, |
| "step": 687 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 3.827205882352941e-06, |
| "loss": 3.8676, |
| "step": 688 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 3.825367647058824e-06, |
| "loss": 3.8615, |
| "step": 689 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 3.8235294117647055e-06, |
| "loss": 3.8607, |
| "step": 690 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 3.821691176470588e-06, |
| "loss": 3.8984, |
| "step": 691 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 3.819852941176471e-06, |
| "loss": 3.9044, |
| "step": 692 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 3.818014705882353e-06, |
| "loss": 3.9116, |
| "step": 693 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 3.816176470588235e-06, |
| "loss": 3.8948, |
| "step": 694 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 3.8143382352941177e-06, |
| "loss": 3.888, |
| "step": 695 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 3.8125e-06, |
| "loss": 3.908, |
| "step": 696 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 3.8106617647058825e-06, |
| "loss": 3.9121, |
| "step": 697 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 3.8088235294117647e-06, |
| "loss": 3.883, |
| "step": 698 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 3.8069852941176473e-06, |
| "loss": 3.918, |
| "step": 699 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 3.8051470588235295e-06, |
| "loss": 3.8864, |
| "step": 700 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 3.8033088235294117e-06, |
| "loss": 3.8558, |
| "step": 701 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 3.8014705882352943e-06, |
| "loss": 3.909, |
| "step": 702 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 3.7996323529411765e-06, |
| "loss": 3.8228, |
| "step": 703 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 3.797794117647059e-06, |
| "loss": 3.9212, |
| "step": 704 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 3.7959558823529413e-06, |
| "loss": 3.893, |
| "step": 705 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 3.794117647058824e-06, |
| "loss": 3.8535, |
| "step": 706 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 3.792279411764706e-06, |
| "loss": 3.8973, |
| "step": 707 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 3.7904411764705882e-06, |
| "loss": 3.8784, |
| "step": 708 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 3.788602941176471e-06, |
| "loss": 3.8789, |
| "step": 709 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 3.786764705882353e-06, |
| "loss": 3.907, |
| "step": 710 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 3.7849264705882356e-06, |
| "loss": 3.9111, |
| "step": 711 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 3.783088235294118e-06, |
| "loss": 3.8925, |
| "step": 712 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 3.78125e-06, |
| "loss": 3.9401, |
| "step": 713 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 3.7794117647058826e-06, |
| "loss": 3.8688, |
| "step": 714 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 3.777573529411765e-06, |
| "loss": 3.8858, |
| "step": 715 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 3.7757352941176474e-06, |
| "loss": 3.9156, |
| "step": 716 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 3.7738970588235296e-06, |
| "loss": 3.9049, |
| "step": 717 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 3.772058823529412e-06, |
| "loss": 3.883, |
| "step": 718 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 3.7702205882352944e-06, |
| "loss": 3.8564, |
| "step": 719 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 3.7683823529411766e-06, |
| "loss": 3.8927, |
| "step": 720 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 3.766544117647059e-06, |
| "loss": 3.8769, |
| "step": 721 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 3.7647058823529414e-06, |
| "loss": 3.9029, |
| "step": 722 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 3.762867647058824e-06, |
| "loss": 3.9064, |
| "step": 723 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 3.761029411764706e-06, |
| "loss": 3.9405, |
| "step": 724 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 3.7591911764705883e-06, |
| "loss": 3.8377, |
| "step": 725 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 3.757352941176471e-06, |
| "loss": 3.9144, |
| "step": 726 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 3.755514705882353e-06, |
| "loss": 3.9066, |
| "step": 727 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 3.7536764705882357e-06, |
| "loss": 3.857, |
| "step": 728 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 3.751838235294118e-06, |
| "loss": 3.8759, |
| "step": 729 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 3.7500000000000005e-06, |
| "loss": 3.8411, |
| "step": 730 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 3.7481617647058827e-06, |
| "loss": 3.9467, |
| "step": 731 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 3.746323529411765e-06, |
| "loss": 3.8648, |
| "step": 732 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 3.7444852941176475e-06, |
| "loss": 3.8636, |
| "step": 733 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 3.7426470588235297e-06, |
| "loss": 3.8303, |
| "step": 734 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 3.7408088235294123e-06, |
| "loss": 3.9108, |
| "step": 735 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 3.7389705882352945e-06, |
| "loss": 3.903, |
| "step": 736 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 3.7371323529411767e-06, |
| "loss": 3.8517, |
| "step": 737 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 3.7352941176470593e-06, |
| "loss": 3.9169, |
| "step": 738 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 3.7334558823529414e-06, |
| "loss": 3.8399, |
| "step": 739 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 3.731617647058824e-06, |
| "loss": 3.9108, |
| "step": 740 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 3.7297794117647062e-06, |
| "loss": 3.8493, |
| "step": 741 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 3.7279411764705884e-06, |
| "loss": 3.9021, |
| "step": 742 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 3.726102941176471e-06, |
| "loss": 3.9044, |
| "step": 743 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 3.724264705882353e-06, |
| "loss": 3.9299, |
| "step": 744 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 3.722426470588236e-06, |
| "loss": 3.9097, |
| "step": 745 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 3.720588235294118e-06, |
| "loss": 3.9244, |
| "step": 746 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 3.7187500000000006e-06, |
| "loss": 3.9088, |
| "step": 747 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 3.716911764705883e-06, |
| "loss": 3.8867, |
| "step": 748 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 3.715073529411765e-06, |
| "loss": 3.9152, |
| "step": 749 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 3.7132352941176476e-06, |
| "loss": 3.912, |
| "step": 750 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 3.7113970588235298e-06, |
| "loss": 3.8801, |
| "step": 751 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 3.7095588235294124e-06, |
| "loss": 3.8928, |
| "step": 752 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 3.7077205882352946e-06, |
| "loss": 3.9624, |
| "step": 753 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 3.7058823529411767e-06, |
| "loss": 3.9096, |
| "step": 754 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 3.7040441176470593e-06, |
| "loss": 3.8554, |
| "step": 755 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 3.7022058823529415e-06, |
| "loss": 3.9021, |
| "step": 756 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 3.700367647058824e-06, |
| "loss": 3.7969, |
| "step": 757 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 3.6985294117647063e-06, |
| "loss": 3.796, |
| "step": 758 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 3.696691176470589e-06, |
| "loss": 3.9867, |
| "step": 759 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 3.694852941176471e-06, |
| "loss": 3.8547, |
| "step": 760 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 3.6930147058823533e-06, |
| "loss": 3.9183, |
| "step": 761 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 3.691176470588236e-06, |
| "loss": 3.9144, |
| "step": 762 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 3.689338235294118e-06, |
| "loss": 3.8977, |
| "step": 763 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 3.6875000000000007e-06, |
| "loss": 3.8987, |
| "step": 764 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 3.685661764705883e-06, |
| "loss": 3.8994, |
| "step": 765 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 3.683823529411765e-06, |
| "loss": 3.8752, |
| "step": 766 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 3.6819852941176477e-06, |
| "loss": 3.8946, |
| "step": 767 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 3.68014705882353e-06, |
| "loss": 3.8058, |
| "step": 768 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 3.6783088235294125e-06, |
| "loss": 3.7745, |
| "step": 769 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 3.6764705882352946e-06, |
| "loss": 3.9245, |
| "step": 770 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 3.6746323529411772e-06, |
| "loss": 3.8975, |
| "step": 771 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 3.6727941176470594e-06, |
| "loss": 3.9134, |
| "step": 772 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 3.670955882352941e-06, |
| "loss": 3.8673, |
| "step": 773 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 3.6691176470588234e-06, |
| "loss": 3.8821, |
| "step": 774 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 3.667279411764706e-06, |
| "loss": 3.9071, |
| "step": 775 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 3.665441176470588e-06, |
| "loss": 3.919, |
| "step": 776 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 3.6636029411764708e-06, |
| "loss": 3.8902, |
| "step": 777 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 3.661764705882353e-06, |
| "loss": 3.9026, |
| "step": 778 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 3.6599264705882356e-06, |
| "loss": 3.8472, |
| "step": 779 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 3.6580882352941178e-06, |
| "loss": 3.9874, |
| "step": 780 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 3.65625e-06, |
| "loss": 3.8773, |
| "step": 781 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 3.6544117647058825e-06, |
| "loss": 3.7648, |
| "step": 782 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 3.6525735294117647e-06, |
| "loss": 3.8999, |
| "step": 783 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 3.6507352941176473e-06, |
| "loss": 3.9231, |
| "step": 784 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 3.6488970588235295e-06, |
| "loss": 3.8574, |
| "step": 785 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 3.6470588235294117e-06, |
| "loss": 3.8892, |
| "step": 786 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 3.6452205882352943e-06, |
| "loss": 3.8979, |
| "step": 787 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 3.6433823529411765e-06, |
| "loss": 3.8968, |
| "step": 788 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 3.641544117647059e-06, |
| "loss": 3.8651, |
| "step": 789 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 3.6397058823529413e-06, |
| "loss": 3.8567, |
| "step": 790 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 3.6378676470588235e-06, |
| "loss": 3.9199, |
| "step": 791 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 3.636029411764706e-06, |
| "loss": 3.8299, |
| "step": 792 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 3.6341911764705883e-06, |
| "loss": 3.9038, |
| "step": 793 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 3.632352941176471e-06, |
| "loss": 3.9121, |
| "step": 794 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 3.630514705882353e-06, |
| "loss": 3.8915, |
| "step": 795 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 3.6286764705882357e-06, |
| "loss": 3.8963, |
| "step": 796 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 3.626838235294118e-06, |
| "loss": 3.7507, |
| "step": 797 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 3.625e-06, |
| "loss": 3.8768, |
| "step": 798 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 3.6231617647058826e-06, |
| "loss": 3.9103, |
| "step": 799 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 3.621323529411765e-06, |
| "loss": 3.9213, |
| "step": 800 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 3.6194852941176474e-06, |
| "loss": 3.8851, |
| "step": 801 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 3.6176470588235296e-06, |
| "loss": 3.9026, |
| "step": 802 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 3.6158088235294118e-06, |
| "loss": 3.8877, |
| "step": 803 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 3.6139705882352944e-06, |
| "loss": 3.8842, |
| "step": 804 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 3.6121323529411766e-06, |
| "loss": 3.8703, |
| "step": 805 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 3.610294117647059e-06, |
| "loss": 3.8805, |
| "step": 806 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 3.6084558823529414e-06, |
| "loss": 3.8648, |
| "step": 807 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 3.606617647058824e-06, |
| "loss": 3.8896, |
| "step": 808 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 3.604779411764706e-06, |
| "loss": 3.8869, |
| "step": 809 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 3.6029411764705883e-06, |
| "loss": 3.8499, |
| "step": 810 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 3.601102941176471e-06, |
| "loss": 3.8922, |
| "step": 811 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 3.599264705882353e-06, |
| "loss": 3.8819, |
| "step": 812 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 3.5974264705882357e-06, |
| "loss": 3.8917, |
| "step": 813 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 3.595588235294118e-06, |
| "loss": 3.8994, |
| "step": 814 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 3.59375e-06, |
| "loss": 3.8913, |
| "step": 815 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 3.5919117647058827e-06, |
| "loss": 3.9107, |
| "step": 816 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 3.590073529411765e-06, |
| "loss": 3.9041, |
| "step": 817 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 3.5882352941176475e-06, |
| "loss": 3.8944, |
| "step": 818 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 3.5863970588235297e-06, |
| "loss": 3.8676, |
| "step": 819 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 3.5845588235294123e-06, |
| "loss": 3.9816, |
| "step": 820 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 3.5827205882352945e-06, |
| "loss": 3.8613, |
| "step": 821 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 3.5808823529411767e-06, |
| "loss": 3.903, |
| "step": 822 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 3.5790441176470593e-06, |
| "loss": 3.9405, |
| "step": 823 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 3.5772058823529415e-06, |
| "loss": 3.8746, |
| "step": 824 |
| }, |
| { |
| "epoch": 2.98, |
| "learning_rate": 3.575367647058824e-06, |
| "loss": 3.9041, |
| "step": 825 |
| }, |
| { |
| "epoch": 2.98, |
| "learning_rate": 3.5735294117647062e-06, |
| "loss": 3.8438, |
| "step": 826 |
| }, |
| { |
| "epoch": 2.99, |
| "learning_rate": 3.5716911764705884e-06, |
| "loss": 3.8945, |
| "step": 827 |
| }, |
| { |
| "epoch": 2.99, |
| "learning_rate": 3.569852941176471e-06, |
| "loss": 3.8888, |
| "step": 828 |
| }, |
| { |
| "epoch": 2.99, |
| "learning_rate": 3.5680147058823532e-06, |
| "loss": 3.8727, |
| "step": 829 |
| }, |
| { |
| "epoch": 3.0, |
| "learning_rate": 3.566176470588236e-06, |
| "loss": 3.7984, |
| "step": 830 |
| }, |
| { |
| "epoch": 3.0, |
| "learning_rate": 3.564338235294118e-06, |
| "loss": 3.8543, |
| "step": 831 |
| }, |
| { |
| "epoch": 3.0, |
| "eval_accuracy": 0.07567567567567568, |
| "eval_loss": 3.8715946674346924, |
| "eval_runtime": 142.6452, |
| "eval_samples_per_second": 2.594, |
| "eval_steps_per_second": 0.652, |
| "step": 831 |
| }, |
| { |
| "epoch": 3.0, |
| "learning_rate": 3.5625e-06, |
| "loss": 3.8845, |
| "step": 832 |
| }, |
| { |
| "epoch": 3.01, |
| "learning_rate": 3.560661764705883e-06, |
| "loss": 3.8578, |
| "step": 833 |
| }, |
| { |
| "epoch": 3.01, |
| "learning_rate": 3.558823529411765e-06, |
| "loss": 3.8816, |
| "step": 834 |
| }, |
| { |
| "epoch": 3.01, |
| "learning_rate": 3.5569852941176476e-06, |
| "loss": 3.8997, |
| "step": 835 |
| }, |
| { |
| "epoch": 3.02, |
| "learning_rate": 3.5551470588235298e-06, |
| "loss": 3.8693, |
| "step": 836 |
| }, |
| { |
| "epoch": 3.02, |
| "learning_rate": 3.5533088235294124e-06, |
| "loss": 3.8912, |
| "step": 837 |
| }, |
| { |
| "epoch": 3.03, |
| "learning_rate": 3.5514705882352946e-06, |
| "loss": 3.8171, |
| "step": 838 |
| }, |
| { |
| "epoch": 3.03, |
| "learning_rate": 3.5496323529411768e-06, |
| "loss": 3.9086, |
| "step": 839 |
| }, |
| { |
| "epoch": 3.03, |
| "learning_rate": 3.5477941176470594e-06, |
| "loss": 3.906, |
| "step": 840 |
| }, |
| { |
| "epoch": 3.04, |
| "learning_rate": 3.5459558823529415e-06, |
| "loss": 3.8868, |
| "step": 841 |
| }, |
| { |
| "epoch": 3.04, |
| "learning_rate": 3.544117647058824e-06, |
| "loss": 3.8806, |
| "step": 842 |
| }, |
| { |
| "epoch": 3.04, |
| "learning_rate": 3.5422794117647063e-06, |
| "loss": 3.8998, |
| "step": 843 |
| }, |
| { |
| "epoch": 3.05, |
| "learning_rate": 3.5404411764705885e-06, |
| "loss": 3.8707, |
| "step": 844 |
| }, |
| { |
| "epoch": 3.05, |
| "learning_rate": 3.538602941176471e-06, |
| "loss": 3.8786, |
| "step": 845 |
| }, |
| { |
| "epoch": 3.05, |
| "learning_rate": 3.5367647058823533e-06, |
| "loss": 3.8617, |
| "step": 846 |
| }, |
| { |
| "epoch": 3.06, |
| "learning_rate": 3.534926470588236e-06, |
| "loss": 3.8815, |
| "step": 847 |
| }, |
| { |
| "epoch": 3.06, |
| "learning_rate": 3.533088235294118e-06, |
| "loss": 3.917, |
| "step": 848 |
| }, |
| { |
| "epoch": 3.06, |
| "learning_rate": 3.5312500000000007e-06, |
| "loss": 3.8588, |
| "step": 849 |
| }, |
| { |
| "epoch": 3.07, |
| "learning_rate": 3.529411764705883e-06, |
| "loss": 3.8235, |
| "step": 850 |
| }, |
| { |
| "epoch": 3.07, |
| "learning_rate": 3.527573529411765e-06, |
| "loss": 3.8774, |
| "step": 851 |
| }, |
| { |
| "epoch": 3.08, |
| "learning_rate": 3.5257352941176477e-06, |
| "loss": 3.9158, |
| "step": 852 |
| }, |
| { |
| "epoch": 3.08, |
| "learning_rate": 3.52389705882353e-06, |
| "loss": 3.8705, |
| "step": 853 |
| }, |
| { |
| "epoch": 3.08, |
| "learning_rate": 3.5220588235294125e-06, |
| "loss": 3.8952, |
| "step": 854 |
| }, |
| { |
| "epoch": 3.09, |
| "learning_rate": 3.5202205882352947e-06, |
| "loss": 3.8585, |
| "step": 855 |
| }, |
| { |
| "epoch": 3.09, |
| "learning_rate": 3.518382352941177e-06, |
| "loss": 3.9241, |
| "step": 856 |
| }, |
| { |
| "epoch": 3.09, |
| "learning_rate": 3.5165441176470594e-06, |
| "loss": 3.8826, |
| "step": 857 |
| }, |
| { |
| "epoch": 3.1, |
| "learning_rate": 3.514705882352941e-06, |
| "loss": 3.8563, |
| "step": 858 |
| }, |
| { |
| "epoch": 3.1, |
| "learning_rate": 3.5128676470588234e-06, |
| "loss": 3.8963, |
| "step": 859 |
| }, |
| { |
| "epoch": 3.1, |
| "learning_rate": 3.511029411764706e-06, |
| "loss": 3.8512, |
| "step": 860 |
| }, |
| { |
| "epoch": 3.11, |
| "learning_rate": 3.509191176470588e-06, |
| "loss": 3.8837, |
| "step": 861 |
| }, |
| { |
| "epoch": 3.11, |
| "learning_rate": 3.507352941176471e-06, |
| "loss": 3.8747, |
| "step": 862 |
| }, |
| { |
| "epoch": 3.12, |
| "learning_rate": 3.505514705882353e-06, |
| "loss": 3.8878, |
| "step": 863 |
| }, |
| { |
| "epoch": 3.12, |
| "learning_rate": 3.503676470588235e-06, |
| "loss": 3.8969, |
| "step": 864 |
| }, |
| { |
| "epoch": 3.12, |
| "learning_rate": 3.5018382352941178e-06, |
| "loss": 3.8841, |
| "step": 865 |
| }, |
| { |
| "epoch": 3.13, |
| "learning_rate": 3.5e-06, |
| "loss": 3.8761, |
| "step": 866 |
| }, |
| { |
| "epoch": 3.13, |
| "learning_rate": 3.4981617647058826e-06, |
| "loss": 3.8778, |
| "step": 867 |
| }, |
| { |
| "epoch": 3.13, |
| "learning_rate": 3.4963235294117647e-06, |
| "loss": 3.8743, |
| "step": 868 |
| }, |
| { |
| "epoch": 3.14, |
| "learning_rate": 3.4944852941176473e-06, |
| "loss": 3.9009, |
| "step": 869 |
| }, |
| { |
| "epoch": 3.14, |
| "learning_rate": 3.4926470588235295e-06, |
| "loss": 3.904, |
| "step": 870 |
| }, |
| { |
| "epoch": 3.14, |
| "learning_rate": 3.4908088235294117e-06, |
| "loss": 3.8567, |
| "step": 871 |
| }, |
| { |
| "epoch": 3.15, |
| "learning_rate": 3.4889705882352943e-06, |
| "loss": 3.9257, |
| "step": 872 |
| }, |
| { |
| "epoch": 3.15, |
| "learning_rate": 3.4871323529411765e-06, |
| "loss": 3.8183, |
| "step": 873 |
| }, |
| { |
| "epoch": 3.16, |
| "learning_rate": 3.485294117647059e-06, |
| "loss": 3.8525, |
| "step": 874 |
| }, |
| { |
| "epoch": 3.16, |
| "learning_rate": 3.4834558823529413e-06, |
| "loss": 3.7764, |
| "step": 875 |
| }, |
| { |
| "epoch": 3.16, |
| "learning_rate": 3.4816176470588235e-06, |
| "loss": 3.8376, |
| "step": 876 |
| }, |
| { |
| "epoch": 3.17, |
| "learning_rate": 3.479779411764706e-06, |
| "loss": 3.8778, |
| "step": 877 |
| }, |
| { |
| "epoch": 3.17, |
| "learning_rate": 3.4779411764705883e-06, |
| "loss": 3.8971, |
| "step": 878 |
| }, |
| { |
| "epoch": 3.17, |
| "learning_rate": 3.476102941176471e-06, |
| "loss": 3.9272, |
| "step": 879 |
| }, |
| { |
| "epoch": 3.18, |
| "learning_rate": 3.474264705882353e-06, |
| "loss": 3.9807, |
| "step": 880 |
| }, |
| { |
| "epoch": 3.18, |
| "learning_rate": 3.4724264705882352e-06, |
| "loss": 3.8619, |
| "step": 881 |
| }, |
| { |
| "epoch": 3.18, |
| "learning_rate": 3.470588235294118e-06, |
| "loss": 3.926, |
| "step": 882 |
| }, |
| { |
| "epoch": 3.19, |
| "learning_rate": 3.46875e-06, |
| "loss": 3.907, |
| "step": 883 |
| }, |
| { |
| "epoch": 3.19, |
| "learning_rate": 3.4669117647058826e-06, |
| "loss": 3.786, |
| "step": 884 |
| }, |
| { |
| "epoch": 3.19, |
| "learning_rate": 3.465073529411765e-06, |
| "loss": 3.8521, |
| "step": 885 |
| }, |
| { |
| "epoch": 3.2, |
| "learning_rate": 3.4632352941176474e-06, |
| "loss": 3.8393, |
| "step": 886 |
| }, |
| { |
| "epoch": 3.2, |
| "learning_rate": 3.4613970588235296e-06, |
| "loss": 3.9026, |
| "step": 887 |
| }, |
| { |
| "epoch": 3.21, |
| "learning_rate": 3.459558823529412e-06, |
| "loss": 3.9556, |
| "step": 888 |
| }, |
| { |
| "epoch": 3.21, |
| "learning_rate": 3.4577205882352944e-06, |
| "loss": 3.8306, |
| "step": 889 |
| }, |
| { |
| "epoch": 3.21, |
| "learning_rate": 3.4558823529411766e-06, |
| "loss": 3.8948, |
| "step": 890 |
| }, |
| { |
| "epoch": 3.22, |
| "learning_rate": 3.454044117647059e-06, |
| "loss": 3.8971, |
| "step": 891 |
| }, |
| { |
| "epoch": 3.22, |
| "learning_rate": 3.4522058823529414e-06, |
| "loss": 3.9087, |
| "step": 892 |
| }, |
| { |
| "epoch": 3.22, |
| "learning_rate": 3.4503676470588236e-06, |
| "loss": 3.837, |
| "step": 893 |
| }, |
| { |
| "epoch": 3.23, |
| "learning_rate": 3.448529411764706e-06, |
| "loss": 3.8944, |
| "step": 894 |
| }, |
| { |
| "epoch": 3.23, |
| "learning_rate": 3.4466911764705884e-06, |
| "loss": 3.8641, |
| "step": 895 |
| }, |
| { |
| "epoch": 3.23, |
| "learning_rate": 3.444852941176471e-06, |
| "loss": 3.8963, |
| "step": 896 |
| }, |
| { |
| "epoch": 3.24, |
| "learning_rate": 3.443014705882353e-06, |
| "loss": 3.8441, |
| "step": 897 |
| }, |
| { |
| "epoch": 3.24, |
| "learning_rate": 3.4411764705882358e-06, |
| "loss": 3.874, |
| "step": 898 |
| }, |
| { |
| "epoch": 3.25, |
| "learning_rate": 3.439338235294118e-06, |
| "loss": 3.9138, |
| "step": 899 |
| }, |
| { |
| "epoch": 3.25, |
| "learning_rate": 3.4375e-06, |
| "loss": 3.7848, |
| "step": 900 |
| }, |
| { |
| "epoch": 3.25, |
| "learning_rate": 3.4356617647058827e-06, |
| "loss": 3.8444, |
| "step": 901 |
| }, |
| { |
| "epoch": 3.26, |
| "learning_rate": 3.433823529411765e-06, |
| "loss": 3.9121, |
| "step": 902 |
| }, |
| { |
| "epoch": 3.26, |
| "learning_rate": 3.4319852941176475e-06, |
| "loss": 3.845, |
| "step": 903 |
| }, |
| { |
| "epoch": 3.26, |
| "learning_rate": 3.4301470588235297e-06, |
| "loss": 3.8382, |
| "step": 904 |
| }, |
| { |
| "epoch": 3.27, |
| "learning_rate": 3.428308823529412e-06, |
| "loss": 3.8495, |
| "step": 905 |
| }, |
| { |
| "epoch": 3.27, |
| "learning_rate": 3.4264705882352945e-06, |
| "loss": 3.9002, |
| "step": 906 |
| }, |
| { |
| "epoch": 3.27, |
| "learning_rate": 3.4246323529411767e-06, |
| "loss": 3.8547, |
| "step": 907 |
| }, |
| { |
| "epoch": 3.28, |
| "learning_rate": 3.4227941176470593e-06, |
| "loss": 3.8445, |
| "step": 908 |
| }, |
| { |
| "epoch": 3.28, |
| "learning_rate": 3.4209558823529415e-06, |
| "loss": 3.7814, |
| "step": 909 |
| }, |
| { |
| "epoch": 3.29, |
| "learning_rate": 3.419117647058824e-06, |
| "loss": 3.88, |
| "step": 910 |
| }, |
| { |
| "epoch": 3.29, |
| "learning_rate": 3.4172794117647063e-06, |
| "loss": 3.8275, |
| "step": 911 |
| }, |
| { |
| "epoch": 3.29, |
| "learning_rate": 3.4154411764705884e-06, |
| "loss": 3.8685, |
| "step": 912 |
| }, |
| { |
| "epoch": 3.3, |
| "learning_rate": 3.413602941176471e-06, |
| "loss": 3.8847, |
| "step": 913 |
| }, |
| { |
| "epoch": 3.3, |
| "learning_rate": 3.4117647058823532e-06, |
| "loss": 3.8547, |
| "step": 914 |
| }, |
| { |
| "epoch": 3.3, |
| "learning_rate": 3.409926470588236e-06, |
| "loss": 3.79, |
| "step": 915 |
| }, |
| { |
| "epoch": 3.31, |
| "learning_rate": 3.408088235294118e-06, |
| "loss": 3.8259, |
| "step": 916 |
| }, |
| { |
| "epoch": 3.31, |
| "learning_rate": 3.40625e-06, |
| "loss": 3.9019, |
| "step": 917 |
| }, |
| { |
| "epoch": 3.31, |
| "learning_rate": 3.404411764705883e-06, |
| "loss": 3.8579, |
| "step": 918 |
| }, |
| { |
| "epoch": 3.32, |
| "learning_rate": 3.402573529411765e-06, |
| "loss": 3.9169, |
| "step": 919 |
| }, |
| { |
| "epoch": 3.32, |
| "learning_rate": 3.4007352941176476e-06, |
| "loss": 3.8456, |
| "step": 920 |
| }, |
| { |
| "epoch": 3.32, |
| "learning_rate": 3.39889705882353e-06, |
| "loss": 3.892, |
| "step": 921 |
| }, |
| { |
| "epoch": 3.33, |
| "learning_rate": 3.397058823529412e-06, |
| "loss": 3.8692, |
| "step": 922 |
| }, |
| { |
| "epoch": 3.33, |
| "learning_rate": 3.3952205882352946e-06, |
| "loss": 3.8589, |
| "step": 923 |
| }, |
| { |
| "epoch": 3.34, |
| "learning_rate": 3.3933823529411768e-06, |
| "loss": 3.9266, |
| "step": 924 |
| }, |
| { |
| "epoch": 3.34, |
| "learning_rate": 3.3915441176470594e-06, |
| "loss": 3.8824, |
| "step": 925 |
| }, |
| { |
| "epoch": 3.34, |
| "learning_rate": 3.3897058823529416e-06, |
| "loss": 3.8426, |
| "step": 926 |
| }, |
| { |
| "epoch": 3.35, |
| "learning_rate": 3.387867647058824e-06, |
| "loss": 3.8007, |
| "step": 927 |
| }, |
| { |
| "epoch": 3.35, |
| "learning_rate": 3.3860294117647063e-06, |
| "loss": 3.8145, |
| "step": 928 |
| }, |
| { |
| "epoch": 3.35, |
| "learning_rate": 3.3841911764705885e-06, |
| "loss": 3.8853, |
| "step": 929 |
| }, |
| { |
| "epoch": 3.36, |
| "learning_rate": 3.382352941176471e-06, |
| "loss": 3.8787, |
| "step": 930 |
| }, |
| { |
| "epoch": 3.36, |
| "learning_rate": 3.3805147058823533e-06, |
| "loss": 3.8604, |
| "step": 931 |
| }, |
| { |
| "epoch": 3.36, |
| "learning_rate": 3.378676470588236e-06, |
| "loss": 3.8188, |
| "step": 932 |
| }, |
| { |
| "epoch": 3.37, |
| "learning_rate": 3.376838235294118e-06, |
| "loss": 3.8472, |
| "step": 933 |
| }, |
| { |
| "epoch": 3.37, |
| "learning_rate": 3.3750000000000003e-06, |
| "loss": 3.749, |
| "step": 934 |
| }, |
| { |
| "epoch": 3.38, |
| "learning_rate": 3.373161764705883e-06, |
| "loss": 3.9151, |
| "step": 935 |
| }, |
| { |
| "epoch": 3.38, |
| "learning_rate": 3.371323529411765e-06, |
| "loss": 3.9068, |
| "step": 936 |
| }, |
| { |
| "epoch": 3.38, |
| "learning_rate": 3.3694852941176477e-06, |
| "loss": 3.9025, |
| "step": 937 |
| }, |
| { |
| "epoch": 3.39, |
| "learning_rate": 3.36764705882353e-06, |
| "loss": 3.8105, |
| "step": 938 |
| }, |
| { |
| "epoch": 3.39, |
| "learning_rate": 3.3658088235294125e-06, |
| "loss": 3.8521, |
| "step": 939 |
| }, |
| { |
| "epoch": 3.39, |
| "learning_rate": 3.3639705882352947e-06, |
| "loss": 3.9107, |
| "step": 940 |
| }, |
| { |
| "epoch": 3.4, |
| "learning_rate": 3.362132352941177e-06, |
| "loss": 3.8213, |
| "step": 941 |
| }, |
| { |
| "epoch": 3.4, |
| "learning_rate": 3.3602941176470595e-06, |
| "loss": 3.8693, |
| "step": 942 |
| }, |
| { |
| "epoch": 3.4, |
| "learning_rate": 3.3584558823529412e-06, |
| "loss": 3.9231, |
| "step": 943 |
| }, |
| { |
| "epoch": 3.41, |
| "learning_rate": 3.3566176470588234e-06, |
| "loss": 3.9613, |
| "step": 944 |
| }, |
| { |
| "epoch": 3.41, |
| "learning_rate": 3.354779411764706e-06, |
| "loss": 3.8031, |
| "step": 945 |
| }, |
| { |
| "epoch": 3.42, |
| "learning_rate": 3.352941176470588e-06, |
| "loss": 3.9291, |
| "step": 946 |
| }, |
| { |
| "epoch": 3.42, |
| "learning_rate": 3.351102941176471e-06, |
| "loss": 3.9175, |
| "step": 947 |
| }, |
| { |
| "epoch": 3.42, |
| "learning_rate": 3.349264705882353e-06, |
| "loss": 3.9518, |
| "step": 948 |
| }, |
| { |
| "epoch": 3.43, |
| "learning_rate": 3.347426470588235e-06, |
| "loss": 3.8479, |
| "step": 949 |
| }, |
| { |
| "epoch": 3.43, |
| "learning_rate": 3.3455882352941178e-06, |
| "loss": 3.8762, |
| "step": 950 |
| }, |
| { |
| "epoch": 3.43, |
| "learning_rate": 3.34375e-06, |
| "loss": 3.8534, |
| "step": 951 |
| }, |
| { |
| "epoch": 3.44, |
| "learning_rate": 3.3419117647058826e-06, |
| "loss": 3.8771, |
| "step": 952 |
| }, |
| { |
| "epoch": 3.44, |
| "learning_rate": 3.3400735294117648e-06, |
| "loss": 3.8623, |
| "step": 953 |
| }, |
| { |
| "epoch": 3.44, |
| "learning_rate": 3.338235294117647e-06, |
| "loss": 3.8889, |
| "step": 954 |
| }, |
| { |
| "epoch": 3.45, |
| "learning_rate": 3.3363970588235295e-06, |
| "loss": 3.8368, |
| "step": 955 |
| }, |
| { |
| "epoch": 3.45, |
| "learning_rate": 3.3345588235294117e-06, |
| "loss": 3.884, |
| "step": 956 |
| }, |
| { |
| "epoch": 3.45, |
| "learning_rate": 3.3327205882352943e-06, |
| "loss": 3.8734, |
| "step": 957 |
| }, |
| { |
| "epoch": 3.46, |
| "learning_rate": 3.3308823529411765e-06, |
| "loss": 3.7955, |
| "step": 958 |
| }, |
| { |
| "epoch": 3.46, |
| "learning_rate": 3.329044117647059e-06, |
| "loss": 3.8188, |
| "step": 959 |
| }, |
| { |
| "epoch": 3.47, |
| "learning_rate": 3.3272058823529413e-06, |
| "loss": 3.849, |
| "step": 960 |
| }, |
| { |
| "epoch": 3.47, |
| "learning_rate": 3.3253676470588235e-06, |
| "loss": 3.863, |
| "step": 961 |
| }, |
| { |
| "epoch": 3.47, |
| "learning_rate": 3.323529411764706e-06, |
| "loss": 3.9277, |
| "step": 962 |
| }, |
| { |
| "epoch": 3.48, |
| "learning_rate": 3.3216911764705883e-06, |
| "loss": 3.793, |
| "step": 963 |
| }, |
| { |
| "epoch": 3.48, |
| "learning_rate": 3.319852941176471e-06, |
| "loss": 3.8677, |
| "step": 964 |
| }, |
| { |
| "epoch": 3.48, |
| "learning_rate": 3.318014705882353e-06, |
| "loss": 3.8413, |
| "step": 965 |
| }, |
| { |
| "epoch": 3.49, |
| "learning_rate": 3.3161764705882353e-06, |
| "loss": 3.8306, |
| "step": 966 |
| }, |
| { |
| "epoch": 3.49, |
| "learning_rate": 3.314338235294118e-06, |
| "loss": 3.8914, |
| "step": 967 |
| }, |
| { |
| "epoch": 3.49, |
| "learning_rate": 3.3125e-06, |
| "loss": 3.8469, |
| "step": 968 |
| }, |
| { |
| "epoch": 3.5, |
| "learning_rate": 3.3106617647058827e-06, |
| "loss": 3.84, |
| "step": 969 |
| }, |
| { |
| "epoch": 3.5, |
| "learning_rate": 3.308823529411765e-06, |
| "loss": 3.8828, |
| "step": 970 |
| }, |
| { |
| "epoch": 3.51, |
| "learning_rate": 3.3069852941176474e-06, |
| "loss": 3.9993, |
| "step": 971 |
| }, |
| { |
| "epoch": 3.51, |
| "learning_rate": 3.3051470588235296e-06, |
| "loss": 3.915, |
| "step": 972 |
| }, |
| { |
| "epoch": 3.51, |
| "learning_rate": 3.303308823529412e-06, |
| "loss": 3.7709, |
| "step": 973 |
| }, |
| { |
| "epoch": 3.52, |
| "learning_rate": 3.3014705882352944e-06, |
| "loss": 3.8683, |
| "step": 974 |
| }, |
| { |
| "epoch": 3.52, |
| "learning_rate": 3.2996323529411766e-06, |
| "loss": 3.9375, |
| "step": 975 |
| }, |
| { |
| "epoch": 3.52, |
| "learning_rate": 3.2977941176470592e-06, |
| "loss": 3.9044, |
| "step": 976 |
| }, |
| { |
| "epoch": 3.53, |
| "learning_rate": 3.2959558823529414e-06, |
| "loss": 3.9021, |
| "step": 977 |
| }, |
| { |
| "epoch": 3.53, |
| "learning_rate": 3.2941176470588236e-06, |
| "loss": 3.8651, |
| "step": 978 |
| }, |
| { |
| "epoch": 3.53, |
| "learning_rate": 3.292279411764706e-06, |
| "loss": 3.8722, |
| "step": 979 |
| }, |
| { |
| "epoch": 3.54, |
| "learning_rate": 3.2904411764705884e-06, |
| "loss": 3.8855, |
| "step": 980 |
| }, |
| { |
| "epoch": 3.54, |
| "learning_rate": 3.288602941176471e-06, |
| "loss": 3.861, |
| "step": 981 |
| }, |
| { |
| "epoch": 3.55, |
| "learning_rate": 3.286764705882353e-06, |
| "loss": 3.8801, |
| "step": 982 |
| }, |
| { |
| "epoch": 3.55, |
| "learning_rate": 3.2849264705882353e-06, |
| "loss": 3.8604, |
| "step": 983 |
| }, |
| { |
| "epoch": 3.55, |
| "learning_rate": 3.283088235294118e-06, |
| "loss": 3.842, |
| "step": 984 |
| }, |
| { |
| "epoch": 3.56, |
| "learning_rate": 3.28125e-06, |
| "loss": 3.8221, |
| "step": 985 |
| }, |
| { |
| "epoch": 3.56, |
| "learning_rate": 3.2794117647058827e-06, |
| "loss": 3.8859, |
| "step": 986 |
| }, |
| { |
| "epoch": 3.56, |
| "learning_rate": 3.277573529411765e-06, |
| "loss": 3.8611, |
| "step": 987 |
| }, |
| { |
| "epoch": 3.57, |
| "learning_rate": 3.2757352941176475e-06, |
| "loss": 3.8207, |
| "step": 988 |
| }, |
| { |
| "epoch": 3.57, |
| "learning_rate": 3.2738970588235297e-06, |
| "loss": 3.775, |
| "step": 989 |
| }, |
| { |
| "epoch": 3.57, |
| "learning_rate": 3.272058823529412e-06, |
| "loss": 3.858, |
| "step": 990 |
| }, |
| { |
| "epoch": 3.58, |
| "learning_rate": 3.2702205882352945e-06, |
| "loss": 3.8059, |
| "step": 991 |
| }, |
| { |
| "epoch": 3.58, |
| "learning_rate": 3.2683823529411767e-06, |
| "loss": 3.8658, |
| "step": 992 |
| }, |
| { |
| "epoch": 3.58, |
| "learning_rate": 3.2665441176470593e-06, |
| "loss": 3.8327, |
| "step": 993 |
| }, |
| { |
| "epoch": 3.59, |
| "learning_rate": 3.2647058823529415e-06, |
| "loss": 3.897, |
| "step": 994 |
| }, |
| { |
| "epoch": 3.59, |
| "learning_rate": 3.2628676470588237e-06, |
| "loss": 3.8465, |
| "step": 995 |
| }, |
| { |
| "epoch": 3.6, |
| "learning_rate": 3.2610294117647063e-06, |
| "loss": 3.912, |
| "step": 996 |
| }, |
| { |
| "epoch": 3.6, |
| "learning_rate": 3.2591911764705885e-06, |
| "loss": 3.8713, |
| "step": 997 |
| }, |
| { |
| "epoch": 3.6, |
| "learning_rate": 3.257352941176471e-06, |
| "loss": 3.8196, |
| "step": 998 |
| }, |
| { |
| "epoch": 3.61, |
| "learning_rate": 3.2555147058823532e-06, |
| "loss": 3.7875, |
| "step": 999 |
| }, |
| { |
| "epoch": 3.61, |
| "learning_rate": 3.253676470588236e-06, |
| "loss": 3.8626, |
| "step": 1000 |
| }, |
| { |
| "epoch": 3.61, |
| "learning_rate": 3.251838235294118e-06, |
| "loss": 3.8419, |
| "step": 1001 |
| }, |
| { |
| "epoch": 3.62, |
| "learning_rate": 3.2500000000000002e-06, |
| "loss": 3.7857, |
| "step": 1002 |
| }, |
| { |
| "epoch": 3.62, |
| "learning_rate": 3.248161764705883e-06, |
| "loss": 3.8541, |
| "step": 1003 |
| }, |
| { |
| "epoch": 3.62, |
| "learning_rate": 3.246323529411765e-06, |
| "loss": 3.8291, |
| "step": 1004 |
| }, |
| { |
| "epoch": 3.63, |
| "learning_rate": 3.2444852941176476e-06, |
| "loss": 3.7617, |
| "step": 1005 |
| }, |
| { |
| "epoch": 3.63, |
| "learning_rate": 3.24264705882353e-06, |
| "loss": 3.9104, |
| "step": 1006 |
| }, |
| { |
| "epoch": 3.64, |
| "learning_rate": 3.240808823529412e-06, |
| "loss": 3.8978, |
| "step": 1007 |
| }, |
| { |
| "epoch": 3.64, |
| "learning_rate": 3.2389705882352946e-06, |
| "loss": 3.827, |
| "step": 1008 |
| }, |
| { |
| "epoch": 3.64, |
| "learning_rate": 3.2371323529411768e-06, |
| "loss": 3.8974, |
| "step": 1009 |
| }, |
| { |
| "epoch": 3.65, |
| "learning_rate": 3.2352941176470594e-06, |
| "loss": 3.8966, |
| "step": 1010 |
| }, |
| { |
| "epoch": 3.65, |
| "learning_rate": 3.2334558823529416e-06, |
| "loss": 3.8008, |
| "step": 1011 |
| }, |
| { |
| "epoch": 3.65, |
| "learning_rate": 3.231617647058824e-06, |
| "loss": 3.8636, |
| "step": 1012 |
| }, |
| { |
| "epoch": 3.66, |
| "learning_rate": 3.2297794117647064e-06, |
| "loss": 3.8003, |
| "step": 1013 |
| }, |
| { |
| "epoch": 3.66, |
| "learning_rate": 3.2279411764705885e-06, |
| "loss": 3.8171, |
| "step": 1014 |
| }, |
| { |
| "epoch": 3.66, |
| "learning_rate": 3.226102941176471e-06, |
| "loss": 3.8971, |
| "step": 1015 |
| }, |
| { |
| "epoch": 3.67, |
| "learning_rate": 3.2242647058823533e-06, |
| "loss": 3.8496, |
| "step": 1016 |
| }, |
| { |
| "epoch": 3.67, |
| "learning_rate": 3.222426470588236e-06, |
| "loss": 3.8715, |
| "step": 1017 |
| }, |
| { |
| "epoch": 3.68, |
| "learning_rate": 3.220588235294118e-06, |
| "loss": 3.9174, |
| "step": 1018 |
| }, |
| { |
| "epoch": 3.68, |
| "learning_rate": 3.2187500000000003e-06, |
| "loss": 3.9274, |
| "step": 1019 |
| }, |
| { |
| "epoch": 3.68, |
| "learning_rate": 3.216911764705883e-06, |
| "loss": 3.9359, |
| "step": 1020 |
| }, |
| { |
| "epoch": 3.69, |
| "learning_rate": 3.215073529411765e-06, |
| "loss": 3.8354, |
| "step": 1021 |
| }, |
| { |
| "epoch": 3.69, |
| "learning_rate": 3.2132352941176477e-06, |
| "loss": 3.9904, |
| "step": 1022 |
| }, |
| { |
| "epoch": 3.69, |
| "learning_rate": 3.21139705882353e-06, |
| "loss": 3.8231, |
| "step": 1023 |
| }, |
| { |
| "epoch": 3.7, |
| "learning_rate": 3.209558823529412e-06, |
| "loss": 3.8287, |
| "step": 1024 |
| }, |
| { |
| "epoch": 3.7, |
| "learning_rate": 3.2077205882352947e-06, |
| "loss": 3.8522, |
| "step": 1025 |
| }, |
| { |
| "epoch": 3.7, |
| "learning_rate": 3.205882352941177e-06, |
| "loss": 3.8832, |
| "step": 1026 |
| }, |
| { |
| "epoch": 3.71, |
| "learning_rate": 3.2040441176470595e-06, |
| "loss": 3.8262, |
| "step": 1027 |
| }, |
| { |
| "epoch": 3.71, |
| "learning_rate": 3.2022058823529412e-06, |
| "loss": 3.8892, |
| "step": 1028 |
| }, |
| { |
| "epoch": 3.71, |
| "learning_rate": 3.2003676470588234e-06, |
| "loss": 3.8721, |
| "step": 1029 |
| }, |
| { |
| "epoch": 3.72, |
| "learning_rate": 3.198529411764706e-06, |
| "loss": 3.8922, |
| "step": 1030 |
| }, |
| { |
| "epoch": 3.72, |
| "learning_rate": 3.196691176470588e-06, |
| "loss": 3.8586, |
| "step": 1031 |
| }, |
| { |
| "epoch": 3.73, |
| "learning_rate": 3.1948529411764704e-06, |
| "loss": 3.8284, |
| "step": 1032 |
| }, |
| { |
| "epoch": 3.73, |
| "learning_rate": 3.193014705882353e-06, |
| "loss": 3.8622, |
| "step": 1033 |
| }, |
| { |
| "epoch": 3.73, |
| "learning_rate": 3.191176470588235e-06, |
| "loss": 3.8174, |
| "step": 1034 |
| }, |
| { |
| "epoch": 3.74, |
| "learning_rate": 3.189338235294118e-06, |
| "loss": 3.8811, |
| "step": 1035 |
| }, |
| { |
| "epoch": 3.74, |
| "learning_rate": 3.1875e-06, |
| "loss": 3.8267, |
| "step": 1036 |
| }, |
| { |
| "epoch": 3.74, |
| "learning_rate": 3.1856617647058826e-06, |
| "loss": 3.8518, |
| "step": 1037 |
| }, |
| { |
| "epoch": 3.75, |
| "learning_rate": 3.1838235294117648e-06, |
| "loss": 3.8098, |
| "step": 1038 |
| }, |
| { |
| "epoch": 3.75, |
| "learning_rate": 3.181985294117647e-06, |
| "loss": 3.8925, |
| "step": 1039 |
| }, |
| { |
| "epoch": 3.75, |
| "learning_rate": 3.1801470588235296e-06, |
| "loss": 3.8589, |
| "step": 1040 |
| }, |
| { |
| "epoch": 3.76, |
| "learning_rate": 3.1783088235294117e-06, |
| "loss": 3.8627, |
| "step": 1041 |
| }, |
| { |
| "epoch": 3.76, |
| "learning_rate": 3.1764705882352943e-06, |
| "loss": 3.8578, |
| "step": 1042 |
| }, |
| { |
| "epoch": 3.77, |
| "learning_rate": 3.1746323529411765e-06, |
| "loss": 3.927, |
| "step": 1043 |
| }, |
| { |
| "epoch": 3.77, |
| "learning_rate": 3.1727941176470587e-06, |
| "loss": 3.8797, |
| "step": 1044 |
| }, |
| { |
| "epoch": 3.77, |
| "learning_rate": 3.1709558823529413e-06, |
| "loss": 3.775, |
| "step": 1045 |
| }, |
| { |
| "epoch": 3.78, |
| "learning_rate": 3.1691176470588235e-06, |
| "loss": 3.8828, |
| "step": 1046 |
| }, |
| { |
| "epoch": 3.78, |
| "learning_rate": 3.167279411764706e-06, |
| "loss": 3.813, |
| "step": 1047 |
| }, |
| { |
| "epoch": 3.78, |
| "learning_rate": 3.1654411764705883e-06, |
| "loss": 3.9459, |
| "step": 1048 |
| }, |
| { |
| "epoch": 3.79, |
| "learning_rate": 3.163602941176471e-06, |
| "loss": 3.8703, |
| "step": 1049 |
| }, |
| { |
| "epoch": 3.79, |
| "learning_rate": 3.161764705882353e-06, |
| "loss": 3.8377, |
| "step": 1050 |
| }, |
| { |
| "epoch": 3.79, |
| "learning_rate": 3.1599264705882353e-06, |
| "loss": 3.7724, |
| "step": 1051 |
| }, |
| { |
| "epoch": 3.8, |
| "learning_rate": 3.158088235294118e-06, |
| "loss": 3.9384, |
| "step": 1052 |
| }, |
| { |
| "epoch": 3.8, |
| "learning_rate": 3.15625e-06, |
| "loss": 3.9083, |
| "step": 1053 |
| }, |
| { |
| "epoch": 3.81, |
| "learning_rate": 3.1544117647058827e-06, |
| "loss": 3.858, |
| "step": 1054 |
| }, |
| { |
| "epoch": 3.81, |
| "learning_rate": 3.152573529411765e-06, |
| "loss": 3.8304, |
| "step": 1055 |
| }, |
| { |
| "epoch": 3.81, |
| "learning_rate": 3.150735294117647e-06, |
| "loss": 3.8262, |
| "step": 1056 |
| }, |
| { |
| "epoch": 3.82, |
| "learning_rate": 3.1488970588235296e-06, |
| "loss": 3.7159, |
| "step": 1057 |
| }, |
| { |
| "epoch": 3.82, |
| "learning_rate": 3.147058823529412e-06, |
| "loss": 3.8542, |
| "step": 1058 |
| }, |
| { |
| "epoch": 3.82, |
| "learning_rate": 3.1452205882352944e-06, |
| "loss": 3.8638, |
| "step": 1059 |
| }, |
| { |
| "epoch": 3.83, |
| "learning_rate": 3.1433823529411766e-06, |
| "loss": 3.9042, |
| "step": 1060 |
| }, |
| { |
| "epoch": 3.83, |
| "learning_rate": 3.1415441176470592e-06, |
| "loss": 3.8701, |
| "step": 1061 |
| }, |
| { |
| "epoch": 3.83, |
| "learning_rate": 3.1397058823529414e-06, |
| "loss": 3.7691, |
| "step": 1062 |
| }, |
| { |
| "epoch": 3.84, |
| "learning_rate": 3.1378676470588236e-06, |
| "loss": 3.8731, |
| "step": 1063 |
| }, |
| { |
| "epoch": 3.84, |
| "learning_rate": 3.136029411764706e-06, |
| "loss": 3.8722, |
| "step": 1064 |
| }, |
| { |
| "epoch": 3.84, |
| "learning_rate": 3.1341911764705884e-06, |
| "loss": 3.8362, |
| "step": 1065 |
| }, |
| { |
| "epoch": 3.85, |
| "learning_rate": 3.132352941176471e-06, |
| "loss": 3.9194, |
| "step": 1066 |
| }, |
| { |
| "epoch": 3.85, |
| "learning_rate": 3.130514705882353e-06, |
| "loss": 3.8658, |
| "step": 1067 |
| }, |
| { |
| "epoch": 3.86, |
| "learning_rate": 3.1286764705882354e-06, |
| "loss": 3.7822, |
| "step": 1068 |
| }, |
| { |
| "epoch": 3.86, |
| "learning_rate": 3.126838235294118e-06, |
| "loss": 3.8979, |
| "step": 1069 |
| }, |
| { |
| "epoch": 3.86, |
| "learning_rate": 3.125e-06, |
| "loss": 3.8638, |
| "step": 1070 |
| }, |
| { |
| "epoch": 3.87, |
| "learning_rate": 3.1231617647058828e-06, |
| "loss": 3.8953, |
| "step": 1071 |
| }, |
| { |
| "epoch": 3.87, |
| "learning_rate": 3.121323529411765e-06, |
| "loss": 3.8839, |
| "step": 1072 |
| }, |
| { |
| "epoch": 3.87, |
| "learning_rate": 3.119485294117647e-06, |
| "loss": 3.9778, |
| "step": 1073 |
| }, |
| { |
| "epoch": 3.88, |
| "learning_rate": 3.1176470588235297e-06, |
| "loss": 3.8903, |
| "step": 1074 |
| }, |
| { |
| "epoch": 3.88, |
| "learning_rate": 3.115808823529412e-06, |
| "loss": 3.8579, |
| "step": 1075 |
| }, |
| { |
| "epoch": 3.88, |
| "learning_rate": 3.1139705882352945e-06, |
| "loss": 3.7655, |
| "step": 1076 |
| }, |
| { |
| "epoch": 3.89, |
| "learning_rate": 3.1121323529411767e-06, |
| "loss": 3.8569, |
| "step": 1077 |
| }, |
| { |
| "epoch": 3.89, |
| "learning_rate": 3.1102941176470593e-06, |
| "loss": 3.7733, |
| "step": 1078 |
| }, |
| { |
| "epoch": 3.9, |
| "learning_rate": 3.1084558823529415e-06, |
| "loss": 3.8975, |
| "step": 1079 |
| }, |
| { |
| "epoch": 3.9, |
| "learning_rate": 3.1066176470588237e-06, |
| "loss": 3.9081, |
| "step": 1080 |
| }, |
| { |
| "epoch": 3.9, |
| "learning_rate": 3.1047794117647063e-06, |
| "loss": 3.8688, |
| "step": 1081 |
| }, |
| { |
| "epoch": 3.91, |
| "learning_rate": 3.1029411764705885e-06, |
| "loss": 3.8985, |
| "step": 1082 |
| }, |
| { |
| "epoch": 3.91, |
| "learning_rate": 3.101102941176471e-06, |
| "loss": 3.8815, |
| "step": 1083 |
| }, |
| { |
| "epoch": 3.91, |
| "learning_rate": 3.0992647058823533e-06, |
| "loss": 3.8982, |
| "step": 1084 |
| }, |
| { |
| "epoch": 3.92, |
| "learning_rate": 3.0974264705882354e-06, |
| "loss": 3.8199, |
| "step": 1085 |
| }, |
| { |
| "epoch": 3.92, |
| "learning_rate": 3.095588235294118e-06, |
| "loss": 3.8913, |
| "step": 1086 |
| }, |
| { |
| "epoch": 3.92, |
| "learning_rate": 3.0937500000000002e-06, |
| "loss": 3.8682, |
| "step": 1087 |
| }, |
| { |
| "epoch": 3.93, |
| "learning_rate": 3.091911764705883e-06, |
| "loss": 3.8735, |
| "step": 1088 |
| }, |
| { |
| "epoch": 3.93, |
| "learning_rate": 3.090073529411765e-06, |
| "loss": 3.7854, |
| "step": 1089 |
| }, |
| { |
| "epoch": 3.94, |
| "learning_rate": 3.0882352941176476e-06, |
| "loss": 3.893, |
| "step": 1090 |
| }, |
| { |
| "epoch": 3.94, |
| "learning_rate": 3.08639705882353e-06, |
| "loss": 3.8769, |
| "step": 1091 |
| }, |
| { |
| "epoch": 3.94, |
| "learning_rate": 3.084558823529412e-06, |
| "loss": 3.9361, |
| "step": 1092 |
| }, |
| { |
| "epoch": 3.95, |
| "learning_rate": 3.0827205882352946e-06, |
| "loss": 3.7543, |
| "step": 1093 |
| }, |
| { |
| "epoch": 3.95, |
| "learning_rate": 3.080882352941177e-06, |
| "loss": 3.8516, |
| "step": 1094 |
| }, |
| { |
| "epoch": 3.95, |
| "learning_rate": 3.0790441176470594e-06, |
| "loss": 3.9239, |
| "step": 1095 |
| }, |
| { |
| "epoch": 3.96, |
| "learning_rate": 3.0772058823529416e-06, |
| "loss": 3.8539, |
| "step": 1096 |
| }, |
| { |
| "epoch": 3.96, |
| "learning_rate": 3.0753676470588238e-06, |
| "loss": 3.8585, |
| "step": 1097 |
| }, |
| { |
| "epoch": 3.96, |
| "learning_rate": 3.0735294117647064e-06, |
| "loss": 3.9225, |
| "step": 1098 |
| }, |
| { |
| "epoch": 3.97, |
| "learning_rate": 3.0716911764705886e-06, |
| "loss": 3.8298, |
| "step": 1099 |
| }, |
| { |
| "epoch": 3.97, |
| "learning_rate": 3.069852941176471e-06, |
| "loss": 3.841, |
| "step": 1100 |
| }, |
| { |
| "epoch": 3.97, |
| "learning_rate": 3.0680147058823534e-06, |
| "loss": 3.8988, |
| "step": 1101 |
| }, |
| { |
| "epoch": 3.98, |
| "learning_rate": 3.066176470588236e-06, |
| "loss": 3.8548, |
| "step": 1102 |
| }, |
| { |
| "epoch": 3.98, |
| "learning_rate": 3.064338235294118e-06, |
| "loss": 3.9095, |
| "step": 1103 |
| }, |
| { |
| "epoch": 3.99, |
| "learning_rate": 3.0625000000000003e-06, |
| "loss": 3.8913, |
| "step": 1104 |
| }, |
| { |
| "epoch": 3.99, |
| "learning_rate": 3.060661764705883e-06, |
| "loss": 3.9233, |
| "step": 1105 |
| }, |
| { |
| "epoch": 3.99, |
| "learning_rate": 3.058823529411765e-06, |
| "loss": 3.8599, |
| "step": 1106 |
| }, |
| { |
| "epoch": 4.0, |
| "learning_rate": 3.0569852941176477e-06, |
| "loss": 3.8367, |
| "step": 1107 |
| }, |
| { |
| "epoch": 4.0, |
| "learning_rate": 3.05514705882353e-06, |
| "loss": 3.7901, |
| "step": 1108 |
| }, |
| { |
| "epoch": 4.0, |
| "eval_accuracy": 0.0945945945945946, |
| "eval_loss": 3.838702917098999, |
| "eval_runtime": 141.8389, |
| "eval_samples_per_second": 2.609, |
| "eval_steps_per_second": 0.656, |
| "step": 1108 |
| }, |
| { |
| "epoch": 4.0, |
| "learning_rate": 3.053308823529412e-06, |
| "loss": 3.7794, |
| "step": 1109 |
| }, |
| { |
| "epoch": 4.01, |
| "learning_rate": 3.0514705882352947e-06, |
| "loss": 3.8637, |
| "step": 1110 |
| }, |
| { |
| "epoch": 4.01, |
| "learning_rate": 3.049632352941177e-06, |
| "loss": 3.7863, |
| "step": 1111 |
| }, |
| { |
| "epoch": 4.01, |
| "learning_rate": 3.0477941176470595e-06, |
| "loss": 3.9245, |
| "step": 1112 |
| }, |
| { |
| "epoch": 4.02, |
| "learning_rate": 3.0459558823529412e-06, |
| "loss": 3.8797, |
| "step": 1113 |
| }, |
| { |
| "epoch": 4.02, |
| "learning_rate": 3.0441176470588234e-06, |
| "loss": 3.8391, |
| "step": 1114 |
| }, |
| { |
| "epoch": 4.03, |
| "learning_rate": 3.042279411764706e-06, |
| "loss": 3.7441, |
| "step": 1115 |
| }, |
| { |
| "epoch": 4.03, |
| "learning_rate": 3.0404411764705882e-06, |
| "loss": 3.8995, |
| "step": 1116 |
| }, |
| { |
| "epoch": 4.03, |
| "learning_rate": 3.0386029411764704e-06, |
| "loss": 3.7882, |
| "step": 1117 |
| }, |
| { |
| "epoch": 4.04, |
| "learning_rate": 3.036764705882353e-06, |
| "loss": 3.8309, |
| "step": 1118 |
| }, |
| { |
| "epoch": 4.04, |
| "learning_rate": 3.034926470588235e-06, |
| "loss": 3.9182, |
| "step": 1119 |
| }, |
| { |
| "epoch": 4.04, |
| "learning_rate": 3.033088235294118e-06, |
| "loss": 3.8266, |
| "step": 1120 |
| }, |
| { |
| "epoch": 4.05, |
| "learning_rate": 3.03125e-06, |
| "loss": 3.833, |
| "step": 1121 |
| }, |
| { |
| "epoch": 4.05, |
| "learning_rate": 3.0294117647058826e-06, |
| "loss": 3.8022, |
| "step": 1122 |
| }, |
| { |
| "epoch": 4.05, |
| "learning_rate": 3.0275735294117648e-06, |
| "loss": 3.8981, |
| "step": 1123 |
| }, |
| { |
| "epoch": 4.06, |
| "learning_rate": 3.025735294117647e-06, |
| "loss": 3.8455, |
| "step": 1124 |
| }, |
| { |
| "epoch": 4.06, |
| "learning_rate": 3.0238970588235296e-06, |
| "loss": 3.9001, |
| "step": 1125 |
| }, |
| { |
| "epoch": 4.06, |
| "learning_rate": 3.0220588235294118e-06, |
| "loss": 3.8753, |
| "step": 1126 |
| }, |
| { |
| "epoch": 4.07, |
| "learning_rate": 3.0202205882352944e-06, |
| "loss": 3.8665, |
| "step": 1127 |
| }, |
| { |
| "epoch": 4.07, |
| "learning_rate": 3.0183823529411765e-06, |
| "loss": 3.7868, |
| "step": 1128 |
| }, |
| { |
| "epoch": 4.08, |
| "learning_rate": 3.0165441176470587e-06, |
| "loss": 3.8482, |
| "step": 1129 |
| }, |
| { |
| "epoch": 4.08, |
| "learning_rate": 3.0147058823529413e-06, |
| "loss": 3.875, |
| "step": 1130 |
| }, |
| { |
| "epoch": 4.08, |
| "learning_rate": 3.0128676470588235e-06, |
| "loss": 3.812, |
| "step": 1131 |
| }, |
| { |
| "epoch": 4.09, |
| "learning_rate": 3.011029411764706e-06, |
| "loss": 3.926, |
| "step": 1132 |
| }, |
| { |
| "epoch": 4.09, |
| "learning_rate": 3.0091911764705883e-06, |
| "loss": 3.805, |
| "step": 1133 |
| }, |
| { |
| "epoch": 4.09, |
| "learning_rate": 3.0073529411764705e-06, |
| "loss": 3.86, |
| "step": 1134 |
| }, |
| { |
| "epoch": 4.1, |
| "learning_rate": 3.005514705882353e-06, |
| "loss": 3.7903, |
| "step": 1135 |
| }, |
| { |
| "epoch": 4.1, |
| "learning_rate": 3.0036764705882353e-06, |
| "loss": 3.8155, |
| "step": 1136 |
| }, |
| { |
| "epoch": 4.1, |
| "learning_rate": 3.001838235294118e-06, |
| "loss": 3.8764, |
| "step": 1137 |
| }, |
| { |
| "epoch": 4.11, |
| "learning_rate": 3e-06, |
| "loss": 3.8134, |
| "step": 1138 |
| }, |
| { |
| "epoch": 4.11, |
| "learning_rate": 2.9981617647058827e-06, |
| "loss": 3.8877, |
| "step": 1139 |
| }, |
| { |
| "epoch": 4.12, |
| "learning_rate": 2.996323529411765e-06, |
| "loss": 3.8153, |
| "step": 1140 |
| }, |
| { |
| "epoch": 4.12, |
| "learning_rate": 2.994485294117647e-06, |
| "loss": 3.8429, |
| "step": 1141 |
| }, |
| { |
| "epoch": 4.12, |
| "learning_rate": 2.9926470588235297e-06, |
| "loss": 3.8923, |
| "step": 1142 |
| }, |
| { |
| "epoch": 4.13, |
| "learning_rate": 2.990808823529412e-06, |
| "loss": 3.8527, |
| "step": 1143 |
| }, |
| { |
| "epoch": 4.13, |
| "learning_rate": 2.9889705882352944e-06, |
| "loss": 3.8033, |
| "step": 1144 |
| }, |
| { |
| "epoch": 4.13, |
| "learning_rate": 2.9871323529411766e-06, |
| "loss": 3.8288, |
| "step": 1145 |
| }, |
| { |
| "epoch": 4.14, |
| "learning_rate": 2.985294117647059e-06, |
| "loss": 3.8506, |
| "step": 1146 |
| }, |
| { |
| "epoch": 4.14, |
| "learning_rate": 2.9834558823529414e-06, |
| "loss": 3.8286, |
| "step": 1147 |
| }, |
| { |
| "epoch": 4.14, |
| "learning_rate": 2.9816176470588236e-06, |
| "loss": 3.8395, |
| "step": 1148 |
| }, |
| { |
| "epoch": 4.15, |
| "learning_rate": 2.9797794117647062e-06, |
| "loss": 3.8023, |
| "step": 1149 |
| }, |
| { |
| "epoch": 4.15, |
| "learning_rate": 2.9779411764705884e-06, |
| "loss": 3.8523, |
| "step": 1150 |
| }, |
| { |
| "epoch": 4.16, |
| "learning_rate": 2.976102941176471e-06, |
| "loss": 3.7432, |
| "step": 1151 |
| }, |
| { |
| "epoch": 4.16, |
| "learning_rate": 2.974264705882353e-06, |
| "loss": 3.8311, |
| "step": 1152 |
| }, |
| { |
| "epoch": 4.16, |
| "learning_rate": 2.9724264705882354e-06, |
| "loss": 3.8631, |
| "step": 1153 |
| }, |
| { |
| "epoch": 4.17, |
| "learning_rate": 2.970588235294118e-06, |
| "loss": 3.8679, |
| "step": 1154 |
| }, |
| { |
| "epoch": 4.17, |
| "learning_rate": 2.96875e-06, |
| "loss": 3.8725, |
| "step": 1155 |
| }, |
| { |
| "epoch": 4.17, |
| "learning_rate": 2.9669117647058828e-06, |
| "loss": 3.8268, |
| "step": 1156 |
| }, |
| { |
| "epoch": 4.18, |
| "learning_rate": 2.965073529411765e-06, |
| "loss": 3.8411, |
| "step": 1157 |
| }, |
| { |
| "epoch": 4.18, |
| "learning_rate": 2.963235294117647e-06, |
| "loss": 3.855, |
| "step": 1158 |
| }, |
| { |
| "epoch": 4.18, |
| "learning_rate": 2.9613970588235297e-06, |
| "loss": 3.8503, |
| "step": 1159 |
| }, |
| { |
| "epoch": 4.19, |
| "learning_rate": 2.959558823529412e-06, |
| "loss": 3.9597, |
| "step": 1160 |
| }, |
| { |
| "epoch": 4.19, |
| "learning_rate": 2.9577205882352945e-06, |
| "loss": 3.9216, |
| "step": 1161 |
| }, |
| { |
| "epoch": 4.19, |
| "learning_rate": 2.9558823529411767e-06, |
| "loss": 3.8542, |
| "step": 1162 |
| }, |
| { |
| "epoch": 4.2, |
| "learning_rate": 2.9540441176470593e-06, |
| "loss": 3.8598, |
| "step": 1163 |
| }, |
| { |
| "epoch": 4.2, |
| "learning_rate": 2.9522058823529415e-06, |
| "loss": 3.7886, |
| "step": 1164 |
| }, |
| { |
| "epoch": 4.21, |
| "learning_rate": 2.9503676470588237e-06, |
| "loss": 3.871, |
| "step": 1165 |
| }, |
| { |
| "epoch": 4.21, |
| "learning_rate": 2.9485294117647063e-06, |
| "loss": 3.7283, |
| "step": 1166 |
| }, |
| { |
| "epoch": 4.21, |
| "learning_rate": 2.9466911764705885e-06, |
| "loss": 3.8953, |
| "step": 1167 |
| }, |
| { |
| "epoch": 4.22, |
| "learning_rate": 2.944852941176471e-06, |
| "loss": 3.8464, |
| "step": 1168 |
| }, |
| { |
| "epoch": 4.22, |
| "learning_rate": 2.9430147058823533e-06, |
| "loss": 3.9064, |
| "step": 1169 |
| }, |
| { |
| "epoch": 4.22, |
| "learning_rate": 2.9411764705882355e-06, |
| "loss": 3.871, |
| "step": 1170 |
| }, |
| { |
| "epoch": 4.23, |
| "learning_rate": 2.939338235294118e-06, |
| "loss": 3.9004, |
| "step": 1171 |
| }, |
| { |
| "epoch": 4.23, |
| "learning_rate": 2.9375000000000003e-06, |
| "loss": 3.7616, |
| "step": 1172 |
| }, |
| { |
| "epoch": 4.23, |
| "learning_rate": 2.935661764705883e-06, |
| "loss": 3.848, |
| "step": 1173 |
| }, |
| { |
| "epoch": 4.24, |
| "learning_rate": 2.933823529411765e-06, |
| "loss": 3.8248, |
| "step": 1174 |
| }, |
| { |
| "epoch": 4.24, |
| "learning_rate": 2.9319852941176472e-06, |
| "loss": 3.903, |
| "step": 1175 |
| }, |
| { |
| "epoch": 4.25, |
| "learning_rate": 2.93014705882353e-06, |
| "loss": 3.8677, |
| "step": 1176 |
| }, |
| { |
| "epoch": 4.25, |
| "learning_rate": 2.928308823529412e-06, |
| "loss": 3.9226, |
| "step": 1177 |
| }, |
| { |
| "epoch": 4.25, |
| "learning_rate": 2.9264705882352946e-06, |
| "loss": 3.7422, |
| "step": 1178 |
| }, |
| { |
| "epoch": 4.26, |
| "learning_rate": 2.924632352941177e-06, |
| "loss": 3.7978, |
| "step": 1179 |
| }, |
| { |
| "epoch": 4.26, |
| "learning_rate": 2.9227941176470594e-06, |
| "loss": 3.8876, |
| "step": 1180 |
| }, |
| { |
| "epoch": 4.26, |
| "learning_rate": 2.9209558823529416e-06, |
| "loss": 3.8557, |
| "step": 1181 |
| }, |
| { |
| "epoch": 4.27, |
| "learning_rate": 2.9191176470588238e-06, |
| "loss": 3.8229, |
| "step": 1182 |
| }, |
| { |
| "epoch": 4.27, |
| "learning_rate": 2.9172794117647064e-06, |
| "loss": 3.9248, |
| "step": 1183 |
| }, |
| { |
| "epoch": 4.27, |
| "learning_rate": 2.9154411764705886e-06, |
| "loss": 3.9013, |
| "step": 1184 |
| }, |
| { |
| "epoch": 4.28, |
| "learning_rate": 2.913602941176471e-06, |
| "loss": 3.851, |
| "step": 1185 |
| }, |
| { |
| "epoch": 4.28, |
| "learning_rate": 2.9117647058823534e-06, |
| "loss": 3.9082, |
| "step": 1186 |
| }, |
| { |
| "epoch": 4.29, |
| "learning_rate": 2.9099264705882355e-06, |
| "loss": 3.876, |
| "step": 1187 |
| }, |
| { |
| "epoch": 4.29, |
| "learning_rate": 2.908088235294118e-06, |
| "loss": 3.8759, |
| "step": 1188 |
| }, |
| { |
| "epoch": 4.29, |
| "learning_rate": 2.9062500000000003e-06, |
| "loss": 3.9206, |
| "step": 1189 |
| }, |
| { |
| "epoch": 4.3, |
| "learning_rate": 2.904411764705883e-06, |
| "loss": 3.8957, |
| "step": 1190 |
| }, |
| { |
| "epoch": 4.3, |
| "learning_rate": 2.902573529411765e-06, |
| "loss": 3.9239, |
| "step": 1191 |
| }, |
| { |
| "epoch": 4.3, |
| "learning_rate": 2.9007352941176477e-06, |
| "loss": 3.9166, |
| "step": 1192 |
| }, |
| { |
| "epoch": 4.31, |
| "learning_rate": 2.89889705882353e-06, |
| "loss": 3.8544, |
| "step": 1193 |
| }, |
| { |
| "epoch": 4.31, |
| "learning_rate": 2.897058823529412e-06, |
| "loss": 3.894, |
| "step": 1194 |
| }, |
| { |
| "epoch": 4.31, |
| "learning_rate": 2.8952205882352947e-06, |
| "loss": 3.8498, |
| "step": 1195 |
| }, |
| { |
| "epoch": 4.32, |
| "learning_rate": 2.893382352941177e-06, |
| "loss": 3.8974, |
| "step": 1196 |
| }, |
| { |
| "epoch": 4.32, |
| "learning_rate": 2.8915441176470595e-06, |
| "loss": 3.7968, |
| "step": 1197 |
| }, |
| { |
| "epoch": 4.32, |
| "learning_rate": 2.8897058823529413e-06, |
| "loss": 3.7809, |
| "step": 1198 |
| }, |
| { |
| "epoch": 4.33, |
| "learning_rate": 2.8878676470588234e-06, |
| "loss": 3.871, |
| "step": 1199 |
| }, |
| { |
| "epoch": 4.33, |
| "learning_rate": 2.886029411764706e-06, |
| "loss": 3.8878, |
| "step": 1200 |
| }, |
| { |
| "epoch": 4.34, |
| "learning_rate": 2.8841911764705882e-06, |
| "loss": 3.8607, |
| "step": 1201 |
| }, |
| { |
| "epoch": 4.34, |
| "learning_rate": 2.8823529411764704e-06, |
| "loss": 3.8891, |
| "step": 1202 |
| }, |
| { |
| "epoch": 4.34, |
| "learning_rate": 2.880514705882353e-06, |
| "loss": 3.8552, |
| "step": 1203 |
| }, |
| { |
| "epoch": 4.35, |
| "learning_rate": 2.878676470588235e-06, |
| "loss": 3.834, |
| "step": 1204 |
| }, |
| { |
| "epoch": 4.35, |
| "learning_rate": 2.876838235294118e-06, |
| "loss": 3.8468, |
| "step": 1205 |
| }, |
| { |
| "epoch": 4.35, |
| "learning_rate": 2.875e-06, |
| "loss": 3.8964, |
| "step": 1206 |
| }, |
| { |
| "epoch": 4.36, |
| "learning_rate": 2.873161764705882e-06, |
| "loss": 3.8691, |
| "step": 1207 |
| }, |
| { |
| "epoch": 4.36, |
| "learning_rate": 2.871323529411765e-06, |
| "loss": 3.8125, |
| "step": 1208 |
| }, |
| { |
| "epoch": 4.36, |
| "learning_rate": 2.869485294117647e-06, |
| "loss": 3.8795, |
| "step": 1209 |
| }, |
| { |
| "epoch": 4.37, |
| "learning_rate": 2.8676470588235296e-06, |
| "loss": 3.8424, |
| "step": 1210 |
| }, |
| { |
| "epoch": 4.37, |
| "learning_rate": 2.8658088235294118e-06, |
| "loss": 3.8193, |
| "step": 1211 |
| }, |
| { |
| "epoch": 4.38, |
| "learning_rate": 2.8639705882352944e-06, |
| "loss": 3.8687, |
| "step": 1212 |
| }, |
| { |
| "epoch": 4.38, |
| "learning_rate": 2.8621323529411766e-06, |
| "loss": 3.861, |
| "step": 1213 |
| }, |
| { |
| "epoch": 4.38, |
| "learning_rate": 2.8602941176470587e-06, |
| "loss": 3.8656, |
| "step": 1214 |
| }, |
| { |
| "epoch": 4.39, |
| "learning_rate": 2.8584558823529413e-06, |
| "loss": 3.9022, |
| "step": 1215 |
| }, |
| { |
| "epoch": 4.39, |
| "learning_rate": 2.8566176470588235e-06, |
| "loss": 3.8321, |
| "step": 1216 |
| }, |
| { |
| "epoch": 4.39, |
| "learning_rate": 2.854779411764706e-06, |
| "loss": 3.7633, |
| "step": 1217 |
| }, |
| { |
| "epoch": 4.4, |
| "learning_rate": 2.8529411764705883e-06, |
| "loss": 3.8294, |
| "step": 1218 |
| }, |
| { |
| "epoch": 4.4, |
| "learning_rate": 2.8511029411764705e-06, |
| "loss": 3.9509, |
| "step": 1219 |
| }, |
| { |
| "epoch": 4.4, |
| "learning_rate": 2.849264705882353e-06, |
| "loss": 3.8795, |
| "step": 1220 |
| }, |
| { |
| "epoch": 4.41, |
| "learning_rate": 2.8474264705882353e-06, |
| "loss": 3.842, |
| "step": 1221 |
| }, |
| { |
| "epoch": 4.41, |
| "learning_rate": 2.845588235294118e-06, |
| "loss": 3.9093, |
| "step": 1222 |
| }, |
| { |
| "epoch": 4.42, |
| "learning_rate": 2.84375e-06, |
| "loss": 3.8812, |
| "step": 1223 |
| }, |
| { |
| "epoch": 4.42, |
| "learning_rate": 2.8419117647058823e-06, |
| "loss": 3.8946, |
| "step": 1224 |
| }, |
| { |
| "epoch": 4.42, |
| "learning_rate": 2.840073529411765e-06, |
| "loss": 3.7789, |
| "step": 1225 |
| }, |
| { |
| "epoch": 4.43, |
| "learning_rate": 2.838235294117647e-06, |
| "loss": 3.9029, |
| "step": 1226 |
| }, |
| { |
| "epoch": 4.43, |
| "learning_rate": 2.8363970588235297e-06, |
| "loss": 3.8211, |
| "step": 1227 |
| }, |
| { |
| "epoch": 4.43, |
| "learning_rate": 2.834558823529412e-06, |
| "loss": 3.8598, |
| "step": 1228 |
| }, |
| { |
| "epoch": 4.44, |
| "learning_rate": 2.8327205882352945e-06, |
| "loss": 3.9756, |
| "step": 1229 |
| }, |
| { |
| "epoch": 4.44, |
| "learning_rate": 2.8308823529411766e-06, |
| "loss": 3.9057, |
| "step": 1230 |
| }, |
| { |
| "epoch": 4.44, |
| "learning_rate": 2.829044117647059e-06, |
| "loss": 3.7876, |
| "step": 1231 |
| }, |
| { |
| "epoch": 4.45, |
| "learning_rate": 2.8272058823529414e-06, |
| "loss": 3.7212, |
| "step": 1232 |
| }, |
| { |
| "epoch": 4.45, |
| "learning_rate": 2.8253676470588236e-06, |
| "loss": 3.8179, |
| "step": 1233 |
| }, |
| { |
| "epoch": 4.45, |
| "learning_rate": 2.8235294117647062e-06, |
| "loss": 3.8163, |
| "step": 1234 |
| }, |
| { |
| "epoch": 4.46, |
| "learning_rate": 2.8216911764705884e-06, |
| "loss": 3.9242, |
| "step": 1235 |
| }, |
| { |
| "epoch": 4.46, |
| "learning_rate": 2.8198529411764706e-06, |
| "loss": 3.873, |
| "step": 1236 |
| }, |
| { |
| "epoch": 4.47, |
| "learning_rate": 2.818014705882353e-06, |
| "loss": 3.8443, |
| "step": 1237 |
| }, |
| { |
| "epoch": 4.47, |
| "learning_rate": 2.8161764705882354e-06, |
| "loss": 3.8935, |
| "step": 1238 |
| }, |
| { |
| "epoch": 4.47, |
| "learning_rate": 2.814338235294118e-06, |
| "loss": 3.7905, |
| "step": 1239 |
| }, |
| { |
| "epoch": 4.48, |
| "learning_rate": 2.8125e-06, |
| "loss": 3.8536, |
| "step": 1240 |
| }, |
| { |
| "epoch": 4.48, |
| "learning_rate": 2.8106617647058828e-06, |
| "loss": 3.808, |
| "step": 1241 |
| }, |
| { |
| "epoch": 4.48, |
| "learning_rate": 2.808823529411765e-06, |
| "loss": 3.9448, |
| "step": 1242 |
| }, |
| { |
| "epoch": 4.49, |
| "learning_rate": 2.806985294117647e-06, |
| "loss": 3.8521, |
| "step": 1243 |
| }, |
| { |
| "epoch": 4.49, |
| "learning_rate": 2.8051470588235298e-06, |
| "loss": 3.751, |
| "step": 1244 |
| }, |
| { |
| "epoch": 4.49, |
| "learning_rate": 2.803308823529412e-06, |
| "loss": 3.884, |
| "step": 1245 |
| }, |
| { |
| "epoch": 4.5, |
| "learning_rate": 2.8014705882352945e-06, |
| "loss": 3.8477, |
| "step": 1246 |
| }, |
| { |
| "epoch": 4.5, |
| "learning_rate": 2.7996323529411767e-06, |
| "loss": 3.8389, |
| "step": 1247 |
| }, |
| { |
| "epoch": 4.51, |
| "learning_rate": 2.797794117647059e-06, |
| "loss": 3.8672, |
| "step": 1248 |
| }, |
| { |
| "epoch": 4.51, |
| "learning_rate": 2.7959558823529415e-06, |
| "loss": 3.8047, |
| "step": 1249 |
| }, |
| { |
| "epoch": 4.51, |
| "learning_rate": 2.7941176470588237e-06, |
| "loss": 3.7926, |
| "step": 1250 |
| }, |
| { |
| "epoch": 4.52, |
| "learning_rate": 2.7922794117647063e-06, |
| "loss": 3.8726, |
| "step": 1251 |
| }, |
| { |
| "epoch": 4.52, |
| "learning_rate": 2.7904411764705885e-06, |
| "loss": 3.8393, |
| "step": 1252 |
| }, |
| { |
| "epoch": 4.52, |
| "learning_rate": 2.788602941176471e-06, |
| "loss": 3.8778, |
| "step": 1253 |
| }, |
| { |
| "epoch": 4.53, |
| "learning_rate": 2.7867647058823533e-06, |
| "loss": 3.8925, |
| "step": 1254 |
| }, |
| { |
| "epoch": 4.53, |
| "learning_rate": 2.7849264705882355e-06, |
| "loss": 3.8154, |
| "step": 1255 |
| }, |
| { |
| "epoch": 4.53, |
| "learning_rate": 2.783088235294118e-06, |
| "loss": 3.8656, |
| "step": 1256 |
| }, |
| { |
| "epoch": 4.54, |
| "learning_rate": 2.7812500000000003e-06, |
| "loss": 3.778, |
| "step": 1257 |
| }, |
| { |
| "epoch": 4.54, |
| "learning_rate": 2.779411764705883e-06, |
| "loss": 3.7332, |
| "step": 1258 |
| }, |
| { |
| "epoch": 4.55, |
| "learning_rate": 2.777573529411765e-06, |
| "loss": 3.8294, |
| "step": 1259 |
| }, |
| { |
| "epoch": 4.55, |
| "learning_rate": 2.7757352941176472e-06, |
| "loss": 3.7332, |
| "step": 1260 |
| }, |
| { |
| "epoch": 4.55, |
| "learning_rate": 2.77389705882353e-06, |
| "loss": 3.9188, |
| "step": 1261 |
| }, |
| { |
| "epoch": 4.56, |
| "learning_rate": 2.772058823529412e-06, |
| "loss": 3.968, |
| "step": 1262 |
| }, |
| { |
| "epoch": 4.56, |
| "learning_rate": 2.7702205882352946e-06, |
| "loss": 3.7484, |
| "step": 1263 |
| }, |
| { |
| "epoch": 4.56, |
| "learning_rate": 2.768382352941177e-06, |
| "loss": 3.8176, |
| "step": 1264 |
| }, |
| { |
| "epoch": 4.57, |
| "learning_rate": 2.766544117647059e-06, |
| "loss": 3.889, |
| "step": 1265 |
| }, |
| { |
| "epoch": 4.57, |
| "learning_rate": 2.7647058823529416e-06, |
| "loss": 3.7576, |
| "step": 1266 |
| }, |
| { |
| "epoch": 4.57, |
| "learning_rate": 2.762867647058824e-06, |
| "loss": 3.9066, |
| "step": 1267 |
| }, |
| { |
| "epoch": 4.58, |
| "learning_rate": 2.7610294117647064e-06, |
| "loss": 3.9179, |
| "step": 1268 |
| }, |
| { |
| "epoch": 4.58, |
| "learning_rate": 2.7591911764705886e-06, |
| "loss": 3.8583, |
| "step": 1269 |
| }, |
| { |
| "epoch": 4.58, |
| "learning_rate": 2.757352941176471e-06, |
| "loss": 3.9205, |
| "step": 1270 |
| }, |
| { |
| "epoch": 4.59, |
| "learning_rate": 2.7555147058823534e-06, |
| "loss": 3.9177, |
| "step": 1271 |
| }, |
| { |
| "epoch": 4.59, |
| "learning_rate": 2.7536764705882356e-06, |
| "loss": 3.7967, |
| "step": 1272 |
| }, |
| { |
| "epoch": 4.6, |
| "learning_rate": 2.751838235294118e-06, |
| "loss": 3.8539, |
| "step": 1273 |
| }, |
| { |
| "epoch": 4.6, |
| "learning_rate": 2.7500000000000004e-06, |
| "loss": 3.8079, |
| "step": 1274 |
| }, |
| { |
| "epoch": 4.6, |
| "learning_rate": 2.748161764705883e-06, |
| "loss": 3.9461, |
| "step": 1275 |
| }, |
| { |
| "epoch": 4.61, |
| "learning_rate": 2.746323529411765e-06, |
| "loss": 3.7719, |
| "step": 1276 |
| }, |
| { |
| "epoch": 4.61, |
| "learning_rate": 2.7444852941176473e-06, |
| "loss": 3.8149, |
| "step": 1277 |
| }, |
| { |
| "epoch": 4.61, |
| "learning_rate": 2.74264705882353e-06, |
| "loss": 3.9015, |
| "step": 1278 |
| }, |
| { |
| "epoch": 4.62, |
| "learning_rate": 2.740808823529412e-06, |
| "loss": 3.81, |
| "step": 1279 |
| }, |
| { |
| "epoch": 4.62, |
| "learning_rate": 2.7389705882352947e-06, |
| "loss": 3.7756, |
| "step": 1280 |
| }, |
| { |
| "epoch": 4.62, |
| "learning_rate": 2.737132352941177e-06, |
| "loss": 3.9043, |
| "step": 1281 |
| }, |
| { |
| "epoch": 4.63, |
| "learning_rate": 2.7352941176470595e-06, |
| "loss": 3.8091, |
| "step": 1282 |
| }, |
| { |
| "epoch": 4.63, |
| "learning_rate": 2.7334558823529413e-06, |
| "loss": 3.8349, |
| "step": 1283 |
| }, |
| { |
| "epoch": 4.64, |
| "learning_rate": 2.7316176470588235e-06, |
| "loss": 3.7652, |
| "step": 1284 |
| }, |
| { |
| "epoch": 4.64, |
| "learning_rate": 2.7297794117647056e-06, |
| "loss": 3.8246, |
| "step": 1285 |
| }, |
| { |
| "epoch": 4.64, |
| "learning_rate": 2.7279411764705883e-06, |
| "loss": 3.8528, |
| "step": 1286 |
| }, |
| { |
| "epoch": 4.65, |
| "learning_rate": 2.7261029411764704e-06, |
| "loss": 3.7847, |
| "step": 1287 |
| }, |
| { |
| "epoch": 4.65, |
| "learning_rate": 2.724264705882353e-06, |
| "loss": 3.9511, |
| "step": 1288 |
| }, |
| { |
| "epoch": 4.65, |
| "learning_rate": 2.7224264705882352e-06, |
| "loss": 3.8301, |
| "step": 1289 |
| }, |
| { |
| "epoch": 4.66, |
| "learning_rate": 2.720588235294118e-06, |
| "loss": 3.7516, |
| "step": 1290 |
| }, |
| { |
| "epoch": 4.66, |
| "learning_rate": 2.71875e-06, |
| "loss": 3.8913, |
| "step": 1291 |
| }, |
| { |
| "epoch": 4.66, |
| "learning_rate": 2.716911764705882e-06, |
| "loss": 3.9193, |
| "step": 1292 |
| }, |
| { |
| "epoch": 4.67, |
| "learning_rate": 2.715073529411765e-06, |
| "loss": 3.9011, |
| "step": 1293 |
| }, |
| { |
| "epoch": 4.67, |
| "learning_rate": 2.713235294117647e-06, |
| "loss": 3.8158, |
| "step": 1294 |
| }, |
| { |
| "epoch": 4.68, |
| "learning_rate": 2.7113970588235296e-06, |
| "loss": 3.8093, |
| "step": 1295 |
| }, |
| { |
| "epoch": 4.68, |
| "learning_rate": 2.7095588235294118e-06, |
| "loss": 3.8375, |
| "step": 1296 |
| }, |
| { |
| "epoch": 4.68, |
| "learning_rate": 2.707720588235294e-06, |
| "loss": 3.904, |
| "step": 1297 |
| }, |
| { |
| "epoch": 4.69, |
| "learning_rate": 2.7058823529411766e-06, |
| "loss": 3.7123, |
| "step": 1298 |
| }, |
| { |
| "epoch": 4.69, |
| "learning_rate": 2.7040441176470588e-06, |
| "loss": 3.8847, |
| "step": 1299 |
| }, |
| { |
| "epoch": 4.69, |
| "learning_rate": 2.7022058823529414e-06, |
| "loss": 3.7486, |
| "step": 1300 |
| }, |
| { |
| "epoch": 4.7, |
| "learning_rate": 2.7003676470588235e-06, |
| "loss": 3.868, |
| "step": 1301 |
| }, |
| { |
| "epoch": 4.7, |
| "learning_rate": 2.698529411764706e-06, |
| "loss": 3.9272, |
| "step": 1302 |
| }, |
| { |
| "epoch": 4.7, |
| "learning_rate": 2.6966911764705883e-06, |
| "loss": 3.8874, |
| "step": 1303 |
| }, |
| { |
| "epoch": 4.71, |
| "learning_rate": 2.6948529411764705e-06, |
| "loss": 3.8191, |
| "step": 1304 |
| }, |
| { |
| "epoch": 4.71, |
| "learning_rate": 2.693014705882353e-06, |
| "loss": 3.8455, |
| "step": 1305 |
| }, |
| { |
| "epoch": 4.71, |
| "learning_rate": 2.6911764705882353e-06, |
| "loss": 3.8703, |
| "step": 1306 |
| }, |
| { |
| "epoch": 4.72, |
| "learning_rate": 2.689338235294118e-06, |
| "loss": 3.8898, |
| "step": 1307 |
| }, |
| { |
| "epoch": 4.72, |
| "learning_rate": 2.6875e-06, |
| "loss": 3.6968, |
| "step": 1308 |
| }, |
| { |
| "epoch": 4.73, |
| "learning_rate": 2.6856617647058823e-06, |
| "loss": 3.8408, |
| "step": 1309 |
| }, |
| { |
| "epoch": 4.73, |
| "learning_rate": 2.683823529411765e-06, |
| "loss": 3.8895, |
| "step": 1310 |
| }, |
| { |
| "epoch": 4.73, |
| "learning_rate": 2.681985294117647e-06, |
| "loss": 3.8222, |
| "step": 1311 |
| }, |
| { |
| "epoch": 4.74, |
| "learning_rate": 2.6801470588235297e-06, |
| "loss": 3.9387, |
| "step": 1312 |
| }, |
| { |
| "epoch": 4.74, |
| "learning_rate": 2.678308823529412e-06, |
| "loss": 3.7954, |
| "step": 1313 |
| }, |
| { |
| "epoch": 4.74, |
| "learning_rate": 2.6764705882352945e-06, |
| "loss": 3.8852, |
| "step": 1314 |
| }, |
| { |
| "epoch": 4.75, |
| "learning_rate": 2.6746323529411767e-06, |
| "loss": 3.8554, |
| "step": 1315 |
| }, |
| { |
| "epoch": 4.75, |
| "learning_rate": 2.672794117647059e-06, |
| "loss": 3.7885, |
| "step": 1316 |
| }, |
| { |
| "epoch": 4.75, |
| "learning_rate": 2.6709558823529415e-06, |
| "loss": 3.8982, |
| "step": 1317 |
| }, |
| { |
| "epoch": 4.76, |
| "learning_rate": 2.6691176470588236e-06, |
| "loss": 3.8127, |
| "step": 1318 |
| }, |
| { |
| "epoch": 4.76, |
| "learning_rate": 2.6672794117647062e-06, |
| "loss": 3.8841, |
| "step": 1319 |
| }, |
| { |
| "epoch": 4.77, |
| "learning_rate": 2.6654411764705884e-06, |
| "loss": 3.7996, |
| "step": 1320 |
| }, |
| { |
| "epoch": 4.77, |
| "learning_rate": 2.6636029411764706e-06, |
| "loss": 3.8403, |
| "step": 1321 |
| }, |
| { |
| "epoch": 4.77, |
| "learning_rate": 2.6617647058823532e-06, |
| "loss": 3.9211, |
| "step": 1322 |
| }, |
| { |
| "epoch": 4.78, |
| "learning_rate": 2.6599264705882354e-06, |
| "loss": 3.7752, |
| "step": 1323 |
| }, |
| { |
| "epoch": 4.78, |
| "learning_rate": 2.658088235294118e-06, |
| "loss": 3.8452, |
| "step": 1324 |
| }, |
| { |
| "epoch": 4.78, |
| "learning_rate": 2.65625e-06, |
| "loss": 3.8486, |
| "step": 1325 |
| }, |
| { |
| "epoch": 4.79, |
| "learning_rate": 2.6544117647058824e-06, |
| "loss": 3.8544, |
| "step": 1326 |
| }, |
| { |
| "epoch": 4.79, |
| "learning_rate": 2.652573529411765e-06, |
| "loss": 3.8022, |
| "step": 1327 |
| }, |
| { |
| "epoch": 4.79, |
| "learning_rate": 2.650735294117647e-06, |
| "loss": 3.867, |
| "step": 1328 |
| }, |
| { |
| "epoch": 4.8, |
| "learning_rate": 2.6488970588235298e-06, |
| "loss": 3.8698, |
| "step": 1329 |
| }, |
| { |
| "epoch": 4.8, |
| "learning_rate": 2.647058823529412e-06, |
| "loss": 3.795, |
| "step": 1330 |
| }, |
| { |
| "epoch": 4.81, |
| "learning_rate": 2.6452205882352946e-06, |
| "loss": 3.8937, |
| "step": 1331 |
| }, |
| { |
| "epoch": 4.81, |
| "learning_rate": 2.6433823529411767e-06, |
| "loss": 3.8101, |
| "step": 1332 |
| }, |
| { |
| "epoch": 4.81, |
| "learning_rate": 2.641544117647059e-06, |
| "loss": 3.8737, |
| "step": 1333 |
| }, |
| { |
| "epoch": 4.82, |
| "learning_rate": 2.6397058823529415e-06, |
| "loss": 3.8933, |
| "step": 1334 |
| }, |
| { |
| "epoch": 4.82, |
| "learning_rate": 2.6378676470588237e-06, |
| "loss": 3.8787, |
| "step": 1335 |
| }, |
| { |
| "epoch": 4.82, |
| "learning_rate": 2.6360294117647063e-06, |
| "loss": 3.8354, |
| "step": 1336 |
| }, |
| { |
| "epoch": 4.83, |
| "learning_rate": 2.6341911764705885e-06, |
| "loss": 3.7723, |
| "step": 1337 |
| }, |
| { |
| "epoch": 4.83, |
| "learning_rate": 2.6323529411764707e-06, |
| "loss": 3.8334, |
| "step": 1338 |
| }, |
| { |
| "epoch": 4.83, |
| "learning_rate": 2.6305147058823533e-06, |
| "loss": 3.8109, |
| "step": 1339 |
| }, |
| { |
| "epoch": 4.84, |
| "learning_rate": 2.6286764705882355e-06, |
| "loss": 3.721, |
| "step": 1340 |
| }, |
| { |
| "epoch": 4.84, |
| "learning_rate": 2.626838235294118e-06, |
| "loss": 3.9016, |
| "step": 1341 |
| }, |
| { |
| "epoch": 4.84, |
| "learning_rate": 2.6250000000000003e-06, |
| "loss": 3.8424, |
| "step": 1342 |
| }, |
| { |
| "epoch": 4.85, |
| "learning_rate": 2.623161764705883e-06, |
| "loss": 3.8434, |
| "step": 1343 |
| }, |
| { |
| "epoch": 4.85, |
| "learning_rate": 2.621323529411765e-06, |
| "loss": 3.8004, |
| "step": 1344 |
| }, |
| { |
| "epoch": 4.86, |
| "learning_rate": 2.6194852941176473e-06, |
| "loss": 3.8638, |
| "step": 1345 |
| }, |
| { |
| "epoch": 4.86, |
| "learning_rate": 2.61764705882353e-06, |
| "loss": 3.8666, |
| "step": 1346 |
| }, |
| { |
| "epoch": 4.86, |
| "learning_rate": 2.615808823529412e-06, |
| "loss": 3.8641, |
| "step": 1347 |
| }, |
| { |
| "epoch": 4.87, |
| "learning_rate": 2.6139705882352947e-06, |
| "loss": 3.8302, |
| "step": 1348 |
| }, |
| { |
| "epoch": 4.87, |
| "learning_rate": 2.612132352941177e-06, |
| "loss": 3.7778, |
| "step": 1349 |
| }, |
| { |
| "epoch": 4.87, |
| "learning_rate": 2.610294117647059e-06, |
| "loss": 3.8042, |
| "step": 1350 |
| }, |
| { |
| "epoch": 4.88, |
| "learning_rate": 2.6084558823529416e-06, |
| "loss": 3.7827, |
| "step": 1351 |
| }, |
| { |
| "epoch": 4.88, |
| "learning_rate": 2.606617647058824e-06, |
| "loss": 3.8356, |
| "step": 1352 |
| }, |
| { |
| "epoch": 4.88, |
| "learning_rate": 2.6047794117647064e-06, |
| "loss": 3.7598, |
| "step": 1353 |
| }, |
| { |
| "epoch": 4.89, |
| "learning_rate": 2.6029411764705886e-06, |
| "loss": 3.8939, |
| "step": 1354 |
| }, |
| { |
| "epoch": 4.89, |
| "learning_rate": 2.601102941176471e-06, |
| "loss": 3.8656, |
| "step": 1355 |
| }, |
| { |
| "epoch": 4.9, |
| "learning_rate": 2.5992647058823534e-06, |
| "loss": 3.8924, |
| "step": 1356 |
| }, |
| { |
| "epoch": 4.9, |
| "learning_rate": 2.5974264705882356e-06, |
| "loss": 3.8417, |
| "step": 1357 |
| }, |
| { |
| "epoch": 4.9, |
| "learning_rate": 2.595588235294118e-06, |
| "loss": 3.8414, |
| "step": 1358 |
| }, |
| { |
| "epoch": 4.91, |
| "learning_rate": 2.5937500000000004e-06, |
| "loss": 3.7218, |
| "step": 1359 |
| }, |
| { |
| "epoch": 4.91, |
| "learning_rate": 2.591911764705883e-06, |
| "loss": 3.7599, |
| "step": 1360 |
| }, |
| { |
| "epoch": 4.91, |
| "learning_rate": 2.590073529411765e-06, |
| "loss": 3.9119, |
| "step": 1361 |
| }, |
| { |
| "epoch": 4.92, |
| "learning_rate": 2.5882352941176473e-06, |
| "loss": 3.8041, |
| "step": 1362 |
| }, |
| { |
| "epoch": 4.92, |
| "learning_rate": 2.58639705882353e-06, |
| "loss": 3.777, |
| "step": 1363 |
| }, |
| { |
| "epoch": 4.92, |
| "learning_rate": 2.584558823529412e-06, |
| "loss": 3.8667, |
| "step": 1364 |
| }, |
| { |
| "epoch": 4.93, |
| "learning_rate": 2.5827205882352947e-06, |
| "loss": 3.8421, |
| "step": 1365 |
| }, |
| { |
| "epoch": 4.93, |
| "learning_rate": 2.580882352941177e-06, |
| "loss": 3.8412, |
| "step": 1366 |
| }, |
| { |
| "epoch": 4.94, |
| "learning_rate": 2.579044117647059e-06, |
| "loss": 3.8712, |
| "step": 1367 |
| }, |
| { |
| "epoch": 4.94, |
| "learning_rate": 2.5772058823529413e-06, |
| "loss": 3.7757, |
| "step": 1368 |
| }, |
| { |
| "epoch": 4.94, |
| "learning_rate": 2.5753676470588235e-06, |
| "loss": 3.7206, |
| "step": 1369 |
| }, |
| { |
| "epoch": 4.95, |
| "learning_rate": 2.5735294117647057e-06, |
| "loss": 3.8781, |
| "step": 1370 |
| }, |
| { |
| "epoch": 4.95, |
| "learning_rate": 2.5716911764705883e-06, |
| "loss": 3.9022, |
| "step": 1371 |
| }, |
| { |
| "epoch": 4.95, |
| "learning_rate": 2.5698529411764704e-06, |
| "loss": 3.6748, |
| "step": 1372 |
| }, |
| { |
| "epoch": 4.96, |
| "learning_rate": 2.568014705882353e-06, |
| "loss": 3.747, |
| "step": 1373 |
| }, |
| { |
| "epoch": 4.96, |
| "learning_rate": 2.5661764705882352e-06, |
| "loss": 3.8702, |
| "step": 1374 |
| }, |
| { |
| "epoch": 4.96, |
| "learning_rate": 2.5643382352941174e-06, |
| "loss": 3.7941, |
| "step": 1375 |
| }, |
| { |
| "epoch": 4.97, |
| "learning_rate": 2.5625e-06, |
| "loss": 3.8689, |
| "step": 1376 |
| }, |
| { |
| "epoch": 4.97, |
| "learning_rate": 2.5606617647058822e-06, |
| "loss": 3.7528, |
| "step": 1377 |
| }, |
| { |
| "epoch": 4.97, |
| "learning_rate": 2.558823529411765e-06, |
| "loss": 3.7879, |
| "step": 1378 |
| }, |
| { |
| "epoch": 4.98, |
| "learning_rate": 2.556985294117647e-06, |
| "loss": 3.842, |
| "step": 1379 |
| }, |
| { |
| "epoch": 4.98, |
| "learning_rate": 2.5551470588235296e-06, |
| "loss": 3.7839, |
| "step": 1380 |
| }, |
| { |
| "epoch": 4.99, |
| "learning_rate": 2.553308823529412e-06, |
| "loss": 3.8471, |
| "step": 1381 |
| }, |
| { |
| "epoch": 4.99, |
| "learning_rate": 2.551470588235294e-06, |
| "loss": 3.7944, |
| "step": 1382 |
| }, |
| { |
| "epoch": 4.99, |
| "learning_rate": 2.5496323529411766e-06, |
| "loss": 3.8677, |
| "step": 1383 |
| }, |
| { |
| "epoch": 5.0, |
| "learning_rate": 2.5477941176470588e-06, |
| "loss": 3.8319, |
| "step": 1384 |
| }, |
| { |
| "epoch": 5.0, |
| "learning_rate": 2.5459558823529414e-06, |
| "loss": 3.6766, |
| "step": 1385 |
| }, |
| { |
| "epoch": 5.0, |
| "eval_accuracy": 0.12162162162162163, |
| "eval_loss": 3.8140854835510254, |
| "eval_runtime": 141.8941, |
| "eval_samples_per_second": 2.608, |
| "eval_steps_per_second": 0.655, |
| "step": 1385 |
| }, |
| { |
| "epoch": 5.0, |
| "learning_rate": 2.5441176470588236e-06, |
| "loss": 3.9093, |
| "step": 1386 |
| }, |
| { |
| "epoch": 5.01, |
| "learning_rate": 2.5422794117647057e-06, |
| "loss": 3.931, |
| "step": 1387 |
| }, |
| { |
| "epoch": 5.01, |
| "learning_rate": 2.5404411764705884e-06, |
| "loss": 3.7568, |
| "step": 1388 |
| }, |
| { |
| "epoch": 5.01, |
| "learning_rate": 2.5386029411764705e-06, |
| "loss": 3.8262, |
| "step": 1389 |
| }, |
| { |
| "epoch": 5.02, |
| "learning_rate": 2.536764705882353e-06, |
| "loss": 3.8426, |
| "step": 1390 |
| }, |
| { |
| "epoch": 5.02, |
| "learning_rate": 2.5349264705882353e-06, |
| "loss": 3.7376, |
| "step": 1391 |
| }, |
| { |
| "epoch": 5.03, |
| "learning_rate": 2.533088235294118e-06, |
| "loss": 3.8782, |
| "step": 1392 |
| }, |
| { |
| "epoch": 5.03, |
| "learning_rate": 2.53125e-06, |
| "loss": 3.7147, |
| "step": 1393 |
| }, |
| { |
| "epoch": 5.03, |
| "learning_rate": 2.5294117647058823e-06, |
| "loss": 3.7964, |
| "step": 1394 |
| }, |
| { |
| "epoch": 5.04, |
| "learning_rate": 2.527573529411765e-06, |
| "loss": 3.783, |
| "step": 1395 |
| }, |
| { |
| "epoch": 5.04, |
| "learning_rate": 2.525735294117647e-06, |
| "loss": 3.7448, |
| "step": 1396 |
| }, |
| { |
| "epoch": 5.04, |
| "learning_rate": 2.5238970588235297e-06, |
| "loss": 3.7199, |
| "step": 1397 |
| }, |
| { |
| "epoch": 5.05, |
| "learning_rate": 2.522058823529412e-06, |
| "loss": 3.7467, |
| "step": 1398 |
| }, |
| { |
| "epoch": 5.05, |
| "learning_rate": 2.520220588235294e-06, |
| "loss": 3.7637, |
| "step": 1399 |
| }, |
| { |
| "epoch": 5.05, |
| "learning_rate": 2.5183823529411767e-06, |
| "loss": 3.8261, |
| "step": 1400 |
| }, |
| { |
| "epoch": 5.06, |
| "learning_rate": 2.516544117647059e-06, |
| "loss": 3.8239, |
| "step": 1401 |
| }, |
| { |
| "epoch": 5.06, |
| "learning_rate": 2.5147058823529415e-06, |
| "loss": 3.816, |
| "step": 1402 |
| }, |
| { |
| "epoch": 5.06, |
| "learning_rate": 2.5128676470588236e-06, |
| "loss": 3.8442, |
| "step": 1403 |
| }, |
| { |
| "epoch": 5.07, |
| "learning_rate": 2.5110294117647063e-06, |
| "loss": 3.8801, |
| "step": 1404 |
| }, |
| { |
| "epoch": 5.07, |
| "learning_rate": 2.5091911764705884e-06, |
| "loss": 3.8965, |
| "step": 1405 |
| }, |
| { |
| "epoch": 5.08, |
| "learning_rate": 2.5073529411764706e-06, |
| "loss": 3.9093, |
| "step": 1406 |
| }, |
| { |
| "epoch": 5.08, |
| "learning_rate": 2.5055147058823532e-06, |
| "loss": 3.8445, |
| "step": 1407 |
| }, |
| { |
| "epoch": 5.08, |
| "learning_rate": 2.5036764705882354e-06, |
| "loss": 3.7602, |
| "step": 1408 |
| }, |
| { |
| "epoch": 5.09, |
| "learning_rate": 2.501838235294118e-06, |
| "loss": 3.8217, |
| "step": 1409 |
| }, |
| { |
| "epoch": 5.09, |
| "learning_rate": 2.5e-06, |
| "loss": 3.8104, |
| "step": 1410 |
| }, |
| { |
| "epoch": 5.09, |
| "learning_rate": 2.4981617647058824e-06, |
| "loss": 3.7757, |
| "step": 1411 |
| }, |
| { |
| "epoch": 5.1, |
| "learning_rate": 2.496323529411765e-06, |
| "loss": 3.7992, |
| "step": 1412 |
| }, |
| { |
| "epoch": 5.1, |
| "learning_rate": 2.494485294117647e-06, |
| "loss": 3.875, |
| "step": 1413 |
| }, |
| { |
| "epoch": 5.1, |
| "learning_rate": 2.4926470588235298e-06, |
| "loss": 3.927, |
| "step": 1414 |
| }, |
| { |
| "epoch": 5.11, |
| "learning_rate": 2.490808823529412e-06, |
| "loss": 3.8832, |
| "step": 1415 |
| }, |
| { |
| "epoch": 5.11, |
| "learning_rate": 2.488970588235294e-06, |
| "loss": 3.8145, |
| "step": 1416 |
| }, |
| { |
| "epoch": 5.12, |
| "learning_rate": 2.4871323529411768e-06, |
| "loss": 3.7506, |
| "step": 1417 |
| }, |
| { |
| "epoch": 5.12, |
| "learning_rate": 2.485294117647059e-06, |
| "loss": 3.9218, |
| "step": 1418 |
| }, |
| { |
| "epoch": 5.12, |
| "learning_rate": 2.4834558823529416e-06, |
| "loss": 3.8855, |
| "step": 1419 |
| }, |
| { |
| "epoch": 5.13, |
| "learning_rate": 2.4816176470588237e-06, |
| "loss": 3.8391, |
| "step": 1420 |
| }, |
| { |
| "epoch": 5.13, |
| "learning_rate": 2.4797794117647063e-06, |
| "loss": 3.8401, |
| "step": 1421 |
| }, |
| { |
| "epoch": 5.13, |
| "learning_rate": 2.4779411764705885e-06, |
| "loss": 3.7426, |
| "step": 1422 |
| }, |
| { |
| "epoch": 5.14, |
| "learning_rate": 2.4761029411764707e-06, |
| "loss": 3.7924, |
| "step": 1423 |
| }, |
| { |
| "epoch": 5.14, |
| "learning_rate": 2.4742647058823533e-06, |
| "loss": 3.8432, |
| "step": 1424 |
| }, |
| { |
| "epoch": 5.14, |
| "learning_rate": 2.4724264705882355e-06, |
| "loss": 3.7874, |
| "step": 1425 |
| }, |
| { |
| "epoch": 5.15, |
| "learning_rate": 2.470588235294118e-06, |
| "loss": 3.9072, |
| "step": 1426 |
| }, |
| { |
| "epoch": 5.15, |
| "learning_rate": 2.4687500000000003e-06, |
| "loss": 3.7645, |
| "step": 1427 |
| }, |
| { |
| "epoch": 5.16, |
| "learning_rate": 2.4669117647058825e-06, |
| "loss": 3.6841, |
| "step": 1428 |
| }, |
| { |
| "epoch": 5.16, |
| "learning_rate": 2.465073529411765e-06, |
| "loss": 3.8278, |
| "step": 1429 |
| }, |
| { |
| "epoch": 5.16, |
| "learning_rate": 2.4632352941176473e-06, |
| "loss": 3.8375, |
| "step": 1430 |
| }, |
| { |
| "epoch": 5.17, |
| "learning_rate": 2.46139705882353e-06, |
| "loss": 3.826, |
| "step": 1431 |
| }, |
| { |
| "epoch": 5.17, |
| "learning_rate": 2.4595588235294116e-06, |
| "loss": 3.8361, |
| "step": 1432 |
| }, |
| { |
| "epoch": 5.17, |
| "learning_rate": 2.4577205882352942e-06, |
| "loss": 3.7711, |
| "step": 1433 |
| }, |
| { |
| "epoch": 5.18, |
| "learning_rate": 2.4558823529411764e-06, |
| "loss": 3.8912, |
| "step": 1434 |
| }, |
| { |
| "epoch": 5.18, |
| "learning_rate": 2.454044117647059e-06, |
| "loss": 3.7994, |
| "step": 1435 |
| }, |
| { |
| "epoch": 5.18, |
| "learning_rate": 2.4522058823529412e-06, |
| "loss": 3.861, |
| "step": 1436 |
| }, |
| { |
| "epoch": 5.19, |
| "learning_rate": 2.450367647058824e-06, |
| "loss": 3.8326, |
| "step": 1437 |
| }, |
| { |
| "epoch": 5.19, |
| "learning_rate": 2.448529411764706e-06, |
| "loss": 3.8962, |
| "step": 1438 |
| }, |
| { |
| "epoch": 5.19, |
| "learning_rate": 2.446691176470588e-06, |
| "loss": 3.8763, |
| "step": 1439 |
| }, |
| { |
| "epoch": 5.2, |
| "learning_rate": 2.444852941176471e-06, |
| "loss": 3.8224, |
| "step": 1440 |
| }, |
| { |
| "epoch": 5.2, |
| "learning_rate": 2.443014705882353e-06, |
| "loss": 3.7163, |
| "step": 1441 |
| }, |
| { |
| "epoch": 5.21, |
| "learning_rate": 2.4411764705882356e-06, |
| "loss": 3.663, |
| "step": 1442 |
| }, |
| { |
| "epoch": 5.21, |
| "learning_rate": 2.4393382352941178e-06, |
| "loss": 3.7742, |
| "step": 1443 |
| }, |
| { |
| "epoch": 5.21, |
| "learning_rate": 2.4375e-06, |
| "loss": 3.7776, |
| "step": 1444 |
| }, |
| { |
| "epoch": 5.22, |
| "learning_rate": 2.4356617647058826e-06, |
| "loss": 3.7575, |
| "step": 1445 |
| }, |
| { |
| "epoch": 5.22, |
| "learning_rate": 2.4338235294117647e-06, |
| "loss": 3.7288, |
| "step": 1446 |
| }, |
| { |
| "epoch": 5.22, |
| "learning_rate": 2.4319852941176474e-06, |
| "loss": 3.7924, |
| "step": 1447 |
| }, |
| { |
| "epoch": 5.23, |
| "learning_rate": 2.4301470588235295e-06, |
| "loss": 3.7177, |
| "step": 1448 |
| }, |
| { |
| "epoch": 5.23, |
| "learning_rate": 2.428308823529412e-06, |
| "loss": 3.7735, |
| "step": 1449 |
| }, |
| { |
| "epoch": 5.23, |
| "learning_rate": 2.4264705882352943e-06, |
| "loss": 3.7613, |
| "step": 1450 |
| }, |
| { |
| "epoch": 5.24, |
| "learning_rate": 2.4246323529411765e-06, |
| "loss": 3.6932, |
| "step": 1451 |
| }, |
| { |
| "epoch": 5.24, |
| "learning_rate": 2.422794117647059e-06, |
| "loss": 3.9268, |
| "step": 1452 |
| }, |
| { |
| "epoch": 5.25, |
| "learning_rate": 2.4209558823529413e-06, |
| "loss": 3.8574, |
| "step": 1453 |
| }, |
| { |
| "epoch": 5.25, |
| "learning_rate": 2.419117647058824e-06, |
| "loss": 3.8107, |
| "step": 1454 |
| }, |
| { |
| "epoch": 5.25, |
| "learning_rate": 2.417279411764706e-06, |
| "loss": 3.8143, |
| "step": 1455 |
| }, |
| { |
| "epoch": 5.26, |
| "learning_rate": 2.4154411764705883e-06, |
| "loss": 3.8233, |
| "step": 1456 |
| }, |
| { |
| "epoch": 5.26, |
| "learning_rate": 2.413602941176471e-06, |
| "loss": 3.8765, |
| "step": 1457 |
| }, |
| { |
| "epoch": 5.26, |
| "learning_rate": 2.411764705882353e-06, |
| "loss": 3.842, |
| "step": 1458 |
| }, |
| { |
| "epoch": 5.27, |
| "learning_rate": 2.4099264705882357e-06, |
| "loss": 3.7893, |
| "step": 1459 |
| }, |
| { |
| "epoch": 5.27, |
| "learning_rate": 2.408088235294118e-06, |
| "loss": 3.8639, |
| "step": 1460 |
| }, |
| { |
| "epoch": 5.27, |
| "learning_rate": 2.40625e-06, |
| "loss": 3.837, |
| "step": 1461 |
| }, |
| { |
| "epoch": 5.28, |
| "learning_rate": 2.4044117647058827e-06, |
| "loss": 3.8223, |
| "step": 1462 |
| }, |
| { |
| "epoch": 5.28, |
| "learning_rate": 2.402573529411765e-06, |
| "loss": 3.7547, |
| "step": 1463 |
| }, |
| { |
| "epoch": 5.29, |
| "learning_rate": 2.4007352941176474e-06, |
| "loss": 3.8397, |
| "step": 1464 |
| }, |
| { |
| "epoch": 5.29, |
| "learning_rate": 2.3988970588235296e-06, |
| "loss": 3.8184, |
| "step": 1465 |
| }, |
| { |
| "epoch": 5.29, |
| "learning_rate": 2.3970588235294122e-06, |
| "loss": 3.8426, |
| "step": 1466 |
| }, |
| { |
| "epoch": 5.3, |
| "learning_rate": 2.3952205882352944e-06, |
| "loss": 3.7062, |
| "step": 1467 |
| }, |
| { |
| "epoch": 5.3, |
| "learning_rate": 2.3933823529411766e-06, |
| "loss": 3.811, |
| "step": 1468 |
| }, |
| { |
| "epoch": 5.3, |
| "learning_rate": 2.391544117647059e-06, |
| "loss": 3.8753, |
| "step": 1469 |
| }, |
| { |
| "epoch": 5.31, |
| "learning_rate": 2.3897058823529414e-06, |
| "loss": 3.7554, |
| "step": 1470 |
| }, |
| { |
| "epoch": 5.31, |
| "learning_rate": 2.387867647058824e-06, |
| "loss": 3.8772, |
| "step": 1471 |
| }, |
| { |
| "epoch": 5.31, |
| "learning_rate": 2.386029411764706e-06, |
| "loss": 3.7707, |
| "step": 1472 |
| }, |
| { |
| "epoch": 5.32, |
| "learning_rate": 2.3841911764705884e-06, |
| "loss": 3.7515, |
| "step": 1473 |
| }, |
| { |
| "epoch": 5.32, |
| "learning_rate": 2.3823529411764705e-06, |
| "loss": 3.8449, |
| "step": 1474 |
| }, |
| { |
| "epoch": 5.32, |
| "learning_rate": 2.380514705882353e-06, |
| "loss": 3.7729, |
| "step": 1475 |
| }, |
| { |
| "epoch": 5.33, |
| "learning_rate": 2.3786764705882353e-06, |
| "loss": 3.8959, |
| "step": 1476 |
| }, |
| { |
| "epoch": 5.33, |
| "learning_rate": 2.3768382352941175e-06, |
| "loss": 3.8706, |
| "step": 1477 |
| }, |
| { |
| "epoch": 5.34, |
| "learning_rate": 2.375e-06, |
| "loss": 3.8376, |
| "step": 1478 |
| }, |
| { |
| "epoch": 5.34, |
| "learning_rate": 2.3731617647058823e-06, |
| "loss": 3.8748, |
| "step": 1479 |
| }, |
| { |
| "epoch": 5.34, |
| "learning_rate": 2.371323529411765e-06, |
| "loss": 3.8586, |
| "step": 1480 |
| }, |
| { |
| "epoch": 5.35, |
| "learning_rate": 2.369485294117647e-06, |
| "loss": 3.7315, |
| "step": 1481 |
| }, |
| { |
| "epoch": 5.35, |
| "learning_rate": 2.3676470588235297e-06, |
| "loss": 3.8433, |
| "step": 1482 |
| }, |
| { |
| "epoch": 5.35, |
| "learning_rate": 2.365808823529412e-06, |
| "loss": 3.8453, |
| "step": 1483 |
| }, |
| { |
| "epoch": 5.36, |
| "learning_rate": 2.363970588235294e-06, |
| "loss": 3.9179, |
| "step": 1484 |
| }, |
| { |
| "epoch": 5.36, |
| "learning_rate": 2.3621323529411767e-06, |
| "loss": 3.7572, |
| "step": 1485 |
| }, |
| { |
| "epoch": 5.36, |
| "learning_rate": 2.360294117647059e-06, |
| "loss": 3.8036, |
| "step": 1486 |
| }, |
| { |
| "epoch": 5.37, |
| "learning_rate": 2.3584558823529415e-06, |
| "loss": 3.9434, |
| "step": 1487 |
| }, |
| { |
| "epoch": 5.37, |
| "learning_rate": 2.3566176470588237e-06, |
| "loss": 3.7737, |
| "step": 1488 |
| }, |
| { |
| "epoch": 5.38, |
| "learning_rate": 2.354779411764706e-06, |
| "loss": 3.8794, |
| "step": 1489 |
| }, |
| { |
| "epoch": 5.38, |
| "learning_rate": 2.3529411764705885e-06, |
| "loss": 3.7338, |
| "step": 1490 |
| }, |
| { |
| "epoch": 5.38, |
| "learning_rate": 2.3511029411764706e-06, |
| "loss": 3.7767, |
| "step": 1491 |
| }, |
| { |
| "epoch": 5.39, |
| "learning_rate": 2.3492647058823532e-06, |
| "loss": 3.8121, |
| "step": 1492 |
| }, |
| { |
| "epoch": 5.39, |
| "learning_rate": 2.3474264705882354e-06, |
| "loss": 3.8633, |
| "step": 1493 |
| }, |
| { |
| "epoch": 5.39, |
| "learning_rate": 2.345588235294118e-06, |
| "loss": 3.8826, |
| "step": 1494 |
| }, |
| { |
| "epoch": 5.4, |
| "learning_rate": 2.3437500000000002e-06, |
| "loss": 3.734, |
| "step": 1495 |
| }, |
| { |
| "epoch": 5.4, |
| "learning_rate": 2.3419117647058824e-06, |
| "loss": 3.7519, |
| "step": 1496 |
| }, |
| { |
| "epoch": 5.4, |
| "learning_rate": 2.340073529411765e-06, |
| "loss": 3.8873, |
| "step": 1497 |
| }, |
| { |
| "epoch": 5.41, |
| "learning_rate": 2.338235294117647e-06, |
| "loss": 3.8564, |
| "step": 1498 |
| }, |
| { |
| "epoch": 5.41, |
| "learning_rate": 2.33639705882353e-06, |
| "loss": 3.7875, |
| "step": 1499 |
| }, |
| { |
| "epoch": 5.42, |
| "learning_rate": 2.334558823529412e-06, |
| "loss": 3.8389, |
| "step": 1500 |
| }, |
| { |
| "epoch": 5.42, |
| "learning_rate": 2.332720588235294e-06, |
| "loss": 3.8298, |
| "step": 1501 |
| }, |
| { |
| "epoch": 5.42, |
| "learning_rate": 2.3308823529411768e-06, |
| "loss": 3.8263, |
| "step": 1502 |
| }, |
| { |
| "epoch": 5.43, |
| "learning_rate": 2.329044117647059e-06, |
| "loss": 3.8655, |
| "step": 1503 |
| }, |
| { |
| "epoch": 5.43, |
| "learning_rate": 2.3272058823529416e-06, |
| "loss": 3.7766, |
| "step": 1504 |
| }, |
| { |
| "epoch": 5.43, |
| "learning_rate": 2.3253676470588237e-06, |
| "loss": 3.7153, |
| "step": 1505 |
| }, |
| { |
| "epoch": 5.44, |
| "learning_rate": 2.323529411764706e-06, |
| "loss": 3.753, |
| "step": 1506 |
| }, |
| { |
| "epoch": 5.44, |
| "learning_rate": 2.3216911764705885e-06, |
| "loss": 3.8044, |
| "step": 1507 |
| }, |
| { |
| "epoch": 5.44, |
| "learning_rate": 2.3198529411764707e-06, |
| "loss": 3.7462, |
| "step": 1508 |
| }, |
| { |
| "epoch": 5.45, |
| "learning_rate": 2.3180147058823533e-06, |
| "loss": 3.7945, |
| "step": 1509 |
| }, |
| { |
| "epoch": 5.45, |
| "learning_rate": 2.3161764705882355e-06, |
| "loss": 3.8137, |
| "step": 1510 |
| }, |
| { |
| "epoch": 5.45, |
| "learning_rate": 2.314338235294118e-06, |
| "loss": 3.8258, |
| "step": 1511 |
| }, |
| { |
| "epoch": 5.46, |
| "learning_rate": 2.3125000000000003e-06, |
| "loss": 3.851, |
| "step": 1512 |
| }, |
| { |
| "epoch": 5.46, |
| "learning_rate": 2.3106617647058825e-06, |
| "loss": 3.8219, |
| "step": 1513 |
| }, |
| { |
| "epoch": 5.47, |
| "learning_rate": 2.308823529411765e-06, |
| "loss": 3.7904, |
| "step": 1514 |
| }, |
| { |
| "epoch": 5.47, |
| "learning_rate": 2.3069852941176473e-06, |
| "loss": 3.8711, |
| "step": 1515 |
| }, |
| { |
| "epoch": 5.47, |
| "learning_rate": 2.30514705882353e-06, |
| "loss": 3.7317, |
| "step": 1516 |
| }, |
| { |
| "epoch": 5.48, |
| "learning_rate": 2.3033088235294116e-06, |
| "loss": 3.8045, |
| "step": 1517 |
| }, |
| { |
| "epoch": 5.48, |
| "learning_rate": 2.3014705882352943e-06, |
| "loss": 3.7574, |
| "step": 1518 |
| }, |
| { |
| "epoch": 5.48, |
| "learning_rate": 2.2996323529411764e-06, |
| "loss": 3.7033, |
| "step": 1519 |
| }, |
| { |
| "epoch": 5.49, |
| "learning_rate": 2.297794117647059e-06, |
| "loss": 3.8548, |
| "step": 1520 |
| }, |
| { |
| "epoch": 5.49, |
| "learning_rate": 2.2959558823529412e-06, |
| "loss": 3.851, |
| "step": 1521 |
| }, |
| { |
| "epoch": 5.49, |
| "learning_rate": 2.2941176470588234e-06, |
| "loss": 3.8846, |
| "step": 1522 |
| }, |
| { |
| "epoch": 5.5, |
| "learning_rate": 2.292279411764706e-06, |
| "loss": 3.8431, |
| "step": 1523 |
| }, |
| { |
| "epoch": 5.5, |
| "learning_rate": 2.290441176470588e-06, |
| "loss": 3.7661, |
| "step": 1524 |
| }, |
| { |
| "epoch": 5.51, |
| "learning_rate": 2.288602941176471e-06, |
| "loss": 3.8581, |
| "step": 1525 |
| }, |
| { |
| "epoch": 5.51, |
| "learning_rate": 2.286764705882353e-06, |
| "loss": 3.7818, |
| "step": 1526 |
| }, |
| { |
| "epoch": 5.51, |
| "learning_rate": 2.2849264705882356e-06, |
| "loss": 3.7983, |
| "step": 1527 |
| }, |
| { |
| "epoch": 5.52, |
| "learning_rate": 2.2830882352941178e-06, |
| "loss": 3.838, |
| "step": 1528 |
| }, |
| { |
| "epoch": 5.52, |
| "learning_rate": 2.28125e-06, |
| "loss": 3.709, |
| "step": 1529 |
| }, |
| { |
| "epoch": 5.52, |
| "learning_rate": 2.2794117647058826e-06, |
| "loss": 3.8734, |
| "step": 1530 |
| }, |
| { |
| "epoch": 5.53, |
| "learning_rate": 2.2775735294117648e-06, |
| "loss": 3.7633, |
| "step": 1531 |
| }, |
| { |
| "epoch": 5.53, |
| "learning_rate": 2.2757352941176474e-06, |
| "loss": 3.8374, |
| "step": 1532 |
| }, |
| { |
| "epoch": 5.53, |
| "learning_rate": 2.2738970588235296e-06, |
| "loss": 3.8527, |
| "step": 1533 |
| }, |
| { |
| "epoch": 5.54, |
| "learning_rate": 2.2720588235294117e-06, |
| "loss": 3.7979, |
| "step": 1534 |
| }, |
| { |
| "epoch": 5.54, |
| "learning_rate": 2.2702205882352943e-06, |
| "loss": 3.9243, |
| "step": 1535 |
| }, |
| { |
| "epoch": 5.55, |
| "learning_rate": 2.2683823529411765e-06, |
| "loss": 3.9123, |
| "step": 1536 |
| }, |
| { |
| "epoch": 5.55, |
| "learning_rate": 2.266544117647059e-06, |
| "loss": 3.8441, |
| "step": 1537 |
| }, |
| { |
| "epoch": 5.55, |
| "learning_rate": 2.2647058823529413e-06, |
| "loss": 3.6624, |
| "step": 1538 |
| }, |
| { |
| "epoch": 5.56, |
| "learning_rate": 2.262867647058824e-06, |
| "loss": 3.8357, |
| "step": 1539 |
| }, |
| { |
| "epoch": 5.56, |
| "learning_rate": 2.261029411764706e-06, |
| "loss": 3.7374, |
| "step": 1540 |
| }, |
| { |
| "epoch": 5.56, |
| "learning_rate": 2.2591911764705883e-06, |
| "loss": 3.8683, |
| "step": 1541 |
| }, |
| { |
| "epoch": 5.57, |
| "learning_rate": 2.257352941176471e-06, |
| "loss": 3.8525, |
| "step": 1542 |
| }, |
| { |
| "epoch": 5.57, |
| "learning_rate": 2.255514705882353e-06, |
| "loss": 3.8723, |
| "step": 1543 |
| }, |
| { |
| "epoch": 5.57, |
| "learning_rate": 2.2536764705882357e-06, |
| "loss": 3.8028, |
| "step": 1544 |
| }, |
| { |
| "epoch": 5.58, |
| "learning_rate": 2.251838235294118e-06, |
| "loss": 3.7724, |
| "step": 1545 |
| }, |
| { |
| "epoch": 5.58, |
| "learning_rate": 2.25e-06, |
| "loss": 3.7953, |
| "step": 1546 |
| }, |
| { |
| "epoch": 5.58, |
| "learning_rate": 2.2481617647058827e-06, |
| "loss": 3.8992, |
| "step": 1547 |
| }, |
| { |
| "epoch": 5.59, |
| "learning_rate": 2.246323529411765e-06, |
| "loss": 3.7464, |
| "step": 1548 |
| }, |
| { |
| "epoch": 5.59, |
| "learning_rate": 2.2444852941176475e-06, |
| "loss": 3.7829, |
| "step": 1549 |
| }, |
| { |
| "epoch": 5.6, |
| "learning_rate": 2.2426470588235296e-06, |
| "loss": 3.7783, |
| "step": 1550 |
| }, |
| { |
| "epoch": 5.6, |
| "learning_rate": 2.2408088235294122e-06, |
| "loss": 3.7639, |
| "step": 1551 |
| }, |
| { |
| "epoch": 5.6, |
| "learning_rate": 2.2389705882352944e-06, |
| "loss": 3.8702, |
| "step": 1552 |
| }, |
| { |
| "epoch": 5.61, |
| "learning_rate": 2.2371323529411766e-06, |
| "loss": 3.8081, |
| "step": 1553 |
| }, |
| { |
| "epoch": 5.61, |
| "learning_rate": 2.2352941176470592e-06, |
| "loss": 3.732, |
| "step": 1554 |
| }, |
| { |
| "epoch": 5.61, |
| "learning_rate": 2.2334558823529414e-06, |
| "loss": 3.8951, |
| "step": 1555 |
| }, |
| { |
| "epoch": 5.62, |
| "learning_rate": 2.231617647058824e-06, |
| "loss": 3.8567, |
| "step": 1556 |
| }, |
| { |
| "epoch": 5.62, |
| "learning_rate": 2.229779411764706e-06, |
| "loss": 3.9217, |
| "step": 1557 |
| }, |
| { |
| "epoch": 5.62, |
| "learning_rate": 2.2279411764705884e-06, |
| "loss": 3.8078, |
| "step": 1558 |
| }, |
| { |
| "epoch": 5.63, |
| "learning_rate": 2.2261029411764706e-06, |
| "loss": 3.8843, |
| "step": 1559 |
| }, |
| { |
| "epoch": 5.63, |
| "learning_rate": 2.224264705882353e-06, |
| "loss": 3.7829, |
| "step": 1560 |
| }, |
| { |
| "epoch": 5.64, |
| "learning_rate": 2.2224264705882354e-06, |
| "loss": 3.7859, |
| "step": 1561 |
| }, |
| { |
| "epoch": 5.64, |
| "learning_rate": 2.2205882352941175e-06, |
| "loss": 3.7096, |
| "step": 1562 |
| }, |
| { |
| "epoch": 5.64, |
| "learning_rate": 2.21875e-06, |
| "loss": 3.7558, |
| "step": 1563 |
| }, |
| { |
| "epoch": 5.65, |
| "learning_rate": 2.2169117647058823e-06, |
| "loss": 3.7685, |
| "step": 1564 |
| }, |
| { |
| "epoch": 5.65, |
| "learning_rate": 2.215073529411765e-06, |
| "loss": 3.8789, |
| "step": 1565 |
| }, |
| { |
| "epoch": 5.65, |
| "learning_rate": 2.213235294117647e-06, |
| "loss": 3.8418, |
| "step": 1566 |
| }, |
| { |
| "epoch": 5.66, |
| "learning_rate": 2.2113970588235293e-06, |
| "loss": 3.7614, |
| "step": 1567 |
| }, |
| { |
| "epoch": 5.66, |
| "learning_rate": 2.209558823529412e-06, |
| "loss": 3.8484, |
| "step": 1568 |
| }, |
| { |
| "epoch": 5.66, |
| "learning_rate": 2.207720588235294e-06, |
| "loss": 3.8733, |
| "step": 1569 |
| }, |
| { |
| "epoch": 5.67, |
| "learning_rate": 2.2058823529411767e-06, |
| "loss": 3.91, |
| "step": 1570 |
| }, |
| { |
| "epoch": 5.67, |
| "learning_rate": 2.204044117647059e-06, |
| "loss": 3.7435, |
| "step": 1571 |
| }, |
| { |
| "epoch": 5.68, |
| "learning_rate": 2.2022058823529415e-06, |
| "loss": 3.7899, |
| "step": 1572 |
| }, |
| { |
| "epoch": 5.68, |
| "learning_rate": 2.2003676470588237e-06, |
| "loss": 3.8131, |
| "step": 1573 |
| }, |
| { |
| "epoch": 5.68, |
| "learning_rate": 2.198529411764706e-06, |
| "loss": 3.8623, |
| "step": 1574 |
| }, |
| { |
| "epoch": 5.69, |
| "learning_rate": 2.1966911764705885e-06, |
| "loss": 3.9623, |
| "step": 1575 |
| }, |
| { |
| "epoch": 5.69, |
| "learning_rate": 2.1948529411764707e-06, |
| "loss": 3.8614, |
| "step": 1576 |
| }, |
| { |
| "epoch": 5.69, |
| "learning_rate": 2.1930147058823533e-06, |
| "loss": 3.8395, |
| "step": 1577 |
| }, |
| { |
| "epoch": 5.7, |
| "learning_rate": 2.1911764705882354e-06, |
| "loss": 3.8705, |
| "step": 1578 |
| }, |
| { |
| "epoch": 5.7, |
| "learning_rate": 2.1893382352941176e-06, |
| "loss": 3.8814, |
| "step": 1579 |
| }, |
| { |
| "epoch": 5.7, |
| "learning_rate": 2.1875000000000002e-06, |
| "loss": 3.7294, |
| "step": 1580 |
| }, |
| { |
| "epoch": 5.71, |
| "learning_rate": 2.1856617647058824e-06, |
| "loss": 3.7987, |
| "step": 1581 |
| }, |
| { |
| "epoch": 5.71, |
| "learning_rate": 2.183823529411765e-06, |
| "loss": 3.9202, |
| "step": 1582 |
| }, |
| { |
| "epoch": 5.71, |
| "learning_rate": 2.181985294117647e-06, |
| "loss": 3.8419, |
| "step": 1583 |
| }, |
| { |
| "epoch": 5.72, |
| "learning_rate": 2.18014705882353e-06, |
| "loss": 3.8676, |
| "step": 1584 |
| }, |
| { |
| "epoch": 5.72, |
| "learning_rate": 2.178308823529412e-06, |
| "loss": 3.9422, |
| "step": 1585 |
| }, |
| { |
| "epoch": 5.73, |
| "learning_rate": 2.176470588235294e-06, |
| "loss": 3.7685, |
| "step": 1586 |
| }, |
| { |
| "epoch": 5.73, |
| "learning_rate": 2.174632352941177e-06, |
| "loss": 3.8497, |
| "step": 1587 |
| }, |
| { |
| "epoch": 5.73, |
| "learning_rate": 2.172794117647059e-06, |
| "loss": 3.9639, |
| "step": 1588 |
| }, |
| { |
| "epoch": 5.74, |
| "learning_rate": 2.1709558823529416e-06, |
| "loss": 3.8535, |
| "step": 1589 |
| }, |
| { |
| "epoch": 5.74, |
| "learning_rate": 2.1691176470588238e-06, |
| "loss": 3.8169, |
| "step": 1590 |
| }, |
| { |
| "epoch": 5.74, |
| "learning_rate": 2.167279411764706e-06, |
| "loss": 3.7593, |
| "step": 1591 |
| }, |
| { |
| "epoch": 5.75, |
| "learning_rate": 2.1654411764705886e-06, |
| "loss": 3.7814, |
| "step": 1592 |
| }, |
| { |
| "epoch": 5.75, |
| "learning_rate": 2.1636029411764707e-06, |
| "loss": 3.795, |
| "step": 1593 |
| }, |
| { |
| "epoch": 5.75, |
| "learning_rate": 2.1617647058823533e-06, |
| "loss": 3.8272, |
| "step": 1594 |
| }, |
| { |
| "epoch": 5.76, |
| "learning_rate": 2.1599264705882355e-06, |
| "loss": 3.8347, |
| "step": 1595 |
| }, |
| { |
| "epoch": 5.76, |
| "learning_rate": 2.158088235294118e-06, |
| "loss": 3.8092, |
| "step": 1596 |
| }, |
| { |
| "epoch": 5.77, |
| "learning_rate": 2.1562500000000003e-06, |
| "loss": 3.881, |
| "step": 1597 |
| }, |
| { |
| "epoch": 5.77, |
| "learning_rate": 2.1544117647058825e-06, |
| "loss": 3.8513, |
| "step": 1598 |
| }, |
| { |
| "epoch": 5.77, |
| "learning_rate": 2.152573529411765e-06, |
| "loss": 3.8728, |
| "step": 1599 |
| }, |
| { |
| "epoch": 5.78, |
| "learning_rate": 2.1507352941176473e-06, |
| "loss": 3.803, |
| "step": 1600 |
| }, |
| { |
| "epoch": 5.78, |
| "learning_rate": 2.14889705882353e-06, |
| "loss": 3.8366, |
| "step": 1601 |
| }, |
| { |
| "epoch": 5.78, |
| "learning_rate": 2.1470588235294117e-06, |
| "loss": 3.9716, |
| "step": 1602 |
| }, |
| { |
| "epoch": 5.79, |
| "learning_rate": 2.1452205882352943e-06, |
| "loss": 3.7949, |
| "step": 1603 |
| }, |
| { |
| "epoch": 5.79, |
| "learning_rate": 2.1433823529411765e-06, |
| "loss": 3.9464, |
| "step": 1604 |
| }, |
| { |
| "epoch": 5.79, |
| "learning_rate": 2.141544117647059e-06, |
| "loss": 3.7243, |
| "step": 1605 |
| }, |
| { |
| "epoch": 5.8, |
| "learning_rate": 2.1397058823529412e-06, |
| "loss": 3.8275, |
| "step": 1606 |
| }, |
| { |
| "epoch": 5.8, |
| "learning_rate": 2.1378676470588234e-06, |
| "loss": 3.8628, |
| "step": 1607 |
| }, |
| { |
| "epoch": 5.81, |
| "learning_rate": 2.136029411764706e-06, |
| "loss": 3.7648, |
| "step": 1608 |
| }, |
| { |
| "epoch": 5.81, |
| "learning_rate": 2.1341911764705882e-06, |
| "loss": 3.8267, |
| "step": 1609 |
| }, |
| { |
| "epoch": 5.81, |
| "learning_rate": 2.132352941176471e-06, |
| "loss": 3.9208, |
| "step": 1610 |
| }, |
| { |
| "epoch": 5.82, |
| "learning_rate": 2.130514705882353e-06, |
| "loss": 3.741, |
| "step": 1611 |
| }, |
| { |
| "epoch": 5.82, |
| "learning_rate": 2.128676470588235e-06, |
| "loss": 3.8741, |
| "step": 1612 |
| }, |
| { |
| "epoch": 5.82, |
| "learning_rate": 2.126838235294118e-06, |
| "loss": 3.8353, |
| "step": 1613 |
| }, |
| { |
| "epoch": 5.83, |
| "learning_rate": 2.125e-06, |
| "loss": 3.7605, |
| "step": 1614 |
| }, |
| { |
| "epoch": 5.83, |
| "learning_rate": 2.1231617647058826e-06, |
| "loss": 3.8771, |
| "step": 1615 |
| }, |
| { |
| "epoch": 5.83, |
| "learning_rate": 2.1213235294117648e-06, |
| "loss": 3.7695, |
| "step": 1616 |
| }, |
| { |
| "epoch": 5.84, |
| "learning_rate": 2.1194852941176474e-06, |
| "loss": 3.7435, |
| "step": 1617 |
| }, |
| { |
| "epoch": 5.84, |
| "learning_rate": 2.1176470588235296e-06, |
| "loss": 3.8416, |
| "step": 1618 |
| }, |
| { |
| "epoch": 5.84, |
| "learning_rate": 2.1158088235294117e-06, |
| "loss": 3.7762, |
| "step": 1619 |
| }, |
| { |
| "epoch": 5.85, |
| "learning_rate": 2.1139705882352944e-06, |
| "loss": 3.8753, |
| "step": 1620 |
| }, |
| { |
| "epoch": 5.85, |
| "learning_rate": 2.1121323529411765e-06, |
| "loss": 3.8463, |
| "step": 1621 |
| }, |
| { |
| "epoch": 5.86, |
| "learning_rate": 2.110294117647059e-06, |
| "loss": 3.7864, |
| "step": 1622 |
| }, |
| { |
| "epoch": 5.86, |
| "learning_rate": 2.1084558823529413e-06, |
| "loss": 3.8474, |
| "step": 1623 |
| }, |
| { |
| "epoch": 5.86, |
| "learning_rate": 2.1066176470588235e-06, |
| "loss": 3.8671, |
| "step": 1624 |
| }, |
| { |
| "epoch": 5.87, |
| "learning_rate": 2.104779411764706e-06, |
| "loss": 3.8384, |
| "step": 1625 |
| }, |
| { |
| "epoch": 5.87, |
| "learning_rate": 2.1029411764705883e-06, |
| "loss": 3.7705, |
| "step": 1626 |
| }, |
| { |
| "epoch": 5.87, |
| "learning_rate": 2.101102941176471e-06, |
| "loss": 3.8266, |
| "step": 1627 |
| }, |
| { |
| "epoch": 5.88, |
| "learning_rate": 2.099264705882353e-06, |
| "loss": 3.8695, |
| "step": 1628 |
| }, |
| { |
| "epoch": 5.88, |
| "learning_rate": 2.0974264705882357e-06, |
| "loss": 3.7394, |
| "step": 1629 |
| }, |
| { |
| "epoch": 5.88, |
| "learning_rate": 2.095588235294118e-06, |
| "loss": 3.8757, |
| "step": 1630 |
| }, |
| { |
| "epoch": 5.89, |
| "learning_rate": 2.09375e-06, |
| "loss": 3.8877, |
| "step": 1631 |
| }, |
| { |
| "epoch": 5.89, |
| "learning_rate": 2.0919117647058827e-06, |
| "loss": 3.9241, |
| "step": 1632 |
| }, |
| { |
| "epoch": 5.9, |
| "learning_rate": 2.090073529411765e-06, |
| "loss": 3.9198, |
| "step": 1633 |
| }, |
| { |
| "epoch": 5.9, |
| "learning_rate": 2.0882352941176475e-06, |
| "loss": 3.8002, |
| "step": 1634 |
| }, |
| { |
| "epoch": 5.9, |
| "learning_rate": 2.0863970588235297e-06, |
| "loss": 3.8908, |
| "step": 1635 |
| }, |
| { |
| "epoch": 5.91, |
| "learning_rate": 2.084558823529412e-06, |
| "loss": 3.7073, |
| "step": 1636 |
| }, |
| { |
| "epoch": 5.91, |
| "learning_rate": 2.0827205882352944e-06, |
| "loss": 3.8732, |
| "step": 1637 |
| }, |
| { |
| "epoch": 5.91, |
| "learning_rate": 2.0808823529411766e-06, |
| "loss": 3.8899, |
| "step": 1638 |
| }, |
| { |
| "epoch": 5.92, |
| "learning_rate": 2.0790441176470592e-06, |
| "loss": 3.8572, |
| "step": 1639 |
| }, |
| { |
| "epoch": 5.92, |
| "learning_rate": 2.0772058823529414e-06, |
| "loss": 3.8527, |
| "step": 1640 |
| }, |
| { |
| "epoch": 5.92, |
| "learning_rate": 2.075367647058824e-06, |
| "loss": 3.8417, |
| "step": 1641 |
| }, |
| { |
| "epoch": 5.93, |
| "learning_rate": 2.073529411764706e-06, |
| "loss": 3.8419, |
| "step": 1642 |
| }, |
| { |
| "epoch": 5.93, |
| "learning_rate": 2.0716911764705884e-06, |
| "loss": 3.907, |
| "step": 1643 |
| }, |
| { |
| "epoch": 5.94, |
| "learning_rate": 2.0698529411764706e-06, |
| "loss": 3.8896, |
| "step": 1644 |
| }, |
| { |
| "epoch": 5.94, |
| "learning_rate": 2.068014705882353e-06, |
| "loss": 3.7503, |
| "step": 1645 |
| }, |
| { |
| "epoch": 5.94, |
| "learning_rate": 2.0661764705882354e-06, |
| "loss": 3.8586, |
| "step": 1646 |
| }, |
| { |
| "epoch": 5.95, |
| "learning_rate": 2.0643382352941176e-06, |
| "loss": 3.9173, |
| "step": 1647 |
| }, |
| { |
| "epoch": 5.95, |
| "learning_rate": 2.0625e-06, |
| "loss": 3.7068, |
| "step": 1648 |
| }, |
| { |
| "epoch": 5.95, |
| "learning_rate": 2.0606617647058823e-06, |
| "loss": 3.8009, |
| "step": 1649 |
| }, |
| { |
| "epoch": 5.96, |
| "learning_rate": 2.058823529411765e-06, |
| "loss": 3.8202, |
| "step": 1650 |
| }, |
| { |
| "epoch": 5.96, |
| "learning_rate": 2.056985294117647e-06, |
| "loss": 3.7903, |
| "step": 1651 |
| }, |
| { |
| "epoch": 5.96, |
| "learning_rate": 2.0551470588235293e-06, |
| "loss": 3.8178, |
| "step": 1652 |
| }, |
| { |
| "epoch": 5.97, |
| "learning_rate": 2.053308823529412e-06, |
| "loss": 3.8199, |
| "step": 1653 |
| }, |
| { |
| "epoch": 5.97, |
| "learning_rate": 2.051470588235294e-06, |
| "loss": 3.8116, |
| "step": 1654 |
| }, |
| { |
| "epoch": 5.97, |
| "learning_rate": 2.0496323529411767e-06, |
| "loss": 3.8428, |
| "step": 1655 |
| }, |
| { |
| "epoch": 5.98, |
| "learning_rate": 2.047794117647059e-06, |
| "loss": 3.8184, |
| "step": 1656 |
| }, |
| { |
| "epoch": 5.98, |
| "learning_rate": 2.045955882352941e-06, |
| "loss": 3.8504, |
| "step": 1657 |
| }, |
| { |
| "epoch": 5.99, |
| "learning_rate": 2.0441176470588237e-06, |
| "loss": 3.8529, |
| "step": 1658 |
| }, |
| { |
| "epoch": 5.99, |
| "learning_rate": 2.042279411764706e-06, |
| "loss": 3.7947, |
| "step": 1659 |
| }, |
| { |
| "epoch": 5.99, |
| "learning_rate": 2.0404411764705885e-06, |
| "loss": 3.8466, |
| "step": 1660 |
| }, |
| { |
| "epoch": 6.0, |
| "learning_rate": 2.0386029411764707e-06, |
| "loss": 3.8571, |
| "step": 1661 |
| }, |
| { |
| "epoch": 6.0, |
| "learning_rate": 2.0367647058823533e-06, |
| "loss": 3.9425, |
| "step": 1662 |
| }, |
| { |
| "epoch": 6.0, |
| "eval_accuracy": 0.14054054054054055, |
| "eval_loss": 3.786846876144409, |
| "eval_runtime": 142.725, |
| "eval_samples_per_second": 2.592, |
| "eval_steps_per_second": 0.652, |
| "step": 1662 |
| }, |
| { |
| "epoch": 6.0, |
| "learning_rate": 2.0349264705882355e-06, |
| "loss": 3.8003, |
| "step": 1663 |
| }, |
| { |
| "epoch": 6.01, |
| "learning_rate": 2.0330882352941176e-06, |
| "loss": 3.7846, |
| "step": 1664 |
| }, |
| { |
| "epoch": 6.01, |
| "learning_rate": 2.0312500000000002e-06, |
| "loss": 3.8741, |
| "step": 1665 |
| }, |
| { |
| "epoch": 6.01, |
| "learning_rate": 2.0294117647058824e-06, |
| "loss": 3.8627, |
| "step": 1666 |
| }, |
| { |
| "epoch": 6.02, |
| "learning_rate": 2.027573529411765e-06, |
| "loss": 3.8545, |
| "step": 1667 |
| }, |
| { |
| "epoch": 6.02, |
| "learning_rate": 2.0257352941176472e-06, |
| "loss": 3.8277, |
| "step": 1668 |
| }, |
| { |
| "epoch": 6.03, |
| "learning_rate": 2.0238970588235294e-06, |
| "loss": 3.8624, |
| "step": 1669 |
| }, |
| { |
| "epoch": 6.03, |
| "learning_rate": 2.022058823529412e-06, |
| "loss": 3.8949, |
| "step": 1670 |
| }, |
| { |
| "epoch": 6.03, |
| "learning_rate": 2.020220588235294e-06, |
| "loss": 3.8579, |
| "step": 1671 |
| }, |
| { |
| "epoch": 6.04, |
| "learning_rate": 2.018382352941177e-06, |
| "loss": 3.7085, |
| "step": 1672 |
| }, |
| { |
| "epoch": 6.04, |
| "learning_rate": 2.016544117647059e-06, |
| "loss": 3.6744, |
| "step": 1673 |
| }, |
| { |
| "epoch": 6.04, |
| "learning_rate": 2.0147058823529416e-06, |
| "loss": 3.8076, |
| "step": 1674 |
| }, |
| { |
| "epoch": 6.05, |
| "learning_rate": 2.0128676470588238e-06, |
| "loss": 3.6488, |
| "step": 1675 |
| }, |
| { |
| "epoch": 6.05, |
| "learning_rate": 2.011029411764706e-06, |
| "loss": 3.8435, |
| "step": 1676 |
| }, |
| { |
| "epoch": 6.05, |
| "learning_rate": 2.0091911764705886e-06, |
| "loss": 3.787, |
| "step": 1677 |
| }, |
| { |
| "epoch": 6.06, |
| "learning_rate": 2.0073529411764708e-06, |
| "loss": 3.8646, |
| "step": 1678 |
| }, |
| { |
| "epoch": 6.06, |
| "learning_rate": 2.0055147058823534e-06, |
| "loss": 3.7707, |
| "step": 1679 |
| }, |
| { |
| "epoch": 6.06, |
| "learning_rate": 2.0036764705882355e-06, |
| "loss": 3.7303, |
| "step": 1680 |
| }, |
| { |
| "epoch": 6.07, |
| "learning_rate": 2.0018382352941177e-06, |
| "loss": 3.7749, |
| "step": 1681 |
| }, |
| { |
| "epoch": 6.07, |
| "learning_rate": 2.0000000000000003e-06, |
| "loss": 3.8464, |
| "step": 1682 |
| }, |
| { |
| "epoch": 6.08, |
| "learning_rate": 1.9981617647058825e-06, |
| "loss": 3.8474, |
| "step": 1683 |
| }, |
| { |
| "epoch": 6.08, |
| "learning_rate": 1.996323529411765e-06, |
| "loss": 3.8286, |
| "step": 1684 |
| }, |
| { |
| "epoch": 6.08, |
| "learning_rate": 1.9944852941176473e-06, |
| "loss": 3.8651, |
| "step": 1685 |
| }, |
| { |
| "epoch": 6.09, |
| "learning_rate": 1.99264705882353e-06, |
| "loss": 3.88, |
| "step": 1686 |
| }, |
| { |
| "epoch": 6.09, |
| "learning_rate": 1.9908088235294117e-06, |
| "loss": 3.8216, |
| "step": 1687 |
| }, |
| { |
| "epoch": 6.09, |
| "learning_rate": 1.9889705882352943e-06, |
| "loss": 3.7991, |
| "step": 1688 |
| }, |
| { |
| "epoch": 6.1, |
| "learning_rate": 1.9871323529411765e-06, |
| "loss": 3.8404, |
| "step": 1689 |
| }, |
| { |
| "epoch": 6.1, |
| "learning_rate": 1.985294117647059e-06, |
| "loss": 3.7851, |
| "step": 1690 |
| }, |
| { |
| "epoch": 6.1, |
| "learning_rate": 1.9834558823529413e-06, |
| "loss": 3.835, |
| "step": 1691 |
| }, |
| { |
| "epoch": 6.11, |
| "learning_rate": 1.9816176470588234e-06, |
| "loss": 3.836, |
| "step": 1692 |
| }, |
| { |
| "epoch": 6.11, |
| "learning_rate": 1.979779411764706e-06, |
| "loss": 3.6747, |
| "step": 1693 |
| }, |
| { |
| "epoch": 6.12, |
| "learning_rate": 1.9779411764705882e-06, |
| "loss": 3.7738, |
| "step": 1694 |
| }, |
| { |
| "epoch": 6.12, |
| "learning_rate": 1.976102941176471e-06, |
| "loss": 3.7974, |
| "step": 1695 |
| }, |
| { |
| "epoch": 6.12, |
| "learning_rate": 1.974264705882353e-06, |
| "loss": 3.9257, |
| "step": 1696 |
| }, |
| { |
| "epoch": 6.13, |
| "learning_rate": 1.972426470588235e-06, |
| "loss": 3.7793, |
| "step": 1697 |
| }, |
| { |
| "epoch": 6.13, |
| "learning_rate": 1.970588235294118e-06, |
| "loss": 3.89, |
| "step": 1698 |
| }, |
| { |
| "epoch": 6.13, |
| "learning_rate": 1.96875e-06, |
| "loss": 3.992, |
| "step": 1699 |
| }, |
| { |
| "epoch": 6.14, |
| "learning_rate": 1.9669117647058826e-06, |
| "loss": 3.8987, |
| "step": 1700 |
| }, |
| { |
| "epoch": 6.14, |
| "learning_rate": 1.9650735294117648e-06, |
| "loss": 3.894, |
| "step": 1701 |
| }, |
| { |
| "epoch": 6.14, |
| "learning_rate": 1.9632352941176474e-06, |
| "loss": 3.8464, |
| "step": 1702 |
| }, |
| { |
| "epoch": 6.15, |
| "learning_rate": 1.9613970588235296e-06, |
| "loss": 3.8553, |
| "step": 1703 |
| }, |
| { |
| "epoch": 6.15, |
| "learning_rate": 1.9595588235294118e-06, |
| "loss": 3.864, |
| "step": 1704 |
| }, |
| { |
| "epoch": 6.16, |
| "learning_rate": 1.9577205882352944e-06, |
| "loss": 3.858, |
| "step": 1705 |
| }, |
| { |
| "epoch": 6.16, |
| "learning_rate": 1.9558823529411766e-06, |
| "loss": 3.8453, |
| "step": 1706 |
| }, |
| { |
| "epoch": 6.16, |
| "learning_rate": 1.954044117647059e-06, |
| "loss": 3.8767, |
| "step": 1707 |
| }, |
| { |
| "epoch": 6.17, |
| "learning_rate": 1.9522058823529413e-06, |
| "loss": 3.9418, |
| "step": 1708 |
| }, |
| { |
| "epoch": 6.17, |
| "learning_rate": 1.9503676470588235e-06, |
| "loss": 3.8135, |
| "step": 1709 |
| }, |
| { |
| "epoch": 6.17, |
| "learning_rate": 1.948529411764706e-06, |
| "loss": 3.7664, |
| "step": 1710 |
| }, |
| { |
| "epoch": 6.18, |
| "learning_rate": 1.9466911764705883e-06, |
| "loss": 3.8857, |
| "step": 1711 |
| }, |
| { |
| "epoch": 6.18, |
| "learning_rate": 1.944852941176471e-06, |
| "loss": 3.8334, |
| "step": 1712 |
| }, |
| { |
| "epoch": 6.18, |
| "learning_rate": 1.943014705882353e-06, |
| "loss": 3.8144, |
| "step": 1713 |
| }, |
| { |
| "epoch": 6.19, |
| "learning_rate": 1.9411764705882353e-06, |
| "loss": 3.8831, |
| "step": 1714 |
| }, |
| { |
| "epoch": 6.19, |
| "learning_rate": 1.939338235294118e-06, |
| "loss": 3.8263, |
| "step": 1715 |
| }, |
| { |
| "epoch": 6.19, |
| "learning_rate": 1.9375e-06, |
| "loss": 3.7624, |
| "step": 1716 |
| }, |
| { |
| "epoch": 6.2, |
| "learning_rate": 1.9356617647058827e-06, |
| "loss": 3.884, |
| "step": 1717 |
| }, |
| { |
| "epoch": 6.2, |
| "learning_rate": 1.933823529411765e-06, |
| "loss": 3.6998, |
| "step": 1718 |
| }, |
| { |
| "epoch": 6.21, |
| "learning_rate": 1.9319852941176475e-06, |
| "loss": 3.8494, |
| "step": 1719 |
| }, |
| { |
| "epoch": 6.21, |
| "learning_rate": 1.9301470588235297e-06, |
| "loss": 3.8094, |
| "step": 1720 |
| }, |
| { |
| "epoch": 6.21, |
| "learning_rate": 1.928308823529412e-06, |
| "loss": 3.9022, |
| "step": 1721 |
| }, |
| { |
| "epoch": 6.22, |
| "learning_rate": 1.9264705882352945e-06, |
| "loss": 3.8381, |
| "step": 1722 |
| }, |
| { |
| "epoch": 6.22, |
| "learning_rate": 1.9246323529411766e-06, |
| "loss": 3.751, |
| "step": 1723 |
| }, |
| { |
| "epoch": 6.22, |
| "learning_rate": 1.9227941176470592e-06, |
| "loss": 3.8755, |
| "step": 1724 |
| }, |
| { |
| "epoch": 6.23, |
| "learning_rate": 1.9209558823529414e-06, |
| "loss": 3.7505, |
| "step": 1725 |
| }, |
| { |
| "epoch": 6.23, |
| "learning_rate": 1.9191176470588236e-06, |
| "loss": 3.7719, |
| "step": 1726 |
| }, |
| { |
| "epoch": 6.23, |
| "learning_rate": 1.9172794117647062e-06, |
| "loss": 3.712, |
| "step": 1727 |
| }, |
| { |
| "epoch": 6.24, |
| "learning_rate": 1.9154411764705884e-06, |
| "loss": 3.8366, |
| "step": 1728 |
| }, |
| { |
| "epoch": 6.24, |
| "learning_rate": 1.9136029411764706e-06, |
| "loss": 3.8508, |
| "step": 1729 |
| }, |
| { |
| "epoch": 6.25, |
| "learning_rate": 1.9117647058823528e-06, |
| "loss": 3.7747, |
| "step": 1730 |
| }, |
| { |
| "epoch": 6.25, |
| "learning_rate": 1.9099264705882354e-06, |
| "loss": 3.8291, |
| "step": 1731 |
| }, |
| { |
| "epoch": 6.25, |
| "learning_rate": 1.9080882352941176e-06, |
| "loss": 3.8345, |
| "step": 1732 |
| }, |
| { |
| "epoch": 6.26, |
| "learning_rate": 1.90625e-06, |
| "loss": 3.8688, |
| "step": 1733 |
| }, |
| { |
| "epoch": 6.26, |
| "learning_rate": 1.9044117647058824e-06, |
| "loss": 3.9339, |
| "step": 1734 |
| }, |
| { |
| "epoch": 6.26, |
| "learning_rate": 1.9025735294117648e-06, |
| "loss": 3.8923, |
| "step": 1735 |
| }, |
| { |
| "epoch": 6.27, |
| "learning_rate": 1.9007352941176471e-06, |
| "loss": 3.8169, |
| "step": 1736 |
| }, |
| { |
| "epoch": 6.27, |
| "learning_rate": 1.8988970588235295e-06, |
| "loss": 3.7106, |
| "step": 1737 |
| }, |
| { |
| "epoch": 6.27, |
| "learning_rate": 1.897058823529412e-06, |
| "loss": 3.8349, |
| "step": 1738 |
| }, |
| { |
| "epoch": 6.28, |
| "learning_rate": 1.8952205882352941e-06, |
| "loss": 3.9044, |
| "step": 1739 |
| }, |
| { |
| "epoch": 6.28, |
| "learning_rate": 1.8933823529411765e-06, |
| "loss": 3.8336, |
| "step": 1740 |
| }, |
| { |
| "epoch": 6.29, |
| "learning_rate": 1.891544117647059e-06, |
| "loss": 3.7836, |
| "step": 1741 |
| }, |
| { |
| "epoch": 6.29, |
| "learning_rate": 1.8897058823529413e-06, |
| "loss": 3.8564, |
| "step": 1742 |
| }, |
| { |
| "epoch": 6.29, |
| "learning_rate": 1.8878676470588237e-06, |
| "loss": 3.7502, |
| "step": 1743 |
| }, |
| { |
| "epoch": 6.3, |
| "learning_rate": 1.886029411764706e-06, |
| "loss": 3.7814, |
| "step": 1744 |
| }, |
| { |
| "epoch": 6.3, |
| "learning_rate": 1.8841911764705883e-06, |
| "loss": 3.8226, |
| "step": 1745 |
| }, |
| { |
| "epoch": 6.3, |
| "learning_rate": 1.8823529411764707e-06, |
| "loss": 3.7096, |
| "step": 1746 |
| }, |
| { |
| "epoch": 6.31, |
| "learning_rate": 1.880514705882353e-06, |
| "loss": 3.8493, |
| "step": 1747 |
| }, |
| { |
| "epoch": 6.31, |
| "learning_rate": 1.8786764705882355e-06, |
| "loss": 3.7681, |
| "step": 1748 |
| }, |
| { |
| "epoch": 6.31, |
| "learning_rate": 1.8768382352941179e-06, |
| "loss": 3.8937, |
| "step": 1749 |
| }, |
| { |
| "epoch": 6.32, |
| "learning_rate": 1.8750000000000003e-06, |
| "loss": 3.7546, |
| "step": 1750 |
| }, |
| { |
| "epoch": 6.32, |
| "learning_rate": 1.8731617647058824e-06, |
| "loss": 3.8569, |
| "step": 1751 |
| }, |
| { |
| "epoch": 6.32, |
| "learning_rate": 1.8713235294117648e-06, |
| "loss": 3.7643, |
| "step": 1752 |
| }, |
| { |
| "epoch": 6.33, |
| "learning_rate": 1.8694852941176472e-06, |
| "loss": 3.8164, |
| "step": 1753 |
| }, |
| { |
| "epoch": 6.33, |
| "learning_rate": 1.8676470588235296e-06, |
| "loss": 3.8028, |
| "step": 1754 |
| }, |
| { |
| "epoch": 6.34, |
| "learning_rate": 1.865808823529412e-06, |
| "loss": 3.7994, |
| "step": 1755 |
| }, |
| { |
| "epoch": 6.34, |
| "learning_rate": 1.8639705882352942e-06, |
| "loss": 3.6638, |
| "step": 1756 |
| }, |
| { |
| "epoch": 6.34, |
| "learning_rate": 1.8621323529411766e-06, |
| "loss": 3.8002, |
| "step": 1757 |
| }, |
| { |
| "epoch": 6.35, |
| "learning_rate": 1.860294117647059e-06, |
| "loss": 3.8703, |
| "step": 1758 |
| }, |
| { |
| "epoch": 6.35, |
| "learning_rate": 1.8584558823529414e-06, |
| "loss": 3.8682, |
| "step": 1759 |
| }, |
| { |
| "epoch": 6.35, |
| "learning_rate": 1.8566176470588238e-06, |
| "loss": 3.9009, |
| "step": 1760 |
| }, |
| { |
| "epoch": 6.36, |
| "learning_rate": 1.8547794117647062e-06, |
| "loss": 3.7888, |
| "step": 1761 |
| }, |
| { |
| "epoch": 6.36, |
| "learning_rate": 1.8529411764705884e-06, |
| "loss": 3.7294, |
| "step": 1762 |
| }, |
| { |
| "epoch": 6.36, |
| "learning_rate": 1.8511029411764708e-06, |
| "loss": 3.872, |
| "step": 1763 |
| }, |
| { |
| "epoch": 6.37, |
| "learning_rate": 1.8492647058823532e-06, |
| "loss": 3.861, |
| "step": 1764 |
| }, |
| { |
| "epoch": 6.37, |
| "learning_rate": 1.8474264705882356e-06, |
| "loss": 3.8371, |
| "step": 1765 |
| }, |
| { |
| "epoch": 6.38, |
| "learning_rate": 1.845588235294118e-06, |
| "loss": 3.8096, |
| "step": 1766 |
| }, |
| { |
| "epoch": 6.38, |
| "learning_rate": 1.8437500000000003e-06, |
| "loss": 3.7628, |
| "step": 1767 |
| }, |
| { |
| "epoch": 6.38, |
| "learning_rate": 1.8419117647058825e-06, |
| "loss": 3.6311, |
| "step": 1768 |
| }, |
| { |
| "epoch": 6.39, |
| "learning_rate": 1.840073529411765e-06, |
| "loss": 3.7765, |
| "step": 1769 |
| }, |
| { |
| "epoch": 6.39, |
| "learning_rate": 1.8382352941176473e-06, |
| "loss": 3.7715, |
| "step": 1770 |
| }, |
| { |
| "epoch": 6.39, |
| "learning_rate": 1.8363970588235297e-06, |
| "loss": 3.8914, |
| "step": 1771 |
| }, |
| { |
| "epoch": 6.4, |
| "learning_rate": 1.8345588235294117e-06, |
| "loss": 3.8217, |
| "step": 1772 |
| }, |
| { |
| "epoch": 6.4, |
| "learning_rate": 1.832720588235294e-06, |
| "loss": 3.8074, |
| "step": 1773 |
| }, |
| { |
| "epoch": 6.4, |
| "learning_rate": 1.8308823529411765e-06, |
| "loss": 3.7807, |
| "step": 1774 |
| }, |
| { |
| "epoch": 6.41, |
| "learning_rate": 1.8290441176470589e-06, |
| "loss": 3.7765, |
| "step": 1775 |
| }, |
| { |
| "epoch": 6.41, |
| "learning_rate": 1.8272058823529413e-06, |
| "loss": 3.8895, |
| "step": 1776 |
| }, |
| { |
| "epoch": 6.42, |
| "learning_rate": 1.8253676470588237e-06, |
| "loss": 3.9043, |
| "step": 1777 |
| }, |
| { |
| "epoch": 6.42, |
| "learning_rate": 1.8235294117647058e-06, |
| "loss": 3.7651, |
| "step": 1778 |
| }, |
| { |
| "epoch": 6.42, |
| "learning_rate": 1.8216911764705882e-06, |
| "loss": 3.8798, |
| "step": 1779 |
| }, |
| { |
| "epoch": 6.43, |
| "learning_rate": 1.8198529411764706e-06, |
| "loss": 3.7807, |
| "step": 1780 |
| }, |
| { |
| "epoch": 6.43, |
| "learning_rate": 1.818014705882353e-06, |
| "loss": 3.8245, |
| "step": 1781 |
| }, |
| { |
| "epoch": 6.43, |
| "learning_rate": 1.8161764705882354e-06, |
| "loss": 3.8564, |
| "step": 1782 |
| }, |
| { |
| "epoch": 6.44, |
| "learning_rate": 1.8143382352941178e-06, |
| "loss": 3.9185, |
| "step": 1783 |
| }, |
| { |
| "epoch": 6.44, |
| "learning_rate": 1.8125e-06, |
| "loss": 3.9712, |
| "step": 1784 |
| }, |
| { |
| "epoch": 6.44, |
| "learning_rate": 1.8106617647058824e-06, |
| "loss": 3.771, |
| "step": 1785 |
| }, |
| { |
| "epoch": 6.45, |
| "learning_rate": 1.8088235294117648e-06, |
| "loss": 3.7597, |
| "step": 1786 |
| }, |
| { |
| "epoch": 6.45, |
| "learning_rate": 1.8069852941176472e-06, |
| "loss": 3.949, |
| "step": 1787 |
| }, |
| { |
| "epoch": 6.45, |
| "learning_rate": 1.8051470588235296e-06, |
| "loss": 3.746, |
| "step": 1788 |
| }, |
| { |
| "epoch": 6.46, |
| "learning_rate": 1.803308823529412e-06, |
| "loss": 3.7631, |
| "step": 1789 |
| }, |
| { |
| "epoch": 6.46, |
| "learning_rate": 1.8014705882352942e-06, |
| "loss": 3.6724, |
| "step": 1790 |
| }, |
| { |
| "epoch": 6.47, |
| "learning_rate": 1.7996323529411766e-06, |
| "loss": 3.8181, |
| "step": 1791 |
| }, |
| { |
| "epoch": 6.47, |
| "learning_rate": 1.797794117647059e-06, |
| "loss": 3.6771, |
| "step": 1792 |
| }, |
| { |
| "epoch": 6.47, |
| "learning_rate": 1.7959558823529414e-06, |
| "loss": 3.8023, |
| "step": 1793 |
| }, |
| { |
| "epoch": 6.48, |
| "learning_rate": 1.7941176470588238e-06, |
| "loss": 3.6396, |
| "step": 1794 |
| }, |
| { |
| "epoch": 6.48, |
| "learning_rate": 1.7922794117647061e-06, |
| "loss": 3.8385, |
| "step": 1795 |
| }, |
| { |
| "epoch": 6.48, |
| "learning_rate": 1.7904411764705883e-06, |
| "loss": 3.859, |
| "step": 1796 |
| }, |
| { |
| "epoch": 6.49, |
| "learning_rate": 1.7886029411764707e-06, |
| "loss": 3.8541, |
| "step": 1797 |
| }, |
| { |
| "epoch": 6.49, |
| "learning_rate": 1.7867647058823531e-06, |
| "loss": 3.8321, |
| "step": 1798 |
| }, |
| { |
| "epoch": 6.49, |
| "learning_rate": 1.7849264705882355e-06, |
| "loss": 3.8423, |
| "step": 1799 |
| }, |
| { |
| "epoch": 6.5, |
| "learning_rate": 1.783088235294118e-06, |
| "loss": 3.7461, |
| "step": 1800 |
| }, |
| { |
| "epoch": 6.5, |
| "learning_rate": 1.78125e-06, |
| "loss": 3.8433, |
| "step": 1801 |
| }, |
| { |
| "epoch": 6.51, |
| "learning_rate": 1.7794117647058825e-06, |
| "loss": 3.7245, |
| "step": 1802 |
| }, |
| { |
| "epoch": 6.51, |
| "learning_rate": 1.7775735294117649e-06, |
| "loss": 3.8481, |
| "step": 1803 |
| }, |
| { |
| "epoch": 6.51, |
| "learning_rate": 1.7757352941176473e-06, |
| "loss": 3.7244, |
| "step": 1804 |
| }, |
| { |
| "epoch": 6.52, |
| "learning_rate": 1.7738970588235297e-06, |
| "loss": 3.8453, |
| "step": 1805 |
| }, |
| { |
| "epoch": 6.52, |
| "learning_rate": 1.772058823529412e-06, |
| "loss": 3.8145, |
| "step": 1806 |
| }, |
| { |
| "epoch": 6.52, |
| "learning_rate": 1.7702205882352943e-06, |
| "loss": 3.7138, |
| "step": 1807 |
| }, |
| { |
| "epoch": 6.53, |
| "learning_rate": 1.7683823529411767e-06, |
| "loss": 3.7176, |
| "step": 1808 |
| }, |
| { |
| "epoch": 6.53, |
| "learning_rate": 1.766544117647059e-06, |
| "loss": 3.7195, |
| "step": 1809 |
| }, |
| { |
| "epoch": 6.53, |
| "learning_rate": 1.7647058823529414e-06, |
| "loss": 3.8094, |
| "step": 1810 |
| }, |
| { |
| "epoch": 6.54, |
| "learning_rate": 1.7628676470588238e-06, |
| "loss": 3.7769, |
| "step": 1811 |
| }, |
| { |
| "epoch": 6.54, |
| "learning_rate": 1.7610294117647062e-06, |
| "loss": 3.8893, |
| "step": 1812 |
| }, |
| { |
| "epoch": 6.55, |
| "learning_rate": 1.7591911764705884e-06, |
| "loss": 3.8972, |
| "step": 1813 |
| }, |
| { |
| "epoch": 6.55, |
| "learning_rate": 1.7573529411764706e-06, |
| "loss": 3.7307, |
| "step": 1814 |
| }, |
| { |
| "epoch": 6.55, |
| "learning_rate": 1.755514705882353e-06, |
| "loss": 3.8761, |
| "step": 1815 |
| }, |
| { |
| "epoch": 6.56, |
| "learning_rate": 1.7536764705882354e-06, |
| "loss": 3.8384, |
| "step": 1816 |
| }, |
| { |
| "epoch": 6.56, |
| "learning_rate": 1.7518382352941176e-06, |
| "loss": 3.8063, |
| "step": 1817 |
| }, |
| { |
| "epoch": 6.56, |
| "learning_rate": 1.75e-06, |
| "loss": 3.8318, |
| "step": 1818 |
| }, |
| { |
| "epoch": 6.57, |
| "learning_rate": 1.7481617647058824e-06, |
| "loss": 3.7415, |
| "step": 1819 |
| }, |
| { |
| "epoch": 6.57, |
| "learning_rate": 1.7463235294117648e-06, |
| "loss": 3.7832, |
| "step": 1820 |
| }, |
| { |
| "epoch": 6.57, |
| "learning_rate": 1.7444852941176472e-06, |
| "loss": 3.7597, |
| "step": 1821 |
| }, |
| { |
| "epoch": 6.58, |
| "learning_rate": 1.7426470588235296e-06, |
| "loss": 3.7841, |
| "step": 1822 |
| }, |
| { |
| "epoch": 6.58, |
| "learning_rate": 1.7408088235294117e-06, |
| "loss": 3.8429, |
| "step": 1823 |
| }, |
| { |
| "epoch": 6.58, |
| "learning_rate": 1.7389705882352941e-06, |
| "loss": 3.7655, |
| "step": 1824 |
| }, |
| { |
| "epoch": 6.59, |
| "learning_rate": 1.7371323529411765e-06, |
| "loss": 3.8168, |
| "step": 1825 |
| }, |
| { |
| "epoch": 6.59, |
| "learning_rate": 1.735294117647059e-06, |
| "loss": 3.7262, |
| "step": 1826 |
| }, |
| { |
| "epoch": 6.6, |
| "learning_rate": 1.7334558823529413e-06, |
| "loss": 3.8554, |
| "step": 1827 |
| }, |
| { |
| "epoch": 6.6, |
| "learning_rate": 1.7316176470588237e-06, |
| "loss": 3.8066, |
| "step": 1828 |
| }, |
| { |
| "epoch": 6.6, |
| "learning_rate": 1.729779411764706e-06, |
| "loss": 3.6789, |
| "step": 1829 |
| }, |
| { |
| "epoch": 6.61, |
| "learning_rate": 1.7279411764705883e-06, |
| "loss": 3.7543, |
| "step": 1830 |
| }, |
| { |
| "epoch": 6.61, |
| "learning_rate": 1.7261029411764707e-06, |
| "loss": 3.7824, |
| "step": 1831 |
| }, |
| { |
| "epoch": 6.61, |
| "learning_rate": 1.724264705882353e-06, |
| "loss": 3.6629, |
| "step": 1832 |
| }, |
| { |
| "epoch": 6.62, |
| "learning_rate": 1.7224264705882355e-06, |
| "loss": 3.7854, |
| "step": 1833 |
| }, |
| { |
| "epoch": 6.62, |
| "learning_rate": 1.7205882352941179e-06, |
| "loss": 3.7961, |
| "step": 1834 |
| }, |
| { |
| "epoch": 6.62, |
| "learning_rate": 1.71875e-06, |
| "loss": 3.7546, |
| "step": 1835 |
| }, |
| { |
| "epoch": 6.63, |
| "learning_rate": 1.7169117647058825e-06, |
| "loss": 3.8099, |
| "step": 1836 |
| }, |
| { |
| "epoch": 6.63, |
| "learning_rate": 1.7150735294117649e-06, |
| "loss": 3.7741, |
| "step": 1837 |
| }, |
| { |
| "epoch": 6.64, |
| "learning_rate": 1.7132352941176472e-06, |
| "loss": 3.693, |
| "step": 1838 |
| }, |
| { |
| "epoch": 6.64, |
| "learning_rate": 1.7113970588235296e-06, |
| "loss": 3.7291, |
| "step": 1839 |
| }, |
| { |
| "epoch": 6.64, |
| "learning_rate": 1.709558823529412e-06, |
| "loss": 3.903, |
| "step": 1840 |
| }, |
| { |
| "epoch": 6.65, |
| "learning_rate": 1.7077205882352942e-06, |
| "loss": 3.8132, |
| "step": 1841 |
| }, |
| { |
| "epoch": 6.65, |
| "learning_rate": 1.7058823529411766e-06, |
| "loss": 3.6792, |
| "step": 1842 |
| }, |
| { |
| "epoch": 6.65, |
| "learning_rate": 1.704044117647059e-06, |
| "loss": 3.8166, |
| "step": 1843 |
| }, |
| { |
| "epoch": 6.66, |
| "learning_rate": 1.7022058823529414e-06, |
| "loss": 3.8264, |
| "step": 1844 |
| }, |
| { |
| "epoch": 6.66, |
| "learning_rate": 1.7003676470588238e-06, |
| "loss": 3.689, |
| "step": 1845 |
| }, |
| { |
| "epoch": 6.66, |
| "learning_rate": 1.698529411764706e-06, |
| "loss": 3.7161, |
| "step": 1846 |
| }, |
| { |
| "epoch": 6.67, |
| "learning_rate": 1.6966911764705884e-06, |
| "loss": 3.7997, |
| "step": 1847 |
| }, |
| { |
| "epoch": 6.67, |
| "learning_rate": 1.6948529411764708e-06, |
| "loss": 3.6926, |
| "step": 1848 |
| }, |
| { |
| "epoch": 6.68, |
| "learning_rate": 1.6930147058823532e-06, |
| "loss": 3.7508, |
| "step": 1849 |
| }, |
| { |
| "epoch": 6.68, |
| "learning_rate": 1.6911764705882356e-06, |
| "loss": 3.7327, |
| "step": 1850 |
| }, |
| { |
| "epoch": 6.68, |
| "learning_rate": 1.689338235294118e-06, |
| "loss": 3.6809, |
| "step": 1851 |
| }, |
| { |
| "epoch": 6.69, |
| "learning_rate": 1.6875000000000001e-06, |
| "loss": 3.794, |
| "step": 1852 |
| }, |
| { |
| "epoch": 6.69, |
| "learning_rate": 1.6856617647058825e-06, |
| "loss": 3.7229, |
| "step": 1853 |
| }, |
| { |
| "epoch": 6.69, |
| "learning_rate": 1.683823529411765e-06, |
| "loss": 3.8143, |
| "step": 1854 |
| }, |
| { |
| "epoch": 6.7, |
| "learning_rate": 1.6819852941176473e-06, |
| "loss": 3.8186, |
| "step": 1855 |
| }, |
| { |
| "epoch": 6.7, |
| "learning_rate": 1.6801470588235297e-06, |
| "loss": 3.8055, |
| "step": 1856 |
| }, |
| { |
| "epoch": 6.7, |
| "learning_rate": 1.6783088235294117e-06, |
| "loss": 3.8658, |
| "step": 1857 |
| }, |
| { |
| "epoch": 6.71, |
| "learning_rate": 1.676470588235294e-06, |
| "loss": 3.7594, |
| "step": 1858 |
| }, |
| { |
| "epoch": 6.71, |
| "learning_rate": 1.6746323529411765e-06, |
| "loss": 3.7909, |
| "step": 1859 |
| }, |
| { |
| "epoch": 6.71, |
| "learning_rate": 1.6727941176470589e-06, |
| "loss": 3.6839, |
| "step": 1860 |
| }, |
| { |
| "epoch": 6.72, |
| "learning_rate": 1.6709558823529413e-06, |
| "loss": 3.6973, |
| "step": 1861 |
| }, |
| { |
| "epoch": 6.72, |
| "learning_rate": 1.6691176470588235e-06, |
| "loss": 3.7935, |
| "step": 1862 |
| }, |
| { |
| "epoch": 6.73, |
| "learning_rate": 1.6672794117647059e-06, |
| "loss": 3.8034, |
| "step": 1863 |
| }, |
| { |
| "epoch": 6.73, |
| "learning_rate": 1.6654411764705883e-06, |
| "loss": 3.8299, |
| "step": 1864 |
| }, |
| { |
| "epoch": 6.73, |
| "learning_rate": 1.6636029411764707e-06, |
| "loss": 3.8731, |
| "step": 1865 |
| }, |
| { |
| "epoch": 6.74, |
| "learning_rate": 1.661764705882353e-06, |
| "loss": 3.8029, |
| "step": 1866 |
| }, |
| { |
| "epoch": 6.74, |
| "learning_rate": 1.6599264705882354e-06, |
| "loss": 3.859, |
| "step": 1867 |
| }, |
| { |
| "epoch": 6.74, |
| "learning_rate": 1.6580882352941176e-06, |
| "loss": 3.7978, |
| "step": 1868 |
| }, |
| { |
| "epoch": 6.75, |
| "learning_rate": 1.65625e-06, |
| "loss": 3.817, |
| "step": 1869 |
| }, |
| { |
| "epoch": 6.75, |
| "learning_rate": 1.6544117647058824e-06, |
| "loss": 3.7914, |
| "step": 1870 |
| }, |
| { |
| "epoch": 6.75, |
| "learning_rate": 1.6525735294117648e-06, |
| "loss": 3.742, |
| "step": 1871 |
| }, |
| { |
| "epoch": 6.76, |
| "learning_rate": 1.6507352941176472e-06, |
| "loss": 3.8804, |
| "step": 1872 |
| }, |
| { |
| "epoch": 6.76, |
| "learning_rate": 1.6488970588235296e-06, |
| "loss": 3.7164, |
| "step": 1873 |
| }, |
| { |
| "epoch": 6.77, |
| "learning_rate": 1.6470588235294118e-06, |
| "loss": 3.8845, |
| "step": 1874 |
| }, |
| { |
| "epoch": 6.77, |
| "learning_rate": 1.6452205882352942e-06, |
| "loss": 3.77, |
| "step": 1875 |
| }, |
| { |
| "epoch": 6.77, |
| "learning_rate": 1.6433823529411766e-06, |
| "loss": 3.7618, |
| "step": 1876 |
| }, |
| { |
| "epoch": 6.78, |
| "learning_rate": 1.641544117647059e-06, |
| "loss": 3.8907, |
| "step": 1877 |
| }, |
| { |
| "epoch": 6.78, |
| "learning_rate": 1.6397058823529414e-06, |
| "loss": 3.7796, |
| "step": 1878 |
| }, |
| { |
| "epoch": 6.78, |
| "learning_rate": 1.6378676470588238e-06, |
| "loss": 3.8757, |
| "step": 1879 |
| }, |
| { |
| "epoch": 6.79, |
| "learning_rate": 1.636029411764706e-06, |
| "loss": 3.8229, |
| "step": 1880 |
| }, |
| { |
| "epoch": 6.79, |
| "learning_rate": 1.6341911764705883e-06, |
| "loss": 3.7782, |
| "step": 1881 |
| }, |
| { |
| "epoch": 6.79, |
| "learning_rate": 1.6323529411764707e-06, |
| "loss": 3.7979, |
| "step": 1882 |
| }, |
| { |
| "epoch": 6.8, |
| "learning_rate": 1.6305147058823531e-06, |
| "loss": 3.6994, |
| "step": 1883 |
| }, |
| { |
| "epoch": 6.8, |
| "learning_rate": 1.6286764705882355e-06, |
| "loss": 3.7792, |
| "step": 1884 |
| }, |
| { |
| "epoch": 6.81, |
| "learning_rate": 1.626838235294118e-06, |
| "loss": 3.8666, |
| "step": 1885 |
| }, |
| { |
| "epoch": 6.81, |
| "learning_rate": 1.6250000000000001e-06, |
| "loss": 3.7292, |
| "step": 1886 |
| }, |
| { |
| "epoch": 6.81, |
| "learning_rate": 1.6231617647058825e-06, |
| "loss": 3.7613, |
| "step": 1887 |
| }, |
| { |
| "epoch": 6.82, |
| "learning_rate": 1.621323529411765e-06, |
| "loss": 3.7594, |
| "step": 1888 |
| }, |
| { |
| "epoch": 6.82, |
| "learning_rate": 1.6194852941176473e-06, |
| "loss": 3.7984, |
| "step": 1889 |
| }, |
| { |
| "epoch": 6.82, |
| "learning_rate": 1.6176470588235297e-06, |
| "loss": 3.7251, |
| "step": 1890 |
| }, |
| { |
| "epoch": 6.83, |
| "learning_rate": 1.615808823529412e-06, |
| "loss": 3.7351, |
| "step": 1891 |
| }, |
| { |
| "epoch": 6.83, |
| "learning_rate": 1.6139705882352943e-06, |
| "loss": 3.745, |
| "step": 1892 |
| }, |
| { |
| "epoch": 6.83, |
| "learning_rate": 1.6121323529411767e-06, |
| "loss": 3.6524, |
| "step": 1893 |
| }, |
| { |
| "epoch": 6.84, |
| "learning_rate": 1.610294117647059e-06, |
| "loss": 3.8404, |
| "step": 1894 |
| }, |
| { |
| "epoch": 6.84, |
| "learning_rate": 1.6084558823529415e-06, |
| "loss": 3.7618, |
| "step": 1895 |
| }, |
| { |
| "epoch": 6.84, |
| "learning_rate": 1.6066176470588239e-06, |
| "loss": 3.7798, |
| "step": 1896 |
| }, |
| { |
| "epoch": 6.85, |
| "learning_rate": 1.604779411764706e-06, |
| "loss": 3.8866, |
| "step": 1897 |
| }, |
| { |
| "epoch": 6.85, |
| "learning_rate": 1.6029411764705884e-06, |
| "loss": 3.7678, |
| "step": 1898 |
| }, |
| { |
| "epoch": 6.86, |
| "learning_rate": 1.6011029411764706e-06, |
| "loss": 3.7194, |
| "step": 1899 |
| }, |
| { |
| "epoch": 6.86, |
| "learning_rate": 1.599264705882353e-06, |
| "loss": 3.8635, |
| "step": 1900 |
| }, |
| { |
| "epoch": 6.86, |
| "learning_rate": 1.5974264705882352e-06, |
| "loss": 3.8417, |
| "step": 1901 |
| }, |
| { |
| "epoch": 6.87, |
| "learning_rate": 1.5955882352941176e-06, |
| "loss": 3.7795, |
| "step": 1902 |
| }, |
| { |
| "epoch": 6.87, |
| "learning_rate": 1.59375e-06, |
| "loss": 3.8634, |
| "step": 1903 |
| }, |
| { |
| "epoch": 6.87, |
| "learning_rate": 1.5919117647058824e-06, |
| "loss": 3.8006, |
| "step": 1904 |
| }, |
| { |
| "epoch": 6.88, |
| "learning_rate": 1.5900735294117648e-06, |
| "loss": 3.8424, |
| "step": 1905 |
| }, |
| { |
| "epoch": 6.88, |
| "learning_rate": 1.5882352941176472e-06, |
| "loss": 3.8695, |
| "step": 1906 |
| }, |
| { |
| "epoch": 6.88, |
| "learning_rate": 1.5863970588235294e-06, |
| "loss": 3.7717, |
| "step": 1907 |
| }, |
| { |
| "epoch": 6.89, |
| "learning_rate": 1.5845588235294118e-06, |
| "loss": 3.7287, |
| "step": 1908 |
| }, |
| { |
| "epoch": 6.89, |
| "learning_rate": 1.5827205882352941e-06, |
| "loss": 3.762, |
| "step": 1909 |
| }, |
| { |
| "epoch": 6.9, |
| "learning_rate": 1.5808823529411765e-06, |
| "loss": 3.7972, |
| "step": 1910 |
| }, |
| { |
| "epoch": 6.9, |
| "learning_rate": 1.579044117647059e-06, |
| "loss": 3.8528, |
| "step": 1911 |
| }, |
| { |
| "epoch": 6.9, |
| "learning_rate": 1.5772058823529413e-06, |
| "loss": 3.8372, |
| "step": 1912 |
| }, |
| { |
| "epoch": 6.91, |
| "learning_rate": 1.5753676470588235e-06, |
| "loss": 3.7855, |
| "step": 1913 |
| }, |
| { |
| "epoch": 6.91, |
| "learning_rate": 1.573529411764706e-06, |
| "loss": 3.8558, |
| "step": 1914 |
| }, |
| { |
| "epoch": 6.91, |
| "learning_rate": 1.5716911764705883e-06, |
| "loss": 3.8172, |
| "step": 1915 |
| }, |
| { |
| "epoch": 6.92, |
| "learning_rate": 1.5698529411764707e-06, |
| "loss": 3.7989, |
| "step": 1916 |
| }, |
| { |
| "epoch": 6.92, |
| "learning_rate": 1.568014705882353e-06, |
| "loss": 3.7558, |
| "step": 1917 |
| }, |
| { |
| "epoch": 6.92, |
| "learning_rate": 1.5661764705882355e-06, |
| "loss": 3.7079, |
| "step": 1918 |
| }, |
| { |
| "epoch": 6.93, |
| "learning_rate": 1.5643382352941177e-06, |
| "loss": 3.9108, |
| "step": 1919 |
| }, |
| { |
| "epoch": 6.93, |
| "learning_rate": 1.5625e-06, |
| "loss": 3.7567, |
| "step": 1920 |
| }, |
| { |
| "epoch": 6.94, |
| "learning_rate": 1.5606617647058825e-06, |
| "loss": 3.6723, |
| "step": 1921 |
| }, |
| { |
| "epoch": 6.94, |
| "learning_rate": 1.5588235294117649e-06, |
| "loss": 3.7365, |
| "step": 1922 |
| }, |
| { |
| "epoch": 6.94, |
| "learning_rate": 1.5569852941176473e-06, |
| "loss": 3.9026, |
| "step": 1923 |
| }, |
| { |
| "epoch": 6.95, |
| "learning_rate": 1.5551470588235297e-06, |
| "loss": 3.7582, |
| "step": 1924 |
| }, |
| { |
| "epoch": 6.95, |
| "learning_rate": 1.5533088235294118e-06, |
| "loss": 3.7406, |
| "step": 1925 |
| }, |
| { |
| "epoch": 6.95, |
| "learning_rate": 1.5514705882352942e-06, |
| "loss": 3.8318, |
| "step": 1926 |
| }, |
| { |
| "epoch": 6.96, |
| "learning_rate": 1.5496323529411766e-06, |
| "loss": 3.8049, |
| "step": 1927 |
| }, |
| { |
| "epoch": 6.96, |
| "learning_rate": 1.547794117647059e-06, |
| "loss": 3.7983, |
| "step": 1928 |
| }, |
| { |
| "epoch": 6.96, |
| "learning_rate": 1.5459558823529414e-06, |
| "loss": 3.8789, |
| "step": 1929 |
| }, |
| { |
| "epoch": 6.97, |
| "learning_rate": 1.5441176470588238e-06, |
| "loss": 3.8081, |
| "step": 1930 |
| }, |
| { |
| "epoch": 6.97, |
| "learning_rate": 1.542279411764706e-06, |
| "loss": 3.7304, |
| "step": 1931 |
| }, |
| { |
| "epoch": 6.97, |
| "learning_rate": 1.5404411764705884e-06, |
| "loss": 3.8131, |
| "step": 1932 |
| }, |
| { |
| "epoch": 6.98, |
| "learning_rate": 1.5386029411764708e-06, |
| "loss": 3.7591, |
| "step": 1933 |
| }, |
| { |
| "epoch": 6.98, |
| "learning_rate": 1.5367647058823532e-06, |
| "loss": 3.8297, |
| "step": 1934 |
| }, |
| { |
| "epoch": 6.99, |
| "learning_rate": 1.5349264705882356e-06, |
| "loss": 3.7684, |
| "step": 1935 |
| }, |
| { |
| "epoch": 6.99, |
| "learning_rate": 1.533088235294118e-06, |
| "loss": 3.8732, |
| "step": 1936 |
| }, |
| { |
| "epoch": 6.99, |
| "learning_rate": 1.5312500000000002e-06, |
| "loss": 3.7739, |
| "step": 1937 |
| }, |
| { |
| "epoch": 7.0, |
| "learning_rate": 1.5294117647058826e-06, |
| "loss": 3.8292, |
| "step": 1938 |
| }, |
| { |
| "epoch": 7.0, |
| "learning_rate": 1.527573529411765e-06, |
| "loss": 3.7581, |
| "step": 1939 |
| }, |
| { |
| "epoch": 7.0, |
| "eval_accuracy": 0.16216216216216217, |
| "eval_loss": 3.774104595184326, |
| "eval_runtime": 142.8751, |
| "eval_samples_per_second": 2.59, |
| "eval_steps_per_second": 0.651, |
| "step": 1939 |
| }, |
| { |
| "epoch": 7.0, |
| "learning_rate": 1.5257352941176473e-06, |
| "loss": 3.7403, |
| "step": 1940 |
| }, |
| { |
| "epoch": 7.01, |
| "learning_rate": 1.5238970588235297e-06, |
| "loss": 3.8248, |
| "step": 1941 |
| }, |
| { |
| "epoch": 7.01, |
| "learning_rate": 1.5220588235294117e-06, |
| "loss": 3.6871, |
| "step": 1942 |
| }, |
| { |
| "epoch": 7.01, |
| "learning_rate": 1.5202205882352941e-06, |
| "loss": 3.8357, |
| "step": 1943 |
| }, |
| { |
| "epoch": 7.02, |
| "learning_rate": 1.5183823529411765e-06, |
| "loss": 3.8075, |
| "step": 1944 |
| }, |
| { |
| "epoch": 7.02, |
| "learning_rate": 1.516544117647059e-06, |
| "loss": 3.7796, |
| "step": 1945 |
| }, |
| { |
| "epoch": 7.03, |
| "learning_rate": 1.5147058823529413e-06, |
| "loss": 3.7146, |
| "step": 1946 |
| }, |
| { |
| "epoch": 7.03, |
| "learning_rate": 1.5128676470588235e-06, |
| "loss": 3.8713, |
| "step": 1947 |
| }, |
| { |
| "epoch": 7.03, |
| "learning_rate": 1.5110294117647059e-06, |
| "loss": 3.876, |
| "step": 1948 |
| }, |
| { |
| "epoch": 7.04, |
| "learning_rate": 1.5091911764705883e-06, |
| "loss": 3.8634, |
| "step": 1949 |
| }, |
| { |
| "epoch": 7.04, |
| "learning_rate": 1.5073529411764707e-06, |
| "loss": 3.8102, |
| "step": 1950 |
| }, |
| { |
| "epoch": 7.04, |
| "learning_rate": 1.505514705882353e-06, |
| "loss": 3.7924, |
| "step": 1951 |
| }, |
| { |
| "epoch": 7.05, |
| "learning_rate": 1.5036764705882352e-06, |
| "loss": 3.7546, |
| "step": 1952 |
| }, |
| { |
| "epoch": 7.05, |
| "learning_rate": 1.5018382352941176e-06, |
| "loss": 3.8252, |
| "step": 1953 |
| }, |
| { |
| "epoch": 7.05, |
| "learning_rate": 1.5e-06, |
| "loss": 3.8057, |
| "step": 1954 |
| }, |
| { |
| "epoch": 7.06, |
| "learning_rate": 1.4981617647058824e-06, |
| "loss": 3.8298, |
| "step": 1955 |
| }, |
| { |
| "epoch": 7.06, |
| "learning_rate": 1.4963235294117648e-06, |
| "loss": 3.7478, |
| "step": 1956 |
| }, |
| { |
| "epoch": 7.06, |
| "learning_rate": 1.4944852941176472e-06, |
| "loss": 3.737, |
| "step": 1957 |
| }, |
| { |
| "epoch": 7.07, |
| "learning_rate": 1.4926470588235294e-06, |
| "loss": 3.7556, |
| "step": 1958 |
| }, |
| { |
| "epoch": 7.07, |
| "learning_rate": 1.4908088235294118e-06, |
| "loss": 3.8264, |
| "step": 1959 |
| }, |
| { |
| "epoch": 7.08, |
| "learning_rate": 1.4889705882352942e-06, |
| "loss": 3.7312, |
| "step": 1960 |
| }, |
| { |
| "epoch": 7.08, |
| "learning_rate": 1.4871323529411766e-06, |
| "loss": 3.796, |
| "step": 1961 |
| }, |
| { |
| "epoch": 7.08, |
| "learning_rate": 1.485294117647059e-06, |
| "loss": 3.8793, |
| "step": 1962 |
| }, |
| { |
| "epoch": 7.09, |
| "learning_rate": 1.4834558823529414e-06, |
| "loss": 3.7915, |
| "step": 1963 |
| }, |
| { |
| "epoch": 7.09, |
| "learning_rate": 1.4816176470588236e-06, |
| "loss": 3.754, |
| "step": 1964 |
| }, |
| { |
| "epoch": 7.09, |
| "learning_rate": 1.479779411764706e-06, |
| "loss": 3.8069, |
| "step": 1965 |
| }, |
| { |
| "epoch": 7.1, |
| "learning_rate": 1.4779411764705884e-06, |
| "loss": 3.7583, |
| "step": 1966 |
| }, |
| { |
| "epoch": 7.1, |
| "learning_rate": 1.4761029411764708e-06, |
| "loss": 3.7031, |
| "step": 1967 |
| }, |
| { |
| "epoch": 7.1, |
| "learning_rate": 1.4742647058823532e-06, |
| "loss": 3.7798, |
| "step": 1968 |
| }, |
| { |
| "epoch": 7.11, |
| "learning_rate": 1.4724264705882355e-06, |
| "loss": 3.8637, |
| "step": 1969 |
| }, |
| { |
| "epoch": 7.11, |
| "learning_rate": 1.4705882352941177e-06, |
| "loss": 3.7951, |
| "step": 1970 |
| }, |
| { |
| "epoch": 7.12, |
| "learning_rate": 1.4687500000000001e-06, |
| "loss": 3.8379, |
| "step": 1971 |
| }, |
| { |
| "epoch": 7.12, |
| "learning_rate": 1.4669117647058825e-06, |
| "loss": 3.6724, |
| "step": 1972 |
| }, |
| { |
| "epoch": 7.12, |
| "learning_rate": 1.465073529411765e-06, |
| "loss": 3.7265, |
| "step": 1973 |
| }, |
| { |
| "epoch": 7.13, |
| "learning_rate": 1.4632352941176473e-06, |
| "loss": 3.5929, |
| "step": 1974 |
| }, |
| { |
| "epoch": 7.13, |
| "learning_rate": 1.4613970588235297e-06, |
| "loss": 3.791, |
| "step": 1975 |
| }, |
| { |
| "epoch": 7.13, |
| "learning_rate": 1.4595588235294119e-06, |
| "loss": 3.8131, |
| "step": 1976 |
| }, |
| { |
| "epoch": 7.14, |
| "learning_rate": 1.4577205882352943e-06, |
| "loss": 3.838, |
| "step": 1977 |
| }, |
| { |
| "epoch": 7.14, |
| "learning_rate": 1.4558823529411767e-06, |
| "loss": 3.857, |
| "step": 1978 |
| }, |
| { |
| "epoch": 7.14, |
| "learning_rate": 1.454044117647059e-06, |
| "loss": 3.7491, |
| "step": 1979 |
| }, |
| { |
| "epoch": 7.15, |
| "learning_rate": 1.4522058823529415e-06, |
| "loss": 3.5938, |
| "step": 1980 |
| }, |
| { |
| "epoch": 7.15, |
| "learning_rate": 1.4503676470588239e-06, |
| "loss": 3.8188, |
| "step": 1981 |
| }, |
| { |
| "epoch": 7.16, |
| "learning_rate": 1.448529411764706e-06, |
| "loss": 3.8125, |
| "step": 1982 |
| }, |
| { |
| "epoch": 7.16, |
| "learning_rate": 1.4466911764705884e-06, |
| "loss": 3.8759, |
| "step": 1983 |
| }, |
| { |
| "epoch": 7.16, |
| "learning_rate": 1.4448529411764706e-06, |
| "loss": 3.8634, |
| "step": 1984 |
| }, |
| { |
| "epoch": 7.17, |
| "learning_rate": 1.443014705882353e-06, |
| "loss": 3.8508, |
| "step": 1985 |
| }, |
| { |
| "epoch": 7.17, |
| "learning_rate": 1.4411764705882352e-06, |
| "loss": 3.7074, |
| "step": 1986 |
| }, |
| { |
| "epoch": 7.17, |
| "learning_rate": 1.4393382352941176e-06, |
| "loss": 3.864, |
| "step": 1987 |
| }, |
| { |
| "epoch": 7.18, |
| "learning_rate": 1.4375e-06, |
| "loss": 3.7429, |
| "step": 1988 |
| }, |
| { |
| "epoch": 7.18, |
| "learning_rate": 1.4356617647058824e-06, |
| "loss": 3.7337, |
| "step": 1989 |
| }, |
| { |
| "epoch": 7.18, |
| "learning_rate": 1.4338235294117648e-06, |
| "loss": 3.8944, |
| "step": 1990 |
| }, |
| { |
| "epoch": 7.19, |
| "learning_rate": 1.4319852941176472e-06, |
| "loss": 3.7586, |
| "step": 1991 |
| }, |
| { |
| "epoch": 7.19, |
| "learning_rate": 1.4301470588235294e-06, |
| "loss": 3.8116, |
| "step": 1992 |
| }, |
| { |
| "epoch": 7.19, |
| "learning_rate": 1.4283088235294118e-06, |
| "loss": 3.8157, |
| "step": 1993 |
| }, |
| { |
| "epoch": 7.2, |
| "learning_rate": 1.4264705882352942e-06, |
| "loss": 3.8172, |
| "step": 1994 |
| }, |
| { |
| "epoch": 7.2, |
| "learning_rate": 1.4246323529411766e-06, |
| "loss": 3.7848, |
| "step": 1995 |
| }, |
| { |
| "epoch": 7.21, |
| "learning_rate": 1.422794117647059e-06, |
| "loss": 3.9113, |
| "step": 1996 |
| }, |
| { |
| "epoch": 7.21, |
| "learning_rate": 1.4209558823529411e-06, |
| "loss": 3.7809, |
| "step": 1997 |
| }, |
| { |
| "epoch": 7.21, |
| "learning_rate": 1.4191176470588235e-06, |
| "loss": 3.7451, |
| "step": 1998 |
| }, |
| { |
| "epoch": 7.22, |
| "learning_rate": 1.417279411764706e-06, |
| "loss": 3.8775, |
| "step": 1999 |
| }, |
| { |
| "epoch": 7.22, |
| "learning_rate": 1.4154411764705883e-06, |
| "loss": 3.8819, |
| "step": 2000 |
| }, |
| { |
| "epoch": 7.22, |
| "learning_rate": 1.4136029411764707e-06, |
| "loss": 3.8487, |
| "step": 2001 |
| }, |
| { |
| "epoch": 7.23, |
| "learning_rate": 1.4117647058823531e-06, |
| "loss": 3.72, |
| "step": 2002 |
| }, |
| { |
| "epoch": 7.23, |
| "learning_rate": 1.4099264705882353e-06, |
| "loss": 3.8203, |
| "step": 2003 |
| }, |
| { |
| "epoch": 7.23, |
| "learning_rate": 1.4080882352941177e-06, |
| "loss": 3.7654, |
| "step": 2004 |
| }, |
| { |
| "epoch": 7.24, |
| "learning_rate": 1.40625e-06, |
| "loss": 3.8784, |
| "step": 2005 |
| }, |
| { |
| "epoch": 7.24, |
| "learning_rate": 1.4044117647058825e-06, |
| "loss": 3.8024, |
| "step": 2006 |
| }, |
| { |
| "epoch": 7.25, |
| "learning_rate": 1.4025735294117649e-06, |
| "loss": 3.7868, |
| "step": 2007 |
| }, |
| { |
| "epoch": 7.25, |
| "learning_rate": 1.4007352941176473e-06, |
| "loss": 3.7785, |
| "step": 2008 |
| }, |
| { |
| "epoch": 7.25, |
| "learning_rate": 1.3988970588235295e-06, |
| "loss": 3.918, |
| "step": 2009 |
| }, |
| { |
| "epoch": 7.26, |
| "learning_rate": 1.3970588235294119e-06, |
| "loss": 3.6327, |
| "step": 2010 |
| }, |
| { |
| "epoch": 7.26, |
| "learning_rate": 1.3952205882352942e-06, |
| "loss": 3.7786, |
| "step": 2011 |
| }, |
| { |
| "epoch": 7.26, |
| "learning_rate": 1.3933823529411766e-06, |
| "loss": 3.7522, |
| "step": 2012 |
| }, |
| { |
| "epoch": 7.27, |
| "learning_rate": 1.391544117647059e-06, |
| "loss": 3.8688, |
| "step": 2013 |
| }, |
| { |
| "epoch": 7.27, |
| "learning_rate": 1.3897058823529414e-06, |
| "loss": 3.8575, |
| "step": 2014 |
| }, |
| { |
| "epoch": 7.27, |
| "learning_rate": 1.3878676470588236e-06, |
| "loss": 3.8429, |
| "step": 2015 |
| }, |
| { |
| "epoch": 7.28, |
| "learning_rate": 1.386029411764706e-06, |
| "loss": 3.8384, |
| "step": 2016 |
| }, |
| { |
| "epoch": 7.28, |
| "learning_rate": 1.3841911764705884e-06, |
| "loss": 3.8098, |
| "step": 2017 |
| }, |
| { |
| "epoch": 7.29, |
| "learning_rate": 1.3823529411764708e-06, |
| "loss": 3.8119, |
| "step": 2018 |
| }, |
| { |
| "epoch": 7.29, |
| "learning_rate": 1.3805147058823532e-06, |
| "loss": 3.8167, |
| "step": 2019 |
| }, |
| { |
| "epoch": 7.29, |
| "learning_rate": 1.3786764705882356e-06, |
| "loss": 3.7727, |
| "step": 2020 |
| }, |
| { |
| "epoch": 7.3, |
| "learning_rate": 1.3768382352941178e-06, |
| "loss": 3.8718, |
| "step": 2021 |
| }, |
| { |
| "epoch": 7.3, |
| "learning_rate": 1.3750000000000002e-06, |
| "loss": 3.798, |
| "step": 2022 |
| }, |
| { |
| "epoch": 7.3, |
| "learning_rate": 1.3731617647058826e-06, |
| "loss": 3.7045, |
| "step": 2023 |
| }, |
| { |
| "epoch": 7.31, |
| "learning_rate": 1.371323529411765e-06, |
| "loss": 3.8145, |
| "step": 2024 |
| }, |
| { |
| "epoch": 7.31, |
| "learning_rate": 1.3694852941176474e-06, |
| "loss": 3.7804, |
| "step": 2025 |
| }, |
| { |
| "epoch": 7.31, |
| "learning_rate": 1.3676470588235298e-06, |
| "loss": 3.9408, |
| "step": 2026 |
| }, |
| { |
| "epoch": 7.32, |
| "learning_rate": 1.3658088235294117e-06, |
| "loss": 3.7253, |
| "step": 2027 |
| }, |
| { |
| "epoch": 7.32, |
| "learning_rate": 1.3639705882352941e-06, |
| "loss": 3.7543, |
| "step": 2028 |
| }, |
| { |
| "epoch": 7.32, |
| "learning_rate": 1.3621323529411765e-06, |
| "loss": 3.8434, |
| "step": 2029 |
| }, |
| { |
| "epoch": 7.33, |
| "learning_rate": 1.360294117647059e-06, |
| "loss": 3.8461, |
| "step": 2030 |
| }, |
| { |
| "epoch": 7.33, |
| "learning_rate": 1.358455882352941e-06, |
| "loss": 3.7705, |
| "step": 2031 |
| }, |
| { |
| "epoch": 7.34, |
| "learning_rate": 1.3566176470588235e-06, |
| "loss": 3.9134, |
| "step": 2032 |
| }, |
| { |
| "epoch": 7.34, |
| "learning_rate": 1.3547794117647059e-06, |
| "loss": 3.8107, |
| "step": 2033 |
| }, |
| { |
| "epoch": 7.34, |
| "learning_rate": 1.3529411764705883e-06, |
| "loss": 3.8778, |
| "step": 2034 |
| }, |
| { |
| "epoch": 7.35, |
| "learning_rate": 1.3511029411764707e-06, |
| "loss": 3.6885, |
| "step": 2035 |
| }, |
| { |
| "epoch": 7.35, |
| "learning_rate": 1.349264705882353e-06, |
| "loss": 3.8994, |
| "step": 2036 |
| }, |
| { |
| "epoch": 7.35, |
| "learning_rate": 1.3474264705882353e-06, |
| "loss": 3.8841, |
| "step": 2037 |
| }, |
| { |
| "epoch": 7.36, |
| "learning_rate": 1.3455882352941177e-06, |
| "loss": 3.7173, |
| "step": 2038 |
| }, |
| { |
| "epoch": 7.36, |
| "learning_rate": 1.34375e-06, |
| "loss": 3.8302, |
| "step": 2039 |
| }, |
| { |
| "epoch": 7.36, |
| "learning_rate": 1.3419117647058824e-06, |
| "loss": 3.9526, |
| "step": 2040 |
| }, |
| { |
| "epoch": 7.37, |
| "learning_rate": 1.3400735294117648e-06, |
| "loss": 3.7484, |
| "step": 2041 |
| }, |
| { |
| "epoch": 7.37, |
| "learning_rate": 1.3382352941176472e-06, |
| "loss": 3.8131, |
| "step": 2042 |
| }, |
| { |
| "epoch": 7.38, |
| "learning_rate": 1.3363970588235294e-06, |
| "loss": 3.8062, |
| "step": 2043 |
| }, |
| { |
| "epoch": 7.38, |
| "learning_rate": 1.3345588235294118e-06, |
| "loss": 3.7704, |
| "step": 2044 |
| }, |
| { |
| "epoch": 7.38, |
| "learning_rate": 1.3327205882352942e-06, |
| "loss": 3.7688, |
| "step": 2045 |
| }, |
| { |
| "epoch": 7.39, |
| "learning_rate": 1.3308823529411766e-06, |
| "loss": 3.6819, |
| "step": 2046 |
| }, |
| { |
| "epoch": 7.39, |
| "learning_rate": 1.329044117647059e-06, |
| "loss": 3.7821, |
| "step": 2047 |
| }, |
| { |
| "epoch": 7.39, |
| "learning_rate": 1.3272058823529412e-06, |
| "loss": 3.8495, |
| "step": 2048 |
| }, |
| { |
| "epoch": 7.4, |
| "learning_rate": 1.3253676470588236e-06, |
| "loss": 3.7767, |
| "step": 2049 |
| }, |
| { |
| "epoch": 7.4, |
| "learning_rate": 1.323529411764706e-06, |
| "loss": 3.8434, |
| "step": 2050 |
| }, |
| { |
| "epoch": 7.4, |
| "learning_rate": 1.3216911764705884e-06, |
| "loss": 3.714, |
| "step": 2051 |
| }, |
| { |
| "epoch": 7.41, |
| "learning_rate": 1.3198529411764708e-06, |
| "loss": 3.7876, |
| "step": 2052 |
| }, |
| { |
| "epoch": 7.41, |
| "learning_rate": 1.3180147058823532e-06, |
| "loss": 3.7688, |
| "step": 2053 |
| }, |
| { |
| "epoch": 7.42, |
| "learning_rate": 1.3161764705882353e-06, |
| "loss": 3.8039, |
| "step": 2054 |
| }, |
| { |
| "epoch": 7.42, |
| "learning_rate": 1.3143382352941177e-06, |
| "loss": 3.7814, |
| "step": 2055 |
| }, |
| { |
| "epoch": 7.42, |
| "learning_rate": 1.3125000000000001e-06, |
| "loss": 3.8708, |
| "step": 2056 |
| }, |
| { |
| "epoch": 7.43, |
| "learning_rate": 1.3106617647058825e-06, |
| "loss": 3.7506, |
| "step": 2057 |
| }, |
| { |
| "epoch": 7.43, |
| "learning_rate": 1.308823529411765e-06, |
| "loss": 3.8229, |
| "step": 2058 |
| }, |
| { |
| "epoch": 7.43, |
| "learning_rate": 1.3069852941176473e-06, |
| "loss": 3.84, |
| "step": 2059 |
| }, |
| { |
| "epoch": 7.44, |
| "learning_rate": 1.3051470588235295e-06, |
| "loss": 3.8934, |
| "step": 2060 |
| }, |
| { |
| "epoch": 7.44, |
| "learning_rate": 1.303308823529412e-06, |
| "loss": 3.7734, |
| "step": 2061 |
| }, |
| { |
| "epoch": 7.44, |
| "learning_rate": 1.3014705882352943e-06, |
| "loss": 3.7292, |
| "step": 2062 |
| }, |
| { |
| "epoch": 7.45, |
| "learning_rate": 1.2996323529411767e-06, |
| "loss": 3.7871, |
| "step": 2063 |
| }, |
| { |
| "epoch": 7.45, |
| "learning_rate": 1.297794117647059e-06, |
| "loss": 3.7663, |
| "step": 2064 |
| }, |
| { |
| "epoch": 7.45, |
| "learning_rate": 1.2959558823529415e-06, |
| "loss": 3.706, |
| "step": 2065 |
| }, |
| { |
| "epoch": 7.46, |
| "learning_rate": 1.2941176470588237e-06, |
| "loss": 3.8397, |
| "step": 2066 |
| }, |
| { |
| "epoch": 7.46, |
| "learning_rate": 1.292279411764706e-06, |
| "loss": 3.8575, |
| "step": 2067 |
| }, |
| { |
| "epoch": 7.47, |
| "learning_rate": 1.2904411764705885e-06, |
| "loss": 3.6974, |
| "step": 2068 |
| }, |
| { |
| "epoch": 7.47, |
| "learning_rate": 1.2886029411764706e-06, |
| "loss": 3.8456, |
| "step": 2069 |
| }, |
| { |
| "epoch": 7.47, |
| "learning_rate": 1.2867647058823528e-06, |
| "loss": 3.8241, |
| "step": 2070 |
| }, |
| { |
| "epoch": 7.48, |
| "learning_rate": 1.2849264705882352e-06, |
| "loss": 3.7675, |
| "step": 2071 |
| }, |
| { |
| "epoch": 7.48, |
| "learning_rate": 1.2830882352941176e-06, |
| "loss": 3.6908, |
| "step": 2072 |
| }, |
| { |
| "epoch": 7.48, |
| "learning_rate": 1.28125e-06, |
| "loss": 3.8484, |
| "step": 2073 |
| }, |
| { |
| "epoch": 7.49, |
| "learning_rate": 1.2794117647058824e-06, |
| "loss": 3.7201, |
| "step": 2074 |
| }, |
| { |
| "epoch": 7.49, |
| "learning_rate": 1.2775735294117648e-06, |
| "loss": 3.7842, |
| "step": 2075 |
| }, |
| { |
| "epoch": 7.49, |
| "learning_rate": 1.275735294117647e-06, |
| "loss": 3.702, |
| "step": 2076 |
| }, |
| { |
| "epoch": 7.5, |
| "learning_rate": 1.2738970588235294e-06, |
| "loss": 3.781, |
| "step": 2077 |
| }, |
| { |
| "epoch": 7.5, |
| "learning_rate": 1.2720588235294118e-06, |
| "loss": 3.8236, |
| "step": 2078 |
| }, |
| { |
| "epoch": 7.51, |
| "learning_rate": 1.2702205882352942e-06, |
| "loss": 3.8398, |
| "step": 2079 |
| }, |
| { |
| "epoch": 7.51, |
| "learning_rate": 1.2683823529411766e-06, |
| "loss": 3.8886, |
| "step": 2080 |
| }, |
| { |
| "epoch": 7.51, |
| "learning_rate": 1.266544117647059e-06, |
| "loss": 3.6874, |
| "step": 2081 |
| }, |
| { |
| "epoch": 7.52, |
| "learning_rate": 1.2647058823529412e-06, |
| "loss": 3.7717, |
| "step": 2082 |
| }, |
| { |
| "epoch": 7.52, |
| "learning_rate": 1.2628676470588235e-06, |
| "loss": 3.7724, |
| "step": 2083 |
| }, |
| { |
| "epoch": 7.52, |
| "learning_rate": 1.261029411764706e-06, |
| "loss": 3.7912, |
| "step": 2084 |
| }, |
| { |
| "epoch": 7.53, |
| "learning_rate": 1.2591911764705883e-06, |
| "loss": 3.8082, |
| "step": 2085 |
| }, |
| { |
| "epoch": 7.53, |
| "learning_rate": 1.2573529411764707e-06, |
| "loss": 3.7173, |
| "step": 2086 |
| }, |
| { |
| "epoch": 7.53, |
| "learning_rate": 1.2555147058823531e-06, |
| "loss": 3.761, |
| "step": 2087 |
| }, |
| { |
| "epoch": 7.54, |
| "learning_rate": 1.2536764705882353e-06, |
| "loss": 3.7925, |
| "step": 2088 |
| }, |
| { |
| "epoch": 7.54, |
| "learning_rate": 1.2518382352941177e-06, |
| "loss": 3.7401, |
| "step": 2089 |
| }, |
| { |
| "epoch": 7.55, |
| "learning_rate": 1.25e-06, |
| "loss": 3.8769, |
| "step": 2090 |
| }, |
| { |
| "epoch": 7.55, |
| "learning_rate": 1.2481617647058825e-06, |
| "loss": 3.9441, |
| "step": 2091 |
| }, |
| { |
| "epoch": 7.55, |
| "learning_rate": 1.2463235294117649e-06, |
| "loss": 3.7706, |
| "step": 2092 |
| }, |
| { |
| "epoch": 7.56, |
| "learning_rate": 1.244485294117647e-06, |
| "loss": 3.7737, |
| "step": 2093 |
| }, |
| { |
| "epoch": 7.56, |
| "learning_rate": 1.2426470588235295e-06, |
| "loss": 3.7552, |
| "step": 2094 |
| }, |
| { |
| "epoch": 7.56, |
| "learning_rate": 1.2408088235294119e-06, |
| "loss": 3.7735, |
| "step": 2095 |
| }, |
| { |
| "epoch": 7.57, |
| "learning_rate": 1.2389705882352943e-06, |
| "loss": 3.8378, |
| "step": 2096 |
| }, |
| { |
| "epoch": 7.57, |
| "learning_rate": 1.2371323529411767e-06, |
| "loss": 3.6474, |
| "step": 2097 |
| }, |
| { |
| "epoch": 7.57, |
| "learning_rate": 1.235294117647059e-06, |
| "loss": 3.8589, |
| "step": 2098 |
| }, |
| { |
| "epoch": 7.58, |
| "learning_rate": 1.2334558823529412e-06, |
| "loss": 3.6263, |
| "step": 2099 |
| }, |
| { |
| "epoch": 7.58, |
| "learning_rate": 1.2316176470588236e-06, |
| "loss": 3.8027, |
| "step": 2100 |
| }, |
| { |
| "epoch": 7.58, |
| "learning_rate": 1.2297794117647058e-06, |
| "loss": 3.796, |
| "step": 2101 |
| }, |
| { |
| "epoch": 7.59, |
| "learning_rate": 1.2279411764705882e-06, |
| "loss": 3.7207, |
| "step": 2102 |
| }, |
| { |
| "epoch": 7.59, |
| "learning_rate": 1.2261029411764706e-06, |
| "loss": 3.8075, |
| "step": 2103 |
| }, |
| { |
| "epoch": 7.6, |
| "learning_rate": 1.224264705882353e-06, |
| "loss": 3.8761, |
| "step": 2104 |
| }, |
| { |
| "epoch": 7.6, |
| "learning_rate": 1.2224264705882354e-06, |
| "loss": 3.7404, |
| "step": 2105 |
| }, |
| { |
| "epoch": 7.6, |
| "learning_rate": 1.2205882352941178e-06, |
| "loss": 3.8922, |
| "step": 2106 |
| }, |
| { |
| "epoch": 7.61, |
| "learning_rate": 1.21875e-06, |
| "loss": 3.8768, |
| "step": 2107 |
| }, |
| { |
| "epoch": 7.61, |
| "learning_rate": 1.2169117647058824e-06, |
| "loss": 3.6092, |
| "step": 2108 |
| }, |
| { |
| "epoch": 7.61, |
| "learning_rate": 1.2150735294117648e-06, |
| "loss": 3.6752, |
| "step": 2109 |
| }, |
| { |
| "epoch": 7.62, |
| "learning_rate": 1.2132352941176472e-06, |
| "loss": 3.8158, |
| "step": 2110 |
| }, |
| { |
| "epoch": 7.62, |
| "learning_rate": 1.2113970588235296e-06, |
| "loss": 3.7398, |
| "step": 2111 |
| }, |
| { |
| "epoch": 7.62, |
| "learning_rate": 1.209558823529412e-06, |
| "loss": 3.8378, |
| "step": 2112 |
| }, |
| { |
| "epoch": 7.63, |
| "learning_rate": 1.2077205882352941e-06, |
| "loss": 3.708, |
| "step": 2113 |
| }, |
| { |
| "epoch": 7.63, |
| "learning_rate": 1.2058823529411765e-06, |
| "loss": 3.7681, |
| "step": 2114 |
| }, |
| { |
| "epoch": 7.64, |
| "learning_rate": 1.204044117647059e-06, |
| "loss": 3.8254, |
| "step": 2115 |
| }, |
| { |
| "epoch": 7.64, |
| "learning_rate": 1.2022058823529413e-06, |
| "loss": 3.7685, |
| "step": 2116 |
| }, |
| { |
| "epoch": 7.64, |
| "learning_rate": 1.2003676470588237e-06, |
| "loss": 3.6942, |
| "step": 2117 |
| }, |
| { |
| "epoch": 7.65, |
| "learning_rate": 1.1985294117647061e-06, |
| "loss": 3.92, |
| "step": 2118 |
| }, |
| { |
| "epoch": 7.65, |
| "learning_rate": 1.1966911764705883e-06, |
| "loss": 3.694, |
| "step": 2119 |
| }, |
| { |
| "epoch": 7.65, |
| "learning_rate": 1.1948529411764707e-06, |
| "loss": 3.7566, |
| "step": 2120 |
| }, |
| { |
| "epoch": 7.66, |
| "learning_rate": 1.193014705882353e-06, |
| "loss": 3.7929, |
| "step": 2121 |
| }, |
| { |
| "epoch": 7.66, |
| "learning_rate": 1.1911764705882353e-06, |
| "loss": 3.809, |
| "step": 2122 |
| }, |
| { |
| "epoch": 7.66, |
| "learning_rate": 1.1893382352941177e-06, |
| "loss": 3.6985, |
| "step": 2123 |
| }, |
| { |
| "epoch": 7.67, |
| "learning_rate": 1.1875e-06, |
| "loss": 3.7969, |
| "step": 2124 |
| }, |
| { |
| "epoch": 7.67, |
| "learning_rate": 1.1856617647058825e-06, |
| "loss": 3.8082, |
| "step": 2125 |
| }, |
| { |
| "epoch": 7.68, |
| "learning_rate": 1.1838235294117649e-06, |
| "loss": 3.8358, |
| "step": 2126 |
| }, |
| { |
| "epoch": 7.68, |
| "learning_rate": 1.181985294117647e-06, |
| "loss": 3.7409, |
| "step": 2127 |
| }, |
| { |
| "epoch": 7.68, |
| "learning_rate": 1.1801470588235294e-06, |
| "loss": 3.6596, |
| "step": 2128 |
| }, |
| { |
| "epoch": 7.69, |
| "learning_rate": 1.1783088235294118e-06, |
| "loss": 3.7785, |
| "step": 2129 |
| }, |
| { |
| "epoch": 7.69, |
| "learning_rate": 1.1764705882352942e-06, |
| "loss": 3.814, |
| "step": 2130 |
| }, |
| { |
| "epoch": 7.69, |
| "learning_rate": 1.1746323529411766e-06, |
| "loss": 3.866, |
| "step": 2131 |
| }, |
| { |
| "epoch": 7.7, |
| "learning_rate": 1.172794117647059e-06, |
| "loss": 3.7406, |
| "step": 2132 |
| }, |
| { |
| "epoch": 7.7, |
| "learning_rate": 1.1709558823529412e-06, |
| "loss": 3.6683, |
| "step": 2133 |
| }, |
| { |
| "epoch": 7.7, |
| "learning_rate": 1.1691176470588236e-06, |
| "loss": 3.6596, |
| "step": 2134 |
| }, |
| { |
| "epoch": 7.71, |
| "learning_rate": 1.167279411764706e-06, |
| "loss": 3.7248, |
| "step": 2135 |
| }, |
| { |
| "epoch": 7.71, |
| "learning_rate": 1.1654411764705884e-06, |
| "loss": 3.8138, |
| "step": 2136 |
| }, |
| { |
| "epoch": 7.71, |
| "learning_rate": 1.1636029411764708e-06, |
| "loss": 3.8308, |
| "step": 2137 |
| }, |
| { |
| "epoch": 7.72, |
| "learning_rate": 1.161764705882353e-06, |
| "loss": 3.8092, |
| "step": 2138 |
| }, |
| { |
| "epoch": 7.72, |
| "learning_rate": 1.1599264705882354e-06, |
| "loss": 3.7331, |
| "step": 2139 |
| }, |
| { |
| "epoch": 7.73, |
| "learning_rate": 1.1580882352941178e-06, |
| "loss": 3.7359, |
| "step": 2140 |
| }, |
| { |
| "epoch": 7.73, |
| "learning_rate": 1.1562500000000002e-06, |
| "loss": 3.8491, |
| "step": 2141 |
| }, |
| { |
| "epoch": 7.73, |
| "learning_rate": 1.1544117647058825e-06, |
| "loss": 3.8642, |
| "step": 2142 |
| }, |
| { |
| "epoch": 7.74, |
| "learning_rate": 1.152573529411765e-06, |
| "loss": 3.6513, |
| "step": 2143 |
| }, |
| { |
| "epoch": 7.74, |
| "learning_rate": 1.1507352941176471e-06, |
| "loss": 3.9139, |
| "step": 2144 |
| }, |
| { |
| "epoch": 7.74, |
| "learning_rate": 1.1488970588235295e-06, |
| "loss": 3.6864, |
| "step": 2145 |
| }, |
| { |
| "epoch": 7.75, |
| "learning_rate": 1.1470588235294117e-06, |
| "loss": 3.7701, |
| "step": 2146 |
| }, |
| { |
| "epoch": 7.75, |
| "learning_rate": 1.145220588235294e-06, |
| "loss": 3.689, |
| "step": 2147 |
| }, |
| { |
| "epoch": 7.75, |
| "learning_rate": 1.1433823529411765e-06, |
| "loss": 3.7826, |
| "step": 2148 |
| }, |
| { |
| "epoch": 7.76, |
| "learning_rate": 1.1415441176470589e-06, |
| "loss": 3.8122, |
| "step": 2149 |
| }, |
| { |
| "epoch": 7.76, |
| "learning_rate": 1.1397058823529413e-06, |
| "loss": 3.7883, |
| "step": 2150 |
| }, |
| { |
| "epoch": 7.77, |
| "learning_rate": 1.1378676470588237e-06, |
| "loss": 3.8785, |
| "step": 2151 |
| }, |
| { |
| "epoch": 7.77, |
| "learning_rate": 1.1360294117647059e-06, |
| "loss": 3.9005, |
| "step": 2152 |
| }, |
| { |
| "epoch": 7.77, |
| "learning_rate": 1.1341911764705883e-06, |
| "loss": 3.869, |
| "step": 2153 |
| }, |
| { |
| "epoch": 7.78, |
| "learning_rate": 1.1323529411764707e-06, |
| "loss": 3.88, |
| "step": 2154 |
| }, |
| { |
| "epoch": 7.78, |
| "learning_rate": 1.130514705882353e-06, |
| "loss": 3.7647, |
| "step": 2155 |
| }, |
| { |
| "epoch": 7.78, |
| "learning_rate": 1.1286764705882354e-06, |
| "loss": 3.7608, |
| "step": 2156 |
| }, |
| { |
| "epoch": 7.79, |
| "learning_rate": 1.1268382352941178e-06, |
| "loss": 3.8569, |
| "step": 2157 |
| }, |
| { |
| "epoch": 7.79, |
| "learning_rate": 1.125e-06, |
| "loss": 3.8418, |
| "step": 2158 |
| }, |
| { |
| "epoch": 7.79, |
| "learning_rate": 1.1231617647058824e-06, |
| "loss": 3.8406, |
| "step": 2159 |
| }, |
| { |
| "epoch": 7.8, |
| "learning_rate": 1.1213235294117648e-06, |
| "loss": 3.7298, |
| "step": 2160 |
| }, |
| { |
| "epoch": 7.8, |
| "learning_rate": 1.1194852941176472e-06, |
| "loss": 3.7855, |
| "step": 2161 |
| }, |
| { |
| "epoch": 7.81, |
| "learning_rate": 1.1176470588235296e-06, |
| "loss": 3.7927, |
| "step": 2162 |
| }, |
| { |
| "epoch": 7.81, |
| "learning_rate": 1.115808823529412e-06, |
| "loss": 3.6819, |
| "step": 2163 |
| }, |
| { |
| "epoch": 7.81, |
| "learning_rate": 1.1139705882352942e-06, |
| "loss": 3.8876, |
| "step": 2164 |
| }, |
| { |
| "epoch": 7.82, |
| "learning_rate": 1.1121323529411766e-06, |
| "loss": 3.9123, |
| "step": 2165 |
| }, |
| { |
| "epoch": 7.82, |
| "learning_rate": 1.1102941176470588e-06, |
| "loss": 3.7854, |
| "step": 2166 |
| }, |
| { |
| "epoch": 7.82, |
| "learning_rate": 1.1084558823529412e-06, |
| "loss": 3.883, |
| "step": 2167 |
| }, |
| { |
| "epoch": 7.83, |
| "learning_rate": 1.1066176470588236e-06, |
| "loss": 3.71, |
| "step": 2168 |
| }, |
| { |
| "epoch": 7.83, |
| "learning_rate": 1.104779411764706e-06, |
| "loss": 3.7341, |
| "step": 2169 |
| }, |
| { |
| "epoch": 7.83, |
| "learning_rate": 1.1029411764705884e-06, |
| "loss": 3.7001, |
| "step": 2170 |
| }, |
| { |
| "epoch": 7.84, |
| "learning_rate": 1.1011029411764707e-06, |
| "loss": 3.6824, |
| "step": 2171 |
| }, |
| { |
| "epoch": 7.84, |
| "learning_rate": 1.099264705882353e-06, |
| "loss": 3.7421, |
| "step": 2172 |
| }, |
| { |
| "epoch": 7.84, |
| "learning_rate": 1.0974264705882353e-06, |
| "loss": 3.7325, |
| "step": 2173 |
| }, |
| { |
| "epoch": 7.85, |
| "learning_rate": 1.0955882352941177e-06, |
| "loss": 3.851, |
| "step": 2174 |
| }, |
| { |
| "epoch": 7.85, |
| "learning_rate": 1.0937500000000001e-06, |
| "loss": 3.772, |
| "step": 2175 |
| }, |
| { |
| "epoch": 7.86, |
| "learning_rate": 1.0919117647058825e-06, |
| "loss": 3.8535, |
| "step": 2176 |
| }, |
| { |
| "epoch": 7.86, |
| "learning_rate": 1.090073529411765e-06, |
| "loss": 3.8309, |
| "step": 2177 |
| }, |
| { |
| "epoch": 7.86, |
| "learning_rate": 1.088235294117647e-06, |
| "loss": 3.6384, |
| "step": 2178 |
| }, |
| { |
| "epoch": 7.87, |
| "learning_rate": 1.0863970588235295e-06, |
| "loss": 3.6046, |
| "step": 2179 |
| }, |
| { |
| "epoch": 7.87, |
| "learning_rate": 1.0845588235294119e-06, |
| "loss": 3.8592, |
| "step": 2180 |
| }, |
| { |
| "epoch": 7.87, |
| "learning_rate": 1.0827205882352943e-06, |
| "loss": 3.6961, |
| "step": 2181 |
| }, |
| { |
| "epoch": 7.88, |
| "learning_rate": 1.0808823529411767e-06, |
| "loss": 3.7922, |
| "step": 2182 |
| }, |
| { |
| "epoch": 7.88, |
| "learning_rate": 1.079044117647059e-06, |
| "loss": 3.7968, |
| "step": 2183 |
| }, |
| { |
| "epoch": 7.88, |
| "learning_rate": 1.0772058823529413e-06, |
| "loss": 3.8131, |
| "step": 2184 |
| }, |
| { |
| "epoch": 7.89, |
| "learning_rate": 1.0753676470588236e-06, |
| "loss": 3.801, |
| "step": 2185 |
| }, |
| { |
| "epoch": 7.89, |
| "learning_rate": 1.0735294117647058e-06, |
| "loss": 3.6458, |
| "step": 2186 |
| }, |
| { |
| "epoch": 7.9, |
| "learning_rate": 1.0716911764705882e-06, |
| "loss": 3.6855, |
| "step": 2187 |
| }, |
| { |
| "epoch": 7.9, |
| "learning_rate": 1.0698529411764706e-06, |
| "loss": 3.8189, |
| "step": 2188 |
| }, |
| { |
| "epoch": 7.9, |
| "learning_rate": 1.068014705882353e-06, |
| "loss": 3.7964, |
| "step": 2189 |
| }, |
| { |
| "epoch": 7.91, |
| "learning_rate": 1.0661764705882354e-06, |
| "loss": 3.6956, |
| "step": 2190 |
| }, |
| { |
| "epoch": 7.91, |
| "learning_rate": 1.0643382352941176e-06, |
| "loss": 3.8703, |
| "step": 2191 |
| }, |
| { |
| "epoch": 7.91, |
| "learning_rate": 1.0625e-06, |
| "loss": 3.7828, |
| "step": 2192 |
| }, |
| { |
| "epoch": 7.92, |
| "learning_rate": 1.0606617647058824e-06, |
| "loss": 3.6857, |
| "step": 2193 |
| }, |
| { |
| "epoch": 7.92, |
| "learning_rate": 1.0588235294117648e-06, |
| "loss": 3.7364, |
| "step": 2194 |
| }, |
| { |
| "epoch": 7.92, |
| "learning_rate": 1.0569852941176472e-06, |
| "loss": 3.8068, |
| "step": 2195 |
| }, |
| { |
| "epoch": 7.93, |
| "learning_rate": 1.0551470588235296e-06, |
| "loss": 3.8961, |
| "step": 2196 |
| }, |
| { |
| "epoch": 7.93, |
| "learning_rate": 1.0533088235294118e-06, |
| "loss": 3.7794, |
| "step": 2197 |
| }, |
| { |
| "epoch": 7.94, |
| "learning_rate": 1.0514705882352942e-06, |
| "loss": 3.7591, |
| "step": 2198 |
| }, |
| { |
| "epoch": 7.94, |
| "learning_rate": 1.0496323529411765e-06, |
| "loss": 3.7285, |
| "step": 2199 |
| }, |
| { |
| "epoch": 7.94, |
| "learning_rate": 1.047794117647059e-06, |
| "loss": 3.8174, |
| "step": 2200 |
| }, |
| { |
| "epoch": 7.95, |
| "learning_rate": 1.0459558823529413e-06, |
| "loss": 3.8515, |
| "step": 2201 |
| }, |
| { |
| "epoch": 7.95, |
| "learning_rate": 1.0441176470588237e-06, |
| "loss": 3.7553, |
| "step": 2202 |
| }, |
| { |
| "epoch": 7.95, |
| "learning_rate": 1.042279411764706e-06, |
| "loss": 3.5999, |
| "step": 2203 |
| }, |
| { |
| "epoch": 7.96, |
| "learning_rate": 1.0404411764705883e-06, |
| "loss": 3.7718, |
| "step": 2204 |
| }, |
| { |
| "epoch": 7.96, |
| "learning_rate": 1.0386029411764707e-06, |
| "loss": 3.6309, |
| "step": 2205 |
| }, |
| { |
| "epoch": 7.96, |
| "learning_rate": 1.036764705882353e-06, |
| "loss": 3.7269, |
| "step": 2206 |
| }, |
| { |
| "epoch": 7.97, |
| "learning_rate": 1.0349264705882353e-06, |
| "loss": 3.8262, |
| "step": 2207 |
| }, |
| { |
| "epoch": 7.97, |
| "learning_rate": 1.0330882352941177e-06, |
| "loss": 3.6814, |
| "step": 2208 |
| }, |
| { |
| "epoch": 7.97, |
| "learning_rate": 1.03125e-06, |
| "loss": 3.8087, |
| "step": 2209 |
| }, |
| { |
| "epoch": 7.98, |
| "learning_rate": 1.0294117647058825e-06, |
| "loss": 3.6475, |
| "step": 2210 |
| }, |
| { |
| "epoch": 7.98, |
| "learning_rate": 1.0275735294117647e-06, |
| "loss": 3.8227, |
| "step": 2211 |
| }, |
| { |
| "epoch": 7.99, |
| "learning_rate": 1.025735294117647e-06, |
| "loss": 3.757, |
| "step": 2212 |
| }, |
| { |
| "epoch": 7.99, |
| "learning_rate": 1.0238970588235294e-06, |
| "loss": 3.7494, |
| "step": 2213 |
| }, |
| { |
| "epoch": 7.99, |
| "learning_rate": 1.0220588235294118e-06, |
| "loss": 3.7782, |
| "step": 2214 |
| }, |
| { |
| "epoch": 8.0, |
| "learning_rate": 1.0202205882352942e-06, |
| "loss": 3.7041, |
| "step": 2215 |
| }, |
| { |
| "epoch": 8.0, |
| "learning_rate": 1.0183823529411766e-06, |
| "loss": 3.8817, |
| "step": 2216 |
| }, |
| { |
| "epoch": 8.0, |
| "eval_accuracy": 0.15675675675675677, |
| "eval_loss": 3.7593867778778076, |
| "eval_runtime": 144.6016, |
| "eval_samples_per_second": 2.559, |
| "eval_steps_per_second": 0.643, |
| "step": 2216 |
| }, |
| { |
| "epoch": 8.0, |
| "learning_rate": 1.0165441176470588e-06, |
| "loss": 3.7311, |
| "step": 2217 |
| }, |
| { |
| "epoch": 8.01, |
| "learning_rate": 1.0147058823529412e-06, |
| "loss": 3.8138, |
| "step": 2218 |
| }, |
| { |
| "epoch": 8.01, |
| "learning_rate": 1.0128676470588236e-06, |
| "loss": 3.6592, |
| "step": 2219 |
| }, |
| { |
| "epoch": 8.01, |
| "learning_rate": 1.011029411764706e-06, |
| "loss": 3.8356, |
| "step": 2220 |
| }, |
| { |
| "epoch": 8.02, |
| "learning_rate": 1.0091911764705884e-06, |
| "loss": 3.7156, |
| "step": 2221 |
| }, |
| { |
| "epoch": 8.02, |
| "learning_rate": 1.0073529411764708e-06, |
| "loss": 3.7526, |
| "step": 2222 |
| }, |
| { |
| "epoch": 8.03, |
| "learning_rate": 1.005514705882353e-06, |
| "loss": 3.7126, |
| "step": 2223 |
| }, |
| { |
| "epoch": 8.03, |
| "learning_rate": 1.0036764705882354e-06, |
| "loss": 3.723, |
| "step": 2224 |
| }, |
| { |
| "epoch": 8.03, |
| "learning_rate": 1.0018382352941178e-06, |
| "loss": 3.8471, |
| "step": 2225 |
| }, |
| { |
| "epoch": 8.04, |
| "learning_rate": 1.0000000000000002e-06, |
| "loss": 3.869, |
| "step": 2226 |
| }, |
| { |
| "epoch": 8.04, |
| "learning_rate": 9.981617647058826e-07, |
| "loss": 3.7312, |
| "step": 2227 |
| }, |
| { |
| "epoch": 8.04, |
| "learning_rate": 9.96323529411765e-07, |
| "loss": 3.7501, |
| "step": 2228 |
| }, |
| { |
| "epoch": 8.05, |
| "learning_rate": 9.944852941176471e-07, |
| "loss": 3.7984, |
| "step": 2229 |
| }, |
| { |
| "epoch": 8.05, |
| "learning_rate": 9.926470588235295e-07, |
| "loss": 3.793, |
| "step": 2230 |
| }, |
| { |
| "epoch": 8.05, |
| "learning_rate": 9.908088235294117e-07, |
| "loss": 3.8679, |
| "step": 2231 |
| }, |
| { |
| "epoch": 8.06, |
| "learning_rate": 9.889705882352941e-07, |
| "loss": 3.7919, |
| "step": 2232 |
| }, |
| { |
| "epoch": 8.06, |
| "learning_rate": 9.871323529411765e-07, |
| "loss": 3.7891, |
| "step": 2233 |
| }, |
| { |
| "epoch": 8.06, |
| "learning_rate": 9.85294117647059e-07, |
| "loss": 3.841, |
| "step": 2234 |
| }, |
| { |
| "epoch": 8.07, |
| "learning_rate": 9.834558823529413e-07, |
| "loss": 3.8562, |
| "step": 2235 |
| }, |
| { |
| "epoch": 8.07, |
| "learning_rate": 9.816176470588237e-07, |
| "loss": 3.9092, |
| "step": 2236 |
| }, |
| { |
| "epoch": 8.08, |
| "learning_rate": 9.797794117647059e-07, |
| "loss": 3.9066, |
| "step": 2237 |
| }, |
| { |
| "epoch": 8.08, |
| "learning_rate": 9.779411764705883e-07, |
| "loss": 3.8177, |
| "step": 2238 |
| }, |
| { |
| "epoch": 8.08, |
| "learning_rate": 9.761029411764707e-07, |
| "loss": 3.8291, |
| "step": 2239 |
| }, |
| { |
| "epoch": 8.09, |
| "learning_rate": 9.74264705882353e-07, |
| "loss": 3.7232, |
| "step": 2240 |
| }, |
| { |
| "epoch": 8.09, |
| "learning_rate": 9.724264705882355e-07, |
| "loss": 3.7228, |
| "step": 2241 |
| }, |
| { |
| "epoch": 8.09, |
| "learning_rate": 9.705882352941176e-07, |
| "loss": 3.8503, |
| "step": 2242 |
| }, |
| { |
| "epoch": 8.1, |
| "learning_rate": 9.6875e-07, |
| "loss": 3.7716, |
| "step": 2243 |
| }, |
| { |
| "epoch": 8.1, |
| "learning_rate": 9.669117647058824e-07, |
| "loss": 3.8207, |
| "step": 2244 |
| }, |
| { |
| "epoch": 8.1, |
| "learning_rate": 9.650735294117648e-07, |
| "loss": 3.7668, |
| "step": 2245 |
| }, |
| { |
| "epoch": 8.11, |
| "learning_rate": 9.632352941176472e-07, |
| "loss": 3.7285, |
| "step": 2246 |
| }, |
| { |
| "epoch": 8.11, |
| "learning_rate": 9.613970588235296e-07, |
| "loss": 4.0027, |
| "step": 2247 |
| }, |
| { |
| "epoch": 8.12, |
| "learning_rate": 9.595588235294118e-07, |
| "loss": 3.7481, |
| "step": 2248 |
| }, |
| { |
| "epoch": 8.12, |
| "learning_rate": 9.577205882352942e-07, |
| "loss": 3.9156, |
| "step": 2249 |
| }, |
| { |
| "epoch": 8.12, |
| "learning_rate": 9.558823529411764e-07, |
| "loss": 3.6292, |
| "step": 2250 |
| }, |
| { |
| "epoch": 8.13, |
| "learning_rate": 9.540441176470588e-07, |
| "loss": 3.7208, |
| "step": 2251 |
| }, |
| { |
| "epoch": 8.13, |
| "learning_rate": 9.522058823529412e-07, |
| "loss": 3.825, |
| "step": 2252 |
| }, |
| { |
| "epoch": 8.13, |
| "learning_rate": 9.503676470588236e-07, |
| "loss": 3.8504, |
| "step": 2253 |
| }, |
| { |
| "epoch": 8.14, |
| "learning_rate": 9.48529411764706e-07, |
| "loss": 3.7816, |
| "step": 2254 |
| }, |
| { |
| "epoch": 8.14, |
| "learning_rate": 9.466911764705883e-07, |
| "loss": 3.7958, |
| "step": 2255 |
| }, |
| { |
| "epoch": 8.14, |
| "learning_rate": 9.448529411764707e-07, |
| "loss": 3.7548, |
| "step": 2256 |
| }, |
| { |
| "epoch": 8.15, |
| "learning_rate": 9.43014705882353e-07, |
| "loss": 3.6554, |
| "step": 2257 |
| }, |
| { |
| "epoch": 8.15, |
| "learning_rate": 9.411764705882353e-07, |
| "loss": 3.8942, |
| "step": 2258 |
| }, |
| { |
| "epoch": 8.16, |
| "learning_rate": 9.393382352941177e-07, |
| "loss": 3.647, |
| "step": 2259 |
| }, |
| { |
| "epoch": 8.16, |
| "learning_rate": 9.375000000000001e-07, |
| "loss": 3.7699, |
| "step": 2260 |
| }, |
| { |
| "epoch": 8.16, |
| "learning_rate": 9.356617647058824e-07, |
| "loss": 3.7737, |
| "step": 2261 |
| }, |
| { |
| "epoch": 8.17, |
| "learning_rate": 9.338235294117648e-07, |
| "loss": 3.7753, |
| "step": 2262 |
| }, |
| { |
| "epoch": 8.17, |
| "learning_rate": 9.319852941176471e-07, |
| "loss": 3.8769, |
| "step": 2263 |
| }, |
| { |
| "epoch": 8.17, |
| "learning_rate": 9.301470588235295e-07, |
| "loss": 3.8848, |
| "step": 2264 |
| }, |
| { |
| "epoch": 8.18, |
| "learning_rate": 9.283088235294119e-07, |
| "loss": 3.748, |
| "step": 2265 |
| }, |
| { |
| "epoch": 8.18, |
| "learning_rate": 9.264705882352942e-07, |
| "loss": 3.8326, |
| "step": 2266 |
| }, |
| { |
| "epoch": 8.18, |
| "learning_rate": 9.246323529411766e-07, |
| "loss": 3.8675, |
| "step": 2267 |
| }, |
| { |
| "epoch": 8.19, |
| "learning_rate": 9.22794117647059e-07, |
| "loss": 3.8514, |
| "step": 2268 |
| }, |
| { |
| "epoch": 8.19, |
| "learning_rate": 9.209558823529413e-07, |
| "loss": 3.7658, |
| "step": 2269 |
| }, |
| { |
| "epoch": 8.19, |
| "learning_rate": 9.191176470588237e-07, |
| "loss": 3.7384, |
| "step": 2270 |
| }, |
| { |
| "epoch": 8.2, |
| "learning_rate": 9.172794117647058e-07, |
| "loss": 3.7817, |
| "step": 2271 |
| }, |
| { |
| "epoch": 8.2, |
| "learning_rate": 9.154411764705882e-07, |
| "loss": 3.76, |
| "step": 2272 |
| }, |
| { |
| "epoch": 8.21, |
| "learning_rate": 9.136029411764706e-07, |
| "loss": 3.8039, |
| "step": 2273 |
| }, |
| { |
| "epoch": 8.21, |
| "learning_rate": 9.117647058823529e-07, |
| "loss": 3.7519, |
| "step": 2274 |
| }, |
| { |
| "epoch": 8.21, |
| "learning_rate": 9.099264705882353e-07, |
| "loss": 3.8345, |
| "step": 2275 |
| }, |
| { |
| "epoch": 8.22, |
| "learning_rate": 9.080882352941177e-07, |
| "loss": 3.8411, |
| "step": 2276 |
| }, |
| { |
| "epoch": 8.22, |
| "learning_rate": 9.0625e-07, |
| "loss": 3.6126, |
| "step": 2277 |
| }, |
| { |
| "epoch": 8.22, |
| "learning_rate": 9.044117647058824e-07, |
| "loss": 3.851, |
| "step": 2278 |
| }, |
| { |
| "epoch": 8.23, |
| "learning_rate": 9.025735294117648e-07, |
| "loss": 3.7382, |
| "step": 2279 |
| }, |
| { |
| "epoch": 8.23, |
| "learning_rate": 9.007352941176471e-07, |
| "loss": 3.726, |
| "step": 2280 |
| }, |
| { |
| "epoch": 8.23, |
| "learning_rate": 8.988970588235295e-07, |
| "loss": 3.6642, |
| "step": 2281 |
| }, |
| { |
| "epoch": 8.24, |
| "learning_rate": 8.970588235294119e-07, |
| "loss": 3.7449, |
| "step": 2282 |
| }, |
| { |
| "epoch": 8.24, |
| "learning_rate": 8.952205882352942e-07, |
| "loss": 3.6771, |
| "step": 2283 |
| }, |
| { |
| "epoch": 8.25, |
| "learning_rate": 8.933823529411766e-07, |
| "loss": 3.749, |
| "step": 2284 |
| }, |
| { |
| "epoch": 8.25, |
| "learning_rate": 8.91544117647059e-07, |
| "loss": 3.7423, |
| "step": 2285 |
| }, |
| { |
| "epoch": 8.25, |
| "learning_rate": 8.897058823529412e-07, |
| "loss": 3.8984, |
| "step": 2286 |
| }, |
| { |
| "epoch": 8.26, |
| "learning_rate": 8.878676470588236e-07, |
| "loss": 3.8241, |
| "step": 2287 |
| }, |
| { |
| "epoch": 8.26, |
| "learning_rate": 8.86029411764706e-07, |
| "loss": 3.7171, |
| "step": 2288 |
| }, |
| { |
| "epoch": 8.26, |
| "learning_rate": 8.841911764705883e-07, |
| "loss": 3.7772, |
| "step": 2289 |
| }, |
| { |
| "epoch": 8.27, |
| "learning_rate": 8.823529411764707e-07, |
| "loss": 3.7561, |
| "step": 2290 |
| }, |
| { |
| "epoch": 8.27, |
| "learning_rate": 8.805147058823531e-07, |
| "loss": 3.8011, |
| "step": 2291 |
| }, |
| { |
| "epoch": 8.27, |
| "learning_rate": 8.786764705882353e-07, |
| "loss": 3.7904, |
| "step": 2292 |
| }, |
| { |
| "epoch": 8.28, |
| "learning_rate": 8.768382352941177e-07, |
| "loss": 3.7924, |
| "step": 2293 |
| }, |
| { |
| "epoch": 8.28, |
| "learning_rate": 8.75e-07, |
| "loss": 3.6614, |
| "step": 2294 |
| }, |
| { |
| "epoch": 8.29, |
| "learning_rate": 8.731617647058824e-07, |
| "loss": 3.8004, |
| "step": 2295 |
| }, |
| { |
| "epoch": 8.29, |
| "learning_rate": 8.713235294117648e-07, |
| "loss": 3.7227, |
| "step": 2296 |
| }, |
| { |
| "epoch": 8.29, |
| "learning_rate": 8.694852941176471e-07, |
| "loss": 3.8628, |
| "step": 2297 |
| }, |
| { |
| "epoch": 8.3, |
| "learning_rate": 8.676470588235295e-07, |
| "loss": 3.84, |
| "step": 2298 |
| }, |
| { |
| "epoch": 8.3, |
| "learning_rate": 8.658088235294119e-07, |
| "loss": 3.8566, |
| "step": 2299 |
| }, |
| { |
| "epoch": 8.3, |
| "learning_rate": 8.639705882352941e-07, |
| "loss": 3.8929, |
| "step": 2300 |
| }, |
| { |
| "epoch": 8.31, |
| "learning_rate": 8.621323529411765e-07, |
| "loss": 3.8101, |
| "step": 2301 |
| }, |
| { |
| "epoch": 8.31, |
| "learning_rate": 8.602941176470589e-07, |
| "loss": 3.7256, |
| "step": 2302 |
| }, |
| { |
| "epoch": 8.31, |
| "learning_rate": 8.584558823529412e-07, |
| "loss": 3.9177, |
| "step": 2303 |
| }, |
| { |
| "epoch": 8.32, |
| "learning_rate": 8.566176470588236e-07, |
| "loss": 3.8506, |
| "step": 2304 |
| }, |
| { |
| "epoch": 8.32, |
| "learning_rate": 8.54779411764706e-07, |
| "loss": 3.8089, |
| "step": 2305 |
| }, |
| { |
| "epoch": 8.32, |
| "learning_rate": 8.529411764705883e-07, |
| "loss": 3.8267, |
| "step": 2306 |
| }, |
| { |
| "epoch": 8.33, |
| "learning_rate": 8.511029411764707e-07, |
| "loss": 3.8108, |
| "step": 2307 |
| }, |
| { |
| "epoch": 8.33, |
| "learning_rate": 8.49264705882353e-07, |
| "loss": 3.8277, |
| "step": 2308 |
| }, |
| { |
| "epoch": 8.34, |
| "learning_rate": 8.474264705882354e-07, |
| "loss": 3.7748, |
| "step": 2309 |
| }, |
| { |
| "epoch": 8.34, |
| "learning_rate": 8.455882352941178e-07, |
| "loss": 3.7916, |
| "step": 2310 |
| }, |
| { |
| "epoch": 8.34, |
| "learning_rate": 8.437500000000001e-07, |
| "loss": 3.7391, |
| "step": 2311 |
| }, |
| { |
| "epoch": 8.35, |
| "learning_rate": 8.419117647058825e-07, |
| "loss": 3.8705, |
| "step": 2312 |
| }, |
| { |
| "epoch": 8.35, |
| "learning_rate": 8.400735294117649e-07, |
| "loss": 3.7975, |
| "step": 2313 |
| }, |
| { |
| "epoch": 8.35, |
| "learning_rate": 8.38235294117647e-07, |
| "loss": 3.6619, |
| "step": 2314 |
| }, |
| { |
| "epoch": 8.36, |
| "learning_rate": 8.363970588235294e-07, |
| "loss": 3.7262, |
| "step": 2315 |
| }, |
| { |
| "epoch": 8.36, |
| "learning_rate": 8.345588235294117e-07, |
| "loss": 3.7448, |
| "step": 2316 |
| }, |
| { |
| "epoch": 8.36, |
| "learning_rate": 8.327205882352941e-07, |
| "loss": 3.6764, |
| "step": 2317 |
| }, |
| { |
| "epoch": 8.37, |
| "learning_rate": 8.308823529411765e-07, |
| "loss": 3.8003, |
| "step": 2318 |
| }, |
| { |
| "epoch": 8.37, |
| "learning_rate": 8.290441176470588e-07, |
| "loss": 3.658, |
| "step": 2319 |
| }, |
| { |
| "epoch": 8.38, |
| "learning_rate": 8.272058823529412e-07, |
| "loss": 3.8074, |
| "step": 2320 |
| }, |
| { |
| "epoch": 8.38, |
| "learning_rate": 8.253676470588236e-07, |
| "loss": 3.7665, |
| "step": 2321 |
| }, |
| { |
| "epoch": 8.38, |
| "learning_rate": 8.235294117647059e-07, |
| "loss": 3.8429, |
| "step": 2322 |
| }, |
| { |
| "epoch": 8.39, |
| "learning_rate": 8.216911764705883e-07, |
| "loss": 3.891, |
| "step": 2323 |
| }, |
| { |
| "epoch": 8.39, |
| "learning_rate": 8.198529411764707e-07, |
| "loss": 3.8151, |
| "step": 2324 |
| }, |
| { |
| "epoch": 8.39, |
| "learning_rate": 8.18014705882353e-07, |
| "loss": 3.8866, |
| "step": 2325 |
| }, |
| { |
| "epoch": 8.4, |
| "learning_rate": 8.161764705882354e-07, |
| "loss": 3.7529, |
| "step": 2326 |
| }, |
| { |
| "epoch": 8.4, |
| "learning_rate": 8.143382352941178e-07, |
| "loss": 3.8941, |
| "step": 2327 |
| }, |
| { |
| "epoch": 8.4, |
| "learning_rate": 8.125000000000001e-07, |
| "loss": 3.8118, |
| "step": 2328 |
| }, |
| { |
| "epoch": 8.41, |
| "learning_rate": 8.106617647058825e-07, |
| "loss": 3.7765, |
| "step": 2329 |
| }, |
| { |
| "epoch": 8.41, |
| "learning_rate": 8.088235294117648e-07, |
| "loss": 3.8259, |
| "step": 2330 |
| }, |
| { |
| "epoch": 8.42, |
| "learning_rate": 8.069852941176471e-07, |
| "loss": 3.8988, |
| "step": 2331 |
| }, |
| { |
| "epoch": 8.42, |
| "learning_rate": 8.051470588235295e-07, |
| "loss": 3.7111, |
| "step": 2332 |
| }, |
| { |
| "epoch": 8.42, |
| "learning_rate": 8.033088235294119e-07, |
| "loss": 3.7895, |
| "step": 2333 |
| }, |
| { |
| "epoch": 8.43, |
| "learning_rate": 8.014705882352942e-07, |
| "loss": 3.8168, |
| "step": 2334 |
| }, |
| { |
| "epoch": 8.43, |
| "learning_rate": 7.996323529411765e-07, |
| "loss": 3.6895, |
| "step": 2335 |
| }, |
| { |
| "epoch": 8.43, |
| "learning_rate": 7.977941176470588e-07, |
| "loss": 3.705, |
| "step": 2336 |
| }, |
| { |
| "epoch": 8.44, |
| "learning_rate": 7.959558823529412e-07, |
| "loss": 3.7424, |
| "step": 2337 |
| }, |
| { |
| "epoch": 8.44, |
| "learning_rate": 7.941176470588236e-07, |
| "loss": 3.6825, |
| "step": 2338 |
| }, |
| { |
| "epoch": 8.44, |
| "learning_rate": 7.922794117647059e-07, |
| "loss": 3.6312, |
| "step": 2339 |
| }, |
| { |
| "epoch": 8.45, |
| "learning_rate": 7.904411764705883e-07, |
| "loss": 3.6848, |
| "step": 2340 |
| }, |
| { |
| "epoch": 8.45, |
| "learning_rate": 7.886029411764707e-07, |
| "loss": 3.8568, |
| "step": 2341 |
| }, |
| { |
| "epoch": 8.45, |
| "learning_rate": 7.86764705882353e-07, |
| "loss": 3.7696, |
| "step": 2342 |
| }, |
| { |
| "epoch": 8.46, |
| "learning_rate": 7.849264705882354e-07, |
| "loss": 3.7791, |
| "step": 2343 |
| }, |
| { |
| "epoch": 8.46, |
| "learning_rate": 7.830882352941177e-07, |
| "loss": 3.7782, |
| "step": 2344 |
| }, |
| { |
| "epoch": 8.47, |
| "learning_rate": 7.8125e-07, |
| "loss": 3.6817, |
| "step": 2345 |
| }, |
| { |
| "epoch": 8.47, |
| "learning_rate": 7.794117647058824e-07, |
| "loss": 3.7526, |
| "step": 2346 |
| }, |
| { |
| "epoch": 8.47, |
| "learning_rate": 7.775735294117648e-07, |
| "loss": 3.6692, |
| "step": 2347 |
| }, |
| { |
| "epoch": 8.48, |
| "learning_rate": 7.757352941176471e-07, |
| "loss": 3.7771, |
| "step": 2348 |
| }, |
| { |
| "epoch": 8.48, |
| "learning_rate": 7.738970588235295e-07, |
| "loss": 3.7641, |
| "step": 2349 |
| }, |
| { |
| "epoch": 8.48, |
| "learning_rate": 7.720588235294119e-07, |
| "loss": 3.7224, |
| "step": 2350 |
| }, |
| { |
| "epoch": 8.49, |
| "learning_rate": 7.702205882352942e-07, |
| "loss": 3.9045, |
| "step": 2351 |
| }, |
| { |
| "epoch": 8.49, |
| "learning_rate": 7.683823529411766e-07, |
| "loss": 3.6777, |
| "step": 2352 |
| }, |
| { |
| "epoch": 8.49, |
| "learning_rate": 7.66544117647059e-07, |
| "loss": 3.7719, |
| "step": 2353 |
| }, |
| { |
| "epoch": 8.5, |
| "learning_rate": 7.647058823529413e-07, |
| "loss": 3.7168, |
| "step": 2354 |
| }, |
| { |
| "epoch": 8.5, |
| "learning_rate": 7.628676470588237e-07, |
| "loss": 3.8028, |
| "step": 2355 |
| }, |
| { |
| "epoch": 8.51, |
| "learning_rate": 7.610294117647059e-07, |
| "loss": 3.8534, |
| "step": 2356 |
| }, |
| { |
| "epoch": 8.51, |
| "learning_rate": 7.591911764705883e-07, |
| "loss": 3.6416, |
| "step": 2357 |
| }, |
| { |
| "epoch": 8.51, |
| "learning_rate": 7.573529411764706e-07, |
| "loss": 3.7058, |
| "step": 2358 |
| }, |
| { |
| "epoch": 8.52, |
| "learning_rate": 7.555147058823529e-07, |
| "loss": 3.9277, |
| "step": 2359 |
| }, |
| { |
| "epoch": 8.52, |
| "learning_rate": 7.536764705882353e-07, |
| "loss": 3.8308, |
| "step": 2360 |
| }, |
| { |
| "epoch": 8.52, |
| "learning_rate": 7.518382352941176e-07, |
| "loss": 3.7528, |
| "step": 2361 |
| }, |
| { |
| "epoch": 8.53, |
| "learning_rate": 7.5e-07, |
| "loss": 3.6644, |
| "step": 2362 |
| }, |
| { |
| "epoch": 8.53, |
| "learning_rate": 7.481617647058824e-07, |
| "loss": 3.8808, |
| "step": 2363 |
| }, |
| { |
| "epoch": 8.53, |
| "learning_rate": 7.463235294117647e-07, |
| "loss": 3.8774, |
| "step": 2364 |
| }, |
| { |
| "epoch": 8.54, |
| "learning_rate": 7.444852941176471e-07, |
| "loss": 3.8571, |
| "step": 2365 |
| }, |
| { |
| "epoch": 8.54, |
| "learning_rate": 7.426470588235295e-07, |
| "loss": 3.6985, |
| "step": 2366 |
| }, |
| { |
| "epoch": 8.55, |
| "learning_rate": 7.408088235294118e-07, |
| "loss": 3.7546, |
| "step": 2367 |
| }, |
| { |
| "epoch": 8.55, |
| "learning_rate": 7.389705882352942e-07, |
| "loss": 3.7884, |
| "step": 2368 |
| }, |
| { |
| "epoch": 8.55, |
| "learning_rate": 7.371323529411766e-07, |
| "loss": 3.8495, |
| "step": 2369 |
| }, |
| { |
| "epoch": 8.56, |
| "learning_rate": 7.352941176470589e-07, |
| "loss": 3.8278, |
| "step": 2370 |
| }, |
| { |
| "epoch": 8.56, |
| "learning_rate": 7.334558823529413e-07, |
| "loss": 3.8158, |
| "step": 2371 |
| }, |
| { |
| "epoch": 8.56, |
| "learning_rate": 7.316176470588237e-07, |
| "loss": 3.7087, |
| "step": 2372 |
| }, |
| { |
| "epoch": 8.57, |
| "learning_rate": 7.297794117647059e-07, |
| "loss": 3.7061, |
| "step": 2373 |
| }, |
| { |
| "epoch": 8.57, |
| "learning_rate": 7.279411764705883e-07, |
| "loss": 3.7877, |
| "step": 2374 |
| }, |
| { |
| "epoch": 8.57, |
| "learning_rate": 7.261029411764707e-07, |
| "loss": 3.7624, |
| "step": 2375 |
| }, |
| { |
| "epoch": 8.58, |
| "learning_rate": 7.24264705882353e-07, |
| "loss": 3.6838, |
| "step": 2376 |
| }, |
| { |
| "epoch": 8.58, |
| "learning_rate": 7.224264705882353e-07, |
| "loss": 3.6878, |
| "step": 2377 |
| }, |
| { |
| "epoch": 8.58, |
| "learning_rate": 7.205882352941176e-07, |
| "loss": 3.6837, |
| "step": 2378 |
| }, |
| { |
| "epoch": 8.59, |
| "learning_rate": 7.1875e-07, |
| "loss": 3.6522, |
| "step": 2379 |
| }, |
| { |
| "epoch": 8.59, |
| "learning_rate": 7.169117647058824e-07, |
| "loss": 3.8438, |
| "step": 2380 |
| }, |
| { |
| "epoch": 8.6, |
| "learning_rate": 7.150735294117647e-07, |
| "loss": 3.7046, |
| "step": 2381 |
| }, |
| { |
| "epoch": 8.6, |
| "learning_rate": 7.132352941176471e-07, |
| "loss": 3.8354, |
| "step": 2382 |
| }, |
| { |
| "epoch": 8.6, |
| "learning_rate": 7.113970588235295e-07, |
| "loss": 3.7745, |
| "step": 2383 |
| }, |
| { |
| "epoch": 8.61, |
| "learning_rate": 7.095588235294118e-07, |
| "loss": 3.879, |
| "step": 2384 |
| }, |
| { |
| "epoch": 8.61, |
| "learning_rate": 7.077205882352942e-07, |
| "loss": 3.8688, |
| "step": 2385 |
| }, |
| { |
| "epoch": 8.61, |
| "learning_rate": 7.058823529411766e-07, |
| "loss": 3.9224, |
| "step": 2386 |
| }, |
| { |
| "epoch": 8.62, |
| "learning_rate": 7.040441176470588e-07, |
| "loss": 3.7479, |
| "step": 2387 |
| }, |
| { |
| "epoch": 8.62, |
| "learning_rate": 7.022058823529412e-07, |
| "loss": 3.7001, |
| "step": 2388 |
| }, |
| { |
| "epoch": 8.62, |
| "learning_rate": 7.003676470588236e-07, |
| "loss": 3.8452, |
| "step": 2389 |
| }, |
| { |
| "epoch": 8.63, |
| "learning_rate": 6.985294117647059e-07, |
| "loss": 3.7145, |
| "step": 2390 |
| }, |
| { |
| "epoch": 8.63, |
| "learning_rate": 6.966911764705883e-07, |
| "loss": 3.6996, |
| "step": 2391 |
| }, |
| { |
| "epoch": 8.64, |
| "learning_rate": 6.948529411764707e-07, |
| "loss": 3.6727, |
| "step": 2392 |
| }, |
| { |
| "epoch": 8.64, |
| "learning_rate": 6.93014705882353e-07, |
| "loss": 3.7747, |
| "step": 2393 |
| }, |
| { |
| "epoch": 8.64, |
| "learning_rate": 6.911764705882354e-07, |
| "loss": 3.7433, |
| "step": 2394 |
| }, |
| { |
| "epoch": 8.65, |
| "learning_rate": 6.893382352941178e-07, |
| "loss": 3.8303, |
| "step": 2395 |
| }, |
| { |
| "epoch": 8.65, |
| "learning_rate": 6.875000000000001e-07, |
| "loss": 3.6821, |
| "step": 2396 |
| }, |
| { |
| "epoch": 8.65, |
| "learning_rate": 6.856617647058825e-07, |
| "loss": 3.648, |
| "step": 2397 |
| }, |
| { |
| "epoch": 8.66, |
| "learning_rate": 6.838235294117649e-07, |
| "loss": 3.7825, |
| "step": 2398 |
| }, |
| { |
| "epoch": 8.66, |
| "learning_rate": 6.819852941176471e-07, |
| "loss": 3.7121, |
| "step": 2399 |
| }, |
| { |
| "epoch": 8.66, |
| "learning_rate": 6.801470588235295e-07, |
| "loss": 3.7382, |
| "step": 2400 |
| }, |
| { |
| "epoch": 8.67, |
| "learning_rate": 6.783088235294117e-07, |
| "loss": 3.7332, |
| "step": 2401 |
| }, |
| { |
| "epoch": 8.67, |
| "learning_rate": 6.764705882352941e-07, |
| "loss": 3.8153, |
| "step": 2402 |
| }, |
| { |
| "epoch": 8.68, |
| "learning_rate": 6.746323529411765e-07, |
| "loss": 3.9339, |
| "step": 2403 |
| }, |
| { |
| "epoch": 8.68, |
| "learning_rate": 6.727941176470588e-07, |
| "loss": 3.6879, |
| "step": 2404 |
| }, |
| { |
| "epoch": 8.68, |
| "learning_rate": 6.709558823529412e-07, |
| "loss": 3.7747, |
| "step": 2405 |
| }, |
| { |
| "epoch": 8.69, |
| "learning_rate": 6.691176470588236e-07, |
| "loss": 3.795, |
| "step": 2406 |
| }, |
| { |
| "epoch": 8.69, |
| "learning_rate": 6.672794117647059e-07, |
| "loss": 3.8234, |
| "step": 2407 |
| }, |
| { |
| "epoch": 8.69, |
| "learning_rate": 6.654411764705883e-07, |
| "loss": 3.8175, |
| "step": 2408 |
| }, |
| { |
| "epoch": 8.7, |
| "learning_rate": 6.636029411764706e-07, |
| "loss": 3.7169, |
| "step": 2409 |
| }, |
| { |
| "epoch": 8.7, |
| "learning_rate": 6.61764705882353e-07, |
| "loss": 3.7274, |
| "step": 2410 |
| }, |
| { |
| "epoch": 8.7, |
| "learning_rate": 6.599264705882354e-07, |
| "loss": 3.7801, |
| "step": 2411 |
| }, |
| { |
| "epoch": 8.71, |
| "learning_rate": 6.580882352941177e-07, |
| "loss": 3.8364, |
| "step": 2412 |
| }, |
| { |
| "epoch": 8.71, |
| "learning_rate": 6.562500000000001e-07, |
| "loss": 3.872, |
| "step": 2413 |
| }, |
| { |
| "epoch": 8.71, |
| "learning_rate": 6.544117647058825e-07, |
| "loss": 3.8348, |
| "step": 2414 |
| }, |
| { |
| "epoch": 8.72, |
| "learning_rate": 6.525735294117648e-07, |
| "loss": 3.8543, |
| "step": 2415 |
| }, |
| { |
| "epoch": 8.72, |
| "learning_rate": 6.507352941176472e-07, |
| "loss": 3.8511, |
| "step": 2416 |
| }, |
| { |
| "epoch": 8.73, |
| "learning_rate": 6.488970588235295e-07, |
| "loss": 3.82, |
| "step": 2417 |
| }, |
| { |
| "epoch": 8.73, |
| "learning_rate": 6.470588235294118e-07, |
| "loss": 3.8348, |
| "step": 2418 |
| }, |
| { |
| "epoch": 8.73, |
| "learning_rate": 6.452205882352942e-07, |
| "loss": 3.6814, |
| "step": 2419 |
| }, |
| { |
| "epoch": 8.74, |
| "learning_rate": 6.433823529411764e-07, |
| "loss": 3.7721, |
| "step": 2420 |
| }, |
| { |
| "epoch": 8.74, |
| "learning_rate": 6.415441176470588e-07, |
| "loss": 3.6613, |
| "step": 2421 |
| }, |
| { |
| "epoch": 8.74, |
| "learning_rate": 6.397058823529412e-07, |
| "loss": 3.7546, |
| "step": 2422 |
| }, |
| { |
| "epoch": 8.75, |
| "learning_rate": 6.378676470588235e-07, |
| "loss": 3.8473, |
| "step": 2423 |
| }, |
| { |
| "epoch": 8.75, |
| "learning_rate": 6.360294117647059e-07, |
| "loss": 3.7451, |
| "step": 2424 |
| }, |
| { |
| "epoch": 8.75, |
| "learning_rate": 6.341911764705883e-07, |
| "loss": 3.6373, |
| "step": 2425 |
| }, |
| { |
| "epoch": 8.76, |
| "learning_rate": 6.323529411764706e-07, |
| "loss": 3.7275, |
| "step": 2426 |
| }, |
| { |
| "epoch": 8.76, |
| "learning_rate": 6.30514705882353e-07, |
| "loss": 3.743, |
| "step": 2427 |
| }, |
| { |
| "epoch": 8.77, |
| "learning_rate": 6.286764705882354e-07, |
| "loss": 3.8529, |
| "step": 2428 |
| }, |
| { |
| "epoch": 8.77, |
| "learning_rate": 6.268382352941177e-07, |
| "loss": 3.7222, |
| "step": 2429 |
| }, |
| { |
| "epoch": 8.77, |
| "learning_rate": 6.25e-07, |
| "loss": 3.7621, |
| "step": 2430 |
| }, |
| { |
| "epoch": 8.78, |
| "learning_rate": 6.231617647058824e-07, |
| "loss": 3.755, |
| "step": 2431 |
| }, |
| { |
| "epoch": 8.78, |
| "learning_rate": 6.213235294117647e-07, |
| "loss": 3.8091, |
| "step": 2432 |
| }, |
| { |
| "epoch": 8.78, |
| "learning_rate": 6.194852941176471e-07, |
| "loss": 3.8066, |
| "step": 2433 |
| }, |
| { |
| "epoch": 8.79, |
| "learning_rate": 6.176470588235295e-07, |
| "loss": 3.6961, |
| "step": 2434 |
| }, |
| { |
| "epoch": 8.79, |
| "learning_rate": 6.158088235294118e-07, |
| "loss": 3.878, |
| "step": 2435 |
| }, |
| { |
| "epoch": 8.79, |
| "learning_rate": 6.139705882352941e-07, |
| "loss": 3.7094, |
| "step": 2436 |
| }, |
| { |
| "epoch": 8.8, |
| "learning_rate": 6.121323529411765e-07, |
| "loss": 3.8669, |
| "step": 2437 |
| }, |
| { |
| "epoch": 8.8, |
| "learning_rate": 6.102941176470589e-07, |
| "loss": 3.7921, |
| "step": 2438 |
| }, |
| { |
| "epoch": 8.81, |
| "learning_rate": 6.084558823529412e-07, |
| "loss": 3.7094, |
| "step": 2439 |
| }, |
| { |
| "epoch": 8.81, |
| "learning_rate": 6.066176470588236e-07, |
| "loss": 3.622, |
| "step": 2440 |
| }, |
| { |
| "epoch": 8.81, |
| "learning_rate": 6.04779411764706e-07, |
| "loss": 3.7612, |
| "step": 2441 |
| }, |
| { |
| "epoch": 8.82, |
| "learning_rate": 6.029411764705883e-07, |
| "loss": 3.8429, |
| "step": 2442 |
| }, |
| { |
| "epoch": 8.82, |
| "learning_rate": 6.011029411764707e-07, |
| "loss": 3.7919, |
| "step": 2443 |
| }, |
| { |
| "epoch": 8.82, |
| "learning_rate": 5.992647058823531e-07, |
| "loss": 3.7504, |
| "step": 2444 |
| }, |
| { |
| "epoch": 8.83, |
| "learning_rate": 5.974264705882353e-07, |
| "loss": 3.845, |
| "step": 2445 |
| }, |
| { |
| "epoch": 8.83, |
| "learning_rate": 5.955882352941176e-07, |
| "loss": 3.9041, |
| "step": 2446 |
| }, |
| { |
| "epoch": 8.83, |
| "learning_rate": 5.9375e-07, |
| "loss": 3.7744, |
| "step": 2447 |
| }, |
| { |
| "epoch": 8.84, |
| "learning_rate": 5.919117647058824e-07, |
| "loss": 3.7116, |
| "step": 2448 |
| }, |
| { |
| "epoch": 8.84, |
| "learning_rate": 5.900735294117647e-07, |
| "loss": 3.8122, |
| "step": 2449 |
| }, |
| { |
| "epoch": 8.84, |
| "learning_rate": 5.882352941176471e-07, |
| "loss": 3.8511, |
| "step": 2450 |
| }, |
| { |
| "epoch": 8.85, |
| "learning_rate": 5.863970588235295e-07, |
| "loss": 3.7406, |
| "step": 2451 |
| }, |
| { |
| "epoch": 8.85, |
| "learning_rate": 5.845588235294118e-07, |
| "loss": 3.9195, |
| "step": 2452 |
| }, |
| { |
| "epoch": 8.86, |
| "learning_rate": 5.827205882352942e-07, |
| "loss": 3.9183, |
| "step": 2453 |
| }, |
| { |
| "epoch": 8.86, |
| "learning_rate": 5.808823529411765e-07, |
| "loss": 3.8855, |
| "step": 2454 |
| }, |
| { |
| "epoch": 8.86, |
| "learning_rate": 5.790441176470589e-07, |
| "loss": 3.7123, |
| "step": 2455 |
| }, |
| { |
| "epoch": 8.87, |
| "learning_rate": 5.772058823529413e-07, |
| "loss": 3.7221, |
| "step": 2456 |
| }, |
| { |
| "epoch": 8.87, |
| "learning_rate": 5.753676470588236e-07, |
| "loss": 3.794, |
| "step": 2457 |
| }, |
| { |
| "epoch": 8.87, |
| "learning_rate": 5.735294117647059e-07, |
| "loss": 3.7516, |
| "step": 2458 |
| }, |
| { |
| "epoch": 8.88, |
| "learning_rate": 5.716911764705882e-07, |
| "loss": 3.8284, |
| "step": 2459 |
| }, |
| { |
| "epoch": 8.88, |
| "learning_rate": 5.698529411764706e-07, |
| "loss": 3.7762, |
| "step": 2460 |
| }, |
| { |
| "epoch": 8.88, |
| "learning_rate": 5.680147058823529e-07, |
| "loss": 3.7962, |
| "step": 2461 |
| }, |
| { |
| "epoch": 8.89, |
| "learning_rate": 5.661764705882353e-07, |
| "loss": 3.8181, |
| "step": 2462 |
| }, |
| { |
| "epoch": 8.89, |
| "learning_rate": 5.643382352941177e-07, |
| "loss": 3.6142, |
| "step": 2463 |
| }, |
| { |
| "epoch": 8.9, |
| "learning_rate": 5.625e-07, |
| "loss": 3.924, |
| "step": 2464 |
| }, |
| { |
| "epoch": 8.9, |
| "learning_rate": 5.606617647058824e-07, |
| "loss": 3.7873, |
| "step": 2465 |
| }, |
| { |
| "epoch": 8.9, |
| "learning_rate": 5.588235294117648e-07, |
| "loss": 3.6543, |
| "step": 2466 |
| }, |
| { |
| "epoch": 8.91, |
| "learning_rate": 5.569852941176471e-07, |
| "loss": 3.696, |
| "step": 2467 |
| }, |
| { |
| "epoch": 8.91, |
| "learning_rate": 5.551470588235294e-07, |
| "loss": 3.7413, |
| "step": 2468 |
| }, |
| { |
| "epoch": 8.91, |
| "learning_rate": 5.533088235294118e-07, |
| "loss": 3.8391, |
| "step": 2469 |
| }, |
| { |
| "epoch": 8.92, |
| "learning_rate": 5.514705882352942e-07, |
| "loss": 3.7809, |
| "step": 2470 |
| }, |
| { |
| "epoch": 8.92, |
| "learning_rate": 5.496323529411765e-07, |
| "loss": 3.674, |
| "step": 2471 |
| }, |
| { |
| "epoch": 8.92, |
| "learning_rate": 5.477941176470589e-07, |
| "loss": 3.7189, |
| "step": 2472 |
| }, |
| { |
| "epoch": 8.93, |
| "learning_rate": 5.459558823529413e-07, |
| "loss": 3.7992, |
| "step": 2473 |
| }, |
| { |
| "epoch": 8.93, |
| "learning_rate": 5.441176470588235e-07, |
| "loss": 3.7355, |
| "step": 2474 |
| }, |
| { |
| "epoch": 8.94, |
| "learning_rate": 5.422794117647059e-07, |
| "loss": 3.8828, |
| "step": 2475 |
| }, |
| { |
| "epoch": 8.94, |
| "learning_rate": 5.404411764705883e-07, |
| "loss": 3.6782, |
| "step": 2476 |
| }, |
| { |
| "epoch": 8.94, |
| "learning_rate": 5.386029411764706e-07, |
| "loss": 3.6093, |
| "step": 2477 |
| }, |
| { |
| "epoch": 8.95, |
| "learning_rate": 5.367647058823529e-07, |
| "loss": 3.691, |
| "step": 2478 |
| }, |
| { |
| "epoch": 8.95, |
| "learning_rate": 5.349264705882353e-07, |
| "loss": 3.6619, |
| "step": 2479 |
| }, |
| { |
| "epoch": 8.95, |
| "learning_rate": 5.330882352941177e-07, |
| "loss": 3.8944, |
| "step": 2480 |
| }, |
| { |
| "epoch": 8.96, |
| "learning_rate": 5.3125e-07, |
| "loss": 3.8312, |
| "step": 2481 |
| }, |
| { |
| "epoch": 8.96, |
| "learning_rate": 5.294117647058824e-07, |
| "loss": 3.7363, |
| "step": 2482 |
| }, |
| { |
| "epoch": 8.96, |
| "learning_rate": 5.275735294117648e-07, |
| "loss": 3.5276, |
| "step": 2483 |
| }, |
| { |
| "epoch": 8.97, |
| "learning_rate": 5.257352941176471e-07, |
| "loss": 3.8608, |
| "step": 2484 |
| }, |
| { |
| "epoch": 8.97, |
| "learning_rate": 5.238970588235295e-07, |
| "loss": 3.8745, |
| "step": 2485 |
| }, |
| { |
| "epoch": 8.97, |
| "learning_rate": 5.220588235294119e-07, |
| "loss": 3.7958, |
| "step": 2486 |
| }, |
| { |
| "epoch": 8.98, |
| "learning_rate": 5.202205882352942e-07, |
| "loss": 3.8531, |
| "step": 2487 |
| }, |
| { |
| "epoch": 8.98, |
| "learning_rate": 5.183823529411766e-07, |
| "loss": 3.7045, |
| "step": 2488 |
| }, |
| { |
| "epoch": 8.99, |
| "learning_rate": 5.165441176470588e-07, |
| "loss": 3.8388, |
| "step": 2489 |
| }, |
| { |
| "epoch": 8.99, |
| "learning_rate": 5.147058823529412e-07, |
| "loss": 3.8649, |
| "step": 2490 |
| }, |
| { |
| "epoch": 8.99, |
| "learning_rate": 5.128676470588235e-07, |
| "loss": 3.9795, |
| "step": 2491 |
| }, |
| { |
| "epoch": 9.0, |
| "learning_rate": 5.110294117647059e-07, |
| "loss": 3.6293, |
| "step": 2492 |
| }, |
| { |
| "epoch": 9.0, |
| "learning_rate": 5.091911764705883e-07, |
| "loss": 3.8258, |
| "step": 2493 |
| }, |
| { |
| "epoch": 9.0, |
| "eval_accuracy": 0.16756756756756758, |
| "eval_loss": 3.747398614883423, |
| "eval_runtime": 142.6563, |
| "eval_samples_per_second": 2.594, |
| "eval_steps_per_second": 0.652, |
| "step": 2493 |
| }, |
| { |
| "epoch": 9.0, |
| "learning_rate": 5.073529411764706e-07, |
| "loss": 3.7742, |
| "step": 2494 |
| }, |
| { |
| "epoch": 9.01, |
| "learning_rate": 5.05514705882353e-07, |
| "loss": 3.7896, |
| "step": 2495 |
| }, |
| { |
| "epoch": 9.01, |
| "learning_rate": 5.036764705882354e-07, |
| "loss": 3.767, |
| "step": 2496 |
| }, |
| { |
| "epoch": 9.01, |
| "learning_rate": 5.018382352941177e-07, |
| "loss": 3.6757, |
| "step": 2497 |
| }, |
| { |
| "epoch": 9.02, |
| "learning_rate": 5.000000000000001e-07, |
| "loss": 3.7333, |
| "step": 2498 |
| }, |
| { |
| "epoch": 9.02, |
| "learning_rate": 4.981617647058825e-07, |
| "loss": 3.7456, |
| "step": 2499 |
| }, |
| { |
| "epoch": 9.03, |
| "learning_rate": 4.963235294117648e-07, |
| "loss": 3.7664, |
| "step": 2500 |
| }, |
| { |
| "epoch": 9.03, |
| "learning_rate": 4.944852941176471e-07, |
| "loss": 3.6546, |
| "step": 2501 |
| }, |
| { |
| "epoch": 9.03, |
| "learning_rate": 4.926470588235295e-07, |
| "loss": 3.7102, |
| "step": 2502 |
| }, |
| { |
| "epoch": 9.04, |
| "learning_rate": 4.908088235294118e-07, |
| "loss": 3.8283, |
| "step": 2503 |
| }, |
| { |
| "epoch": 9.04, |
| "learning_rate": 4.889705882352941e-07, |
| "loss": 3.7896, |
| "step": 2504 |
| }, |
| { |
| "epoch": 9.04, |
| "learning_rate": 4.871323529411765e-07, |
| "loss": 3.8111, |
| "step": 2505 |
| }, |
| { |
| "epoch": 9.05, |
| "learning_rate": 4.852941176470588e-07, |
| "loss": 3.6198, |
| "step": 2506 |
| }, |
| { |
| "epoch": 9.05, |
| "learning_rate": 4.834558823529412e-07, |
| "loss": 3.7278, |
| "step": 2507 |
| }, |
| { |
| "epoch": 9.05, |
| "learning_rate": 4.816176470588236e-07, |
| "loss": 3.7027, |
| "step": 2508 |
| }, |
| { |
| "epoch": 9.06, |
| "learning_rate": 4.797794117647059e-07, |
| "loss": 3.8305, |
| "step": 2509 |
| }, |
| { |
| "epoch": 9.06, |
| "learning_rate": 4.779411764705882e-07, |
| "loss": 3.7641, |
| "step": 2510 |
| }, |
| { |
| "epoch": 9.06, |
| "learning_rate": 4.761029411764706e-07, |
| "loss": 3.6554, |
| "step": 2511 |
| }, |
| { |
| "epoch": 9.07, |
| "learning_rate": 4.74264705882353e-07, |
| "loss": 3.7905, |
| "step": 2512 |
| }, |
| { |
| "epoch": 9.07, |
| "learning_rate": 4.7242647058823533e-07, |
| "loss": 3.5754, |
| "step": 2513 |
| }, |
| { |
| "epoch": 9.08, |
| "learning_rate": 4.7058823529411767e-07, |
| "loss": 3.8388, |
| "step": 2514 |
| }, |
| { |
| "epoch": 9.08, |
| "learning_rate": 4.6875000000000006e-07, |
| "loss": 3.8282, |
| "step": 2515 |
| }, |
| { |
| "epoch": 9.08, |
| "learning_rate": 4.669117647058824e-07, |
| "loss": 3.8828, |
| "step": 2516 |
| }, |
| { |
| "epoch": 9.09, |
| "learning_rate": 4.6507352941176475e-07, |
| "loss": 3.7438, |
| "step": 2517 |
| }, |
| { |
| "epoch": 9.09, |
| "learning_rate": 4.632352941176471e-07, |
| "loss": 3.8017, |
| "step": 2518 |
| }, |
| { |
| "epoch": 9.09, |
| "learning_rate": 4.613970588235295e-07, |
| "loss": 3.7509, |
| "step": 2519 |
| }, |
| { |
| "epoch": 9.1, |
| "learning_rate": 4.5955882352941183e-07, |
| "loss": 3.8022, |
| "step": 2520 |
| }, |
| { |
| "epoch": 9.1, |
| "learning_rate": 4.577205882352941e-07, |
| "loss": 3.7026, |
| "step": 2521 |
| }, |
| { |
| "epoch": 9.1, |
| "learning_rate": 4.5588235294117646e-07, |
| "loss": 3.7332, |
| "step": 2522 |
| }, |
| { |
| "epoch": 9.11, |
| "learning_rate": 4.5404411764705886e-07, |
| "loss": 3.7656, |
| "step": 2523 |
| }, |
| { |
| "epoch": 9.11, |
| "learning_rate": 4.522058823529412e-07, |
| "loss": 3.8702, |
| "step": 2524 |
| }, |
| { |
| "epoch": 9.12, |
| "learning_rate": 4.5036764705882354e-07, |
| "loss": 3.8981, |
| "step": 2525 |
| }, |
| { |
| "epoch": 9.12, |
| "learning_rate": 4.4852941176470594e-07, |
| "loss": 3.7603, |
| "step": 2526 |
| }, |
| { |
| "epoch": 9.12, |
| "learning_rate": 4.466911764705883e-07, |
| "loss": 3.6499, |
| "step": 2527 |
| }, |
| { |
| "epoch": 9.13, |
| "learning_rate": 4.448529411764706e-07, |
| "loss": 3.7807, |
| "step": 2528 |
| }, |
| { |
| "epoch": 9.13, |
| "learning_rate": 4.43014705882353e-07, |
| "loss": 3.7718, |
| "step": 2529 |
| }, |
| { |
| "epoch": 9.13, |
| "learning_rate": 4.4117647058823536e-07, |
| "loss": 3.7782, |
| "step": 2530 |
| }, |
| { |
| "epoch": 9.14, |
| "learning_rate": 4.3933823529411765e-07, |
| "loss": 3.8509, |
| "step": 2531 |
| }, |
| { |
| "epoch": 9.14, |
| "learning_rate": 4.375e-07, |
| "loss": 3.8103, |
| "step": 2532 |
| }, |
| { |
| "epoch": 9.14, |
| "learning_rate": 4.356617647058824e-07, |
| "loss": 3.6662, |
| "step": 2533 |
| }, |
| { |
| "epoch": 9.15, |
| "learning_rate": 4.3382352941176473e-07, |
| "loss": 3.6027, |
| "step": 2534 |
| }, |
| { |
| "epoch": 9.15, |
| "learning_rate": 4.319852941176471e-07, |
| "loss": 3.6919, |
| "step": 2535 |
| }, |
| { |
| "epoch": 9.16, |
| "learning_rate": 4.3014705882352947e-07, |
| "loss": 3.7634, |
| "step": 2536 |
| }, |
| { |
| "epoch": 9.16, |
| "learning_rate": 4.283088235294118e-07, |
| "loss": 3.7289, |
| "step": 2537 |
| }, |
| { |
| "epoch": 9.16, |
| "learning_rate": 4.2647058823529415e-07, |
| "loss": 3.7774, |
| "step": 2538 |
| }, |
| { |
| "epoch": 9.17, |
| "learning_rate": 4.246323529411765e-07, |
| "loss": 3.7781, |
| "step": 2539 |
| }, |
| { |
| "epoch": 9.17, |
| "learning_rate": 4.227941176470589e-07, |
| "loss": 3.8441, |
| "step": 2540 |
| }, |
| { |
| "epoch": 9.17, |
| "learning_rate": 4.2095588235294123e-07, |
| "loss": 3.6877, |
| "step": 2541 |
| }, |
| { |
| "epoch": 9.18, |
| "learning_rate": 4.191176470588235e-07, |
| "loss": 3.8197, |
| "step": 2542 |
| }, |
| { |
| "epoch": 9.18, |
| "learning_rate": 4.1727941176470587e-07, |
| "loss": 3.7825, |
| "step": 2543 |
| }, |
| { |
| "epoch": 9.18, |
| "learning_rate": 4.1544117647058826e-07, |
| "loss": 3.7872, |
| "step": 2544 |
| }, |
| { |
| "epoch": 9.19, |
| "learning_rate": 4.136029411764706e-07, |
| "loss": 3.7787, |
| "step": 2545 |
| }, |
| { |
| "epoch": 9.19, |
| "learning_rate": 4.1176470588235295e-07, |
| "loss": 3.7984, |
| "step": 2546 |
| }, |
| { |
| "epoch": 9.19, |
| "learning_rate": 4.0992647058823534e-07, |
| "loss": 3.7131, |
| "step": 2547 |
| }, |
| { |
| "epoch": 9.2, |
| "learning_rate": 4.080882352941177e-07, |
| "loss": 3.7869, |
| "step": 2548 |
| }, |
| { |
| "epoch": 9.2, |
| "learning_rate": 4.0625000000000003e-07, |
| "loss": 3.8407, |
| "step": 2549 |
| }, |
| { |
| "epoch": 9.21, |
| "learning_rate": 4.044117647058824e-07, |
| "loss": 3.8945, |
| "step": 2550 |
| }, |
| { |
| "epoch": 9.21, |
| "learning_rate": 4.0257352941176477e-07, |
| "loss": 3.697, |
| "step": 2551 |
| }, |
| { |
| "epoch": 9.21, |
| "learning_rate": 4.007352941176471e-07, |
| "loss": 3.7087, |
| "step": 2552 |
| }, |
| { |
| "epoch": 9.22, |
| "learning_rate": 3.988970588235294e-07, |
| "loss": 3.747, |
| "step": 2553 |
| }, |
| { |
| "epoch": 9.22, |
| "learning_rate": 3.970588235294118e-07, |
| "loss": 3.8015, |
| "step": 2554 |
| }, |
| { |
| "epoch": 9.22, |
| "learning_rate": 3.9522058823529414e-07, |
| "loss": 3.6433, |
| "step": 2555 |
| }, |
| { |
| "epoch": 9.23, |
| "learning_rate": 3.933823529411765e-07, |
| "loss": 3.8178, |
| "step": 2556 |
| }, |
| { |
| "epoch": 9.23, |
| "learning_rate": 3.915441176470589e-07, |
| "loss": 3.8249, |
| "step": 2557 |
| }, |
| { |
| "epoch": 9.23, |
| "learning_rate": 3.897058823529412e-07, |
| "loss": 3.8446, |
| "step": 2558 |
| }, |
| { |
| "epoch": 9.24, |
| "learning_rate": 3.8786764705882356e-07, |
| "loss": 3.7414, |
| "step": 2559 |
| }, |
| { |
| "epoch": 9.24, |
| "learning_rate": 3.8602941176470595e-07, |
| "loss": 3.8151, |
| "step": 2560 |
| }, |
| { |
| "epoch": 9.25, |
| "learning_rate": 3.841911764705883e-07, |
| "loss": 3.688, |
| "step": 2561 |
| }, |
| { |
| "epoch": 9.25, |
| "learning_rate": 3.8235294117647064e-07, |
| "loss": 3.7028, |
| "step": 2562 |
| }, |
| { |
| "epoch": 9.25, |
| "learning_rate": 3.8051470588235293e-07, |
| "loss": 3.636, |
| "step": 2563 |
| }, |
| { |
| "epoch": 9.26, |
| "learning_rate": 3.786764705882353e-07, |
| "loss": 3.652, |
| "step": 2564 |
| }, |
| { |
| "epoch": 9.26, |
| "learning_rate": 3.7683823529411767e-07, |
| "loss": 3.7657, |
| "step": 2565 |
| }, |
| { |
| "epoch": 9.26, |
| "learning_rate": 3.75e-07, |
| "loss": 3.7984, |
| "step": 2566 |
| }, |
| { |
| "epoch": 9.27, |
| "learning_rate": 3.7316176470588235e-07, |
| "loss": 3.7719, |
| "step": 2567 |
| }, |
| { |
| "epoch": 9.27, |
| "learning_rate": 3.7132352941176475e-07, |
| "loss": 3.7794, |
| "step": 2568 |
| }, |
| { |
| "epoch": 9.27, |
| "learning_rate": 3.694852941176471e-07, |
| "loss": 3.8205, |
| "step": 2569 |
| }, |
| { |
| "epoch": 9.28, |
| "learning_rate": 3.6764705882352943e-07, |
| "loss": 3.7515, |
| "step": 2570 |
| }, |
| { |
| "epoch": 9.28, |
| "learning_rate": 3.6580882352941183e-07, |
| "loss": 3.7615, |
| "step": 2571 |
| }, |
| { |
| "epoch": 9.29, |
| "learning_rate": 3.6397058823529417e-07, |
| "loss": 3.7461, |
| "step": 2572 |
| }, |
| { |
| "epoch": 9.29, |
| "learning_rate": 3.621323529411765e-07, |
| "loss": 3.7666, |
| "step": 2573 |
| }, |
| { |
| "epoch": 9.29, |
| "learning_rate": 3.602941176470588e-07, |
| "loss": 3.7117, |
| "step": 2574 |
| }, |
| { |
| "epoch": 9.3, |
| "learning_rate": 3.584558823529412e-07, |
| "loss": 3.8768, |
| "step": 2575 |
| }, |
| { |
| "epoch": 9.3, |
| "learning_rate": 3.5661764705882354e-07, |
| "loss": 3.722, |
| "step": 2576 |
| }, |
| { |
| "epoch": 9.3, |
| "learning_rate": 3.547794117647059e-07, |
| "loss": 3.7573, |
| "step": 2577 |
| }, |
| { |
| "epoch": 9.31, |
| "learning_rate": 3.529411764705883e-07, |
| "loss": 3.701, |
| "step": 2578 |
| }, |
| { |
| "epoch": 9.31, |
| "learning_rate": 3.511029411764706e-07, |
| "loss": 3.7353, |
| "step": 2579 |
| }, |
| { |
| "epoch": 9.31, |
| "learning_rate": 3.4926470588235296e-07, |
| "loss": 3.8629, |
| "step": 2580 |
| }, |
| { |
| "epoch": 9.32, |
| "learning_rate": 3.4742647058823536e-07, |
| "loss": 3.8279, |
| "step": 2581 |
| }, |
| { |
| "epoch": 9.32, |
| "learning_rate": 3.455882352941177e-07, |
| "loss": 3.8181, |
| "step": 2582 |
| }, |
| { |
| "epoch": 9.32, |
| "learning_rate": 3.4375000000000004e-07, |
| "loss": 3.8184, |
| "step": 2583 |
| }, |
| { |
| "epoch": 9.33, |
| "learning_rate": 3.4191176470588244e-07, |
| "loss": 3.7048, |
| "step": 2584 |
| }, |
| { |
| "epoch": 9.33, |
| "learning_rate": 3.4007352941176473e-07, |
| "loss": 3.7605, |
| "step": 2585 |
| }, |
| { |
| "epoch": 9.34, |
| "learning_rate": 3.3823529411764707e-07, |
| "loss": 3.7882, |
| "step": 2586 |
| }, |
| { |
| "epoch": 9.34, |
| "learning_rate": 3.363970588235294e-07, |
| "loss": 3.7615, |
| "step": 2587 |
| }, |
| { |
| "epoch": 9.34, |
| "learning_rate": 3.345588235294118e-07, |
| "loss": 3.8021, |
| "step": 2588 |
| }, |
| { |
| "epoch": 9.35, |
| "learning_rate": 3.3272058823529415e-07, |
| "loss": 3.7766, |
| "step": 2589 |
| }, |
| { |
| "epoch": 9.35, |
| "learning_rate": 3.308823529411765e-07, |
| "loss": 3.883, |
| "step": 2590 |
| }, |
| { |
| "epoch": 9.35, |
| "learning_rate": 3.2904411764705884e-07, |
| "loss": 3.7975, |
| "step": 2591 |
| }, |
| { |
| "epoch": 9.36, |
| "learning_rate": 3.2720588235294123e-07, |
| "loss": 3.7549, |
| "step": 2592 |
| }, |
| { |
| "epoch": 9.36, |
| "learning_rate": 3.253676470588236e-07, |
| "loss": 3.7203, |
| "step": 2593 |
| }, |
| { |
| "epoch": 9.36, |
| "learning_rate": 3.235294117647059e-07, |
| "loss": 3.6731, |
| "step": 2594 |
| }, |
| { |
| "epoch": 9.37, |
| "learning_rate": 3.216911764705882e-07, |
| "loss": 3.7708, |
| "step": 2595 |
| }, |
| { |
| "epoch": 9.37, |
| "learning_rate": 3.198529411764706e-07, |
| "loss": 3.8121, |
| "step": 2596 |
| }, |
| { |
| "epoch": 9.38, |
| "learning_rate": 3.1801470588235295e-07, |
| "loss": 3.6584, |
| "step": 2597 |
| }, |
| { |
| "epoch": 9.38, |
| "learning_rate": 3.161764705882353e-07, |
| "loss": 3.9055, |
| "step": 2598 |
| }, |
| { |
| "epoch": 9.38, |
| "learning_rate": 3.143382352941177e-07, |
| "loss": 3.7132, |
| "step": 2599 |
| }, |
| { |
| "epoch": 9.39, |
| "learning_rate": 3.125e-07, |
| "loss": 3.7559, |
| "step": 2600 |
| }, |
| { |
| "epoch": 9.39, |
| "learning_rate": 3.1066176470588237e-07, |
| "loss": 3.7324, |
| "step": 2601 |
| }, |
| { |
| "epoch": 9.39, |
| "learning_rate": 3.0882352941176476e-07, |
| "loss": 3.84, |
| "step": 2602 |
| }, |
| { |
| "epoch": 9.4, |
| "learning_rate": 3.0698529411764705e-07, |
| "loss": 3.6149, |
| "step": 2603 |
| }, |
| { |
| "epoch": 9.4, |
| "learning_rate": 3.0514705882352945e-07, |
| "loss": 3.8501, |
| "step": 2604 |
| }, |
| { |
| "epoch": 9.4, |
| "learning_rate": 3.033088235294118e-07, |
| "loss": 3.8329, |
| "step": 2605 |
| }, |
| { |
| "epoch": 9.41, |
| "learning_rate": 3.0147058823529413e-07, |
| "loss": 3.8633, |
| "step": 2606 |
| }, |
| { |
| "epoch": 9.41, |
| "learning_rate": 2.9963235294117653e-07, |
| "loss": 3.7794, |
| "step": 2607 |
| }, |
| { |
| "epoch": 9.42, |
| "learning_rate": 2.977941176470588e-07, |
| "loss": 3.7203, |
| "step": 2608 |
| }, |
| { |
| "epoch": 9.42, |
| "learning_rate": 2.959558823529412e-07, |
| "loss": 3.7122, |
| "step": 2609 |
| }, |
| { |
| "epoch": 9.42, |
| "learning_rate": 2.9411764705882356e-07, |
| "loss": 3.733, |
| "step": 2610 |
| }, |
| { |
| "epoch": 9.43, |
| "learning_rate": 2.922794117647059e-07, |
| "loss": 3.7117, |
| "step": 2611 |
| }, |
| { |
| "epoch": 9.43, |
| "learning_rate": 2.9044117647058824e-07, |
| "loss": 3.645, |
| "step": 2612 |
| }, |
| { |
| "epoch": 9.43, |
| "learning_rate": 2.8860294117647064e-07, |
| "loss": 3.8675, |
| "step": 2613 |
| }, |
| { |
| "epoch": 9.44, |
| "learning_rate": 2.8676470588235293e-07, |
| "loss": 3.6991, |
| "step": 2614 |
| }, |
| { |
| "epoch": 9.44, |
| "learning_rate": 2.849264705882353e-07, |
| "loss": 3.8638, |
| "step": 2615 |
| }, |
| { |
| "epoch": 9.44, |
| "learning_rate": 2.8308823529411766e-07, |
| "loss": 3.6476, |
| "step": 2616 |
| }, |
| { |
| "epoch": 9.45, |
| "learning_rate": 2.8125e-07, |
| "loss": 3.7259, |
| "step": 2617 |
| }, |
| { |
| "epoch": 9.45, |
| "learning_rate": 2.794117647058824e-07, |
| "loss": 3.8833, |
| "step": 2618 |
| }, |
| { |
| "epoch": 9.45, |
| "learning_rate": 2.775735294117647e-07, |
| "loss": 3.6329, |
| "step": 2619 |
| }, |
| { |
| "epoch": 9.46, |
| "learning_rate": 2.757352941176471e-07, |
| "loss": 3.8142, |
| "step": 2620 |
| }, |
| { |
| "epoch": 9.46, |
| "learning_rate": 2.7389705882352943e-07, |
| "loss": 3.7852, |
| "step": 2621 |
| }, |
| { |
| "epoch": 9.47, |
| "learning_rate": 2.7205882352941177e-07, |
| "loss": 3.6707, |
| "step": 2622 |
| }, |
| { |
| "epoch": 9.47, |
| "learning_rate": 2.7022058823529417e-07, |
| "loss": 3.8685, |
| "step": 2623 |
| }, |
| { |
| "epoch": 9.47, |
| "learning_rate": 2.6838235294117646e-07, |
| "loss": 3.8412, |
| "step": 2624 |
| }, |
| { |
| "epoch": 9.48, |
| "learning_rate": 2.6654411764705885e-07, |
| "loss": 3.8398, |
| "step": 2625 |
| }, |
| { |
| "epoch": 9.48, |
| "learning_rate": 2.647058823529412e-07, |
| "loss": 3.8604, |
| "step": 2626 |
| }, |
| { |
| "epoch": 9.48, |
| "learning_rate": 2.6286764705882354e-07, |
| "loss": 3.8224, |
| "step": 2627 |
| }, |
| { |
| "epoch": 9.49, |
| "learning_rate": 2.6102941176470593e-07, |
| "loss": 3.72, |
| "step": 2628 |
| }, |
| { |
| "epoch": 9.49, |
| "learning_rate": 2.591911764705883e-07, |
| "loss": 3.7473, |
| "step": 2629 |
| }, |
| { |
| "epoch": 9.49, |
| "learning_rate": 2.573529411764706e-07, |
| "loss": 3.7612, |
| "step": 2630 |
| }, |
| { |
| "epoch": 9.5, |
| "learning_rate": 2.5551470588235296e-07, |
| "loss": 3.6752, |
| "step": 2631 |
| }, |
| { |
| "epoch": 9.5, |
| "learning_rate": 2.536764705882353e-07, |
| "loss": 3.7414, |
| "step": 2632 |
| }, |
| { |
| "epoch": 9.51, |
| "learning_rate": 2.518382352941177e-07, |
| "loss": 3.6937, |
| "step": 2633 |
| }, |
| { |
| "epoch": 9.51, |
| "learning_rate": 2.5000000000000004e-07, |
| "loss": 3.8864, |
| "step": 2634 |
| }, |
| { |
| "epoch": 9.51, |
| "learning_rate": 2.481617647058824e-07, |
| "loss": 3.6525, |
| "step": 2635 |
| }, |
| { |
| "epoch": 9.52, |
| "learning_rate": 2.4632352941176473e-07, |
| "loss": 3.7045, |
| "step": 2636 |
| }, |
| { |
| "epoch": 9.52, |
| "learning_rate": 2.4448529411764707e-07, |
| "loss": 3.7811, |
| "step": 2637 |
| }, |
| { |
| "epoch": 9.52, |
| "learning_rate": 2.426470588235294e-07, |
| "loss": 3.7723, |
| "step": 2638 |
| }, |
| { |
| "epoch": 9.53, |
| "learning_rate": 2.408088235294118e-07, |
| "loss": 3.9059, |
| "step": 2639 |
| }, |
| { |
| "epoch": 9.53, |
| "learning_rate": 2.389705882352941e-07, |
| "loss": 3.8559, |
| "step": 2640 |
| }, |
| { |
| "epoch": 9.53, |
| "learning_rate": 2.371323529411765e-07, |
| "loss": 3.798, |
| "step": 2641 |
| }, |
| { |
| "epoch": 9.54, |
| "learning_rate": 2.3529411764705883e-07, |
| "loss": 3.7379, |
| "step": 2642 |
| }, |
| { |
| "epoch": 9.54, |
| "learning_rate": 2.334558823529412e-07, |
| "loss": 3.7787, |
| "step": 2643 |
| }, |
| { |
| "epoch": 9.55, |
| "learning_rate": 2.3161764705882355e-07, |
| "loss": 3.8253, |
| "step": 2644 |
| }, |
| { |
| "epoch": 9.55, |
| "learning_rate": 2.2977941176470592e-07, |
| "loss": 3.7808, |
| "step": 2645 |
| }, |
| { |
| "epoch": 9.55, |
| "learning_rate": 2.2794117647058823e-07, |
| "loss": 3.6646, |
| "step": 2646 |
| }, |
| { |
| "epoch": 9.56, |
| "learning_rate": 2.261029411764706e-07, |
| "loss": 3.7457, |
| "step": 2647 |
| }, |
| { |
| "epoch": 9.56, |
| "learning_rate": 2.2426470588235297e-07, |
| "loss": 3.8225, |
| "step": 2648 |
| }, |
| { |
| "epoch": 9.56, |
| "learning_rate": 2.224264705882353e-07, |
| "loss": 3.7174, |
| "step": 2649 |
| }, |
| { |
| "epoch": 9.57, |
| "learning_rate": 2.2058823529411768e-07, |
| "loss": 3.8852, |
| "step": 2650 |
| }, |
| { |
| "epoch": 9.57, |
| "learning_rate": 2.1875e-07, |
| "loss": 3.7364, |
| "step": 2651 |
| }, |
| { |
| "epoch": 9.57, |
| "learning_rate": 2.1691176470588237e-07, |
| "loss": 3.8263, |
| "step": 2652 |
| }, |
| { |
| "epoch": 9.58, |
| "learning_rate": 2.1507352941176473e-07, |
| "loss": 3.8587, |
| "step": 2653 |
| }, |
| { |
| "epoch": 9.58, |
| "learning_rate": 2.1323529411764708e-07, |
| "loss": 3.6841, |
| "step": 2654 |
| }, |
| { |
| "epoch": 9.58, |
| "learning_rate": 2.1139705882352945e-07, |
| "loss": 3.7906, |
| "step": 2655 |
| }, |
| { |
| "epoch": 9.59, |
| "learning_rate": 2.0955882352941176e-07, |
| "loss": 3.6977, |
| "step": 2656 |
| }, |
| { |
| "epoch": 9.59, |
| "learning_rate": 2.0772058823529413e-07, |
| "loss": 3.6885, |
| "step": 2657 |
| }, |
| { |
| "epoch": 9.6, |
| "learning_rate": 2.0588235294117647e-07, |
| "loss": 3.7529, |
| "step": 2658 |
| }, |
| { |
| "epoch": 9.6, |
| "learning_rate": 2.0404411764705884e-07, |
| "loss": 3.725, |
| "step": 2659 |
| }, |
| { |
| "epoch": 9.6, |
| "learning_rate": 2.022058823529412e-07, |
| "loss": 3.7744, |
| "step": 2660 |
| }, |
| { |
| "epoch": 9.61, |
| "learning_rate": 2.0036764705882355e-07, |
| "loss": 3.767, |
| "step": 2661 |
| }, |
| { |
| "epoch": 9.61, |
| "learning_rate": 1.985294117647059e-07, |
| "loss": 3.9009, |
| "step": 2662 |
| }, |
| { |
| "epoch": 9.61, |
| "learning_rate": 1.9669117647058824e-07, |
| "loss": 3.7754, |
| "step": 2663 |
| }, |
| { |
| "epoch": 9.62, |
| "learning_rate": 1.948529411764706e-07, |
| "loss": 3.7756, |
| "step": 2664 |
| }, |
| { |
| "epoch": 9.62, |
| "learning_rate": 1.9301470588235298e-07, |
| "loss": 3.6718, |
| "step": 2665 |
| }, |
| { |
| "epoch": 9.62, |
| "learning_rate": 1.9117647058823532e-07, |
| "loss": 3.8339, |
| "step": 2666 |
| }, |
| { |
| "epoch": 9.63, |
| "learning_rate": 1.8933823529411766e-07, |
| "loss": 3.8213, |
| "step": 2667 |
| }, |
| { |
| "epoch": 9.63, |
| "learning_rate": 1.875e-07, |
| "loss": 3.653, |
| "step": 2668 |
| }, |
| { |
| "epoch": 9.64, |
| "learning_rate": 1.8566176470588237e-07, |
| "loss": 3.8195, |
| "step": 2669 |
| }, |
| { |
| "epoch": 9.64, |
| "learning_rate": 1.8382352941176472e-07, |
| "loss": 3.8466, |
| "step": 2670 |
| }, |
| { |
| "epoch": 9.64, |
| "learning_rate": 1.8198529411764709e-07, |
| "loss": 3.833, |
| "step": 2671 |
| }, |
| { |
| "epoch": 9.65, |
| "learning_rate": 1.801470588235294e-07, |
| "loss": 3.7026, |
| "step": 2672 |
| }, |
| { |
| "epoch": 9.65, |
| "learning_rate": 1.7830882352941177e-07, |
| "loss": 3.7743, |
| "step": 2673 |
| }, |
| { |
| "epoch": 9.65, |
| "learning_rate": 1.7647058823529414e-07, |
| "loss": 3.7778, |
| "step": 2674 |
| }, |
| { |
| "epoch": 9.66, |
| "learning_rate": 1.7463235294117648e-07, |
| "loss": 3.8244, |
| "step": 2675 |
| }, |
| { |
| "epoch": 9.66, |
| "learning_rate": 1.7279411764705885e-07, |
| "loss": 3.747, |
| "step": 2676 |
| }, |
| { |
| "epoch": 9.66, |
| "learning_rate": 1.7095588235294122e-07, |
| "loss": 3.7837, |
| "step": 2677 |
| }, |
| { |
| "epoch": 9.67, |
| "learning_rate": 1.6911764705882354e-07, |
| "loss": 3.8244, |
| "step": 2678 |
| }, |
| { |
| "epoch": 9.67, |
| "learning_rate": 1.672794117647059e-07, |
| "loss": 3.7091, |
| "step": 2679 |
| }, |
| { |
| "epoch": 9.68, |
| "learning_rate": 1.6544117647058825e-07, |
| "loss": 3.7692, |
| "step": 2680 |
| }, |
| { |
| "epoch": 9.68, |
| "learning_rate": 1.6360294117647062e-07, |
| "loss": 3.6898, |
| "step": 2681 |
| }, |
| { |
| "epoch": 9.68, |
| "learning_rate": 1.6176470588235296e-07, |
| "loss": 3.7676, |
| "step": 2682 |
| }, |
| { |
| "epoch": 9.69, |
| "learning_rate": 1.599264705882353e-07, |
| "loss": 3.7901, |
| "step": 2683 |
| }, |
| { |
| "epoch": 9.69, |
| "learning_rate": 1.5808823529411764e-07, |
| "loss": 3.6358, |
| "step": 2684 |
| }, |
| { |
| "epoch": 9.69, |
| "learning_rate": 1.5625e-07, |
| "loss": 3.7731, |
| "step": 2685 |
| }, |
| { |
| "epoch": 9.7, |
| "learning_rate": 1.5441176470588238e-07, |
| "loss": 3.7159, |
| "step": 2686 |
| }, |
| { |
| "epoch": 9.7, |
| "learning_rate": 1.5257352941176472e-07, |
| "loss": 3.73, |
| "step": 2687 |
| }, |
| { |
| "epoch": 9.7, |
| "learning_rate": 1.5073529411764707e-07, |
| "loss": 3.9258, |
| "step": 2688 |
| }, |
| { |
| "epoch": 9.71, |
| "learning_rate": 1.488970588235294e-07, |
| "loss": 3.7849, |
| "step": 2689 |
| }, |
| { |
| "epoch": 9.71, |
| "learning_rate": 1.4705882352941178e-07, |
| "loss": 3.7702, |
| "step": 2690 |
| }, |
| { |
| "epoch": 9.71, |
| "learning_rate": 1.4522058823529412e-07, |
| "loss": 3.5905, |
| "step": 2691 |
| }, |
| { |
| "epoch": 9.72, |
| "learning_rate": 1.4338235294117646e-07, |
| "loss": 3.8034, |
| "step": 2692 |
| }, |
| { |
| "epoch": 9.72, |
| "learning_rate": 1.4154411764705883e-07, |
| "loss": 3.8553, |
| "step": 2693 |
| }, |
| { |
| "epoch": 9.73, |
| "learning_rate": 1.397058823529412e-07, |
| "loss": 3.7798, |
| "step": 2694 |
| }, |
| { |
| "epoch": 9.73, |
| "learning_rate": 1.3786764705882354e-07, |
| "loss": 3.806, |
| "step": 2695 |
| }, |
| { |
| "epoch": 9.73, |
| "learning_rate": 1.3602941176470589e-07, |
| "loss": 3.7418, |
| "step": 2696 |
| }, |
| { |
| "epoch": 9.74, |
| "learning_rate": 1.3419117647058823e-07, |
| "loss": 3.7032, |
| "step": 2697 |
| }, |
| { |
| "epoch": 9.74, |
| "learning_rate": 1.323529411764706e-07, |
| "loss": 3.7004, |
| "step": 2698 |
| }, |
| { |
| "epoch": 9.74, |
| "learning_rate": 1.3051470588235297e-07, |
| "loss": 3.6563, |
| "step": 2699 |
| }, |
| { |
| "epoch": 9.75, |
| "learning_rate": 1.286764705882353e-07, |
| "loss": 3.7654, |
| "step": 2700 |
| }, |
| { |
| "epoch": 9.75, |
| "learning_rate": 1.2683823529411765e-07, |
| "loss": 3.8808, |
| "step": 2701 |
| }, |
| { |
| "epoch": 9.75, |
| "learning_rate": 1.2500000000000002e-07, |
| "loss": 3.8361, |
| "step": 2702 |
| }, |
| { |
| "epoch": 9.76, |
| "learning_rate": 1.2316176470588236e-07, |
| "loss": 3.7541, |
| "step": 2703 |
| }, |
| { |
| "epoch": 9.76, |
| "learning_rate": 1.213235294117647e-07, |
| "loss": 3.8437, |
| "step": 2704 |
| }, |
| { |
| "epoch": 9.77, |
| "learning_rate": 1.1948529411764705e-07, |
| "loss": 3.7702, |
| "step": 2705 |
| }, |
| { |
| "epoch": 9.77, |
| "learning_rate": 1.1764705882352942e-07, |
| "loss": 3.8234, |
| "step": 2706 |
| }, |
| { |
| "epoch": 9.77, |
| "learning_rate": 1.1580882352941177e-07, |
| "loss": 3.7749, |
| "step": 2707 |
| }, |
| { |
| "epoch": 9.78, |
| "learning_rate": 1.1397058823529412e-07, |
| "loss": 3.7498, |
| "step": 2708 |
| }, |
| { |
| "epoch": 9.78, |
| "learning_rate": 1.1213235294117648e-07, |
| "loss": 3.719, |
| "step": 2709 |
| }, |
| { |
| "epoch": 9.78, |
| "learning_rate": 1.1029411764705884e-07, |
| "loss": 3.7922, |
| "step": 2710 |
| }, |
| { |
| "epoch": 9.79, |
| "learning_rate": 1.0845588235294118e-07, |
| "loss": 3.7657, |
| "step": 2711 |
| }, |
| { |
| "epoch": 9.79, |
| "learning_rate": 1.0661764705882354e-07, |
| "loss": 3.8531, |
| "step": 2712 |
| }, |
| { |
| "epoch": 9.79, |
| "learning_rate": 1.0477941176470588e-07, |
| "loss": 3.8902, |
| "step": 2713 |
| }, |
| { |
| "epoch": 9.8, |
| "learning_rate": 1.0294117647058824e-07, |
| "loss": 3.6529, |
| "step": 2714 |
| }, |
| { |
| "epoch": 9.8, |
| "learning_rate": 1.011029411764706e-07, |
| "loss": 3.7144, |
| "step": 2715 |
| }, |
| { |
| "epoch": 9.81, |
| "learning_rate": 9.926470588235295e-08, |
| "loss": 3.7093, |
| "step": 2716 |
| }, |
| { |
| "epoch": 9.81, |
| "learning_rate": 9.74264705882353e-08, |
| "loss": 3.7385, |
| "step": 2717 |
| }, |
| { |
| "epoch": 9.81, |
| "learning_rate": 9.558823529411766e-08, |
| "loss": 3.8658, |
| "step": 2718 |
| }, |
| { |
| "epoch": 9.82, |
| "learning_rate": 9.375e-08, |
| "loss": 3.7143, |
| "step": 2719 |
| }, |
| { |
| "epoch": 9.82, |
| "learning_rate": 9.191176470588236e-08, |
| "loss": 3.6873, |
| "step": 2720 |
| }, |
| { |
| "epoch": 9.82, |
| "learning_rate": 9.00735294117647e-08, |
| "loss": 3.7374, |
| "step": 2721 |
| }, |
| { |
| "epoch": 9.83, |
| "learning_rate": 8.823529411764707e-08, |
| "loss": 3.7236, |
| "step": 2722 |
| }, |
| { |
| "epoch": 9.83, |
| "learning_rate": 8.639705882352943e-08, |
| "loss": 3.6656, |
| "step": 2723 |
| }, |
| { |
| "epoch": 9.83, |
| "learning_rate": 8.455882352941177e-08, |
| "loss": 3.7009, |
| "step": 2724 |
| }, |
| { |
| "epoch": 9.84, |
| "learning_rate": 8.272058823529412e-08, |
| "loss": 3.7995, |
| "step": 2725 |
| }, |
| { |
| "epoch": 9.84, |
| "learning_rate": 8.088235294117648e-08, |
| "loss": 3.804, |
| "step": 2726 |
| }, |
| { |
| "epoch": 9.84, |
| "learning_rate": 7.904411764705882e-08, |
| "loss": 3.7859, |
| "step": 2727 |
| }, |
| { |
| "epoch": 9.85, |
| "learning_rate": 7.720588235294119e-08, |
| "loss": 3.8329, |
| "step": 2728 |
| }, |
| { |
| "epoch": 9.85, |
| "learning_rate": 7.536764705882353e-08, |
| "loss": 3.8527, |
| "step": 2729 |
| }, |
| { |
| "epoch": 9.86, |
| "learning_rate": 7.352941176470589e-08, |
| "loss": 3.8687, |
| "step": 2730 |
| }, |
| { |
| "epoch": 9.86, |
| "learning_rate": 7.169117647058823e-08, |
| "loss": 3.8088, |
| "step": 2731 |
| }, |
| { |
| "epoch": 9.86, |
| "learning_rate": 6.98529411764706e-08, |
| "loss": 3.7211, |
| "step": 2732 |
| }, |
| { |
| "epoch": 9.87, |
| "learning_rate": 6.801470588235294e-08, |
| "loss": 3.826, |
| "step": 2733 |
| }, |
| { |
| "epoch": 9.87, |
| "learning_rate": 6.61764705882353e-08, |
| "loss": 3.8344, |
| "step": 2734 |
| }, |
| { |
| "epoch": 9.87, |
| "learning_rate": 6.433823529411765e-08, |
| "loss": 3.7468, |
| "step": 2735 |
| }, |
| { |
| "epoch": 9.88, |
| "learning_rate": 6.250000000000001e-08, |
| "loss": 3.8694, |
| "step": 2736 |
| }, |
| { |
| "epoch": 9.88, |
| "learning_rate": 6.066176470588235e-08, |
| "loss": 3.8458, |
| "step": 2737 |
| }, |
| { |
| "epoch": 9.88, |
| "learning_rate": 5.882352941176471e-08, |
| "loss": 3.865, |
| "step": 2738 |
| }, |
| { |
| "epoch": 9.89, |
| "learning_rate": 5.698529411764706e-08, |
| "loss": 3.7796, |
| "step": 2739 |
| }, |
| { |
| "epoch": 9.89, |
| "learning_rate": 5.514705882352942e-08, |
| "loss": 3.8002, |
| "step": 2740 |
| }, |
| { |
| "epoch": 9.9, |
| "learning_rate": 5.330882352941177e-08, |
| "loss": 3.6658, |
| "step": 2741 |
| }, |
| { |
| "epoch": 9.9, |
| "learning_rate": 5.147058823529412e-08, |
| "loss": 3.777, |
| "step": 2742 |
| }, |
| { |
| "epoch": 9.9, |
| "learning_rate": 4.9632352941176474e-08, |
| "loss": 3.673, |
| "step": 2743 |
| }, |
| { |
| "epoch": 9.91, |
| "learning_rate": 4.779411764705883e-08, |
| "loss": 3.7164, |
| "step": 2744 |
| }, |
| { |
| "epoch": 9.91, |
| "learning_rate": 4.595588235294118e-08, |
| "loss": 3.7202, |
| "step": 2745 |
| }, |
| { |
| "epoch": 9.91, |
| "learning_rate": 4.4117647058823535e-08, |
| "loss": 3.7995, |
| "step": 2746 |
| }, |
| { |
| "epoch": 9.92, |
| "learning_rate": 4.2279411764705884e-08, |
| "loss": 3.7512, |
| "step": 2747 |
| }, |
| { |
| "epoch": 9.92, |
| "learning_rate": 4.044117647058824e-08, |
| "loss": 3.8668, |
| "step": 2748 |
| }, |
| { |
| "epoch": 9.92, |
| "learning_rate": 3.8602941176470595e-08, |
| "loss": 3.875, |
| "step": 2749 |
| }, |
| { |
| "epoch": 9.93, |
| "learning_rate": 3.6764705882352945e-08, |
| "loss": 3.837, |
| "step": 2750 |
| }, |
| { |
| "epoch": 9.93, |
| "learning_rate": 3.49264705882353e-08, |
| "loss": 3.8107, |
| "step": 2751 |
| }, |
| { |
| "epoch": 9.94, |
| "learning_rate": 3.308823529411765e-08, |
| "loss": 3.9115, |
| "step": 2752 |
| }, |
| { |
| "epoch": 9.94, |
| "learning_rate": 3.1250000000000005e-08, |
| "loss": 3.869, |
| "step": 2753 |
| }, |
| { |
| "epoch": 9.94, |
| "learning_rate": 2.9411764705882354e-08, |
| "loss": 3.8325, |
| "step": 2754 |
| }, |
| { |
| "epoch": 9.95, |
| "learning_rate": 2.757352941176471e-08, |
| "loss": 3.824, |
| "step": 2755 |
| }, |
| { |
| "epoch": 9.95, |
| "learning_rate": 2.573529411764706e-08, |
| "loss": 3.6754, |
| "step": 2756 |
| }, |
| { |
| "epoch": 9.95, |
| "learning_rate": 2.3897058823529415e-08, |
| "loss": 3.8178, |
| "step": 2757 |
| }, |
| { |
| "epoch": 9.96, |
| "learning_rate": 2.2058823529411767e-08, |
| "loss": 3.6439, |
| "step": 2758 |
| }, |
| { |
| "epoch": 9.96, |
| "learning_rate": 2.022058823529412e-08, |
| "loss": 3.8224, |
| "step": 2759 |
| }, |
| { |
| "epoch": 9.96, |
| "learning_rate": 1.8382352941176472e-08, |
| "loss": 3.6205, |
| "step": 2760 |
| }, |
| { |
| "epoch": 9.97, |
| "learning_rate": 1.6544117647058825e-08, |
| "loss": 3.8128, |
| "step": 2761 |
| }, |
| { |
| "epoch": 9.97, |
| "learning_rate": 1.4705882352941177e-08, |
| "loss": 3.7489, |
| "step": 2762 |
| }, |
| { |
| "epoch": 9.97, |
| "learning_rate": 1.286764705882353e-08, |
| "loss": 3.7814, |
| "step": 2763 |
| }, |
| { |
| "epoch": 9.98, |
| "learning_rate": 1.1029411764705884e-08, |
| "loss": 3.7514, |
| "step": 2764 |
| }, |
| { |
| "epoch": 9.98, |
| "learning_rate": 9.191176470588236e-09, |
| "loss": 3.6821, |
| "step": 2765 |
| }, |
| { |
| "epoch": 9.99, |
| "learning_rate": 7.3529411764705886e-09, |
| "loss": 3.8408, |
| "step": 2766 |
| }, |
| { |
| "epoch": 9.99, |
| "learning_rate": 5.514705882352942e-09, |
| "loss": 3.7974, |
| "step": 2767 |
| }, |
| { |
| "epoch": 9.99, |
| "learning_rate": 3.6764705882352943e-09, |
| "loss": 3.9221, |
| "step": 2768 |
| }, |
| { |
| "epoch": 10.0, |
| "learning_rate": 1.8382352941176471e-09, |
| "loss": 3.8255, |
| "step": 2769 |
| }, |
| { |
| "epoch": 10.0, |
| "learning_rate": 0.0, |
| "loss": 3.7908, |
| "step": 2770 |
| }, |
| { |
| "epoch": 10.0, |
| "eval_accuracy": 0.15945945945945947, |
| "eval_loss": 3.7452244758605957, |
| "eval_runtime": 143.3771, |
| "eval_samples_per_second": 2.581, |
| "eval_steps_per_second": 0.649, |
| "step": 2770 |
| } |
| ], |
| "max_steps": 2770, |
| "num_train_epochs": 10, |
| "total_flos": 3.006770814363167e+18, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|