| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 155, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 0.00019870967741935483, | |
| "loss": 2.6923, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 0.00019741935483870969, | |
| "loss": 2.474, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.0001961290322580645, | |
| "loss": 2.4604, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.00019483870967741936, | |
| "loss": 2.5712, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.00019354838709677422, | |
| "loss": 2.722, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.00019225806451612904, | |
| "loss": 3.5362, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.0001909677419354839, | |
| "loss": 2.6359, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.00018967741935483872, | |
| "loss": 3.0204, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.00018838709677419354, | |
| "loss": 2.5709, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.0001870967741935484, | |
| "loss": 2.7084, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.00018580645161290325, | |
| "loss": 2.7173, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.00018451612903225807, | |
| "loss": 2.8462, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.00018322580645161292, | |
| "loss": 2.5665, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.00018193548387096775, | |
| "loss": 2.8803, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00018064516129032257, | |
| "loss": 2.5982, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00017935483870967742, | |
| "loss": 2.8527, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.00017806451612903228, | |
| "loss": 2.3655, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.0001767741935483871, | |
| "loss": 2.3536, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.00017548387096774195, | |
| "loss": 3.2141, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.00017419354838709678, | |
| "loss": 2.8027, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.00017290322580645163, | |
| "loss": 2.6194, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.00017161290322580645, | |
| "loss": 2.9702, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.00017032258064516128, | |
| "loss": 2.544, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.00016903225806451616, | |
| "loss": 2.6569, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.00016774193548387098, | |
| "loss": 3.0532, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.0001664516129032258, | |
| "loss": 2.5908, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.00016516129032258066, | |
| "loss": 2.7713, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.00016387096774193548, | |
| "loss": 2.4241, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.00016258064516129034, | |
| "loss": 2.6717, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.00016129032258064516, | |
| "loss": 2.8446, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.00016, | |
| "loss": 2.6616, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.00015870967741935487, | |
| "loss": 2.3076, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.0001574193548387097, | |
| "loss": 2.3784, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.00015612903225806451, | |
| "loss": 2.8661, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.00015483870967741937, | |
| "loss": 2.7698, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.0001535483870967742, | |
| "loss": 3.0509, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.00015225806451612902, | |
| "loss": 2.9071, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.0001509677419354839, | |
| "loss": 2.5806, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.00014967741935483872, | |
| "loss": 2.7682, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.00014838709677419355, | |
| "loss": 2.4589, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.0001470967741935484, | |
| "loss": 2.7426, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.00014580645161290322, | |
| "loss": 2.353, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.00014451612903225807, | |
| "loss": 2.5173, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.00014322580645161293, | |
| "loss": 2.3351, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.00014193548387096775, | |
| "loss": 2.5319, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.0001406451612903226, | |
| "loss": 2.3194, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.00013935483870967743, | |
| "loss": 2.4565, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.00013806451612903225, | |
| "loss": 1.7739, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.0001367741935483871, | |
| "loss": 2.3309, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.00013548387096774193, | |
| "loss": 2.5577, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.00013419354838709678, | |
| "loss": 2.0317, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.00013290322580645163, | |
| "loss": 2.9939, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.00013161290322580646, | |
| "loss": 3.1213, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.0001303225806451613, | |
| "loss": 2.0057, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.00012903225806451613, | |
| "loss": 1.4559, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.00012774193548387096, | |
| "loss": 1.9017, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.0001264516129032258, | |
| "loss": 2.0386, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.00012516129032258066, | |
| "loss": 2.0177, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.0001238709677419355, | |
| "loss": 2.6006, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.00012258064516129034, | |
| "loss": 2.543, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.00012129032258064516, | |
| "loss": 2.401, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.00012, | |
| "loss": 2.2931, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.00011870967741935484, | |
| "loss": 2.7531, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.00011741935483870967, | |
| "loss": 2.3478, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.00011612903225806453, | |
| "loss": 2.7799, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.00011483870967741937, | |
| "loss": 2.0962, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.0001135483870967742, | |
| "loss": 2.5596, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.00011225806451612903, | |
| "loss": 2.4893, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.00011096774193548387, | |
| "loss": 2.5819, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.00010967741935483871, | |
| "loss": 2.5943, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.00010838709677419356, | |
| "loss": 2.5922, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.0001070967741935484, | |
| "loss": 2.7867, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.00010580645161290324, | |
| "loss": 2.9934, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.00010451612903225806, | |
| "loss": 2.0741, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.0001032258064516129, | |
| "loss": 2.371, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.00010193548387096774, | |
| "loss": 2.2222, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.00010064516129032258, | |
| "loss": 1.8981, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 9.935483870967742e-05, | |
| "loss": 2.2085, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 9.806451612903226e-05, | |
| "loss": 2.43, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 9.677419354838711e-05, | |
| "loss": 2.0956, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 9.548387096774195e-05, | |
| "loss": 2.1909, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 9.419354838709677e-05, | |
| "loss": 3.139, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 9.290322580645162e-05, | |
| "loss": 1.915, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 9.161290322580646e-05, | |
| "loss": 3.089, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 9.032258064516129e-05, | |
| "loss": 2.226, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 8.903225806451614e-05, | |
| "loss": 2.8796, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 8.774193548387098e-05, | |
| "loss": 2.579, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 8.645161290322581e-05, | |
| "loss": 2.0254, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 8.516129032258064e-05, | |
| "loss": 1.689, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 8.387096774193549e-05, | |
| "loss": 3.0117, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 8.258064516129033e-05, | |
| "loss": 2.5169, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 8.129032258064517e-05, | |
| "loss": 2.2028, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 8e-05, | |
| "loss": 2.7676, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 7.870967741935484e-05, | |
| "loss": 2.4326, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 7.741935483870968e-05, | |
| "loss": 2.4871, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 7.612903225806451e-05, | |
| "loss": 3.8584, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 7.483870967741936e-05, | |
| "loss": 2.2319, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 7.35483870967742e-05, | |
| "loss": 2.9421, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 7.225806451612904e-05, | |
| "loss": 2.3276, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 7.096774193548388e-05, | |
| "loss": 2.662, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 6.967741935483871e-05, | |
| "loss": 2.4066, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 6.838709677419355e-05, | |
| "loss": 2.3137, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 6.709677419354839e-05, | |
| "loss": 2.379, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 6.580645161290323e-05, | |
| "loss": 2.8036, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 6.451612903225807e-05, | |
| "loss": 1.8447, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 6.32258064516129e-05, | |
| "loss": 3.0718, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 6.193548387096774e-05, | |
| "loss": 2.7225, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 6.064516129032258e-05, | |
| "loss": 3.1034, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 5.935483870967742e-05, | |
| "loss": 2.8354, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 5.8064516129032266e-05, | |
| "loss": 2.5716, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 5.67741935483871e-05, | |
| "loss": 2.6014, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 5.5483870967741936e-05, | |
| "loss": 2.1639, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 5.419354838709678e-05, | |
| "loss": 2.3661, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 5.290322580645162e-05, | |
| "loss": 2.2034, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 5.161290322580645e-05, | |
| "loss": 2.3003, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 5.032258064516129e-05, | |
| "loss": 2.0926, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 4.903225806451613e-05, | |
| "loss": 2.1339, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 4.774193548387097e-05, | |
| "loss": 2.4229, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 4.645161290322581e-05, | |
| "loss": 2.5723, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 4.516129032258064e-05, | |
| "loss": 2.471, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 4.387096774193549e-05, | |
| "loss": 2.1827, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 4.258064516129032e-05, | |
| "loss": 2.256, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 4.1290322580645165e-05, | |
| "loss": 1.9319, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 4e-05, | |
| "loss": 2.3735, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 3.870967741935484e-05, | |
| "loss": 3.3083, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 3.741935483870968e-05, | |
| "loss": 2.6105, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 3.612903225806452e-05, | |
| "loss": 2.8468, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.483870967741936e-05, | |
| "loss": 2.6511, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.3548387096774195e-05, | |
| "loss": 2.424, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.2258064516129034e-05, | |
| "loss": 2.5496, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 3.096774193548387e-05, | |
| "loss": 2.1921, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.967741935483871e-05, | |
| "loss": 2.2175, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.838709677419355e-05, | |
| "loss": 1.6908, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.709677419354839e-05, | |
| "loss": 2.6133, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.5806451612903226e-05, | |
| "loss": 2.2093, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 2.4516129032258064e-05, | |
| "loss": 2.6935, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 2.3225806451612906e-05, | |
| "loss": 2.4553, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 2.1935483870967744e-05, | |
| "loss": 2.4681, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 2.0645161290322582e-05, | |
| "loss": 1.8384, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.935483870967742e-05, | |
| "loss": 2.1883, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.806451612903226e-05, | |
| "loss": 2.4338, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 1.6774193548387098e-05, | |
| "loss": 3.0665, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 1.5483870967741936e-05, | |
| "loss": 2.2678, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 1.4193548387096774e-05, | |
| "loss": 2.7912, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.2903225806451613e-05, | |
| "loss": 2.5368, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.1612903225806453e-05, | |
| "loss": 2.0611, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.0322580645161291e-05, | |
| "loss": 2.3654, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 9.03225806451613e-06, | |
| "loss": 5.5901, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 7.741935483870968e-06, | |
| "loss": 2.7886, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 6.451612903225806e-06, | |
| "loss": 2.855, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 5.161290322580646e-06, | |
| "loss": 2.5303, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 3.870967741935484e-06, | |
| "loss": 2.453, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 2.580645161290323e-06, | |
| "loss": 2.4346, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.2903225806451614e-06, | |
| "loss": 2.3262, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 0.0, | |
| "loss": 2.5423, | |
| "step": 155 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 155, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 500, | |
| "total_flos": 7324808442032640.0, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |