| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.967032967032967, | |
| "global_step": 135, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 3.3333333333333333e-06, | |
| "loss": 1.5488, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 6.666666666666667e-06, | |
| "loss": 1.5869, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 1e-05, | |
| "loss": 1.5615, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 1.3333333333333333e-05, | |
| "loss": 1.4697, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 1.6666666666666667e-05, | |
| "loss": 1.3818, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3203, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 1.9997034698451396e-05, | |
| "loss": 1.459, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.998814055240823e-05, | |
| "loss": 1.2891, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 1.9973322836635517e-05, | |
| "loss": 1.2598, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 1.995259033893236e-05, | |
| "loss": 1.2588, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 1.9925955354920265e-05, | |
| "loss": 1.2471, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 1.9893433680751105e-05, | |
| "loss": 1.2197, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 1.985504460373903e-05, | |
| "loss": 1.1875, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 1.9810810890921943e-05, | |
| "loss": 1.1768, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 1.9760758775559275e-05, | |
| "loss": 1.1562, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 1.9704917941574053e-05, | |
| "loss": 1.1523, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 1.9643321505948588e-05, | |
| "loss": 1.1299, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 1.957600599908406e-05, | |
| "loss": 1.1455, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 1.9503011343135828e-05, | |
| "loss": 1.1074, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.9424380828337146e-05, | |
| "loss": 1.1113, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 1.9340161087325483e-05, | |
| "loss": 1.1143, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 1.9250402067486523e-05, | |
| "loss": 1.085, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 1.9155157001332374e-05, | |
| "loss": 1.0791, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 1.905448237493147e-05, | |
| "loss": 1.0908, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 1.894843789440892e-05, | |
| "loss": 1.1045, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 1.8837086450537195e-05, | |
| "loss": 1.1025, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.872049408143808e-05, | |
| "loss": 1.0693, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.8598729933418102e-05, | |
| "loss": 1.0664, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.8471866219960604e-05, | |
| "loss": 1.0527, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.833997817889878e-05, | |
| "loss": 1.0522, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.820314402779511e-05, | |
| "loss": 1.0566, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.806144491755363e-05, | |
| "loss": 1.043, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 1.7914964884292543e-05, | |
| "loss": 1.0444, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 1.7763790799505746e-05, | |
| "loss": 1.0562, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 1.760801231854278e-05, | |
| "loss": 1.0454, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 1.744772182743782e-05, | |
| "loss": 1.0518, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.728301438811916e-05, | |
| "loss": 1.022, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.711398768203178e-05, | |
| "loss": 1.0293, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.6940741952206342e-05, | |
| "loss": 1.0093, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.676337994380903e-05, | |
| "loss": 1.0103, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.658200684320748e-05, | |
| "loss": 0.999, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 1.6396730215588913e-05, | |
| "loss": 1.0024, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.6207659941167485e-05, | |
| "loss": 0.9873, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.6014908150018703e-05, | |
| "loss": 1.02, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.581858915557953e-05, | |
| "loss": 1.0039, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 1.5618819386853607e-05, | |
| "loss": 0.9478, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 1.541571731936185e-05, | |
| "loss": 0.9795, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 1.5209403404879305e-05, | |
| "loss": 0.9844, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 1.5000000000000002e-05, | |
| "loss": 0.9697, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 1.4787631293572094e-05, | |
| "loss": 0.9941, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 1.4572423233046386e-05, | |
| "loss": 0.957, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 1.4354503449781914e-05, | |
| "loss": 0.9727, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 1.4134001183352833e-05, | |
| "loss": 0.9624, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 1.391104720490156e-05, | |
| "loss": 0.9434, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 1.368577373958362e-05, | |
| "loss": 0.9629, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 1.3458314388150115e-05, | |
| "loss": 0.9395, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 1.3228804047714462e-05, | |
| "loss": 0.9355, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 1.2997378831750242e-05, | |
| "loss": 0.9312, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 1.2764175989367717e-05, | |
| "loss": 0.9419, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 1.2529333823916807e-05, | |
| "loss": 0.9468, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 1.2292991610964902e-05, | |
| "loss": 0.9307, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 1.2055289515698008e-05, | |
| "loss": 0.9004, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 1.1816368509794365e-05, | |
| "loss": 0.9155, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 1.1576370287819737e-05, | |
| "loss": 0.9868, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 1.133543718319398e-05, | |
| "loss": 0.9326, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 1.1093712083778748e-05, | |
| "loss": 0.9199, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 1.0851338347136358e-05, | |
| "loss": 0.9282, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 1.060845971551014e-05, | |
| "loss": 0.9297, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 1.0365220230576592e-05, | |
| "loss": 0.9312, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 1.0121764148019977e-05, | |
| "loss": 0.9062, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 9.878235851980027e-06, | |
| "loss": 0.8867, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 9.634779769423412e-06, | |
| "loss": 0.9473, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 9.391540284489862e-06, | |
| "loss": 0.9023, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 9.148661652863644e-06, | |
| "loss": 0.8955, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 8.906287916221259e-06, | |
| "loss": 0.8984, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 8.664562816806022e-06, | |
| "loss": 0.9106, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 8.423629712180265e-06, | |
| "loss": 0.9321, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 8.183631490205636e-06, | |
| "loss": 0.9253, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 7.944710484301995e-06, | |
| "loss": 0.874, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 7.707008389035102e-06, | |
| "loss": 0.9229, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 7.470666176083193e-06, | |
| "loss": 0.9106, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 7.235824010632284e-06, | |
| "loss": 0.8901, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 7.002621168249759e-06, | |
| "loss": 0.9033, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 6.771195952285541e-06, | |
| "loss": 0.9287, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 6.5416856118498874e-06, | |
| "loss": 0.9043, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 6.314226260416383e-06, | |
| "loss": 0.9302, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 6.088952795098442e-06, | |
| "loss": 0.9048, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 5.8659988166471715e-06, | |
| "loss": 0.897, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 5.645496550218089e-06, | |
| "loss": 0.9419, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 5.427576766953615e-06, | |
| "loss": 0.9126, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 5.212368706427913e-06, | |
| "loss": 0.9087, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 5.000000000000003e-06, | |
| "loss": 0.853, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 4.790596595120699e-06, | |
| "loss": 0.8467, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 4.584282680638155e-06, | |
| "loss": 0.8491, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 4.381180613146396e-06, | |
| "loss": 0.8477, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 4.181410844420473e-06, | |
| "loss": 0.8638, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 3.9850918499812976e-06, | |
| "loss": 0.8633, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 3.7923400588325156e-06, | |
| "loss": 0.8594, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 3.6032697844110896e-06, | |
| "loss": 0.8315, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 3.4179931567925216e-06, | |
| "loss": 0.8306, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 3.236620056190972e-06, | |
| "loss": 0.8281, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 3.0592580477936606e-06, | |
| "loss": 0.8359, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 2.8860123179682244e-06, | |
| "loss": 0.8374, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 2.7169856118808414e-06, | |
| "loss": 0.8291, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 2.5522781725621814e-06, | |
| "loss": 0.8481, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 2.3919876814572197e-06, | |
| "loss": 0.8418, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 2.2362092004942583e-06, | |
| "loss": 0.8589, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 2.08503511570746e-06, | |
| "loss": 0.8291, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 1.9385550824463727e-06, | |
| "loss": 0.8574, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 1.7968559722048906e-06, | |
| "loss": 0.8564, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 1.660021821101222e-06, | |
| "loss": 0.8755, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 1.528133780039397e-06, | |
| "loss": 0.874, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 1.401270066581899e-06, | |
| "loss": 0.833, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 1.279505918561923e-06, | |
| "loss": 0.8276, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 1.1629135494628097e-06, | |
| "loss": 0.8545, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 1.051562105591082e-06, | |
| "loss": 0.8589, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 9.455176250685338e-07, | |
| "loss": 0.8418, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 8.448429986676298e-07, | |
| "loss": 0.8169, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 7.495979325134806e-07, | |
| "loss": 0.8511, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 6.598389126745209e-07, | |
| "loss": 0.8452, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 5.756191716628556e-07, | |
| "loss": 0.8394, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 4.969886568641757e-07, | |
| "loss": 0.8354, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 4.2399400091594154e-07, | |
| "loss": 0.8779, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 3.566784940514145e-07, | |
| "loss": 0.8408, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 2.9508205842594727e-07, | |
| "loss": 0.8574, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 2.392412244407294e-07, | |
| "loss": 0.8354, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 1.8918910907805733e-07, | |
| "loss": 0.8384, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 1.4495539626097289e-07, | |
| "loss": 0.8398, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 1.0656631924889749e-07, | |
| "loss": 0.8579, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 7.404464507973608e-08, | |
| "loss": 0.8389, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 4.740966106764222e-08, | |
| "loss": 0.8647, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 2.667716336448356e-08, | |
| "loss": 0.8354, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 1.1859447591769934e-08, | |
| "loss": 0.8579, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 2.9653015486064143e-09, | |
| "loss": 0.8633, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 0.0, | |
| "loss": 0.8589, | |
| "step": 135 | |
| } | |
| ], | |
| "max_steps": 135, | |
| "num_train_epochs": 3, | |
| "total_flos": 56061402808320.0, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |