| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.984, | |
| "eval_steps": 500, | |
| "global_step": 124, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 5e-06, | |
| "loss": 2.9684, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 1e-05, | |
| "loss": 3.0316, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 1.5000000000000002e-05, | |
| "loss": 2.8996, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 2e-05, | |
| "loss": 2.7286, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 1.9996573249755573e-05, | |
| "loss": 2.8527, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 1.9986295347545738e-05, | |
| "loss": 2.5942, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 1.9969173337331283e-05, | |
| "loss": 2.4836, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 1.9945218953682736e-05, | |
| "loss": 2.666, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 1.9914448613738107e-05, | |
| "loss": 2.3577, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 1.9876883405951378e-05, | |
| "loss": 2.4005, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.983254907563955e-05, | |
| "loss": 2.3426, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 1.9781476007338058e-05, | |
| "loss": 2.1247, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 1.9723699203976768e-05, | |
| "loss": 2.207, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 1.9659258262890683e-05, | |
| "loss": 2.0202, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 1.958819734868193e-05, | |
| "loss": 2.1052, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 1.9510565162951538e-05, | |
| "loss": 2.1671, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 1.9426414910921785e-05, | |
| "loss": 2.1553, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 1.9335804264972018e-05, | |
| "loss": 2.0372, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 1.9238795325112867e-05, | |
| "loss": 1.9472, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 1.913545457642601e-05, | |
| "loss": 1.8312, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 1.902585284349861e-05, | |
| "loss": 1.9444, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 1.891006524188368e-05, | |
| "loss": 1.9848, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 1.8788171126619653e-05, | |
| "loss": 1.9345, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 1.866025403784439e-05, | |
| "loss": 2.0124, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 1.8526401643540924e-05, | |
| "loss": 1.7981, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 1.8386705679454243e-05, | |
| "loss": 1.8936, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 1.8241261886220155e-05, | |
| "loss": 1.7496, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 1.8090169943749477e-05, | |
| "loss": 1.8839, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 1.7933533402912354e-05, | |
| "loss": 1.8223, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 1.777145961456971e-05, | |
| "loss": 1.7236, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 1.7604059656000313e-05, | |
| "loss": 1.7847, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 1.7431448254773943e-05, | |
| "loss": 1.7522, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 1.7253743710122877e-05, | |
| "loss": 1.793, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 1.7071067811865477e-05, | |
| "loss": 1.7028, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 1.688354575693754e-05, | |
| "loss": 1.741, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.6691306063588583e-05, | |
| "loss": 1.6532, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.6494480483301836e-05, | |
| "loss": 1.7473, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.6293203910498375e-05, | |
| "loss": 1.6594, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.608761429008721e-05, | |
| "loss": 1.7145, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.5877852522924733e-05, | |
| "loss": 1.711, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.566406236924833e-05, | |
| "loss": 1.6677, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.5446390350150272e-05, | |
| "loss": 1.6504, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.5224985647159489e-05, | |
| "loss": 1.6619, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.5000000000000002e-05, | |
| "loss": 1.6082, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 1.4771587602596085e-05, | |
| "loss": 1.6774, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 1.4539904997395468e-05, | |
| "loss": 1.6716, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 1.4305110968082953e-05, | |
| "loss": 1.5922, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 1.4067366430758004e-05, | |
| "loss": 1.5776, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 1.3826834323650899e-05, | |
| "loss": 1.5135, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 1.3583679495453e-05, | |
| "loss": 1.6156, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.333806859233771e-05, | |
| "loss": 1.4918, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.3090169943749475e-05, | |
| "loss": 1.6014, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.284015344703923e-05, | |
| "loss": 1.6738, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.2588190451025209e-05, | |
| "loss": 1.5877, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.2334453638559057e-05, | |
| "loss": 1.5073, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.2079116908177592e-05, | |
| "loss": 1.5979, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.1822355254921478e-05, | |
| "loss": 1.5981, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 1.156434465040231e-05, | |
| "loss": 1.5809, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.130526192220052e-05, | |
| "loss": 1.6249, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.1045284632676535e-05, | |
| "loss": 1.5176, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.0784590957278452e-05, | |
| "loss": 1.5136, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.0523359562429441e-05, | |
| "loss": 1.5118, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 1.0261769483078734e-05, | |
| "loss": 1.5413, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 1e-05, | |
| "loss": 1.5132, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 9.738230516921272e-06, | |
| "loss": 1.4406, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 9.476640437570562e-06, | |
| "loss": 1.4744, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 9.215409042721553e-06, | |
| "loss": 1.4483, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 8.954715367323468e-06, | |
| "loss": 1.4445, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 8.694738077799487e-06, | |
| "loss": 1.4893, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 8.43565534959769e-06, | |
| "loss": 1.4632, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 8.177644745078525e-06, | |
| "loss": 1.4622, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 7.92088309182241e-06, | |
| "loss": 1.4093, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 7.66554636144095e-06, | |
| "loss": 1.3794, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 7.411809548974792e-06, | |
| "loss": 1.467, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 7.159846552960774e-06, | |
| "loss": 1.4391, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 6.909830056250527e-06, | |
| "loss": 1.3792, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 6.661931407662292e-06, | |
| "loss": 1.3963, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 6.4163205045469975e-06, | |
| "loss": 1.3913, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 6.173165676349103e-06, | |
| "loss": 1.4604, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 5.932633569242e-06, | |
| "loss": 1.3834, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 5.694889031917047e-06, | |
| "loss": 1.3693, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 5.460095002604533e-06, | |
| "loss": 1.3386, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 5.228412397403916e-06, | |
| "loss": 1.3589, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 5.000000000000003e-06, | |
| "loss": 1.3702, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 4.775014352840512e-06, | |
| "loss": 1.3905, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 4.5536096498497295e-06, | |
| "loss": 1.4237, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 4.335937630751675e-06, | |
| "loss": 1.4223, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 4.12214747707527e-06, | |
| "loss": 1.4153, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 3.912385709912794e-06, | |
| "loss": 1.4039, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 3.7067960895016277e-06, | |
| "loss": 1.3521, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 3.505519516698165e-06, | |
| "loss": 1.445, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 3.308693936411421e-06, | |
| "loss": 1.3813, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 3.116454243062459e-06, | |
| "loss": 1.3369, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 2.9289321881345257e-06, | |
| "loss": 1.367, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 2.746256289877126e-06, | |
| "loss": 1.3921, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 2.5685517452260566e-06, | |
| "loss": 1.3977, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 2.395940343999691e-06, | |
| "loss": 1.5059, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 2.2285403854302912e-06, | |
| "loss": 1.3491, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 2.0664665970876496e-06, | |
| "loss": 1.3816, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 1.9098300562505266e-06, | |
| "loss": 1.3663, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 1.7587381137798432e-06, | |
| "loss": 1.4237, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 1.6132943205457607e-06, | |
| "loss": 1.4992, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 1.4735983564590784e-06, | |
| "loss": 1.4245, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 1.339745962155613e-06, | |
| "loss": 1.4137, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 1.2118288733803474e-06, | |
| "loss": 1.3232, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 1.0899347581163222e-06, | |
| "loss": 1.3351, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 9.74147156501396e-07, | |
| "loss": 1.443, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 8.645454235739903e-07, | |
| "loss": 1.3864, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 7.612046748871327e-07, | |
| "loss": 1.4728, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 6.641957350279838e-07, | |
| "loss": 1.3847, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 5.735850890782158e-07, | |
| "loss": 1.4876, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 4.894348370484648e-07, | |
| "loss": 1.4311, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 4.118026513180695e-07, | |
| "loss": 1.459, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 3.4074173710931804e-07, | |
| "loss": 1.4378, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 2.7630079602323447e-07, | |
| "loss": 1.3836, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 2.1852399266194312e-07, | |
| "loss": 1.346, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 1.6745092436045495e-07, | |
| "loss": 1.4084, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 1.231165940486234e-07, | |
| "loss": 1.3656, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 8.555138626189619e-08, | |
| "loss": 1.473, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 5.4781046317267103e-08, | |
| "loss": 1.4061, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 3.082666266872036e-08, | |
| "loss": 1.3331, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 1.370465245426167e-08, | |
| "loss": 1.4088, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 3.4267502444274013e-09, | |
| "loss": 1.3246, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 0.0, | |
| "loss": 1.4014, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "step": 124, | |
| "total_flos": 2.688391775571149e+16, | |
| "train_loss": 1.6652600323000262, | |
| "train_runtime": 1279.2707, | |
| "train_samples_per_second": 12.507, | |
| "train_steps_per_second": 0.097 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 124, | |
| "num_train_epochs": 2, | |
| "save_steps": 1000, | |
| "total_flos": 2.688391775571149e+16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |