| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.9984, | |
| "global_step": 234, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 0.0, | |
| "loss": 1.3784, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 6.666666666666668e-06, | |
| "loss": 1.145, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 1.0566416671474378e-05, | |
| "loss": 1.3138, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 1.3333333333333337e-05, | |
| "loss": 1.2414, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 1.5479520632582417e-05, | |
| "loss": 1.1408, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 1.7233083338141044e-05, | |
| "loss": 1.1296, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 1.8715699480384028e-05, | |
| "loss": 1.2055, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 2e-05, | |
| "loss": 1.1321, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 2e-05, | |
| "loss": 1.1072, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 1.991150442477876e-05, | |
| "loss": 1.0918, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 1.9823008849557524e-05, | |
| "loss": 1.1221, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 1.9734513274336283e-05, | |
| "loss": 1.1122, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 1.9646017699115046e-05, | |
| "loss": 1.1318, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.9557522123893806e-05, | |
| "loss": 1.1461, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 1.946902654867257e-05, | |
| "loss": 1.1204, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 1.9380530973451328e-05, | |
| "loss": 1.0991, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 1.929203539823009e-05, | |
| "loss": 1.1348, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 1.9203539823008853e-05, | |
| "loss": 1.0723, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 1.9115044247787613e-05, | |
| "loss": 1.07, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 1.9026548672566376e-05, | |
| "loss": 1.1376, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 1.8938053097345135e-05, | |
| "loss": 1.1015, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 1.8849557522123894e-05, | |
| "loss": 1.1164, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 1.8761061946902657e-05, | |
| "loss": 1.0867, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 1.8672566371681417e-05, | |
| "loss": 1.0987, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 1.858407079646018e-05, | |
| "loss": 1.0672, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 1.849557522123894e-05, | |
| "loss": 1.0731, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 1.8407079646017702e-05, | |
| "loss": 1.0883, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 1.831858407079646e-05, | |
| "loss": 1.0672, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 1.823008849557522e-05, | |
| "loss": 1.0841, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 1.8141592920353983e-05, | |
| "loss": 1.1422, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 1.8053097345132743e-05, | |
| "loss": 1.064, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 1.7964601769911506e-05, | |
| "loss": 1.1643, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 1.7876106194690265e-05, | |
| "loss": 1.1199, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.7787610619469028e-05, | |
| "loss": 1.0637, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 1.769911504424779e-05, | |
| "loss": 1.0668, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 1.761061946902655e-05, | |
| "loss": 1.099, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 1.7522123893805313e-05, | |
| "loss": 1.0692, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 1.7433628318584072e-05, | |
| "loss": 1.1731, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 1.7345132743362835e-05, | |
| "loss": 1.1319, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 1.7256637168141594e-05, | |
| "loss": 1.0535, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 1.7168141592920354e-05, | |
| "loss": 1.0725, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 1.7079646017699117e-05, | |
| "loss": 1.1307, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 1.6991150442477876e-05, | |
| "loss": 1.0974, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 1.690265486725664e-05, | |
| "loss": 1.0936, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.68141592920354e-05, | |
| "loss": 1.0644, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.672566371681416e-05, | |
| "loss": 1.0609, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.663716814159292e-05, | |
| "loss": 1.0874, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.6548672566371683e-05, | |
| "loss": 1.0168, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.6460176991150443e-05, | |
| "loss": 1.1001, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.6371681415929206e-05, | |
| "loss": 1.0717, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.628318584070797e-05, | |
| "loss": 1.1052, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.6194690265486728e-05, | |
| "loss": 1.1026, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.6106194690265487e-05, | |
| "loss": 1.1029, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.601769911504425e-05, | |
| "loss": 1.0573, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.592920353982301e-05, | |
| "loss": 1.1157, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 1.5840707964601772e-05, | |
| "loss": 1.0587, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 1.5752212389380532e-05, | |
| "loss": 1.0659, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 1.5663716814159295e-05, | |
| "loss": 1.0786, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 1.5575221238938054e-05, | |
| "loss": 1.0734, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 1.5486725663716813e-05, | |
| "loss": 1.0739, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 1.5398230088495576e-05, | |
| "loss": 1.0432, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 1.5309734513274336e-05, | |
| "loss": 1.0287, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.5221238938053098e-05, | |
| "loss": 1.0657, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.513274336283186e-05, | |
| "loss": 1.0694, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.5044247787610619e-05, | |
| "loss": 1.0142, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.4955752212389383e-05, | |
| "loss": 1.0694, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.4867256637168143e-05, | |
| "loss": 1.0555, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 1.4778761061946904e-05, | |
| "loss": 1.0773, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.4690265486725665e-05, | |
| "loss": 1.0589, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.4601769911504426e-05, | |
| "loss": 1.0068, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.4513274336283187e-05, | |
| "loss": 1.1031, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 1.4424778761061948e-05, | |
| "loss": 1.0696, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 1.433628318584071e-05, | |
| "loss": 1.057, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.424778761061947e-05, | |
| "loss": 1.043, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.4159292035398232e-05, | |
| "loss": 1.08, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.4070796460176991e-05, | |
| "loss": 1.0396, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.3982300884955752e-05, | |
| "loss": 1.0961, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 1.3893805309734513e-05, | |
| "loss": 1.1429, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 1.3805309734513275e-05, | |
| "loss": 1.0117, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 1.3716814159292036e-05, | |
| "loss": 0.8053, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 1.3628318584070797e-05, | |
| "loss": 0.8575, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 1.353982300884956e-05, | |
| "loss": 0.8436, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 1.345132743362832e-05, | |
| "loss": 0.7732, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 1.3362831858407082e-05, | |
| "loss": 0.8007, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 1.3274336283185843e-05, | |
| "loss": 0.896, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 1.3185840707964604e-05, | |
| "loss": 0.8223, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 1.3097345132743363e-05, | |
| "loss": 0.8036, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 1.3008849557522125e-05, | |
| "loss": 0.8582, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 1.2920353982300886e-05, | |
| "loss": 0.8142, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 1.2831858407079647e-05, | |
| "loss": 0.8121, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 1.2743362831858408e-05, | |
| "loss": 0.8693, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 1.2654867256637169e-05, | |
| "loss": 0.8138, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 1.256637168141593e-05, | |
| "loss": 0.8082, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 1.2477876106194691e-05, | |
| "loss": 0.7909, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 1.2389380530973452e-05, | |
| "loss": 0.7682, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 1.2300884955752212e-05, | |
| "loss": 0.8172, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 1.2212389380530973e-05, | |
| "loss": 0.8123, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 1.2123893805309736e-05, | |
| "loss": 0.8582, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 1.2035398230088497e-05, | |
| "loss": 0.8424, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 1.1946902654867258e-05, | |
| "loss": 0.8275, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 1.1858407079646019e-05, | |
| "loss": 0.8387, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 1.176991150442478e-05, | |
| "loss": 0.8281, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 1.1681415929203541e-05, | |
| "loss": 0.7838, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 1.1592920353982302e-05, | |
| "loss": 0.8267, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 1.1504424778761064e-05, | |
| "loss": 0.8709, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 1.1415929203539825e-05, | |
| "loss": 0.7496, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 1.1327433628318584e-05, | |
| "loss": 0.8942, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 1.1238938053097345e-05, | |
| "loss": 0.7894, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 1.1150442477876106e-05, | |
| "loss": 0.8268, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 1.1061946902654867e-05, | |
| "loss": 0.8861, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 1.0973451327433629e-05, | |
| "loss": 0.8377, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 1.088495575221239e-05, | |
| "loss": 0.8178, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 1.079646017699115e-05, | |
| "loss": 0.8198, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 1.0707964601769914e-05, | |
| "loss": 0.8186, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 1.0619469026548675e-05, | |
| "loss": 0.8446, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 1.0530973451327436e-05, | |
| "loss": 0.8713, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 1.0442477876106197e-05, | |
| "loss": 0.7833, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 1.0353982300884956e-05, | |
| "loss": 0.8686, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 1.0265486725663717e-05, | |
| "loss": 0.7812, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 1.0176991150442479e-05, | |
| "loss": 0.8517, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 1.008849557522124e-05, | |
| "loss": 0.7986, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 1e-05, | |
| "loss": 0.8045, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 9.911504424778762e-06, | |
| "loss": 0.805, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 9.823008849557523e-06, | |
| "loss": 0.8309, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 9.734513274336284e-06, | |
| "loss": 0.7949, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 9.646017699115045e-06, | |
| "loss": 0.8484, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 9.557522123893806e-06, | |
| "loss": 0.8306, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 9.469026548672568e-06, | |
| "loss": 0.8458, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 9.380530973451329e-06, | |
| "loss": 0.8468, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 9.29203539823009e-06, | |
| "loss": 0.8093, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 9.203539823008851e-06, | |
| "loss": 0.8116, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 9.11504424778761e-06, | |
| "loss": 0.8449, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 9.026548672566371e-06, | |
| "loss": 0.7937, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 8.938053097345133e-06, | |
| "loss": 0.8003, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 8.849557522123895e-06, | |
| "loss": 0.7981, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 8.761061946902656e-06, | |
| "loss": 0.7976, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 8.672566371681418e-06, | |
| "loss": 0.7909, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 8.584070796460177e-06, | |
| "loss": 0.8746, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 8.495575221238938e-06, | |
| "loss": 0.8757, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 8.4070796460177e-06, | |
| "loss": 0.8416, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 8.31858407079646e-06, | |
| "loss": 0.8874, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 8.230088495575221e-06, | |
| "loss": 0.8264, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 8.141592920353984e-06, | |
| "loss": 0.8191, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 8.053097345132744e-06, | |
| "loss": 0.8478, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 7.964601769911505e-06, | |
| "loss": 0.8388, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 7.876106194690266e-06, | |
| "loss": 0.8154, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 7.787610619469027e-06, | |
| "loss": 0.7766, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 7.699115044247788e-06, | |
| "loss": 0.771, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 7.610619469026549e-06, | |
| "loss": 0.8095, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 7.5221238938053095e-06, | |
| "loss": 0.8346, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 7.4336283185840714e-06, | |
| "loss": 0.8493, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 7.3451327433628326e-06, | |
| "loss": 0.7776, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 7.256637168141594e-06, | |
| "loss": 0.8135, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 7.168141592920355e-06, | |
| "loss": 0.8389, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 7.079646017699116e-06, | |
| "loss": 0.8623, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 6.991150442477876e-06, | |
| "loss": 0.8054, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 6.902654867256637e-06, | |
| "loss": 0.738, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 6.814159292035398e-06, | |
| "loss": 0.6351, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 6.72566371681416e-06, | |
| "loss": 0.708, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 6.6371681415929215e-06, | |
| "loss": 0.7145, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 6.548672566371682e-06, | |
| "loss": 0.6824, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 6.460176991150443e-06, | |
| "loss": 0.567, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 6.371681415929204e-06, | |
| "loss": 0.6298, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 6.283185840707965e-06, | |
| "loss": 0.6047, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 6.194690265486726e-06, | |
| "loss": 0.6152, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 6.1061946902654865e-06, | |
| "loss": 0.6148, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 6.0176991150442484e-06, | |
| "loss": 0.6011, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 5.9292035398230096e-06, | |
| "loss": 0.6032, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 5.840707964601771e-06, | |
| "loss": 0.6438, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 5.752212389380532e-06, | |
| "loss": 0.7348, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 5.663716814159292e-06, | |
| "loss": 0.6328, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 5.575221238938053e-06, | |
| "loss": 0.5862, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 5.486725663716814e-06, | |
| "loss": 0.5969, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 5.398230088495575e-06, | |
| "loss": 0.6267, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 5.309734513274337e-06, | |
| "loss": 0.6917, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 5.2212389380530985e-06, | |
| "loss": 0.6034, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 5.132743362831859e-06, | |
| "loss": 0.7142, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 5.04424778761062e-06, | |
| "loss": 0.5951, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 4.955752212389381e-06, | |
| "loss": 0.6725, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 4.867256637168142e-06, | |
| "loss": 0.5757, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 4.778761061946903e-06, | |
| "loss": 0.6143, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 4.690265486725664e-06, | |
| "loss": 0.63, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 4.6017699115044254e-06, | |
| "loss": 0.6141, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 4.513274336283186e-06, | |
| "loss": 0.6005, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 4.424778761061948e-06, | |
| "loss": 0.6383, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 4.336283185840709e-06, | |
| "loss": 0.6542, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 4.247787610619469e-06, | |
| "loss": 0.6045, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 4.15929203539823e-06, | |
| "loss": 0.5642, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 4.070796460176992e-06, | |
| "loss": 0.6391, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 3.982300884955752e-06, | |
| "loss": 0.5867, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 3.8938053097345135e-06, | |
| "loss": 0.6587, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 3.8053097345132746e-06, | |
| "loss": 0.6431, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 3.7168141592920357e-06, | |
| "loss": 0.5936, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 3.628318584070797e-06, | |
| "loss": 0.5934, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 3.539823008849558e-06, | |
| "loss": 0.6501, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 3.4513274336283186e-06, | |
| "loss": 0.5865, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 3.36283185840708e-06, | |
| "loss": 0.6379, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 3.274336283185841e-06, | |
| "loss": 0.6253, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 3.185840707964602e-06, | |
| "loss": 0.6018, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 3.097345132743363e-06, | |
| "loss": 0.6467, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 3.0088495575221242e-06, | |
| "loss": 0.615, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 2.9203539823008853e-06, | |
| "loss": 0.6497, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 2.831858407079646e-06, | |
| "loss": 0.6206, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 2.743362831858407e-06, | |
| "loss": 0.674, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 2.6548672566371687e-06, | |
| "loss": 0.6109, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 2.5663716814159294e-06, | |
| "loss": 0.6608, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 2.4778761061946905e-06, | |
| "loss": 0.6142, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 2.3893805309734516e-06, | |
| "loss": 0.5465, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 2.3008849557522127e-06, | |
| "loss": 0.5763, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 2.212389380530974e-06, | |
| "loss": 0.6508, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 2.1238938053097345e-06, | |
| "loss": 0.6026, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 2.035398230088496e-06, | |
| "loss": 0.6462, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 1.9469026548672567e-06, | |
| "loss": 0.6304, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 1.8584070796460179e-06, | |
| "loss": 0.6191, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 1.769911504424779e-06, | |
| "loss": 0.6277, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 1.68141592920354e-06, | |
| "loss": 0.5747, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 1.592920353982301e-06, | |
| "loss": 0.6247, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 1.5044247787610621e-06, | |
| "loss": 0.6374, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 1.415929203539823e-06, | |
| "loss": 0.6191, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 1.3274336283185843e-06, | |
| "loss": 0.6169, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 1.2389380530973452e-06, | |
| "loss": 0.6172, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 1.1504424778761064e-06, | |
| "loss": 0.5632, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 1.0619469026548673e-06, | |
| "loss": 0.6987, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 9.734513274336284e-07, | |
| "loss": 0.6426, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 8.849557522123895e-07, | |
| "loss": 0.6, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 7.964601769911505e-07, | |
| "loss": 0.6177, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 7.079646017699115e-07, | |
| "loss": 0.5396, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 6.194690265486726e-07, | |
| "loss": 0.6485, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 5.309734513274336e-07, | |
| "loss": 0.6281, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 4.4247787610619474e-07, | |
| "loss": 0.6415, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 3.5398230088495575e-07, | |
| "loss": 0.6062, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 2.654867256637168e-07, | |
| "loss": 0.5872, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 1.7699115044247788e-07, | |
| "loss": 0.554, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 8.849557522123894e-08, | |
| "loss": 0.6467, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 234, | |
| "total_flos": 18337365196800.0, | |
| "train_loss": 0.8504489425920013, | |
| "train_runtime": 4061.2514, | |
| "train_samples_per_second": 7.387, | |
| "train_steps_per_second": 0.058 | |
| } | |
| ], | |
| "max_steps": 234, | |
| "num_train_epochs": 3, | |
| "total_flos": 18337365196800.0, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |