| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.986175115207373, | |
| "eval_steps": 500, | |
| "global_step": 162, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4e-05, | |
| "loss": 2.258, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 8e-05, | |
| "loss": 2.154, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.00012, | |
| "loss": 2.0787, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.00016, | |
| "loss": 2.1022, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.0002, | |
| "loss": 2.5467, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.00019872611464968155, | |
| "loss": 2.222, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.00019745222929936306, | |
| "loss": 2.1879, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.0001961783439490446, | |
| "loss": 1.8396, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.00019490445859872614, | |
| "loss": 1.8801, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.00019363057324840765, | |
| "loss": 1.9303, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.00019235668789808918, | |
| "loss": 1.7077, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.00019108280254777072, | |
| "loss": 1.8204, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.00018980891719745223, | |
| "loss": 1.7697, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.00018853503184713377, | |
| "loss": 1.7407, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.0001872611464968153, | |
| "loss": 1.7197, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.00018598726114649682, | |
| "loss": 1.6274, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.00018471337579617836, | |
| "loss": 1.7828, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.00018343949044585987, | |
| "loss": 1.6724, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.0001821656050955414, | |
| "loss": 1.6608, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.00018089171974522295, | |
| "loss": 1.7449, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.00017961783439490446, | |
| "loss": 1.724, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.000178343949044586, | |
| "loss": 1.67, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.00017707006369426754, | |
| "loss": 1.5856, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.00017579617834394905, | |
| "loss": 1.7545, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.00017452229299363059, | |
| "loss": 1.7722, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.00017324840764331212, | |
| "loss": 1.5361, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.00017197452229299363, | |
| "loss": 1.6462, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 0.00017070063694267517, | |
| "loss": 1.5768, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 0.0001694267515923567, | |
| "loss": 1.7124, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 0.00016815286624203822, | |
| "loss": 1.5629, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 0.00016687898089171976, | |
| "loss": 1.6924, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 0.0001656050955414013, | |
| "loss": 1.5555, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 0.0001643312101910828, | |
| "loss": 1.6034, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 0.00016305732484076435, | |
| "loss": 1.6526, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 0.0001617834394904459, | |
| "loss": 1.4711, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 0.0001605095541401274, | |
| "loss": 1.592, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 0.00015923566878980894, | |
| "loss": 1.5779, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 0.00015796178343949047, | |
| "loss": 1.7425, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 0.00015668789808917199, | |
| "loss": 1.5372, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 0.00015541401273885352, | |
| "loss": 1.6526, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 0.00015414012738853506, | |
| "loss": 1.54, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 0.00015286624203821657, | |
| "loss": 1.5983, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 0.0001515923566878981, | |
| "loss": 1.5737, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 0.00015031847133757962, | |
| "loss": 1.5084, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 0.00014904458598726113, | |
| "loss": 1.4372, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 0.00014777070063694267, | |
| "loss": 1.6501, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 0.0001464968152866242, | |
| "loss": 1.4461, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 0.00014522292993630572, | |
| "loss": 1.5514, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 0.00014394904458598726, | |
| "loss": 1.5865, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 0.0001426751592356688, | |
| "loss": 1.5795, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 0.0001414012738853503, | |
| "loss": 1.4996, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 0.00014012738853503185, | |
| "loss": 1.6749, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 0.00013885350318471339, | |
| "loss": 1.4825, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 0.0001375796178343949, | |
| "loss": 1.5719, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 0.00013630573248407644, | |
| "loss": 1.4932, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 0.00013503184713375797, | |
| "loss": 1.4342, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 0.00013375796178343948, | |
| "loss": 1.4454, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 0.00013248407643312102, | |
| "loss": 1.5273, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 0.00013121019108280253, | |
| "loss": 1.384, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 0.00012993630573248407, | |
| "loss": 1.4851, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 0.0001286624203821656, | |
| "loss": 1.3932, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 0.00012738853503184712, | |
| "loss": 1.479, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 0.00012611464968152866, | |
| "loss": 1.4611, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 0.0001248407643312102, | |
| "loss": 1.4786, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 0.0001235668789808917, | |
| "loss": 1.2568, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 0.00012229299363057325, | |
| "loss": 1.3784, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 0.00012101910828025477, | |
| "loss": 1.4212, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 0.00011974522292993631, | |
| "loss": 1.3157, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 0.00011847133757961784, | |
| "loss": 1.4883, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 0.00011719745222929936, | |
| "loss": 1.4889, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 0.0001159235668789809, | |
| "loss": 1.4213, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 0.00011464968152866242, | |
| "loss": 1.3831, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 0.00011337579617834395, | |
| "loss": 1.4133, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 0.00011210191082802549, | |
| "loss": 1.3613, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 0.00011082802547770701, | |
| "loss": 1.3554, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 0.00010955414012738854, | |
| "loss": 1.4899, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 0.00010828025477707007, | |
| "loss": 1.3935, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 0.0001070063694267516, | |
| "loss": 1.3503, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 0.00010573248407643312, | |
| "loss": 1.3964, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 0.00010445859872611465, | |
| "loss": 1.5109, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 0.00010318471337579619, | |
| "loss": 1.4097, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 0.00010191082802547771, | |
| "loss": 1.313, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 0.00010063694267515924, | |
| "loss": 1.2494, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 9.936305732484077e-05, | |
| "loss": 1.4779, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 9.80891719745223e-05, | |
| "loss": 1.2957, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 9.681528662420382e-05, | |
| "loss": 1.3638, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 9.554140127388536e-05, | |
| "loss": 1.3777, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 9.426751592356689e-05, | |
| "loss": 1.4158, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 9.299363057324841e-05, | |
| "loss": 1.4125, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 9.171974522292994e-05, | |
| "loss": 1.6222, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 9.044585987261147e-05, | |
| "loss": 1.335, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 8.9171974522293e-05, | |
| "loss": 1.3283, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 8.789808917197452e-05, | |
| "loss": 1.3687, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 8.662420382165606e-05, | |
| "loss": 1.345, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 8.535031847133759e-05, | |
| "loss": 1.4534, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 8.407643312101911e-05, | |
| "loss": 1.2561, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 8.280254777070065e-05, | |
| "loss": 1.3999, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 8.152866242038217e-05, | |
| "loss": 1.4159, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 8.02547770700637e-05, | |
| "loss": 1.3375, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 7.898089171974524e-05, | |
| "loss": 1.3709, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 7.770700636942676e-05, | |
| "loss": 1.2414, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 7.643312101910829e-05, | |
| "loss": 1.4125, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 7.515923566878981e-05, | |
| "loss": 1.4631, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 7.388535031847134e-05, | |
| "loss": 1.2543, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 7.261146496815286e-05, | |
| "loss": 1.3168, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 7.13375796178344e-05, | |
| "loss": 1.4753, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 7.006369426751592e-05, | |
| "loss": 1.4155, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 6.878980891719745e-05, | |
| "loss": 1.3476, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 6.751592356687899e-05, | |
| "loss": 1.2634, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 6.624203821656051e-05, | |
| "loss": 1.236, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 6.496815286624204e-05, | |
| "loss": 1.1112, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 6.369426751592356e-05, | |
| "loss": 1.2689, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 6.24203821656051e-05, | |
| "loss": 1.2972, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 6.114649681528662e-05, | |
| "loss": 1.2451, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 5.9872611464968155e-05, | |
| "loss": 1.3026, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 5.859872611464968e-05, | |
| "loss": 1.1526, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 5.732484076433121e-05, | |
| "loss": 1.1425, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 5.605095541401274e-05, | |
| "loss": 1.2705, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 5.477707006369427e-05, | |
| "loss": 1.1392, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 5.35031847133758e-05, | |
| "loss": 1.1739, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 5.2229299363057324e-05, | |
| "loss": 1.2004, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 5.0955414012738855e-05, | |
| "loss": 1.2028, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 4.968152866242039e-05, | |
| "loss": 1.1971, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 4.840764331210191e-05, | |
| "loss": 1.1393, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 4.713375796178344e-05, | |
| "loss": 1.1752, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 4.585987261146497e-05, | |
| "loss": 1.2266, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 4.45859872611465e-05, | |
| "loss": 1.2033, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 4.331210191082803e-05, | |
| "loss": 1.1491, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 4.2038216560509556e-05, | |
| "loss": 1.1002, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 4.076433121019109e-05, | |
| "loss": 1.1551, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 3.949044585987262e-05, | |
| "loss": 1.2215, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 3.821656050955414e-05, | |
| "loss": 1.1303, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 3.694267515923567e-05, | |
| "loss": 1.3085, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 3.56687898089172e-05, | |
| "loss": 1.2328, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 3.4394904458598724e-05, | |
| "loss": 1.2315, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 3.3121019108280256e-05, | |
| "loss": 1.2797, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 3.184713375796178e-05, | |
| "loss": 1.2824, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 3.057324840764331e-05, | |
| "loss": 1.3541, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 2.929936305732484e-05, | |
| "loss": 1.2053, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 2.802547770700637e-05, | |
| "loss": 1.0262, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 2.67515923566879e-05, | |
| "loss": 1.1497, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 2.5477707006369428e-05, | |
| "loss": 1.1258, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 2.4203821656050956e-05, | |
| "loss": 1.2115, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 2.2929936305732484e-05, | |
| "loss": 1.2129, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 2.1656050955414015e-05, | |
| "loss": 1.1673, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 2.0382165605095544e-05, | |
| "loss": 1.1702, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 1.910828025477707e-05, | |
| "loss": 1.2379, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 1.78343949044586e-05, | |
| "loss": 1.2548, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 1.6560509554140128e-05, | |
| "loss": 1.1624, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 1.5286624203821656e-05, | |
| "loss": 1.0972, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 1.4012738853503186e-05, | |
| "loss": 1.1634, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 1.2738853503184714e-05, | |
| "loss": 1.1169, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 1.1464968152866242e-05, | |
| "loss": 0.9829, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 1.0191082802547772e-05, | |
| "loss": 1.1863, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 8.9171974522293e-06, | |
| "loss": 1.1772, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 7.643312101910828e-06, | |
| "loss": 1.2474, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 6.369426751592357e-06, | |
| "loss": 1.2136, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 5.095541401273886e-06, | |
| "loss": 1.0309, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 3.821656050955414e-06, | |
| "loss": 1.0609, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 2.547770700636943e-06, | |
| "loss": 1.2989, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 1.2738853503184715e-06, | |
| "loss": 1.1991, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 0.0, | |
| "loss": 1.2027, | |
| "step": 162 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 162, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "total_flos": 1.4301896344240128e+16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |