| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 4.934383202099737, | |
| "eval_steps": 500, | |
| "global_step": 235, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 2.5e-06, | |
| "loss": 3.0376, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 5e-06, | |
| "loss": 3.0172, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 7.500000000000001e-06, | |
| "loss": 2.9578, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 1e-05, | |
| "loss": 2.6557, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 1.25e-05, | |
| "loss": 2.1303, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 1.5000000000000002e-05, | |
| "loss": 1.4131, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 1.7500000000000002e-05, | |
| "loss": 1.0997, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 2e-05, | |
| "loss": 0.9714, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 1.999904234053922e-05, | |
| "loss": 0.7915, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 1.9996169545579205e-05, | |
| "loss": 0.7282, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 1.9991382165351816e-05, | |
| "loss": 0.7029, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 1.998468111679304e-05, | |
| "loss": 0.675, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 1.9976067683367388e-05, | |
| "loss": 0.5841, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 1.9965543514822063e-05, | |
| "loss": 0.5786, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 1.995311062687098e-05, | |
| "loss": 0.6586, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 1.9938771400808693e-05, | |
| "loss": 0.5645, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 1.99225285830543e-05, | |
| "loss": 0.5873, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 1.9904385284625426e-05, | |
| "loss": 0.6025, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 1.9884344980542337e-05, | |
| "loss": 0.5488, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 1.9862411509162406e-05, | |
| "loss": 0.4908, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.9838589071444905e-05, | |
| "loss": 0.5674, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 1.98128822301464e-05, | |
| "loss": 0.5536, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 1.978529590894685e-05, | |
| "loss": 0.4978, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 1.975583539150655e-05, | |
| "loss": 0.527, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 1.9724506320454153e-05, | |
| "loss": 0.499, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 1.9691314696305915e-05, | |
| "loss": 0.4806, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 1.965626687631641e-05, | |
| "loss": 0.4387, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.9619369573260924e-05, | |
| "loss": 0.5244, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.958062985414972e-05, | |
| "loss": 0.4788, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.9540055138874504e-05, | |
| "loss": 0.4308, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.9497653198787265e-05, | |
| "loss": 0.4926, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.945343215521182e-05, | |
| "loss": 0.4415, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.9407400477888315e-05, | |
| "loss": 0.4515, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.9359566983351015e-05, | |
| "loss": 0.4261, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 1.9309940833239628e-05, | |
| "loss": 0.4571, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 1.9258531532544586e-05, | |
| "loss": 0.425, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 1.9205348927786533e-05, | |
| "loss": 0.4178, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 1.9150403205130384e-05, | |
| "loss": 0.4582, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.909370488843436e-05, | |
| "loss": 0.5455, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.9035264837234347e-05, | |
| "loss": 0.4623, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.897509424466393e-05, | |
| "loss": 0.4188, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.8913204635310548e-05, | |
| "loss": 0.4544, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.884960786300819e-05, | |
| "loss": 0.4744, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 1.8784316108566994e-05, | |
| "loss": 0.454, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.8717341877440227e-05, | |
| "loss": 0.4799, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.86486979973291e-05, | |
| "loss": 0.449, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.8578397615725857e-05, | |
| "loss": 0.4662, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 1.8506454197395608e-05, | |
| "loss": 0.4239, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 1.843288152179739e-05, | |
| "loss": 0.3638, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 1.8357693680444978e-05, | |
| "loss": 0.3934, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 1.8280905074207886e-05, | |
| "loss": 0.4464, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 1.8202530410553162e-05, | |
| "loss": 0.3987, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 1.8122584700728444e-05, | |
| "loss": 0.3894, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 1.804108325688679e-05, | |
| "loss": 0.41, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 1.7958041689153963e-05, | |
| "loss": 0.3437, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 1.7873475902638552e-05, | |
| "loss": 0.3778, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 1.7787402094385665e-05, | |
| "loss": 0.3254, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 1.769983675027466e-05, | |
| "loss": 0.3431, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 1.7610796641861584e-05, | |
| "loss": 0.3929, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 1.7520298823166873e-05, | |
| "loss": 0.3413, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 1.742836062740898e-05, | |
| "loss": 0.3821, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 1.7334999663684504e-05, | |
| "loss": 0.3619, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 1.724023381359548e-05, | |
| "loss": 0.3541, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 1.7144081227824482e-05, | |
| "loss": 0.3748, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 1.70465603226582e-05, | |
| "loss": 0.3647, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 1.694768977646013e-05, | |
| "loss": 0.3419, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 1.684748852609306e-05, | |
| "loss": 0.3787, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 1.6745975763292072e-05, | |
| "loss": 0.3718, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 1.66431709309887e-05, | |
| "loss": 0.3634, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 1.6539093719586998e-05, | |
| "loss": 0.3455, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 1.6433764063192195e-05, | |
| "loss": 0.3324, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 1.6327202135792687e-05, | |
| "loss": 0.382, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 1.6219428347396055e-05, | |
| "loss": 0.377, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 1.6110463340119917e-05, | |
| "loss": 0.3445, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 1.6000327984238292e-05, | |
| "loss": 0.3279, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 1.5889043374184286e-05, | |
| "loss": 0.3612, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 1.5776630824509843e-05, | |
| "loss": 0.3718, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 1.5663111865803285e-05, | |
| "loss": 0.3525, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 1.5548508240565584e-05, | |
| "loss": 0.3488, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 1.543284189904592e-05, | |
| "loss": 0.3815, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 1.5316134995037545e-05, | |
| "loss": 0.4256, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 1.5198409881634617e-05, | |
| "loss": 0.3417, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 1.5079689106950855e-05, | |
| "loss": 0.3822, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 1.4959995409800874e-05, | |
| "loss": 0.3653, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 1.4839351715344967e-05, | |
| "loss": 0.35, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 1.4717781130698212e-05, | |
| "loss": 0.3176, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 1.4595306940504717e-05, | |
| "loss": 0.3546, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 1.4471952602477866e-05, | |
| "loss": 0.3533, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 1.4347741742907433e-05, | |
| "loss": 0.37, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 1.4222698152134373e-05, | |
| "loss": 0.3758, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 1.409684577999423e-05, | |
| "loss": 0.3275, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 1.3970208731229975e-05, | |
| "loss": 0.3528, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 1.384281126087517e-05, | |
| "loss": 0.3381, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 1.371467776960837e-05, | |
| "loss": 0.3246, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 1.358583279907961e-05, | |
| "loss": 0.3673, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 1.3456301027209884e-05, | |
| "loss": 0.2992, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 1.3326107263464559e-05, | |
| "loss": 0.3205, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 1.3195276444101546e-05, | |
| "loss": 0.2778, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 1.3063833627395231e-05, | |
| "loss": 0.2954, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 1.293180398883701e-05, | |
| "loss": 0.2842, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 1.2799212816313375e-05, | |
| "loss": 0.2526, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 1.2666085505262486e-05, | |
| "loss": 0.2806, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 1.2532447553810125e-05, | |
| "loss": 0.2843, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 1.2398324557885994e-05, | |
| "loss": 0.2569, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 1.2263742206321287e-05, | |
| "loss": 0.2819, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 1.212872627592845e-05, | |
| "loss": 0.3071, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 1.1993302626564103e-05, | |
| "loss": 0.2451, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 1.1857497196176049e-05, | |
| "loss": 0.2559, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 1.1721335995835336e-05, | |
| "loss": 0.2452, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 1.1584845104754305e-05, | |
| "loss": 0.2623, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 1.1448050665291587e-05, | |
| "loss": 0.2553, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 1.1310978877945007e-05, | |
| "loss": 0.2896, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 1.1173655996333356e-05, | |
| "loss": 0.2638, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 1.1036108322167988e-05, | |
| "loss": 0.2885, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 1.0898362200215199e-05, | |
| "loss": 0.2609, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 1.076044401325036e-05, | |
| "loss": 0.2799, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 1.062238017700478e-05, | |
| "loss": 0.2755, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 1.0484197135106265e-05, | |
| "loss": 0.2649, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 1.0345921354014279e-05, | |
| "loss": 0.233, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 1.0207579317950826e-05, | |
| "loss": 0.294, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 1.0069197523827835e-05, | |
| "loss": 0.2588, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 9.930802476172169e-06, | |
| "loss": 0.2775, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 9.792420682049174e-06, | |
| "loss": 0.2845, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 9.654078645985723e-06, | |
| "loss": 0.259, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 9.51580286489374e-06, | |
| "loss": 0.2687, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 9.37761982299522e-06, | |
| "loss": 0.2491, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 9.239555986749645e-06, | |
| "loss": 0.253, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 9.101637799784805e-06, | |
| "loss": 0.2819, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 8.963891677832012e-06, | |
| "loss": 0.2559, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 8.826344003666647e-06, | |
| "loss": 0.2415, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 8.689021122054996e-06, | |
| "loss": 0.2756, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 8.551949334708416e-06, | |
| "loss": 0.2541, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 8.415154895245698e-06, | |
| "loss": 0.2732, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 8.278664004164665e-06, | |
| "loss": 0.2656, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 8.142502803823954e-06, | |
| "loss": 0.2617, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 8.0066973734359e-06, | |
| "loss": 0.299, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 7.871273724071553e-06, | |
| "loss": 0.2356, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 7.736257793678714e-06, | |
| "loss": 0.2768, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 7.601675442114009e-06, | |
| "loss": 0.2561, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 7.467552446189879e-06, | |
| "loss": 0.2787, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 7.3339144947375155e-06, | |
| "loss": 0.2895, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 7.200787183686625e-06, | |
| "loss": 0.2463, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 7.068196011162994e-06, | |
| "loss": 0.2414, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "learning_rate": 6.936166372604773e-06, | |
| "loss": 0.1854, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "learning_rate": 6.804723555898458e-06, | |
| "loss": 0.1974, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 3.07, | |
| "learning_rate": 6.673892736535448e-06, | |
| "loss": 0.1891, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 3.09, | |
| "learning_rate": 6.543698972790118e-06, | |
| "loss": 0.194, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 3.11, | |
| "learning_rate": 6.414167200920392e-06, | |
| "loss": 0.2005, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 3.13, | |
| "learning_rate": 6.285322230391629e-06, | |
| "loss": 0.1758, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 3.15, | |
| "learning_rate": 6.157188739124834e-06, | |
| "loss": 0.1991, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 3.17, | |
| "learning_rate": 6.029791268770029e-06, | |
| "loss": 0.1566, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 3.19, | |
| "learning_rate": 5.903154220005771e-06, | |
| "loss": 0.1892, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 3.21, | |
| "learning_rate": 5.77730184786563e-06, | |
| "loss": 0.161, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 3.23, | |
| "learning_rate": 5.652258257092569e-06, | |
| "loss": 0.1776, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 3.25, | |
| "learning_rate": 5.5280473975221324e-06, | |
| "loss": 0.1848, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 3.28, | |
| "learning_rate": 5.404693059495285e-06, | |
| "loss": 0.187, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 3.3, | |
| "learning_rate": 5.282218869301788e-06, | |
| "loss": 0.1698, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 3.32, | |
| "learning_rate": 5.160648284655032e-06, | |
| "loss": 0.1687, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 3.34, | |
| "learning_rate": 5.040004590199128e-06, | |
| "loss": 0.1722, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "learning_rate": 4.920310893049146e-06, | |
| "loss": 0.1645, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 3.38, | |
| "learning_rate": 4.801590118365384e-06, | |
| "loss": 0.1748, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "learning_rate": 4.683865004962452e-06, | |
| "loss": 0.165, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 3.42, | |
| "learning_rate": 4.567158100954084e-06, | |
| "loss": 0.1593, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "learning_rate": 4.4514917594344186e-06, | |
| "loss": 0.1903, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 3.46, | |
| "learning_rate": 4.3368881341967135e-06, | |
| "loss": 0.1705, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 3.49, | |
| "learning_rate": 4.223369175490162e-06, | |
| "loss": 0.19, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 3.51, | |
| "learning_rate": 4.110956625815713e-06, | |
| "loss": 0.1937, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 3.53, | |
| "learning_rate": 3.9996720157617094e-06, | |
| "loss": 0.188, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 3.55, | |
| "learning_rate": 3.88953665988009e-06, | |
| "loss": 0.1632, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 3.57, | |
| "learning_rate": 3.7805716526039492e-06, | |
| "loss": 0.1842, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 3.59, | |
| "learning_rate": 3.672797864207316e-06, | |
| "loss": 0.1746, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 3.61, | |
| "learning_rate": 3.5662359368078083e-06, | |
| "loss": 0.1854, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 3.63, | |
| "learning_rate": 3.4609062804130066e-06, | |
| "loss": 0.1562, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 3.65, | |
| "learning_rate": 3.3568290690113037e-06, | |
| "loss": 0.182, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 3.67, | |
| "learning_rate": 3.25402423670793e-06, | |
| "loss": 0.177, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 3.7, | |
| "learning_rate": 3.1525114739069418e-06, | |
| "loss": 0.1623, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 3.72, | |
| "learning_rate": 3.0523102235398716e-06, | |
| "loss": 0.172, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 3.74, | |
| "learning_rate": 2.9534396773417996e-06, | |
| "loss": 0.1737, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 3.76, | |
| "learning_rate": 2.855918772175522e-06, | |
| "loss": 0.2083, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 3.78, | |
| "learning_rate": 2.7597661864045232e-06, | |
| "loss": 0.1792, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 3.8, | |
| "learning_rate": 2.6650003363154963e-06, | |
| "loss": 0.1818, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 3.82, | |
| "learning_rate": 2.5716393725910216e-06, | |
| "loss": 0.142, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 3.84, | |
| "learning_rate": 2.4797011768331304e-06, | |
| "loss": 0.1603, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 3.86, | |
| "learning_rate": 2.3892033581384188e-06, | |
| "loss": 0.1762, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 3.88, | |
| "learning_rate": 2.3001632497253423e-06, | |
| "loss": 0.1742, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "learning_rate": 2.2125979056143366e-06, | |
| "loss": 0.1652, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 3.93, | |
| "learning_rate": 2.126524097361449e-06, | |
| "loss": 0.1839, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 3.95, | |
| "learning_rate": 2.0419583108460418e-06, | |
| "loss": 0.1767, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 3.97, | |
| "learning_rate": 1.958916743113214e-06, | |
| "loss": 0.1771, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 3.99, | |
| "learning_rate": 1.877415299271561e-06, | |
| "loss": 0.1692, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 4.01, | |
| "learning_rate": 1.7974695894468385e-06, | |
| "loss": 0.1347, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 4.03, | |
| "learning_rate": 1.7190949257921197e-06, | |
| "loss": 0.1298, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 4.05, | |
| "learning_rate": 1.6423063195550271e-06, | |
| "loss": 0.0981, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 4.07, | |
| "learning_rate": 1.5671184782026106e-06, | |
| "loss": 0.1222, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 4.09, | |
| "learning_rate": 1.4935458026043958e-06, | |
| "loss": 0.0951, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 4.12, | |
| "learning_rate": 1.4216023842741456e-06, | |
| "loss": 0.1068, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 4.14, | |
| "learning_rate": 1.3513020026709023e-06, | |
| "loss": 0.1227, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 4.16, | |
| "learning_rate": 1.2826581225597767e-06, | |
| "loss": 0.1111, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 4.18, | |
| "learning_rate": 1.2156838914330072e-06, | |
| "loss": 0.105, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 4.2, | |
| "learning_rate": 1.1503921369918092e-06, | |
| "loss": 0.1029, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 4.22, | |
| "learning_rate": 1.0867953646894525e-06, | |
| "loss": 0.1153, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 4.24, | |
| "learning_rate": 1.0249057553360742e-06, | |
| "loss": 0.0991, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 4.26, | |
| "learning_rate": 9.647351627656542e-07, | |
| "loss": 0.1313, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 4.28, | |
| "learning_rate": 9.062951115656404e-07, | |
| "loss": 0.1143, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 4.3, | |
| "learning_rate": 8.495967948696193e-07, | |
| "loss": 0.0965, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 4.33, | |
| "learning_rate": 7.946510722134693e-07, | |
| "loss": 0.1004, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 4.35, | |
| "learning_rate": 7.414684674554151e-07, | |
| "loss": 0.1091, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 4.37, | |
| "learning_rate": 6.900591667603751e-07, | |
| "loss": 0.122, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 4.39, | |
| "learning_rate": 6.40433016648988e-07, | |
| "loss": 0.0971, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 4.41, | |
| "learning_rate": 5.925995221116853e-07, | |
| "loss": 0.1233, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 4.43, | |
| "learning_rate": 5.465678447881828e-07, | |
| "loss": 0.1054, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 4.45, | |
| "learning_rate": 5.023468012127363e-07, | |
| "loss": 0.1039, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 4.47, | |
| "learning_rate": 4.5994486112549643e-07, | |
| "loss": 0.1175, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 4.49, | |
| "learning_rate": 4.193701458502808e-07, | |
| "loss": 0.0986, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 4.51, | |
| "learning_rate": 3.80630426739077e-07, | |
| "loss": 0.1173, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 4.54, | |
| "learning_rate": 3.437331236835895e-07, | |
| "loss": 0.1038, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 4.56, | |
| "learning_rate": 3.086853036940862e-07, | |
| "loss": 0.1106, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 4.58, | |
| "learning_rate": 2.754936795458485e-07, | |
| "loss": 0.1074, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 4.6, | |
| "learning_rate": 2.4416460849345124e-07, | |
| "loss": 0.112, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 4.62, | |
| "learning_rate": 2.1470409105315283e-07, | |
| "loss": 0.0948, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 4.64, | |
| "learning_rate": 1.871177698536031e-07, | |
| "loss": 0.0975, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 4.66, | |
| "learning_rate": 1.6141092855509798e-07, | |
| "loss": 0.1156, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 4.68, | |
| "learning_rate": 1.375884908375935e-07, | |
| "loss": 0.0943, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 4.7, | |
| "learning_rate": 1.1565501945766223e-07, | |
| "loss": 0.1215, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 4.72, | |
| "learning_rate": 9.56147153745779e-08, | |
| "loss": 0.1161, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 4.75, | |
| "learning_rate": 7.747141694570026e-08, | |
| "loss": 0.1077, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 4.77, | |
| "learning_rate": 6.122859919130975e-08, | |
| "loss": 0.0957, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 4.79, | |
| "learning_rate": 4.6889373129022084e-08, | |
| "loss": 0.1141, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 4.81, | |
| "learning_rate": 3.445648517793943e-08, | |
| "loss": 0.097, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 4.83, | |
| "learning_rate": 2.3932316632614415e-08, | |
| "loss": 0.1061, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 4.85, | |
| "learning_rate": 1.531888320696284e-08, | |
| "loss": 0.0974, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 4.87, | |
| "learning_rate": 8.617834648185774e-09, | |
| "loss": 0.0928, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 4.89, | |
| "learning_rate": 3.830454420794549e-09, | |
| "loss": 0.1276, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 4.91, | |
| "learning_rate": 9.576594607807465e-10, | |
| "loss": 0.1155, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 4.93, | |
| "learning_rate": 0.0, | |
| "loss": 0.1097, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 4.93, | |
| "step": 235, | |
| "total_flos": 48570126942208.0, | |
| "train_loss": 0.3458154943078122, | |
| "train_runtime": 7403.2578, | |
| "train_samples_per_second": 1.543, | |
| "train_steps_per_second": 0.032 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 235, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 5, | |
| "save_steps": 50, | |
| "total_flos": 48570126942208.0, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |