| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.9954545454545456, | |
| "eval_steps": 500, | |
| "global_step": 438, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.006818181818181818, | |
| "grad_norm": 1.434646725654602, | |
| "learning_rate": 5.0000000000000004e-08, | |
| "loss": 0.8008, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.013636363636363636, | |
| "grad_norm": 1.3611352443695068, | |
| "learning_rate": 1.0000000000000001e-07, | |
| "loss": 0.7827, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.020454545454545454, | |
| "grad_norm": 1.2866194248199463, | |
| "learning_rate": 1.5000000000000002e-07, | |
| "loss": 0.7693, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.02727272727272727, | |
| "grad_norm": 1.4324151277542114, | |
| "learning_rate": 2.0000000000000002e-07, | |
| "loss": 0.7688, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.03409090909090909, | |
| "grad_norm": 1.4685285091400146, | |
| "learning_rate": 2.5000000000000004e-07, | |
| "loss": 0.8055, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.04090909090909091, | |
| "grad_norm": 1.2799904346466064, | |
| "learning_rate": 3.0000000000000004e-07, | |
| "loss": 0.7496, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.04772727272727273, | |
| "grad_norm": 1.2374376058578491, | |
| "learning_rate": 3.5000000000000004e-07, | |
| "loss": 0.7834, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.05454545454545454, | |
| "grad_norm": 1.3800737857818604, | |
| "learning_rate": 4.0000000000000003e-07, | |
| "loss": 0.7747, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.06136363636363636, | |
| "grad_norm": 1.3297126293182373, | |
| "learning_rate": 4.5000000000000003e-07, | |
| "loss": 0.7967, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.06818181818181818, | |
| "grad_norm": 1.2450318336486816, | |
| "learning_rate": 5.000000000000001e-07, | |
| "loss": 0.7852, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.075, | |
| "grad_norm": 1.400126338005066, | |
| "learning_rate": 5.5e-07, | |
| "loss": 0.7987, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.08181818181818182, | |
| "grad_norm": 1.3626885414123535, | |
| "learning_rate": 6.000000000000001e-07, | |
| "loss": 0.7985, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.08863636363636364, | |
| "grad_norm": 1.3277614116668701, | |
| "learning_rate": 6.5e-07, | |
| "loss": 0.7974, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.09545454545454546, | |
| "grad_norm": 1.3976675271987915, | |
| "learning_rate": 7.000000000000001e-07, | |
| "loss": 0.8001, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.10227272727272728, | |
| "grad_norm": 1.387974500656128, | |
| "learning_rate": 7.5e-07, | |
| "loss": 0.7876, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.10909090909090909, | |
| "grad_norm": 1.309926152229309, | |
| "learning_rate": 8.000000000000001e-07, | |
| "loss": 0.7541, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.1159090909090909, | |
| "grad_norm": 1.254166841506958, | |
| "learning_rate": 8.500000000000001e-07, | |
| "loss": 0.7551, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.12272727272727273, | |
| "grad_norm": 1.3773471117019653, | |
| "learning_rate": 9.000000000000001e-07, | |
| "loss": 0.7951, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.12954545454545455, | |
| "grad_norm": 1.2885236740112305, | |
| "learning_rate": 9.500000000000001e-07, | |
| "loss": 0.7518, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.13636363636363635, | |
| "grad_norm": 1.298673152923584, | |
| "learning_rate": 1.0000000000000002e-06, | |
| "loss": 0.741, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.1431818181818182, | |
| "grad_norm": 1.3007020950317383, | |
| "learning_rate": 1.0500000000000001e-06, | |
| "loss": 0.7632, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 1.323603630065918, | |
| "learning_rate": 1.1e-06, | |
| "loss": 0.7812, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.15681818181818183, | |
| "grad_norm": 1.1601744890213013, | |
| "learning_rate": 1.1500000000000002e-06, | |
| "loss": 0.7623, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.16363636363636364, | |
| "grad_norm": 1.2862169742584229, | |
| "learning_rate": 1.2000000000000002e-06, | |
| "loss": 0.7791, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.17045454545454544, | |
| "grad_norm": 1.3574154376983643, | |
| "learning_rate": 1.25e-06, | |
| "loss": 0.7791, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.17727272727272728, | |
| "grad_norm": 1.0806293487548828, | |
| "learning_rate": 1.3e-06, | |
| "loss": 0.7583, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.18409090909090908, | |
| "grad_norm": 1.2130740880966187, | |
| "learning_rate": 1.3500000000000002e-06, | |
| "loss": 0.7439, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.19090909090909092, | |
| "grad_norm": 1.1230735778808594, | |
| "learning_rate": 1.4000000000000001e-06, | |
| "loss": 0.7657, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.19772727272727272, | |
| "grad_norm": 0.964364767074585, | |
| "learning_rate": 1.45e-06, | |
| "loss": 0.7574, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.20454545454545456, | |
| "grad_norm": 1.0301142930984497, | |
| "learning_rate": 1.5e-06, | |
| "loss": 0.7528, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.21136363636363636, | |
| "grad_norm": 1.0557504892349243, | |
| "learning_rate": 1.5500000000000002e-06, | |
| "loss": 0.7155, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.21818181818181817, | |
| "grad_norm": 0.9934628009796143, | |
| "learning_rate": 1.6000000000000001e-06, | |
| "loss": 0.7, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.225, | |
| "grad_norm": 1.002088189125061, | |
| "learning_rate": 1.6500000000000003e-06, | |
| "loss": 0.7403, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.2318181818181818, | |
| "grad_norm": 0.8168452382087708, | |
| "learning_rate": 1.7000000000000002e-06, | |
| "loss": 0.7202, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.23863636363636365, | |
| "grad_norm": 0.8815924525260925, | |
| "learning_rate": 1.75e-06, | |
| "loss": 0.7593, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.24545454545454545, | |
| "grad_norm": 0.7914003729820251, | |
| "learning_rate": 1.8000000000000001e-06, | |
| "loss": 0.7337, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.25227272727272726, | |
| "grad_norm": 0.7730265855789185, | |
| "learning_rate": 1.85e-06, | |
| "loss": 0.6821, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.2590909090909091, | |
| "grad_norm": 0.6619182229042053, | |
| "learning_rate": 1.9000000000000002e-06, | |
| "loss": 0.7168, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.26590909090909093, | |
| "grad_norm": 0.7120696902275085, | |
| "learning_rate": 1.9500000000000004e-06, | |
| "loss": 0.7322, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.2727272727272727, | |
| "grad_norm": 0.6825034022331238, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 0.7101, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.27954545454545454, | |
| "grad_norm": 0.7088951468467712, | |
| "learning_rate": 2.05e-06, | |
| "loss": 0.6809, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.2863636363636364, | |
| "grad_norm": 0.6460469365119934, | |
| "learning_rate": 2.1000000000000002e-06, | |
| "loss": 0.6447, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.29318181818181815, | |
| "grad_norm": 0.6034916043281555, | |
| "learning_rate": 2.15e-06, | |
| "loss": 0.6843, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 0.5842302441596985, | |
| "learning_rate": 2.2e-06, | |
| "loss": 0.6937, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.3068181818181818, | |
| "grad_norm": 0.598784863948822, | |
| "learning_rate": 2.25e-06, | |
| "loss": 0.6676, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.31363636363636366, | |
| "grad_norm": 0.6409489512443542, | |
| "learning_rate": 2.3000000000000004e-06, | |
| "loss": 0.6554, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.32045454545454544, | |
| "grad_norm": 0.615486741065979, | |
| "learning_rate": 2.35e-06, | |
| "loss": 0.7171, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.32727272727272727, | |
| "grad_norm": 0.5648443698883057, | |
| "learning_rate": 2.4000000000000003e-06, | |
| "loss": 0.6383, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.3340909090909091, | |
| "grad_norm": 0.6393448114395142, | |
| "learning_rate": 2.4500000000000003e-06, | |
| "loss": 0.6778, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.3409090909090909, | |
| "grad_norm": 0.6069231629371643, | |
| "learning_rate": 2.5e-06, | |
| "loss": 0.6717, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.3477272727272727, | |
| "grad_norm": 0.7063543200492859, | |
| "learning_rate": 2.55e-06, | |
| "loss": 0.7006, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.35454545454545455, | |
| "grad_norm": 0.600916862487793, | |
| "learning_rate": 2.6e-06, | |
| "loss": 0.649, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.3613636363636364, | |
| "grad_norm": 0.6185877323150635, | |
| "learning_rate": 2.6500000000000005e-06, | |
| "loss": 0.6702, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.36818181818181817, | |
| "grad_norm": 0.6063259243965149, | |
| "learning_rate": 2.7000000000000004e-06, | |
| "loss": 0.6461, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.375, | |
| "grad_norm": 0.5525023937225342, | |
| "learning_rate": 2.7500000000000004e-06, | |
| "loss": 0.6613, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.38181818181818183, | |
| "grad_norm": 0.49498245120048523, | |
| "learning_rate": 2.8000000000000003e-06, | |
| "loss": 0.651, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.3886363636363636, | |
| "grad_norm": 0.5330650210380554, | |
| "learning_rate": 2.85e-06, | |
| "loss": 0.6416, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.39545454545454545, | |
| "grad_norm": 0.5402539372444153, | |
| "learning_rate": 2.9e-06, | |
| "loss": 0.6257, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.4022727272727273, | |
| "grad_norm": 0.49151986837387085, | |
| "learning_rate": 2.95e-06, | |
| "loss": 0.6292, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.4090909090909091, | |
| "grad_norm": 0.5251104235649109, | |
| "learning_rate": 3e-06, | |
| "loss": 0.666, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.4159090909090909, | |
| "grad_norm": 0.57887202501297, | |
| "learning_rate": 3.05e-06, | |
| "loss": 0.642, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.42272727272727273, | |
| "grad_norm": 0.4779260456562042, | |
| "learning_rate": 3.1000000000000004e-06, | |
| "loss": 0.5885, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.42954545454545456, | |
| "grad_norm": 0.6516481637954712, | |
| "learning_rate": 3.1500000000000003e-06, | |
| "loss": 0.6399, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.43636363636363634, | |
| "grad_norm": 0.49200090765953064, | |
| "learning_rate": 3.2000000000000003e-06, | |
| "loss": 0.6412, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.4431818181818182, | |
| "grad_norm": 0.520231306552887, | |
| "learning_rate": 3.2500000000000002e-06, | |
| "loss": 0.6276, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 0.4746171832084656, | |
| "learning_rate": 3.3000000000000006e-06, | |
| "loss": 0.6066, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.45681818181818185, | |
| "grad_norm": 0.5491726994514465, | |
| "learning_rate": 3.3500000000000005e-06, | |
| "loss": 0.5975, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.4636363636363636, | |
| "grad_norm": 0.5318092107772827, | |
| "learning_rate": 3.4000000000000005e-06, | |
| "loss": 0.6276, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.47045454545454546, | |
| "grad_norm": 0.7567431330680847, | |
| "learning_rate": 3.45e-06, | |
| "loss": 0.6252, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.4772727272727273, | |
| "grad_norm": 0.5506855845451355, | |
| "learning_rate": 3.5e-06, | |
| "loss": 0.6384, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.48409090909090907, | |
| "grad_norm": 0.5070034265518188, | |
| "learning_rate": 3.5500000000000003e-06, | |
| "loss": 0.5928, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.4909090909090909, | |
| "grad_norm": 0.4772234559059143, | |
| "learning_rate": 3.6000000000000003e-06, | |
| "loss": 0.5974, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.49772727272727274, | |
| "grad_norm": 0.5434151887893677, | |
| "learning_rate": 3.65e-06, | |
| "loss": 0.6112, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.5045454545454545, | |
| "grad_norm": 0.45374104380607605, | |
| "learning_rate": 3.7e-06, | |
| "loss": 0.6187, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.5113636363636364, | |
| "grad_norm": 0.4568059742450714, | |
| "learning_rate": 3.7500000000000005e-06, | |
| "loss": 0.6256, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.5181818181818182, | |
| "grad_norm": 0.5239875912666321, | |
| "learning_rate": 3.8000000000000005e-06, | |
| "loss": 0.6251, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.525, | |
| "grad_norm": 0.48844802379608154, | |
| "learning_rate": 3.85e-06, | |
| "loss": 0.5859, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.5318181818181819, | |
| "grad_norm": 0.43856486678123474, | |
| "learning_rate": 3.900000000000001e-06, | |
| "loss": 0.6124, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.5386363636363637, | |
| "grad_norm": 0.45667290687561035, | |
| "learning_rate": 3.95e-06, | |
| "loss": 0.5992, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.5454545454545454, | |
| "grad_norm": 0.46727287769317627, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 0.6088, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.5522727272727272, | |
| "grad_norm": 0.45850586891174316, | |
| "learning_rate": 4.05e-06, | |
| "loss": 0.6154, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.5590909090909091, | |
| "grad_norm": 0.5913473963737488, | |
| "learning_rate": 4.1e-06, | |
| "loss": 0.6224, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.5659090909090909, | |
| "grad_norm": 0.4341784715652466, | |
| "learning_rate": 4.15e-06, | |
| "loss": 0.618, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.5727272727272728, | |
| "grad_norm": 0.46108976006507874, | |
| "learning_rate": 4.2000000000000004e-06, | |
| "loss": 0.5813, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.5795454545454546, | |
| "grad_norm": 0.4961765706539154, | |
| "learning_rate": 4.25e-06, | |
| "loss": 0.5969, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.5863636363636363, | |
| "grad_norm": 0.43796494603157043, | |
| "learning_rate": 4.3e-06, | |
| "loss": 0.6036, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.5931818181818181, | |
| "grad_norm": 0.43516144156455994, | |
| "learning_rate": 4.350000000000001e-06, | |
| "loss": 0.5799, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 0.4538296163082123, | |
| "learning_rate": 4.4e-06, | |
| "loss": 0.6125, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.6068181818181818, | |
| "grad_norm": 0.4108039140701294, | |
| "learning_rate": 4.450000000000001e-06, | |
| "loss": 0.6107, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.6136363636363636, | |
| "grad_norm": 0.428292453289032, | |
| "learning_rate": 4.5e-06, | |
| "loss": 0.599, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.6204545454545455, | |
| "grad_norm": 0.5008049607276917, | |
| "learning_rate": 4.5500000000000005e-06, | |
| "loss": 0.6025, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.6272727272727273, | |
| "grad_norm": 0.4718039631843567, | |
| "learning_rate": 4.600000000000001e-06, | |
| "loss": 0.5483, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.634090909090909, | |
| "grad_norm": 0.47552117705345154, | |
| "learning_rate": 4.65e-06, | |
| "loss": 0.6048, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.6409090909090909, | |
| "grad_norm": 0.40332990884780884, | |
| "learning_rate": 4.7e-06, | |
| "loss": 0.5916, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.6477272727272727, | |
| "grad_norm": 0.47048062086105347, | |
| "learning_rate": 4.75e-06, | |
| "loss": 0.611, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.6545454545454545, | |
| "grad_norm": 0.6233271956443787, | |
| "learning_rate": 4.800000000000001e-06, | |
| "loss": 0.5821, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.6613636363636364, | |
| "grad_norm": 0.4700981676578522, | |
| "learning_rate": 4.85e-06, | |
| "loss": 0.5937, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.6681818181818182, | |
| "grad_norm": 0.4266382157802582, | |
| "learning_rate": 4.9000000000000005e-06, | |
| "loss": 0.6084, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.675, | |
| "grad_norm": 0.48668500781059265, | |
| "learning_rate": 4.95e-06, | |
| "loss": 0.5846, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.6818181818181818, | |
| "grad_norm": 0.588253915309906, | |
| "learning_rate": 5e-06, | |
| "loss": 0.6163, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.6886363636363636, | |
| "grad_norm": 0.4984593093395233, | |
| "learning_rate": 4.9999795126530275e-06, | |
| "loss": 0.6173, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.6954545454545454, | |
| "grad_norm": 0.5198028087615967, | |
| "learning_rate": 4.999918050947891e-06, | |
| "loss": 0.5782, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.7022727272727273, | |
| "grad_norm": 0.45491915941238403, | |
| "learning_rate": 4.999815615891943e-06, | |
| "loss": 0.5935, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.7090909090909091, | |
| "grad_norm": 0.5883018970489502, | |
| "learning_rate": 4.9996722091640805e-06, | |
| "loss": 0.5771, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.7159090909090909, | |
| "grad_norm": 0.8883517384529114, | |
| "learning_rate": 4.9994878331147225e-06, | |
| "loss": 0.5862, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.7227272727272728, | |
| "grad_norm": 0.4979236423969269, | |
| "learning_rate": 4.99926249076577e-06, | |
| "loss": 0.5418, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.7295454545454545, | |
| "grad_norm": 0.44228366017341614, | |
| "learning_rate": 4.998996185810557e-06, | |
| "loss": 0.5904, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.7363636363636363, | |
| "grad_norm": 0.4653695523738861, | |
| "learning_rate": 4.998688922613788e-06, | |
| "loss": 0.5751, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.7431818181818182, | |
| "grad_norm": 0.5233970880508423, | |
| "learning_rate": 4.9983407062114695e-06, | |
| "loss": 0.5769, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.45305246114730835, | |
| "learning_rate": 4.9979515423108255e-06, | |
| "loss": 0.5987, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.7568181818181818, | |
| "grad_norm": 0.48286813497543335, | |
| "learning_rate": 4.997521437290205e-06, | |
| "loss": 0.584, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.7636363636363637, | |
| "grad_norm": 0.619292140007019, | |
| "learning_rate": 4.997050398198977e-06, | |
| "loss": 0.5984, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.7704545454545455, | |
| "grad_norm": 0.519248902797699, | |
| "learning_rate": 4.996538432757414e-06, | |
| "loss": 0.6063, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.7772727272727272, | |
| "grad_norm": 0.4723324477672577, | |
| "learning_rate": 4.995985549356568e-06, | |
| "loss": 0.5729, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.7840909090909091, | |
| "grad_norm": 1.6759965419769287, | |
| "learning_rate": 4.995391757058129e-06, | |
| "loss": 0.5715, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.7909090909090909, | |
| "grad_norm": 0.4929729998111725, | |
| "learning_rate": 4.99475706559428e-06, | |
| "loss": 0.5851, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.7977272727272727, | |
| "grad_norm": 0.5491468906402588, | |
| "learning_rate": 4.994081485367537e-06, | |
| "loss": 0.5611, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.8045454545454546, | |
| "grad_norm": 0.4642564654350281, | |
| "learning_rate": 4.993365027450576e-06, | |
| "loss": 0.5826, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.8113636363636364, | |
| "grad_norm": 0.7455803751945496, | |
| "learning_rate": 4.992607703586058e-06, | |
| "loss": 0.5611, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.8181818181818182, | |
| "grad_norm": 0.5237230658531189, | |
| "learning_rate": 4.991809526186424e-06, | |
| "loss": 0.5948, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.825, | |
| "grad_norm": 0.44459426403045654, | |
| "learning_rate": 4.990970508333707e-06, | |
| "loss": 0.5733, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.8318181818181818, | |
| "grad_norm": 0.5403085350990295, | |
| "learning_rate": 4.990090663779305e-06, | |
| "loss": 0.5492, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.8386363636363636, | |
| "grad_norm": 0.43158480525016785, | |
| "learning_rate": 4.9891700069437635e-06, | |
| "loss": 0.5807, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.8454545454545455, | |
| "grad_norm": 0.4388655126094818, | |
| "learning_rate": 4.988208552916535e-06, | |
| "loss": 0.5953, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.8522727272727273, | |
| "grad_norm": 0.47692254185676575, | |
| "learning_rate": 4.987206317455734e-06, | |
| "loss": 0.5644, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.8590909090909091, | |
| "grad_norm": 0.6617960929870605, | |
| "learning_rate": 4.986163316987877e-06, | |
| "loss": 0.524, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.865909090909091, | |
| "grad_norm": 0.4495430588722229, | |
| "learning_rate": 4.985079568607613e-06, | |
| "loss": 0.5883, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.8727272727272727, | |
| "grad_norm": 0.461193323135376, | |
| "learning_rate": 4.983955090077445e-06, | |
| "loss": 0.5618, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.8795454545454545, | |
| "grad_norm": 0.4458140432834625, | |
| "learning_rate": 4.982789899827439e-06, | |
| "loss": 0.5675, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.8863636363636364, | |
| "grad_norm": 0.48425722122192383, | |
| "learning_rate": 4.9815840169549216e-06, | |
| "loss": 0.5685, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.8931818181818182, | |
| "grad_norm": 0.9677756428718567, | |
| "learning_rate": 4.980337461224164e-06, | |
| "loss": 0.574, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 0.6840249300003052, | |
| "learning_rate": 4.979050253066064e-06, | |
| "loss": 0.5413, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.9068181818181819, | |
| "grad_norm": 0.45734715461730957, | |
| "learning_rate": 4.977722413577802e-06, | |
| "loss": 0.5728, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.9136363636363637, | |
| "grad_norm": 0.4468136131763458, | |
| "learning_rate": 4.976353964522509e-06, | |
| "loss": 0.5712, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.9204545454545454, | |
| "grad_norm": 0.4223119914531708, | |
| "learning_rate": 4.974944928328894e-06, | |
| "loss": 0.5409, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.9272727272727272, | |
| "grad_norm": 0.626460611820221, | |
| "learning_rate": 4.973495328090891e-06, | |
| "loss": 0.5768, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.9340909090909091, | |
| "grad_norm": 0.4360118806362152, | |
| "learning_rate": 4.972005187567267e-06, | |
| "loss": 0.5657, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.9409090909090909, | |
| "grad_norm": 0.4545884430408478, | |
| "learning_rate": 4.970474531181245e-06, | |
| "loss": 0.5548, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.9477272727272728, | |
| "grad_norm": 0.49536797404289246, | |
| "learning_rate": 4.968903384020095e-06, | |
| "loss": 0.5522, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.9545454545454546, | |
| "grad_norm": 0.5587348937988281, | |
| "learning_rate": 4.967291771834727e-06, | |
| "loss": 0.5768, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.9613636363636363, | |
| "grad_norm": 0.46643808484077454, | |
| "learning_rate": 4.965639721039267e-06, | |
| "loss": 0.5556, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.9681818181818181, | |
| "grad_norm": 0.4519246220588684, | |
| "learning_rate": 4.963947258710626e-06, | |
| "loss": 0.5549, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.975, | |
| "grad_norm": 0.4627818465232849, | |
| "learning_rate": 4.962214412588053e-06, | |
| "loss": 0.5764, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.9818181818181818, | |
| "grad_norm": 0.46668827533721924, | |
| "learning_rate": 4.960441211072686e-06, | |
| "loss": 0.5287, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.9886363636363636, | |
| "grad_norm": 0.44447973370552063, | |
| "learning_rate": 4.9586276832270785e-06, | |
| "loss": 0.556, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.9954545454545455, | |
| "grad_norm": 0.47608426213264465, | |
| "learning_rate": 4.9567738587747314e-06, | |
| "loss": 0.5544, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 1.0068181818181818, | |
| "grad_norm": 0.8354535102844238, | |
| "learning_rate": 4.954879768099599e-06, | |
| "loss": 1.1057, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 1.0136363636363637, | |
| "grad_norm": 0.4405969977378845, | |
| "learning_rate": 4.952945442245598e-06, | |
| "loss": 0.5694, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 1.0204545454545455, | |
| "grad_norm": 0.4724039137363434, | |
| "learning_rate": 4.95097091291609e-06, | |
| "loss": 0.5385, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 1.0272727272727273, | |
| "grad_norm": 0.9106016755104065, | |
| "learning_rate": 4.948956212473371e-06, | |
| "loss": 0.545, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.0340909090909092, | |
| "grad_norm": 0.4744306206703186, | |
| "learning_rate": 4.946901373938132e-06, | |
| "loss": 0.534, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 1.040909090909091, | |
| "grad_norm": 0.5118283629417419, | |
| "learning_rate": 4.944806430988927e-06, | |
| "loss": 0.5382, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 1.0477272727272728, | |
| "grad_norm": 0.4226383566856384, | |
| "learning_rate": 4.942671417961615e-06, | |
| "loss": 0.5483, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 1.0545454545454545, | |
| "grad_norm": 0.4502519965171814, | |
| "learning_rate": 4.940496369848795e-06, | |
| "loss": 0.553, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 1.0613636363636363, | |
| "grad_norm": 0.41305863857269287, | |
| "learning_rate": 4.938281322299243e-06, | |
| "loss": 0.5383, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 1.0681818181818181, | |
| "grad_norm": 0.44364672899246216, | |
| "learning_rate": 4.936026311617316e-06, | |
| "loss": 0.5528, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 1.075, | |
| "grad_norm": 0.9394287467002869, | |
| "learning_rate": 4.933731374762361e-06, | |
| "loss": 0.541, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 1.0818181818181818, | |
| "grad_norm": 0.47417205572128296, | |
| "learning_rate": 4.931396549348115e-06, | |
| "loss": 0.5368, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 1.0886363636363636, | |
| "grad_norm": 0.45299509167671204, | |
| "learning_rate": 4.9290218736420795e-06, | |
| "loss": 0.5199, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 1.0954545454545455, | |
| "grad_norm": 0.551228940486908, | |
| "learning_rate": 4.926607386564898e-06, | |
| "loss": 0.5402, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 1.1022727272727273, | |
| "grad_norm": 0.5464844703674316, | |
| "learning_rate": 4.9241531276897196e-06, | |
| "loss": 0.5465, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 1.1090909090909091, | |
| "grad_norm": 0.45831242203712463, | |
| "learning_rate": 4.921659137241544e-06, | |
| "loss": 0.5231, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 1.115909090909091, | |
| "grad_norm": 1.723821997642517, | |
| "learning_rate": 4.919125456096574e-06, | |
| "loss": 0.5164, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 1.1227272727272728, | |
| "grad_norm": 0.5151612758636475, | |
| "learning_rate": 4.916552125781529e-06, | |
| "loss": 0.5345, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 1.1295454545454546, | |
| "grad_norm": 0.4512110650539398, | |
| "learning_rate": 4.913939188472979e-06, | |
| "loss": 0.5511, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 1.1363636363636362, | |
| "grad_norm": 0.48583805561065674, | |
| "learning_rate": 4.911286686996648e-06, | |
| "loss": 0.5292, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 1.143181818181818, | |
| "grad_norm": 0.4196493923664093, | |
| "learning_rate": 4.908594664826708e-06, | |
| "loss": 0.5299, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "grad_norm": 0.4370763599872589, | |
| "learning_rate": 4.905863166085076e-06, | |
| "loss": 0.5777, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 1.1568181818181817, | |
| "grad_norm": 0.4871472418308258, | |
| "learning_rate": 4.903092235540679e-06, | |
| "loss": 0.5109, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 1.1636363636363636, | |
| "grad_norm": 0.60159832239151, | |
| "learning_rate": 4.900281918608732e-06, | |
| "loss": 0.5394, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 1.1704545454545454, | |
| "grad_norm": 0.42233574390411377, | |
| "learning_rate": 4.897432261349984e-06, | |
| "loss": 0.5306, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 1.1772727272727272, | |
| "grad_norm": 0.44365131855010986, | |
| "learning_rate": 4.894543310469968e-06, | |
| "loss": 0.5402, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 1.184090909090909, | |
| "grad_norm": 0.42755845189094543, | |
| "learning_rate": 4.891615113318236e-06, | |
| "loss": 0.5444, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 1.190909090909091, | |
| "grad_norm": 0.47664687037467957, | |
| "learning_rate": 4.888647717887582e-06, | |
| "loss": 0.5268, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 1.1977272727272728, | |
| "grad_norm": 0.43990078568458557, | |
| "learning_rate": 4.8856411728132526e-06, | |
| "loss": 0.5338, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 1.2045454545454546, | |
| "grad_norm": 0.5462448596954346, | |
| "learning_rate": 4.8825955273721524e-06, | |
| "loss": 0.5537, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 1.2113636363636364, | |
| "grad_norm": 0.4274113178253174, | |
| "learning_rate": 4.879510831482039e-06, | |
| "loss": 0.5408, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 1.2181818181818183, | |
| "grad_norm": 0.423444926738739, | |
| "learning_rate": 4.876387135700701e-06, | |
| "loss": 0.528, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 1.225, | |
| "grad_norm": 0.5363211035728455, | |
| "learning_rate": 4.873224491225128e-06, | |
| "loss": 0.5606, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 1.231818181818182, | |
| "grad_norm": 0.44390320777893066, | |
| "learning_rate": 4.870022949890676e-06, | |
| "loss": 0.5641, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.2386363636363638, | |
| "grad_norm": 0.44681212306022644, | |
| "learning_rate": 4.866782564170217e-06, | |
| "loss": 0.5065, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 1.2454545454545454, | |
| "grad_norm": 1.5608415603637695, | |
| "learning_rate": 4.863503387173276e-06, | |
| "loss": 0.5393, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 1.2522727272727272, | |
| "grad_norm": 0.4239177703857422, | |
| "learning_rate": 4.860185472645161e-06, | |
| "loss": 0.5394, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 1.259090909090909, | |
| "grad_norm": 0.453068345785141, | |
| "learning_rate": 4.856828874966086e-06, | |
| "loss": 0.5262, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 1.2659090909090909, | |
| "grad_norm": 0.4480113387107849, | |
| "learning_rate": 4.853433649150276e-06, | |
| "loss": 0.558, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 1.2727272727272727, | |
| "grad_norm": 0.49457019567489624, | |
| "learning_rate": 4.849999850845066e-06, | |
| "loss": 0.5328, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 1.2795454545454545, | |
| "grad_norm": 0.4163910746574402, | |
| "learning_rate": 4.8465275363299905e-06, | |
| "loss": 0.5272, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 1.2863636363636364, | |
| "grad_norm": 0.47634580731391907, | |
| "learning_rate": 4.84301676251586e-06, | |
| "loss": 0.5511, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 1.2931818181818182, | |
| "grad_norm": 0.4690471291542053, | |
| "learning_rate": 4.839467586943825e-06, | |
| "loss": 0.5569, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "grad_norm": 0.4555318057537079, | |
| "learning_rate": 4.835880067784441e-06, | |
| "loss": 0.5087, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.3068181818181819, | |
| "grad_norm": 0.8536185026168823, | |
| "learning_rate": 4.832254263836708e-06, | |
| "loss": 0.5321, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 1.3136363636363637, | |
| "grad_norm": 0.452757865190506, | |
| "learning_rate": 4.828590234527107e-06, | |
| "loss": 0.5558, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 1.3204545454545453, | |
| "grad_norm": 0.4368644654750824, | |
| "learning_rate": 4.82488803990863e-06, | |
| "loss": 0.5462, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 1.3272727272727272, | |
| "grad_norm": 0.4731462597846985, | |
| "learning_rate": 4.821147740659795e-06, | |
| "loss": 0.497, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 1.334090909090909, | |
| "grad_norm": 0.4699056148529053, | |
| "learning_rate": 4.817369398083648e-06, | |
| "loss": 0.5357, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 1.3409090909090908, | |
| "grad_norm": 0.45635876059532166, | |
| "learning_rate": 4.813553074106761e-06, | |
| "loss": 0.5325, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 1.3477272727272727, | |
| "grad_norm": 0.4345819354057312, | |
| "learning_rate": 4.809698831278217e-06, | |
| "loss": 0.517, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 1.3545454545454545, | |
| "grad_norm": 0.41615837812423706, | |
| "learning_rate": 4.805806732768585e-06, | |
| "loss": 0.5221, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 1.3613636363636363, | |
| "grad_norm": 0.44078198075294495, | |
| "learning_rate": 4.801876842368882e-06, | |
| "loss": 0.5214, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 1.3681818181818182, | |
| "grad_norm": 0.4360499978065491, | |
| "learning_rate": 4.797909224489531e-06, | |
| "loss": 0.5402, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.375, | |
| "grad_norm": 0.46945518255233765, | |
| "learning_rate": 4.793903944159303e-06, | |
| "loss": 0.5354, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 1.3818181818181818, | |
| "grad_norm": 0.4473336637020111, | |
| "learning_rate": 4.789861067024253e-06, | |
| "loss": 0.5006, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 1.3886363636363637, | |
| "grad_norm": 0.47507786750793457, | |
| "learning_rate": 4.785780659346642e-06, | |
| "loss": 0.5163, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 1.3954545454545455, | |
| "grad_norm": 0.5880581140518188, | |
| "learning_rate": 4.781662788003851e-06, | |
| "loss": 0.5505, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 1.4022727272727273, | |
| "grad_norm": 0.5124858617782593, | |
| "learning_rate": 4.777507520487289e-06, | |
| "loss": 0.5294, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 1.4090909090909092, | |
| "grad_norm": 0.43004822731018066, | |
| "learning_rate": 4.773314924901281e-06, | |
| "loss": 0.5255, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 1.415909090909091, | |
| "grad_norm": 0.5221498012542725, | |
| "learning_rate": 4.769085069961955e-06, | |
| "loss": 0.5575, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 1.4227272727272728, | |
| "grad_norm": 0.45500698685646057, | |
| "learning_rate": 4.764818024996117e-06, | |
| "loss": 0.5222, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 1.4295454545454547, | |
| "grad_norm": 0.443926066160202, | |
| "learning_rate": 4.760513859940112e-06, | |
| "loss": 0.5121, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 1.4363636363636363, | |
| "grad_norm": 0.42411959171295166, | |
| "learning_rate": 4.756172645338675e-06, | |
| "loss": 0.5328, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.4431818181818181, | |
| "grad_norm": 0.477217435836792, | |
| "learning_rate": 4.751794452343785e-06, | |
| "loss": 0.5204, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "grad_norm": 0.4560566246509552, | |
| "learning_rate": 4.747379352713489e-06, | |
| "loss": 0.5224, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 1.4568181818181818, | |
| "grad_norm": 0.44136327505111694, | |
| "learning_rate": 4.7429274188107275e-06, | |
| "loss": 0.5276, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 1.4636363636363636, | |
| "grad_norm": 0.4897741675376892, | |
| "learning_rate": 4.738438723602154e-06, | |
| "loss": 0.5286, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 1.4704545454545455, | |
| "grad_norm": 0.7040328979492188, | |
| "learning_rate": 4.733913340656933e-06, | |
| "loss": 0.5416, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 1.4772727272727273, | |
| "grad_norm": 0.45212069153785706, | |
| "learning_rate": 4.729351344145536e-06, | |
| "loss": 0.5283, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 1.4840909090909091, | |
| "grad_norm": 0.42538344860076904, | |
| "learning_rate": 4.7247528088385296e-06, | |
| "loss": 0.5109, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 1.490909090909091, | |
| "grad_norm": 0.4419437050819397, | |
| "learning_rate": 4.720117810105341e-06, | |
| "loss": 0.5187, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 1.4977272727272728, | |
| "grad_norm": 0.5299937725067139, | |
| "learning_rate": 4.715446423913036e-06, | |
| "loss": 0.541, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 1.5045454545454544, | |
| "grad_norm": 0.4459671080112457, | |
| "learning_rate": 4.710738726825059e-06, | |
| "loss": 0.5498, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.5113636363636362, | |
| "grad_norm": 0.47094839811325073, | |
| "learning_rate": 4.705994795999991e-06, | |
| "loss": 0.5206, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 1.518181818181818, | |
| "grad_norm": 0.4358065128326416, | |
| "learning_rate": 4.701214709190277e-06, | |
| "loss": 0.5238, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 1.525, | |
| "grad_norm": 0.46380552649497986, | |
| "learning_rate": 4.696398544740955e-06, | |
| "loss": 0.55, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 1.5318181818181817, | |
| "grad_norm": 0.4672795236110687, | |
| "learning_rate": 4.69154638158837e-06, | |
| "loss": 0.5097, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 1.5386363636363636, | |
| "grad_norm": 0.43432551622390747, | |
| "learning_rate": 4.686658299258881e-06, | |
| "loss": 0.5368, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.5454545454545454, | |
| "grad_norm": 0.625635027885437, | |
| "learning_rate": 4.681734377867562e-06, | |
| "loss": 0.5395, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 1.5522727272727272, | |
| "grad_norm": 1.058407187461853, | |
| "learning_rate": 4.67677469811688e-06, | |
| "loss": 0.5214, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 1.559090909090909, | |
| "grad_norm": 0.6245561838150024, | |
| "learning_rate": 4.671779341295378e-06, | |
| "loss": 0.5187, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 1.565909090909091, | |
| "grad_norm": 2.216414451599121, | |
| "learning_rate": 4.666748389276344e-06, | |
| "loss": 0.5311, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 1.5727272727272728, | |
| "grad_norm": 0.4490523338317871, | |
| "learning_rate": 4.661681924516466e-06, | |
| "loss": 0.5179, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.5795454545454546, | |
| "grad_norm": 0.49442484974861145, | |
| "learning_rate": 4.6565800300544805e-06, | |
| "loss": 0.5145, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 1.5863636363636364, | |
| "grad_norm": 0.4880093038082123, | |
| "learning_rate": 4.651442789509813e-06, | |
| "loss": 0.5285, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 1.5931818181818183, | |
| "grad_norm": 0.5839570164680481, | |
| "learning_rate": 4.646270287081208e-06, | |
| "loss": 0.5323, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "grad_norm": 0.45720481872558594, | |
| "learning_rate": 4.641062607545347e-06, | |
| "loss": 0.5271, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 1.606818181818182, | |
| "grad_norm": 0.4406528174877167, | |
| "learning_rate": 4.6358198362554585e-06, | |
| "loss": 0.5526, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 1.6136363636363638, | |
| "grad_norm": 0.4360388517379761, | |
| "learning_rate": 4.630542059139923e-06, | |
| "loss": 0.5224, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 1.6204545454545456, | |
| "grad_norm": 0.48028790950775146, | |
| "learning_rate": 4.625229362700863e-06, | |
| "loss": 0.531, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 1.6272727272727274, | |
| "grad_norm": 0.45038384199142456, | |
| "learning_rate": 4.61988183401272e-06, | |
| "loss": 0.5221, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 1.634090909090909, | |
| "grad_norm": 0.5929381847381592, | |
| "learning_rate": 4.614499560720837e-06, | |
| "loss": 0.5088, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 1.6409090909090909, | |
| "grad_norm": 0.4953240752220154, | |
| "learning_rate": 4.609082631040012e-06, | |
| "loss": 0.5073, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.6477272727272727, | |
| "grad_norm": 0.46439602971076965, | |
| "learning_rate": 4.603631133753061e-06, | |
| "loss": 0.549, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 1.6545454545454545, | |
| "grad_norm": 0.4494118392467499, | |
| "learning_rate": 4.598145158209356e-06, | |
| "loss": 0.5019, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 1.6613636363636364, | |
| "grad_norm": 0.6675086617469788, | |
| "learning_rate": 4.592624794323366e-06, | |
| "loss": 0.5305, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 1.6681818181818182, | |
| "grad_norm": 0.6174231171607971, | |
| "learning_rate": 4.587070132573178e-06, | |
| "loss": 0.5098, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 1.675, | |
| "grad_norm": 0.4987574517726898, | |
| "learning_rate": 4.581481263999019e-06, | |
| "loss": 0.5105, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 1.6818181818181817, | |
| "grad_norm": 0.4219074845314026, | |
| "learning_rate": 4.575858280201761e-06, | |
| "loss": 0.5341, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 1.6886363636363635, | |
| "grad_norm": 0.44271811842918396, | |
| "learning_rate": 4.570201273341418e-06, | |
| "loss": 0.517, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 1.6954545454545453, | |
| "grad_norm": 0.49237915873527527, | |
| "learning_rate": 4.564510336135642e-06, | |
| "loss": 0.5554, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 1.7022727272727272, | |
| "grad_norm": 0.4520842134952545, | |
| "learning_rate": 4.558785561858196e-06, | |
| "loss": 0.5133, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 1.709090909090909, | |
| "grad_norm": 0.452443927526474, | |
| "learning_rate": 4.5530270443374305e-06, | |
| "loss": 0.5315, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.7159090909090908, | |
| "grad_norm": 0.5061994791030884, | |
| "learning_rate": 4.547234877954741e-06, | |
| "loss": 0.5371, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 1.7227272727272727, | |
| "grad_norm": 0.47319310903549194, | |
| "learning_rate": 4.541409157643027e-06, | |
| "loss": 0.5278, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 1.7295454545454545, | |
| "grad_norm": 0.583519458770752, | |
| "learning_rate": 4.535549978885132e-06, | |
| "loss": 0.5291, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 1.7363636363636363, | |
| "grad_norm": 0.5193645358085632, | |
| "learning_rate": 4.5296574377122765e-06, | |
| "loss": 0.522, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 1.7431818181818182, | |
| "grad_norm": 0.4840165674686432, | |
| "learning_rate": 4.5237316307024895e-06, | |
| "loss": 0.5112, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "grad_norm": 0.4321780502796173, | |
| "learning_rate": 4.517772654979024e-06, | |
| "loss": 0.5119, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 1.7568181818181818, | |
| "grad_norm": 0.41374215483665466, | |
| "learning_rate": 4.51178060820876e-06, | |
| "loss": 0.5135, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 1.7636363636363637, | |
| "grad_norm": 0.44822537899017334, | |
| "learning_rate": 4.505755588600613e-06, | |
| "loss": 0.5075, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 1.7704545454545455, | |
| "grad_norm": 0.4314457178115845, | |
| "learning_rate": 4.499697694903915e-06, | |
| "loss": 0.5352, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 1.7772727272727273, | |
| "grad_norm": 0.46970486640930176, | |
| "learning_rate": 4.493607026406802e-06, | |
| "loss": 0.5232, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.7840909090909092, | |
| "grad_norm": 0.4221877455711365, | |
| "learning_rate": 4.487483682934587e-06, | |
| "loss": 0.5275, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 1.790909090909091, | |
| "grad_norm": 0.4505936801433563, | |
| "learning_rate": 4.481327764848118e-06, | |
| "loss": 0.4861, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 1.7977272727272728, | |
| "grad_norm": 0.5762044191360474, | |
| "learning_rate": 4.47513937304214e-06, | |
| "loss": 0.5179, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 1.8045454545454547, | |
| "grad_norm": 0.4355130195617676, | |
| "learning_rate": 4.4689186089436365e-06, | |
| "loss": 0.5074, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 1.8113636363636365, | |
| "grad_norm": 0.40980857610702515, | |
| "learning_rate": 4.462665574510169e-06, | |
| "loss": 0.5078, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 1.8181818181818183, | |
| "grad_norm": 0.7456303238868713, | |
| "learning_rate": 4.456380372228208e-06, | |
| "loss": 0.5276, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 1.825, | |
| "grad_norm": 0.4397786855697632, | |
| "learning_rate": 4.450063105111447e-06, | |
| "loss": 0.5396, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 1.8318181818181818, | |
| "grad_norm": 0.4176543951034546, | |
| "learning_rate": 4.443713876699124e-06, | |
| "loss": 0.5021, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 1.8386363636363636, | |
| "grad_norm": 0.4404450058937073, | |
| "learning_rate": 4.4373327910543125e-06, | |
| "loss": 0.5053, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 1.8454545454545455, | |
| "grad_norm": 0.4341736137866974, | |
| "learning_rate": 4.430919952762226e-06, | |
| "loss": 0.5143, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.8522727272727273, | |
| "grad_norm": 0.4676216244697571, | |
| "learning_rate": 4.424475466928499e-06, | |
| "loss": 0.5331, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 1.8590909090909091, | |
| "grad_norm": 0.45895665884017944, | |
| "learning_rate": 4.417999439177465e-06, | |
| "loss": 0.4957, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 1.865909090909091, | |
| "grad_norm": 0.5019403100013733, | |
| "learning_rate": 4.4114919756504275e-06, | |
| "loss": 0.5403, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 1.8727272727272726, | |
| "grad_norm": 0.45978227257728577, | |
| "learning_rate": 4.404953183003916e-06, | |
| "loss": 0.5108, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 1.8795454545454544, | |
| "grad_norm": 0.4369364380836487, | |
| "learning_rate": 4.398383168407941e-06, | |
| "loss": 0.539, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.8863636363636362, | |
| "grad_norm": 0.4336954951286316, | |
| "learning_rate": 4.391782039544239e-06, | |
| "loss": 0.4979, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 1.893181818181818, | |
| "grad_norm": 0.4878862500190735, | |
| "learning_rate": 4.385149904604502e-06, | |
| "loss": 0.5105, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "grad_norm": 0.6377302408218384, | |
| "learning_rate": 4.378486872288611e-06, | |
| "loss": 0.5078, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 1.9068181818181817, | |
| "grad_norm": 0.44863370060920715, | |
| "learning_rate": 4.371793051802849e-06, | |
| "loss": 0.5186, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 1.9136363636363636, | |
| "grad_norm": 0.60493004322052, | |
| "learning_rate": 4.365068552858116e-06, | |
| "loss": 0.5224, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.9204545454545454, | |
| "grad_norm": 0.4682520627975464, | |
| "learning_rate": 4.358313485668124e-06, | |
| "loss": 0.5164, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 1.9272727272727272, | |
| "grad_norm": 0.4865384101867676, | |
| "learning_rate": 4.3515279609476e-06, | |
| "loss": 0.4984, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 1.934090909090909, | |
| "grad_norm": 0.5358805656433105, | |
| "learning_rate": 4.3447120899104615e-06, | |
| "loss": 0.5182, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 1.940909090909091, | |
| "grad_norm": 0.5168288946151733, | |
| "learning_rate": 4.337865984268002e-06, | |
| "loss": 0.5168, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 1.9477272727272728, | |
| "grad_norm": 0.4602853059768677, | |
| "learning_rate": 4.3309897562270525e-06, | |
| "loss": 0.5348, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 1.9545454545454546, | |
| "grad_norm": 0.4886777997016907, | |
| "learning_rate": 4.324083518488151e-06, | |
| "loss": 0.5173, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 1.9613636363636364, | |
| "grad_norm": 0.5380868911743164, | |
| "learning_rate": 4.317147384243688e-06, | |
| "loss": 0.5412, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 1.9681818181818183, | |
| "grad_norm": 0.5659900307655334, | |
| "learning_rate": 4.3101814671760546e-06, | |
| "loss": 0.5283, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 1.975, | |
| "grad_norm": 0.44209596514701843, | |
| "learning_rate": 4.303185881455778e-06, | |
| "loss": 0.5308, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 1.981818181818182, | |
| "grad_norm": 0.44045692682266235, | |
| "learning_rate": 4.296160741739652e-06, | |
| "loss": 0.5254, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.9886363636363638, | |
| "grad_norm": 0.9851962327957153, | |
| "learning_rate": 4.289106163168858e-06, | |
| "loss": 0.5107, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 1.9954545454545456, | |
| "grad_norm": 0.4783098101615906, | |
| "learning_rate": 4.282022261367074e-06, | |
| "loss": 0.5133, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 2.006818181818182, | |
| "grad_norm": 0.863693356513977, | |
| "learning_rate": 4.274909152438582e-06, | |
| "loss": 0.9983, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 2.0136363636363637, | |
| "grad_norm": 0.4465242624282837, | |
| "learning_rate": 4.267766952966369e-06, | |
| "loss": 0.493, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 2.0204545454545455, | |
| "grad_norm": 0.41670849919319153, | |
| "learning_rate": 4.260595780010209e-06, | |
| "loss": 0.4861, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 2.0272727272727273, | |
| "grad_norm": 0.4531606435775757, | |
| "learning_rate": 4.2533957511047485e-06, | |
| "loss": 0.4791, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 2.034090909090909, | |
| "grad_norm": 0.45564866065979004, | |
| "learning_rate": 4.24616698425758e-06, | |
| "loss": 0.456, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 2.040909090909091, | |
| "grad_norm": 0.4542117714881897, | |
| "learning_rate": 4.238909597947307e-06, | |
| "loss": 0.4781, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 2.047727272727273, | |
| "grad_norm": 0.46030667424201965, | |
| "learning_rate": 4.231623711121603e-06, | |
| "loss": 0.4937, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 2.0545454545454547, | |
| "grad_norm": 0.5076425671577454, | |
| "learning_rate": 4.224309443195261e-06, | |
| "loss": 0.4925, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 2.0613636363636365, | |
| "grad_norm": 0.4337129592895508, | |
| "learning_rate": 4.2169669140482365e-06, | |
| "loss": 0.4797, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 2.0681818181818183, | |
| "grad_norm": 0.505288302898407, | |
| "learning_rate": 4.2095962440236846e-06, | |
| "loss": 0.5033, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 2.075, | |
| "grad_norm": 0.48468518257141113, | |
| "learning_rate": 4.202197553925983e-06, | |
| "loss": 0.4826, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 2.081818181818182, | |
| "grad_norm": 0.39565783739089966, | |
| "learning_rate": 4.194770965018758e-06, | |
| "loss": 0.5042, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 2.088636363636364, | |
| "grad_norm": 0.44905662536621094, | |
| "learning_rate": 4.187316599022892e-06, | |
| "loss": 0.5044, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 2.0954545454545457, | |
| "grad_norm": 0.49123474955558777, | |
| "learning_rate": 4.179834578114531e-06, | |
| "loss": 0.4716, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 2.102272727272727, | |
| "grad_norm": 0.4576941132545471, | |
| "learning_rate": 4.172325024923083e-06, | |
| "loss": 0.5, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 2.109090909090909, | |
| "grad_norm": 0.47511449456214905, | |
| "learning_rate": 4.164788062529203e-06, | |
| "loss": 0.4956, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 2.1159090909090907, | |
| "grad_norm": 1.0899369716644287, | |
| "learning_rate": 4.157223814462784e-06, | |
| "loss": 0.4894, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 2.1227272727272726, | |
| "grad_norm": 0.44158637523651123, | |
| "learning_rate": 4.149632404700925e-06, | |
| "loss": 0.4882, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 2.1295454545454544, | |
| "grad_norm": 0.43381020426750183, | |
| "learning_rate": 4.142013957665903e-06, | |
| "loss": 0.4717, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 2.1363636363636362, | |
| "grad_norm": 0.41239485144615173, | |
| "learning_rate": 4.134368598223132e-06, | |
| "loss": 0.4796, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 2.143181818181818, | |
| "grad_norm": 0.40882745385169983, | |
| "learning_rate": 4.126696451679116e-06, | |
| "loss": 0.4749, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "grad_norm": 0.4558829665184021, | |
| "learning_rate": 4.118997643779401e-06, | |
| "loss": 0.4998, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 2.1568181818181817, | |
| "grad_norm": 0.512464702129364, | |
| "learning_rate": 4.111272300706502e-06, | |
| "loss": 0.4902, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 2.1636363636363636, | |
| "grad_norm": 0.4703938066959381, | |
| "learning_rate": 4.1035205490778505e-06, | |
| "loss": 0.5058, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 2.1704545454545454, | |
| "grad_norm": 0.8585686087608337, | |
| "learning_rate": 4.095742515943703e-06, | |
| "loss": 0.4992, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 2.1772727272727272, | |
| "grad_norm": 0.4601600170135498, | |
| "learning_rate": 4.087938328785071e-06, | |
| "loss": 0.5036, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 2.184090909090909, | |
| "grad_norm": 0.4537200927734375, | |
| "learning_rate": 4.080108115511629e-06, | |
| "loss": 0.5039, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 2.190909090909091, | |
| "grad_norm": 2.6141035556793213, | |
| "learning_rate": 4.072252004459612e-06, | |
| "loss": 0.4808, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 2.1977272727272728, | |
| "grad_norm": 0.4352598488330841, | |
| "learning_rate": 4.064370124389718e-06, | |
| "loss": 0.5134, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 2.2045454545454546, | |
| "grad_norm": 0.4278712868690491, | |
| "learning_rate": 4.056462604484998e-06, | |
| "loss": 0.4906, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 2.2113636363636364, | |
| "grad_norm": 0.4466942846775055, | |
| "learning_rate": 4.048529574348734e-06, | |
| "loss": 0.4753, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 2.2181818181818183, | |
| "grad_norm": 0.4446498453617096, | |
| "learning_rate": 4.040571164002319e-06, | |
| "loss": 0.4822, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 2.225, | |
| "grad_norm": 0.46646741032600403, | |
| "learning_rate": 4.032587503883124e-06, | |
| "loss": 0.5061, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 2.231818181818182, | |
| "grad_norm": 0.41562438011169434, | |
| "learning_rate": 4.0245787248423614e-06, | |
| "loss": 0.481, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 2.2386363636363638, | |
| "grad_norm": 0.46327149868011475, | |
| "learning_rate": 4.0165449581429404e-06, | |
| "loss": 0.452, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 2.2454545454545456, | |
| "grad_norm": 0.4267130494117737, | |
| "learning_rate": 4.008486335457312e-06, | |
| "loss": 0.4871, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 2.2522727272727274, | |
| "grad_norm": 0.45584383606910706, | |
| "learning_rate": 4.000402988865316e-06, | |
| "loss": 0.4961, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 2.2590909090909093, | |
| "grad_norm": 0.4481853246688843, | |
| "learning_rate": 3.992295050852013e-06, | |
| "loss": 0.5003, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 2.265909090909091, | |
| "grad_norm": 0.4425372779369354, | |
| "learning_rate": 3.984162654305516e-06, | |
| "loss": 0.4791, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 2.2727272727272725, | |
| "grad_norm": 0.44646650552749634, | |
| "learning_rate": 3.976005932514807e-06, | |
| "loss": 0.4915, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 2.2795454545454543, | |
| "grad_norm": 0.49541783332824707, | |
| "learning_rate": 3.967825019167559e-06, | |
| "loss": 0.5002, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 2.286363636363636, | |
| "grad_norm": 0.45653295516967773, | |
| "learning_rate": 3.959620048347938e-06, | |
| "loss": 0.4458, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 2.293181818181818, | |
| "grad_norm": 0.4129759669303894, | |
| "learning_rate": 3.951391154534415e-06, | |
| "loss": 0.4659, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "grad_norm": 0.6091723442077637, | |
| "learning_rate": 3.943138472597549e-06, | |
| "loss": 0.4954, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 2.3068181818181817, | |
| "grad_norm": 0.545915961265564, | |
| "learning_rate": 3.934862137797788e-06, | |
| "loss": 0.473, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 2.3136363636363635, | |
| "grad_norm": 0.410340279340744, | |
| "learning_rate": 3.9265622857832455e-06, | |
| "loss": 0.4957, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 2.3204545454545453, | |
| "grad_norm": 0.4205377399921417, | |
| "learning_rate": 3.918239052587481e-06, | |
| "loss": 0.4495, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 2.327272727272727, | |
| "grad_norm": 0.4741414189338684, | |
| "learning_rate": 3.909892574627267e-06, | |
| "loss": 0.4921, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 2.334090909090909, | |
| "grad_norm": 0.5609298348426819, | |
| "learning_rate": 3.901522988700355e-06, | |
| "loss": 0.4541, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 2.340909090909091, | |
| "grad_norm": 0.5533426403999329, | |
| "learning_rate": 3.893130431983234e-06, | |
| "loss": 0.4869, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 2.3477272727272727, | |
| "grad_norm": 0.4314653277397156, | |
| "learning_rate": 3.884715042028882e-06, | |
| "loss": 0.4912, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 2.3545454545454545, | |
| "grad_norm": 0.6120728850364685, | |
| "learning_rate": 3.876276956764509e-06, | |
| "loss": 0.4825, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 2.3613636363636363, | |
| "grad_norm": 0.49544838070869446, | |
| "learning_rate": 3.867816314489301e-06, | |
| "loss": 0.5012, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 2.368181818181818, | |
| "grad_norm": 0.45203930139541626, | |
| "learning_rate": 3.8593332538721465e-06, | |
| "loss": 0.4647, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 2.375, | |
| "grad_norm": 0.4588426351547241, | |
| "learning_rate": 3.8508279139493736e-06, | |
| "loss": 0.4643, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 2.381818181818182, | |
| "grad_norm": 0.438743531703949, | |
| "learning_rate": 3.84230043412246e-06, | |
| "loss": 0.4613, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 2.3886363636363637, | |
| "grad_norm": 0.42773687839508057, | |
| "learning_rate": 3.833750954155757e-06, | |
| "loss": 0.477, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 2.3954545454545455, | |
| "grad_norm": 0.4527823328971863, | |
| "learning_rate": 3.825179614174195e-06, | |
| "loss": 0.4578, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 2.4022727272727273, | |
| "grad_norm": 0.5061528086662292, | |
| "learning_rate": 3.816586554660987e-06, | |
| "loss": 0.478, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 2.409090909090909, | |
| "grad_norm": 0.44073787331581116, | |
| "learning_rate": 3.807971916455325e-06, | |
| "loss": 0.4767, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 2.415909090909091, | |
| "grad_norm": 0.5312027931213379, | |
| "learning_rate": 3.799335840750077e-06, | |
| "loss": 0.4988, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 2.422727272727273, | |
| "grad_norm": 0.5463031530380249, | |
| "learning_rate": 3.790678469089465e-06, | |
| "loss": 0.4754, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 2.4295454545454547, | |
| "grad_norm": 0.4529358148574829, | |
| "learning_rate": 3.7819999433667503e-06, | |
| "loss": 0.4796, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 2.4363636363636365, | |
| "grad_norm": 0.5421426296234131, | |
| "learning_rate": 3.773300405821908e-06, | |
| "loss": 0.4795, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 2.4431818181818183, | |
| "grad_norm": 0.4954266846179962, | |
| "learning_rate": 3.764579999039293e-06, | |
| "loss": 0.4606, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "grad_norm": 0.5748584866523743, | |
| "learning_rate": 3.7558388659453052e-06, | |
| "loss": 0.4951, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 2.456818181818182, | |
| "grad_norm": 0.45582106709480286, | |
| "learning_rate": 3.7470771498060455e-06, | |
| "loss": 0.4972, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 2.463636363636364, | |
| "grad_norm": 0.5302329659461975, | |
| "learning_rate": 3.7382949942249695e-06, | |
| "loss": 0.4957, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 2.4704545454545457, | |
| "grad_norm": 0.5779275298118591, | |
| "learning_rate": 3.7294925431405306e-06, | |
| "loss": 0.4581, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 2.4772727272727275, | |
| "grad_norm": 0.43174678087234497, | |
| "learning_rate": 3.720669940823827e-06, | |
| "loss": 0.4774, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 2.484090909090909, | |
| "grad_norm": 0.48383086919784546, | |
| "learning_rate": 3.7118273318762275e-06, | |
| "loss": 0.495, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 2.4909090909090907, | |
| "grad_norm": 0.6840851306915283, | |
| "learning_rate": 3.702964861227013e-06, | |
| "loss": 0.4799, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 2.4977272727272726, | |
| "grad_norm": 0.930915892124176, | |
| "learning_rate": 3.694082674130991e-06, | |
| "loss": 0.4845, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 2.5045454545454544, | |
| "grad_norm": 0.9096072912216187, | |
| "learning_rate": 3.6851809161661206e-06, | |
| "loss": 0.5085, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 2.5113636363636362, | |
| "grad_norm": 0.4533482789993286, | |
| "learning_rate": 3.6762597332311254e-06, | |
| "loss": 0.5155, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 2.518181818181818, | |
| "grad_norm": 0.4738960564136505, | |
| "learning_rate": 3.6673192715431016e-06, | |
| "loss": 0.4669, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 2.525, | |
| "grad_norm": 0.5107500553131104, | |
| "learning_rate": 3.658359677635122e-06, | |
| "loss": 0.4467, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 2.5318181818181817, | |
| "grad_norm": 0.4653143286705017, | |
| "learning_rate": 3.649381098353834e-06, | |
| "loss": 0.5097, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 2.5386363636363636, | |
| "grad_norm": 0.4466484487056732, | |
| "learning_rate": 3.6403836808570512e-06, | |
| "loss": 0.502, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 2.5454545454545454, | |
| "grad_norm": 0.5074096918106079, | |
| "learning_rate": 3.631367572611348e-06, | |
| "loss": 0.4897, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 2.5522727272727272, | |
| "grad_norm": 0.430399090051651, | |
| "learning_rate": 3.6223329213896313e-06, | |
| "loss": 0.4834, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 2.559090909090909, | |
| "grad_norm": 0.5170993208885193, | |
| "learning_rate": 3.613279875268731e-06, | |
| "loss": 0.4781, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 2.565909090909091, | |
| "grad_norm": 0.44036629796028137, | |
| "learning_rate": 3.604208582626964e-06, | |
| "loss": 0.4767, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 2.5727272727272728, | |
| "grad_norm": 0.5156371593475342, | |
| "learning_rate": 3.5951191921417063e-06, | |
| "loss": 0.4431, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 2.5795454545454546, | |
| "grad_norm": 0.4620317220687866, | |
| "learning_rate": 3.586011852786955e-06, | |
| "loss": 0.4876, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 2.5863636363636364, | |
| "grad_norm": 0.47322800755500793, | |
| "learning_rate": 3.5768867138308872e-06, | |
| "loss": 0.4674, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 2.5931818181818183, | |
| "grad_norm": 0.46675825119018555, | |
| "learning_rate": 3.5677439248334133e-06, | |
| "loss": 0.4742, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "grad_norm": 0.4184316098690033, | |
| "learning_rate": 3.5585836356437266e-06, | |
| "loss": 0.4852, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 2.606818181818182, | |
| "grad_norm": 0.5091507434844971, | |
| "learning_rate": 3.5494059963978433e-06, | |
| "loss": 0.4601, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 2.6136363636363638, | |
| "grad_norm": 0.4307488203048706, | |
| "learning_rate": 3.540211157516149e-06, | |
| "loss": 0.4956, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 2.6204545454545456, | |
| "grad_norm": 0.47802144289016724, | |
| "learning_rate": 3.530999269700927e-06, | |
| "loss": 0.475, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 2.6272727272727274, | |
| "grad_norm": 0.48785486817359924, | |
| "learning_rate": 3.521770483933891e-06, | |
| "loss": 0.4995, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 2.634090909090909, | |
| "grad_norm": 0.539107620716095, | |
| "learning_rate": 3.5125249514737093e-06, | |
| "loss": 0.4777, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 2.6409090909090907, | |
| "grad_norm": 2.07820725440979, | |
| "learning_rate": 3.503262823853527e-06, | |
| "loss": 0.4815, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 2.6477272727272725, | |
| "grad_norm": 0.4554578363895416, | |
| "learning_rate": 3.493984252878483e-06, | |
| "loss": 0.4857, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 2.6545454545454543, | |
| "grad_norm": 0.4895271062850952, | |
| "learning_rate": 3.484689390623218e-06, | |
| "loss": 0.4817, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 2.661363636363636, | |
| "grad_norm": 0.4659958779811859, | |
| "learning_rate": 3.4753783894293886e-06, | |
| "loss": 0.4728, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 2.668181818181818, | |
| "grad_norm": 0.6492037177085876, | |
| "learning_rate": 3.466051401903162e-06, | |
| "loss": 0.4759, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 2.675, | |
| "grad_norm": 0.4655483365058899, | |
| "learning_rate": 3.4567085809127247e-06, | |
| "loss": 0.4939, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 2.6818181818181817, | |
| "grad_norm": 0.4413434565067291, | |
| "learning_rate": 3.4473500795857674e-06, | |
| "loss": 0.468, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 2.6886363636363635, | |
| "grad_norm": 0.6349183917045593, | |
| "learning_rate": 3.4379760513069804e-06, | |
| "loss": 0.4871, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 2.6954545454545453, | |
| "grad_norm": 0.45469024777412415, | |
| "learning_rate": 3.428586649715542e-06, | |
| "loss": 0.491, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 2.702272727272727, | |
| "grad_norm": 0.47255265712738037, | |
| "learning_rate": 3.4191820287025916e-06, | |
| "loss": 0.4724, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 2.709090909090909, | |
| "grad_norm": 0.615580141544342, | |
| "learning_rate": 3.4097623424087196e-06, | |
| "loss": 0.4895, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 2.715909090909091, | |
| "grad_norm": 0.5199218988418579, | |
| "learning_rate": 3.4003277452214284e-06, | |
| "loss": 0.4748, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 2.7227272727272727, | |
| "grad_norm": 0.5153501629829407, | |
| "learning_rate": 3.3908783917726123e-06, | |
| "loss": 0.4709, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 2.7295454545454545, | |
| "grad_norm": 0.5243995785713196, | |
| "learning_rate": 3.381414436936018e-06, | |
| "loss": 0.4937, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 2.7363636363636363, | |
| "grad_norm": 0.447218656539917, | |
| "learning_rate": 3.3719360358247054e-06, | |
| "loss": 0.4787, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 2.743181818181818, | |
| "grad_norm": 0.5046358108520508, | |
| "learning_rate": 3.36244334378851e-06, | |
| "loss": 0.4786, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "grad_norm": 0.42097264528274536, | |
| "learning_rate": 3.3529365164114903e-06, | |
| "loss": 0.4904, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 2.756818181818182, | |
| "grad_norm": 0.43367311358451843, | |
| "learning_rate": 3.3434157095093846e-06, | |
| "loss": 0.5101, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 2.7636363636363637, | |
| "grad_norm": 0.4808436632156372, | |
| "learning_rate": 3.333881079127052e-06, | |
| "loss": 0.4897, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 2.7704545454545455, | |
| "grad_norm": 0.4499680697917938, | |
| "learning_rate": 3.3243327815359168e-06, | |
| "loss": 0.4875, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 2.7772727272727273, | |
| "grad_norm": 5.686249256134033, | |
| "learning_rate": 3.314770973231408e-06, | |
| "loss": 0.4749, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 2.784090909090909, | |
| "grad_norm": 0.46353423595428467, | |
| "learning_rate": 3.305195810930393e-06, | |
| "loss": 0.4762, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 2.790909090909091, | |
| "grad_norm": 0.5125230550765991, | |
| "learning_rate": 3.2956074515686105e-06, | |
| "loss": 0.4561, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 2.797727272727273, | |
| "grad_norm": 0.4616805911064148, | |
| "learning_rate": 3.2860060522980945e-06, | |
| "loss": 0.4802, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 2.8045454545454547, | |
| "grad_norm": 0.46016058325767517, | |
| "learning_rate": 3.276391770484606e-06, | |
| "loss": 0.4791, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 2.8113636363636365, | |
| "grad_norm": 0.4200516641139984, | |
| "learning_rate": 3.266764763705046e-06, | |
| "loss": 0.513, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 2.8181818181818183, | |
| "grad_norm": 0.4614863395690918, | |
| "learning_rate": 3.257125189744877e-06, | |
| "loss": 0.4848, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 2.825, | |
| "grad_norm": 0.48173412680625916, | |
| "learning_rate": 3.247473206595536e-06, | |
| "loss": 0.4775, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 2.831818181818182, | |
| "grad_norm": 0.5301795601844788, | |
| "learning_rate": 3.2378089724518464e-06, | |
| "loss": 0.5021, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 2.838636363636364, | |
| "grad_norm": 0.4633345305919647, | |
| "learning_rate": 3.228132645709421e-06, | |
| "loss": 0.4786, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 2.8454545454545457, | |
| "grad_norm": 0.5002239346504211, | |
| "learning_rate": 3.218444384962071e-06, | |
| "loss": 0.4855, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 2.8522727272727275, | |
| "grad_norm": 0.5135217308998108, | |
| "learning_rate": 3.2087443489992043e-06, | |
| "loss": 0.4753, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 2.8590909090909093, | |
| "grad_norm": 0.5117442011833191, | |
| "learning_rate": 3.1990326968032225e-06, | |
| "loss": 0.5172, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 2.865909090909091, | |
| "grad_norm": 0.4510478079319, | |
| "learning_rate": 3.189309587546917e-06, | |
| "loss": 0.469, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 2.8727272727272726, | |
| "grad_norm": 0.43793871998786926, | |
| "learning_rate": 3.1795751805908578e-06, | |
| "loss": 0.4718, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 2.8795454545454544, | |
| "grad_norm": 0.4784693419933319, | |
| "learning_rate": 3.169829635480783e-06, | |
| "loss": 0.489, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 2.8863636363636362, | |
| "grad_norm": 0.44207558035850525, | |
| "learning_rate": 3.160073111944983e-06, | |
| "loss": 0.4898, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 2.893181818181818, | |
| "grad_norm": 0.5189319849014282, | |
| "learning_rate": 3.150305769891686e-06, | |
| "loss": 0.4661, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "grad_norm": 0.45607301592826843, | |
| "learning_rate": 3.1405277694064306e-06, | |
| "loss": 0.451, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 2.9068181818181817, | |
| "grad_norm": 0.4246100187301636, | |
| "learning_rate": 3.13073927074945e-06, | |
| "loss": 0.4798, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 2.9136363636363636, | |
| "grad_norm": 0.6055796146392822, | |
| "learning_rate": 3.1209404343530374e-06, | |
| "loss": 0.4817, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 2.9204545454545454, | |
| "grad_norm": 0.47676950693130493, | |
| "learning_rate": 3.111131420818922e-06, | |
| "loss": 0.4586, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 2.9272727272727272, | |
| "grad_norm": 0.45245999097824097, | |
| "learning_rate": 3.1013123909156347e-06, | |
| "loss": 0.4793, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 2.934090909090909, | |
| "grad_norm": 0.46687406301498413, | |
| "learning_rate": 3.091483505575873e-06, | |
| "loss": 0.4706, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 2.940909090909091, | |
| "grad_norm": 0.43894922733306885, | |
| "learning_rate": 3.081644925893866e-06, | |
| "loss": 0.4909, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 2.9477272727272728, | |
| "grad_norm": 0.4741908609867096, | |
| "learning_rate": 3.0717968131227285e-06, | |
| "loss": 0.4775, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 2.9545454545454546, | |
| "grad_norm": 0.805005669593811, | |
| "learning_rate": 3.061939328671824e-06, | |
| "loss": 0.5056, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 2.9613636363636364, | |
| "grad_norm": 0.4265108108520508, | |
| "learning_rate": 3.0520726341041165e-06, | |
| "loss": 0.4586, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 2.9681818181818183, | |
| "grad_norm": 0.4461130201816559, | |
| "learning_rate": 3.0421968911335196e-06, | |
| "loss": 0.4737, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 2.975, | |
| "grad_norm": 0.46044448018074036, | |
| "learning_rate": 3.032312261622255e-06, | |
| "loss": 0.4836, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 2.981818181818182, | |
| "grad_norm": 0.4915880858898163, | |
| "learning_rate": 3.0224189075781886e-06, | |
| "loss": 0.5056, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 2.9886363636363638, | |
| "grad_norm": 1.6025773286819458, | |
| "learning_rate": 3.012516991152181e-06, | |
| "loss": 0.454, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 2.9954545454545456, | |
| "grad_norm": 0.44822078943252563, | |
| "learning_rate": 3.002606674635432e-06, | |
| "loss": 0.4914, | |
| "step": 438 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 876, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 6, | |
| "save_steps": 146, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 2.822023060576115e+19, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |