| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.0, | |
| "eval_steps": 500, | |
| "global_step": 174, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.017241379310344827, | |
| "grad_norm": 1.34375, | |
| "learning_rate": 0.0, | |
| "loss": 2.5553, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.034482758620689655, | |
| "grad_norm": 1.359375, | |
| "learning_rate": 1.25e-06, | |
| "loss": 2.5516, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.05172413793103448, | |
| "grad_norm": 1.25, | |
| "learning_rate": 2.5e-06, | |
| "loss": 2.5156, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.06896551724137931, | |
| "grad_norm": 1.3125, | |
| "learning_rate": 3.7500000000000005e-06, | |
| "loss": 2.5199, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.08620689655172414, | |
| "grad_norm": 1.1640625, | |
| "learning_rate": 5e-06, | |
| "loss": 2.5059, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.10344827586206896, | |
| "grad_norm": 1.2109375, | |
| "learning_rate": 6.25e-06, | |
| "loss": 2.4909, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.1206896551724138, | |
| "grad_norm": 1.03125, | |
| "learning_rate": 7.500000000000001e-06, | |
| "loss": 2.5399, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.13793103448275862, | |
| "grad_norm": 0.91796875, | |
| "learning_rate": 8.750000000000001e-06, | |
| "loss": 2.5355, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.15517241379310345, | |
| "grad_norm": 0.828125, | |
| "learning_rate": 1e-05, | |
| "loss": 2.5359, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.1724137931034483, | |
| "grad_norm": 0.70703125, | |
| "learning_rate": 9.99910461334869e-06, | |
| "loss": 2.5257, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.1896551724137931, | |
| "grad_norm": 0.6875, | |
| "learning_rate": 9.996418774081658e-06, | |
| "loss": 2.5138, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.20689655172413793, | |
| "grad_norm": 0.6953125, | |
| "learning_rate": 9.991943444144758e-06, | |
| "loss": 2.5139, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.22413793103448276, | |
| "grad_norm": 0.87109375, | |
| "learning_rate": 9.985680226398261e-06, | |
| "loss": 2.4983, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.2413793103448276, | |
| "grad_norm": 0.69921875, | |
| "learning_rate": 9.977631364042796e-06, | |
| "loss": 2.5036, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.25862068965517243, | |
| "grad_norm": 0.703125, | |
| "learning_rate": 9.967799739815925e-06, | |
| "loss": 2.5121, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.27586206896551724, | |
| "grad_norm": 0.703125, | |
| "learning_rate": 9.956188874959686e-06, | |
| "loss": 2.539, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.29310344827586204, | |
| "grad_norm": 0.70703125, | |
| "learning_rate": 9.942802927959444e-06, | |
| "loss": 2.5107, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.3103448275862069, | |
| "grad_norm": 0.67578125, | |
| "learning_rate": 9.927646693054498e-06, | |
| "loss": 2.4907, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.3275862068965517, | |
| "grad_norm": 0.66015625, | |
| "learning_rate": 9.910725598521014e-06, | |
| "loss": 2.4836, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.3448275862068966, | |
| "grad_norm": 0.66796875, | |
| "learning_rate": 9.892045704727864e-06, | |
| "loss": 2.494, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.3620689655172414, | |
| "grad_norm": 0.61328125, | |
| "learning_rate": 9.871613701966067e-06, | |
| "loss": 2.4733, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.3793103448275862, | |
| "grad_norm": 0.66796875, | |
| "learning_rate": 9.849436908052636e-06, | |
| "loss": 2.4141, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.39655172413793105, | |
| "grad_norm": 0.61328125, | |
| "learning_rate": 9.825523265709667e-06, | |
| "loss": 2.521, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.41379310344827586, | |
| "grad_norm": 0.640625, | |
| "learning_rate": 9.799881339719615e-06, | |
| "loss": 2.4647, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.43103448275862066, | |
| "grad_norm": 0.62109375, | |
| "learning_rate": 9.772520313857777e-06, | |
| "loss": 2.4892, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.4482758620689655, | |
| "grad_norm": 0.6015625, | |
| "learning_rate": 9.743449987603082e-06, | |
| "loss": 2.4734, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.46551724137931033, | |
| "grad_norm": 0.609375, | |
| "learning_rate": 9.712680772628365e-06, | |
| "loss": 2.5229, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.4827586206896552, | |
| "grad_norm": 0.6171875, | |
| "learning_rate": 9.680223689071364e-06, | |
| "loss": 2.5502, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.6171875, | |
| "learning_rate": 9.646090361587828e-06, | |
| "loss": 2.5008, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.5172413793103449, | |
| "grad_norm": 0.63671875, | |
| "learning_rate": 9.610293015188067e-06, | |
| "loss": 2.5319, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.5344827586206896, | |
| "grad_norm": 0.61328125, | |
| "learning_rate": 9.572844470858537e-06, | |
| "loss": 2.5004, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.5517241379310345, | |
| "grad_norm": 0.6171875, | |
| "learning_rate": 9.533758140969913e-06, | |
| "loss": 2.4611, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.5689655172413793, | |
| "grad_norm": 0.625, | |
| "learning_rate": 9.493048024473413e-06, | |
| "loss": 2.4883, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.5862068965517241, | |
| "grad_norm": 0.59765625, | |
| "learning_rate": 9.450728701886985e-06, | |
| "loss": 2.5014, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.603448275862069, | |
| "grad_norm": 0.59765625, | |
| "learning_rate": 9.406815330073244e-06, | |
| "loss": 2.4959, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.6206896551724138, | |
| "grad_norm": 0.60546875, | |
| "learning_rate": 9.36132363681097e-06, | |
| "loss": 2.4901, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.6379310344827587, | |
| "grad_norm": 0.59765625, | |
| "learning_rate": 9.314269915162115e-06, | |
| "loss": 2.54, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.6551724137931034, | |
| "grad_norm": 0.609375, | |
| "learning_rate": 9.265671017636384e-06, | |
| "loss": 2.5077, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.6724137931034483, | |
| "grad_norm": 0.60546875, | |
| "learning_rate": 9.215544350155423e-06, | |
| "loss": 2.5093, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.6896551724137931, | |
| "grad_norm": 0.6015625, | |
| "learning_rate": 9.163907865818806e-06, | |
| "loss": 2.5134, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.7068965517241379, | |
| "grad_norm": 0.609375, | |
| "learning_rate": 9.110780058474052e-06, | |
| "loss": 2.5651, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.7241379310344828, | |
| "grad_norm": 0.59375, | |
| "learning_rate": 9.056179956092961e-06, | |
| "loss": 2.4615, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.7413793103448276, | |
| "grad_norm": 0.59375, | |
| "learning_rate": 9.000127113956673e-06, | |
| "loss": 2.4763, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.7586206896551724, | |
| "grad_norm": 0.58203125, | |
| "learning_rate": 8.94264160765183e-06, | |
| "loss": 2.4732, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.7758620689655172, | |
| "grad_norm": 0.58203125, | |
| "learning_rate": 8.883744025880429e-06, | |
| "loss": 2.49, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.7931034482758621, | |
| "grad_norm": 0.5859375, | |
| "learning_rate": 8.823455463085873e-06, | |
| "loss": 2.4944, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.8103448275862069, | |
| "grad_norm": 0.58203125, | |
| "learning_rate": 8.761797511897907e-06, | |
| "loss": 2.5177, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.8275862068965517, | |
| "grad_norm": 0.6015625, | |
| "learning_rate": 8.698792255399104e-06, | |
| "loss": 2.5083, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.8448275862068966, | |
| "grad_norm": 0.58984375, | |
| "learning_rate": 8.634462259215719e-06, | |
| "loss": 2.5096, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.8620689655172413, | |
| "grad_norm": 0.60546875, | |
| "learning_rate": 8.568830563435695e-06, | |
| "loss": 2.5267, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.8793103448275862, | |
| "grad_norm": 0.6015625, | |
| "learning_rate": 8.501920674356755e-06, | |
| "loss": 2.5041, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.896551724137931, | |
| "grad_norm": 0.59765625, | |
| "learning_rate": 8.433756556067506e-06, | |
| "loss": 2.5081, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.9137931034482759, | |
| "grad_norm": 0.578125, | |
| "learning_rate": 8.364362621864595e-06, | |
| "loss": 2.4329, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.9310344827586207, | |
| "grad_norm": 0.58984375, | |
| "learning_rate": 8.29376372550897e-06, | |
| "loss": 2.4784, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.9482758620689655, | |
| "grad_norm": 0.60546875, | |
| "learning_rate": 8.221985152324385e-06, | |
| "loss": 2.5528, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.9655172413793104, | |
| "grad_norm": 0.59375, | |
| "learning_rate": 8.149052610141357e-06, | |
| "loss": 2.4803, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.9827586206896551, | |
| "grad_norm": 0.578125, | |
| "learning_rate": 8.07499222008977e-06, | |
| "loss": 2.5519, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 7.625, | |
| "learning_rate": 7.999830507243478e-06, | |
| "loss": 3.0347, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 1.0172413793103448, | |
| "grad_norm": 0.5703125, | |
| "learning_rate": 7.923594391120237e-06, | |
| "loss": 2.504, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 1.0344827586206897, | |
| "grad_norm": 0.5859375, | |
| "learning_rate": 7.846311176040331e-06, | |
| "loss": 2.4902, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 1.0517241379310345, | |
| "grad_norm": 0.578125, | |
| "learning_rate": 7.768008541347423e-06, | |
| "loss": 2.4702, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 1.0689655172413792, | |
| "grad_norm": 0.578125, | |
| "learning_rate": 7.688714531495061e-06, | |
| "loss": 2.467, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 1.0862068965517242, | |
| "grad_norm": 0.6015625, | |
| "learning_rate": 7.608457546002423e-06, | |
| "loss": 2.4747, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 1.103448275862069, | |
| "grad_norm": 0.58203125, | |
| "learning_rate": 7.527266329282905e-06, | |
| "loss": 2.4916, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 1.1206896551724137, | |
| "grad_norm": 0.5859375, | |
| "learning_rate": 7.445169960349167e-06, | |
| "loss": 2.4334, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 1.1379310344827587, | |
| "grad_norm": 0.58984375, | |
| "learning_rate": 7.362197842398355e-06, | |
| "loss": 2.4852, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 1.1551724137931034, | |
| "grad_norm": 0.58984375, | |
| "learning_rate": 7.278379692281209e-06, | |
| "loss": 2.5411, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 1.1724137931034484, | |
| "grad_norm": 0.57421875, | |
| "learning_rate": 7.193745529858827e-06, | |
| "loss": 2.4498, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 1.1896551724137931, | |
| "grad_norm": 0.6015625, | |
| "learning_rate": 7.10832566725092e-06, | |
| "loss": 2.505, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 1.206896551724138, | |
| "grad_norm": 0.59765625, | |
| "learning_rate": 7.022150697979385e-06, | |
| "loss": 2.5124, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 1.2241379310344827, | |
| "grad_norm": 0.56640625, | |
| "learning_rate": 6.9352514860110876e-06, | |
| "loss": 2.4896, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 1.2413793103448276, | |
| "grad_norm": 0.58203125, | |
| "learning_rate": 6.847659154703785e-06, | |
| "loss": 2.4544, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 1.2586206896551724, | |
| "grad_norm": 0.58984375, | |
| "learning_rate": 6.759405075659165e-06, | |
| "loss": 2.4906, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 1.2758620689655173, | |
| "grad_norm": 0.5703125, | |
| "learning_rate": 6.6705208574869504e-06, | |
| "loss": 2.5192, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 1.293103448275862, | |
| "grad_norm": 0.57421875, | |
| "learning_rate": 6.58103833448412e-06, | |
| "loss": 2.4903, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 1.3103448275862069, | |
| "grad_norm": 0.59765625, | |
| "learning_rate": 6.490989555233328e-06, | |
| "loss": 2.4906, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 1.3275862068965516, | |
| "grad_norm": 0.578125, | |
| "learning_rate": 6.4004067711245366e-06, | |
| "loss": 2.53, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 1.3448275862068966, | |
| "grad_norm": 0.56640625, | |
| "learning_rate": 6.309322424804034e-06, | |
| "loss": 2.5156, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 1.3620689655172413, | |
| "grad_norm": 0.5859375, | |
| "learning_rate": 6.2177691385549595e-06, | |
| "loss": 2.4715, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 1.3793103448275863, | |
| "grad_norm": 0.59375, | |
| "learning_rate": 6.125779702613471e-06, | |
| "loss": 2.5168, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 1.396551724137931, | |
| "grad_norm": 0.58984375, | |
| "learning_rate": 6.033387063424765e-06, | |
| "loss": 2.4368, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 1.4137931034482758, | |
| "grad_norm": 0.578125, | |
| "learning_rate": 5.94062431184317e-06, | |
| "loss": 2.5114, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 1.4310344827586206, | |
| "grad_norm": 0.59375, | |
| "learning_rate": 5.8475246712804845e-06, | |
| "loss": 2.5196, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 1.4482758620689655, | |
| "grad_norm": 0.59375, | |
| "learning_rate": 5.7541214858068705e-06, | |
| "loss": 2.5102, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 1.4655172413793103, | |
| "grad_norm": 0.578125, | |
| "learning_rate": 5.660448208208513e-06, | |
| "loss": 2.5055, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 1.4827586206896552, | |
| "grad_norm": 0.58984375, | |
| "learning_rate": 5.566538388006351e-06, | |
| "loss": 2.4588, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "grad_norm": 0.5859375, | |
| "learning_rate": 5.472425659440157e-06, | |
| "loss": 2.5023, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 1.5172413793103448, | |
| "grad_norm": 0.58203125, | |
| "learning_rate": 5.378143729422285e-06, | |
| "loss": 2.5304, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 1.5344827586206895, | |
| "grad_norm": 0.58203125, | |
| "learning_rate": 5.2837263654653715e-06, | |
| "loss": 2.4919, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 1.5517241379310345, | |
| "grad_norm": 0.57421875, | |
| "learning_rate": 5.189207383588353e-06, | |
| "loss": 2.4797, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 1.5689655172413794, | |
| "grad_norm": 0.58203125, | |
| "learning_rate": 5.094620636205096e-06, | |
| "loss": 2.4814, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 1.5862068965517242, | |
| "grad_norm": 0.58984375, | |
| "learning_rate": 5e-06, | |
| "loss": 2.5611, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 1.603448275862069, | |
| "grad_norm": 0.59765625, | |
| "learning_rate": 4.905379363794907e-06, | |
| "loss": 2.5033, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 1.6206896551724137, | |
| "grad_norm": 0.59765625, | |
| "learning_rate": 4.81079261641165e-06, | |
| "loss": 2.4482, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 1.6379310344827587, | |
| "grad_norm": 0.5859375, | |
| "learning_rate": 4.71627363453463e-06, | |
| "loss": 2.4975, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 1.6551724137931034, | |
| "grad_norm": 0.58984375, | |
| "learning_rate": 4.6218562705777185e-06, | |
| "loss": 2.5358, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 1.6724137931034484, | |
| "grad_norm": 0.58984375, | |
| "learning_rate": 4.527574340559844e-06, | |
| "loss": 2.4869, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 1.6896551724137931, | |
| "grad_norm": 0.56640625, | |
| "learning_rate": 4.4334616119936516e-06, | |
| "loss": 2.4609, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 1.706896551724138, | |
| "grad_norm": 0.5703125, | |
| "learning_rate": 4.33955179179149e-06, | |
| "loss": 2.5028, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 1.7241379310344827, | |
| "grad_norm": 0.5703125, | |
| "learning_rate": 4.245878514193131e-06, | |
| "loss": 2.4754, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 1.7413793103448276, | |
| "grad_norm": 0.59375, | |
| "learning_rate": 4.152475328719517e-06, | |
| "loss": 2.4999, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 1.7586206896551724, | |
| "grad_norm": 0.5625, | |
| "learning_rate": 4.059375688156833e-06, | |
| "loss": 2.4888, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 1.7758620689655173, | |
| "grad_norm": 0.5859375, | |
| "learning_rate": 3.966612936575235e-06, | |
| "loss": 2.4616, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 1.793103448275862, | |
| "grad_norm": 0.578125, | |
| "learning_rate": 3.87422029738653e-06, | |
| "loss": 2.5035, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 1.8103448275862069, | |
| "grad_norm": 0.578125, | |
| "learning_rate": 3.782230861445041e-06, | |
| "loss": 2.4758, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 1.8275862068965516, | |
| "grad_norm": 0.5859375, | |
| "learning_rate": 3.6906775751959667e-06, | |
| "loss": 2.4702, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 1.8448275862068966, | |
| "grad_norm": 0.58203125, | |
| "learning_rate": 3.5995932288754655e-06, | |
| "loss": 2.4774, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 1.8620689655172413, | |
| "grad_norm": 0.5703125, | |
| "learning_rate": 3.509010444766674e-06, | |
| "loss": 2.4498, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 1.8793103448275863, | |
| "grad_norm": 0.57421875, | |
| "learning_rate": 3.4189616655158803e-06, | |
| "loss": 2.4637, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 1.896551724137931, | |
| "grad_norm": 0.59375, | |
| "learning_rate": 3.3294791425130512e-06, | |
| "loss": 2.4424, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 1.9137931034482758, | |
| "grad_norm": 0.578125, | |
| "learning_rate": 3.240594924340835e-06, | |
| "loss": 2.5001, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 1.9310344827586206, | |
| "grad_norm": 0.5703125, | |
| "learning_rate": 3.1523408452962156e-06, | |
| "loss": 2.4744, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 1.9482758620689655, | |
| "grad_norm": 0.578125, | |
| "learning_rate": 3.0647485139889145e-06, | |
| "loss": 2.4345, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 1.9655172413793105, | |
| "grad_norm": 0.58203125, | |
| "learning_rate": 2.9778493020206155e-06, | |
| "loss": 2.4341, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 1.9827586206896552, | |
| "grad_norm": 0.578125, | |
| "learning_rate": 2.89167433274908e-06, | |
| "loss": 2.4849, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 8.5, | |
| "learning_rate": 2.806254470141174e-06, | |
| "loss": 2.672, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 2.0172413793103448, | |
| "grad_norm": 0.5703125, | |
| "learning_rate": 2.721620307718793e-06, | |
| "loss": 2.4653, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 2.0344827586206895, | |
| "grad_norm": 0.58984375, | |
| "learning_rate": 2.6378021576016467e-06, | |
| "loss": 2.4909, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 2.0517241379310347, | |
| "grad_norm": 0.57421875, | |
| "learning_rate": 2.554830039650834e-06, | |
| "loss": 2.4597, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 2.0689655172413794, | |
| "grad_norm": 0.56640625, | |
| "learning_rate": 2.4727336707170973e-06, | |
| "loss": 2.4821, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 2.086206896551724, | |
| "grad_norm": 0.57421875, | |
| "learning_rate": 2.391542453997578e-06, | |
| "loss": 2.5163, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 2.103448275862069, | |
| "grad_norm": 0.5859375, | |
| "learning_rate": 2.3112854685049397e-06, | |
| "loss": 2.4828, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 2.1206896551724137, | |
| "grad_norm": 0.59375, | |
| "learning_rate": 2.2319914586525776e-06, | |
| "loss": 2.5516, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 2.1379310344827585, | |
| "grad_norm": 0.5859375, | |
| "learning_rate": 2.1536888239596714e-06, | |
| "loss": 2.5319, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 2.1551724137931036, | |
| "grad_norm": 0.58984375, | |
| "learning_rate": 2.0764056088797646e-06, | |
| "loss": 2.4779, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 2.1724137931034484, | |
| "grad_norm": 0.5859375, | |
| "learning_rate": 2.000169492756523e-06, | |
| "loss": 2.4576, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 2.189655172413793, | |
| "grad_norm": 0.578125, | |
| "learning_rate": 1.9250077799102323e-06, | |
| "loss": 2.517, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 2.206896551724138, | |
| "grad_norm": 0.578125, | |
| "learning_rate": 1.8509473898586432e-06, | |
| "loss": 2.4432, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 2.2241379310344827, | |
| "grad_norm": 0.58203125, | |
| "learning_rate": 1.7780148476756148e-06, | |
| "loss": 2.5175, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 2.2413793103448274, | |
| "grad_norm": 0.58203125, | |
| "learning_rate": 1.7062362744910321e-06, | |
| "loss": 2.5205, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 2.2586206896551726, | |
| "grad_norm": 0.578125, | |
| "learning_rate": 1.6356373781354058e-06, | |
| "loss": 2.4785, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 2.2758620689655173, | |
| "grad_norm": 0.59375, | |
| "learning_rate": 1.566243443932496e-06, | |
| "loss": 2.4949, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 2.293103448275862, | |
| "grad_norm": 0.58984375, | |
| "learning_rate": 1.4980793256432474e-06, | |
| "loss": 2.4718, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 2.310344827586207, | |
| "grad_norm": 0.5703125, | |
| "learning_rate": 1.4311694365643048e-06, | |
| "loss": 2.4628, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 2.3275862068965516, | |
| "grad_norm": 0.58203125, | |
| "learning_rate": 1.3655377407842813e-06, | |
| "loss": 2.4419, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 2.344827586206897, | |
| "grad_norm": 0.58203125, | |
| "learning_rate": 1.3012077446008969e-06, | |
| "loss": 2.5397, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 2.3620689655172415, | |
| "grad_norm": 0.56640625, | |
| "learning_rate": 1.2382024881020937e-06, | |
| "loss": 2.4173, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 2.3793103448275863, | |
| "grad_norm": 0.58203125, | |
| "learning_rate": 1.1765445369141276e-06, | |
| "loss": 2.4625, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 2.396551724137931, | |
| "grad_norm": 0.625, | |
| "learning_rate": 1.1162559741195733e-06, | |
| "loss": 2.4921, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 2.413793103448276, | |
| "grad_norm": 0.57421875, | |
| "learning_rate": 1.057358392348171e-06, | |
| "loss": 2.4595, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 2.4310344827586206, | |
| "grad_norm": 0.56640625, | |
| "learning_rate": 9.998728860433277e-07, | |
| "loss": 2.518, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 2.4482758620689653, | |
| "grad_norm": 0.56640625, | |
| "learning_rate": 9.438200439070388e-07, | |
| "loss": 2.4773, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 2.4655172413793105, | |
| "grad_norm": 0.578125, | |
| "learning_rate": 8.892199415259501e-07, | |
| "loss": 2.4426, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 2.4827586206896552, | |
| "grad_norm": 0.59375, | |
| "learning_rate": 8.360921341811956e-07, | |
| "loss": 2.5139, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "grad_norm": 0.58203125, | |
| "learning_rate": 7.844556498445788e-07, | |
| "loss": 2.4961, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 2.5172413793103448, | |
| "grad_norm": 0.5859375, | |
| "learning_rate": 7.343289823636168e-07, | |
| "loss": 2.4857, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 2.5344827586206895, | |
| "grad_norm": 0.5859375, | |
| "learning_rate": 6.857300848378857e-07, | |
| "loss": 2.4787, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 2.5517241379310347, | |
| "grad_norm": 0.56640625, | |
| "learning_rate": 6.386763631890313e-07, | |
| "loss": 2.4518, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 2.5689655172413794, | |
| "grad_norm": 0.58984375, | |
| "learning_rate": 5.931846699267558e-07, | |
| "loss": 2.4404, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 2.586206896551724, | |
| "grad_norm": 0.56640625, | |
| "learning_rate": 5.492712981130171e-07, | |
| "loss": 2.52, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 2.603448275862069, | |
| "grad_norm": 0.58984375, | |
| "learning_rate": 5.0695197552659e-07, | |
| "loss": 2.4569, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 2.6206896551724137, | |
| "grad_norm": 0.5703125, | |
| "learning_rate": 4.6624185903008713e-07, | |
| "loss": 2.4528, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 2.637931034482759, | |
| "grad_norm": 0.5859375, | |
| "learning_rate": 4.271555291414636e-07, | |
| "loss": 2.4914, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 2.655172413793103, | |
| "grad_norm": 0.56640625, | |
| "learning_rate": 3.8970698481193225e-07, | |
| "loss": 2.4934, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 2.6724137931034484, | |
| "grad_norm": 0.59375, | |
| "learning_rate": 3.539096384121743e-07, | |
| "loss": 2.5423, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 2.689655172413793, | |
| "grad_norm": 0.58984375, | |
| "learning_rate": 3.1977631092863613e-07, | |
| "loss": 2.4767, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 2.706896551724138, | |
| "grad_norm": 0.56640625, | |
| "learning_rate": 2.873192273716369e-07, | |
| "loss": 2.4789, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 2.7241379310344827, | |
| "grad_norm": 0.578125, | |
| "learning_rate": 2.5655001239691836e-07, | |
| "loss": 2.5066, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 2.7413793103448274, | |
| "grad_norm": 0.5859375, | |
| "learning_rate": 2.274796861422246e-07, | |
| "loss": 2.4971, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 2.7586206896551726, | |
| "grad_norm": 0.578125, | |
| "learning_rate": 2.0011866028038617e-07, | |
| "loss": 2.4953, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 2.7758620689655173, | |
| "grad_norm": 0.578125, | |
| "learning_rate": 1.7447673429033361e-07, | |
| "loss": 2.433, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 2.793103448275862, | |
| "grad_norm": 0.55859375, | |
| "learning_rate": 1.5056309194736385e-07, | |
| "loss": 2.4957, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 2.810344827586207, | |
| "grad_norm": 0.57421875, | |
| "learning_rate": 1.2838629803393343e-07, | |
| "loss": 2.5096, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 2.8275862068965516, | |
| "grad_norm": 0.578125, | |
| "learning_rate": 1.0795429527213685e-07, | |
| "loss": 2.4909, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 2.844827586206897, | |
| "grad_norm": 0.58984375, | |
| "learning_rate": 8.927440147898703e-08, | |
| "loss": 2.4872, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 2.862068965517241, | |
| "grad_norm": 0.5703125, | |
| "learning_rate": 7.235330694550402e-08, | |
| "loss": 2.5162, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 2.8793103448275863, | |
| "grad_norm": 0.5859375, | |
| "learning_rate": 5.7197072040557356e-08, | |
| "loss": 2.5127, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 2.896551724137931, | |
| "grad_norm": 0.578125, | |
| "learning_rate": 4.381112504031337e-08, | |
| "loss": 2.4661, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 2.913793103448276, | |
| "grad_norm": 0.5859375, | |
| "learning_rate": 3.220026018407541e-08, | |
| "loss": 2.4475, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 2.9310344827586206, | |
| "grad_norm": 0.578125, | |
| "learning_rate": 2.236863595720562e-08, | |
| "loss": 2.4706, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 2.9482758620689653, | |
| "grad_norm": 0.58984375, | |
| "learning_rate": 1.431977360173975e-08, | |
| "loss": 2.4365, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 2.9655172413793105, | |
| "grad_norm": 0.58203125, | |
| "learning_rate": 8.056555855243675e-09, | |
| "loss": 2.4945, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 2.9827586206896552, | |
| "grad_norm": 0.5703125, | |
| "learning_rate": 3.5812259183426457e-09, | |
| "loss": 2.4642, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "grad_norm": 5.28125, | |
| "learning_rate": 8.953866513111698e-10, | |
| "loss": 2.3197, | |
| "step": 174 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 174, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 58, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.0763682676368998e+18, | |
| "train_batch_size": 8, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |