| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.99609375, | |
| "eval_steps": 500, | |
| "global_step": 171, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01171875, | |
| "grad_norm": 0.9319692850112915, | |
| "learning_rate": 1.1764705882352942e-07, | |
| "loss": 0.7028, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0234375, | |
| "grad_norm": 0.9757155179977417, | |
| "learning_rate": 2.3529411764705883e-07, | |
| "loss": 0.7416, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.03515625, | |
| "grad_norm": 1.0889487266540527, | |
| "learning_rate": 3.529411764705883e-07, | |
| "loss": 0.8392, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.046875, | |
| "grad_norm": 1.0020272731781006, | |
| "learning_rate": 4.7058823529411767e-07, | |
| "loss": 0.7549, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.05859375, | |
| "grad_norm": 1.0064201354980469, | |
| "learning_rate": 5.882352941176471e-07, | |
| "loss": 0.802, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.0703125, | |
| "grad_norm": 0.9806166291236877, | |
| "learning_rate": 7.058823529411766e-07, | |
| "loss": 0.7754, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.08203125, | |
| "grad_norm": 0.9506519436836243, | |
| "learning_rate": 8.235294117647059e-07, | |
| "loss": 0.7591, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.09375, | |
| "grad_norm": 0.9138185977935791, | |
| "learning_rate": 9.411764705882353e-07, | |
| "loss": 0.7681, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.10546875, | |
| "grad_norm": 0.872790515422821, | |
| "learning_rate": 1.0588235294117648e-06, | |
| "loss": 0.7332, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.1171875, | |
| "grad_norm": 0.8308555483818054, | |
| "learning_rate": 1.1764705882352942e-06, | |
| "loss": 0.7991, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.12890625, | |
| "grad_norm": 0.7250374555587769, | |
| "learning_rate": 1.2941176470588237e-06, | |
| "loss": 0.6812, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.140625, | |
| "grad_norm": 0.6780915856361389, | |
| "learning_rate": 1.4117647058823531e-06, | |
| "loss": 0.7178, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.15234375, | |
| "grad_norm": 0.6684752702713013, | |
| "learning_rate": 1.5294117647058826e-06, | |
| "loss": 0.7335, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.1640625, | |
| "grad_norm": 0.48017001152038574, | |
| "learning_rate": 1.6470588235294118e-06, | |
| "loss": 0.7293, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.17578125, | |
| "grad_norm": 0.4176006317138672, | |
| "learning_rate": 1.7647058823529414e-06, | |
| "loss": 0.6453, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.1875, | |
| "grad_norm": 0.4581579566001892, | |
| "learning_rate": 1.8823529411764707e-06, | |
| "loss": 0.709, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.19921875, | |
| "grad_norm": 0.41040804982185364, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 0.6965, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.2109375, | |
| "grad_norm": 0.38801896572113037, | |
| "learning_rate": 2.1176470588235296e-06, | |
| "loss": 0.7142, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.22265625, | |
| "grad_norm": 0.5587085485458374, | |
| "learning_rate": 2.2352941176470592e-06, | |
| "loss": 0.6624, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.234375, | |
| "grad_norm": 0.6821652054786682, | |
| "learning_rate": 2.3529411764705885e-06, | |
| "loss": 0.6708, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.24609375, | |
| "grad_norm": 0.7130780816078186, | |
| "learning_rate": 2.470588235294118e-06, | |
| "loss": 0.68, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.2578125, | |
| "grad_norm": 0.6799156069755554, | |
| "learning_rate": 2.5882352941176473e-06, | |
| "loss": 0.6069, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.26953125, | |
| "grad_norm": 0.6155521869659424, | |
| "learning_rate": 2.7058823529411766e-06, | |
| "loss": 0.6541, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.28125, | |
| "grad_norm": 0.5290796160697937, | |
| "learning_rate": 2.8235294117647062e-06, | |
| "loss": 0.6276, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.29296875, | |
| "grad_norm": 0.460702121257782, | |
| "learning_rate": 2.9411764705882355e-06, | |
| "loss": 0.6172, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.3046875, | |
| "grad_norm": 0.4080013334751129, | |
| "learning_rate": 3.058823529411765e-06, | |
| "loss": 0.609, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.31640625, | |
| "grad_norm": 0.32690924406051636, | |
| "learning_rate": 3.1764705882352943e-06, | |
| "loss": 0.585, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.328125, | |
| "grad_norm": 0.3010313808917999, | |
| "learning_rate": 3.2941176470588236e-06, | |
| "loss": 0.5684, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.33984375, | |
| "grad_norm": 0.30681392550468445, | |
| "learning_rate": 3.4117647058823532e-06, | |
| "loss": 0.6241, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.3515625, | |
| "grad_norm": 0.2821861505508423, | |
| "learning_rate": 3.529411764705883e-06, | |
| "loss": 0.5612, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.36328125, | |
| "grad_norm": 0.3114885985851288, | |
| "learning_rate": 3.6470588235294117e-06, | |
| "loss": 0.5777, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.375, | |
| "grad_norm": 0.2961554527282715, | |
| "learning_rate": 3.7647058823529414e-06, | |
| "loss": 0.6118, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.38671875, | |
| "grad_norm": 0.2794322371482849, | |
| "learning_rate": 3.882352941176471e-06, | |
| "loss": 0.5221, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.3984375, | |
| "grad_norm": 0.33694833517074585, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 0.5857, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.41015625, | |
| "grad_norm": 0.2691885530948639, | |
| "learning_rate": 4.11764705882353e-06, | |
| "loss": 0.6122, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.421875, | |
| "grad_norm": 0.2636789381504059, | |
| "learning_rate": 4.235294117647059e-06, | |
| "loss": 0.5355, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.43359375, | |
| "grad_norm": 0.25277385115623474, | |
| "learning_rate": 4.352941176470588e-06, | |
| "loss": 0.5373, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.4453125, | |
| "grad_norm": 0.2117689847946167, | |
| "learning_rate": 4.4705882352941184e-06, | |
| "loss": 0.5308, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.45703125, | |
| "grad_norm": 0.19506287574768066, | |
| "learning_rate": 4.588235294117647e-06, | |
| "loss": 0.5372, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.46875, | |
| "grad_norm": 0.21475745737552643, | |
| "learning_rate": 4.705882352941177e-06, | |
| "loss": 0.565, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.48046875, | |
| "grad_norm": 0.20359668135643005, | |
| "learning_rate": 4.823529411764706e-06, | |
| "loss": 0.5349, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.4921875, | |
| "grad_norm": 0.2028248906135559, | |
| "learning_rate": 4.941176470588236e-06, | |
| "loss": 0.5587, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.50390625, | |
| "grad_norm": 0.21315933763980865, | |
| "learning_rate": 5.058823529411765e-06, | |
| "loss": 0.542, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.515625, | |
| "grad_norm": 0.21315297484397888, | |
| "learning_rate": 5.176470588235295e-06, | |
| "loss": 0.5878, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.52734375, | |
| "grad_norm": 0.19848909974098206, | |
| "learning_rate": 5.294117647058824e-06, | |
| "loss": 0.5238, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.5390625, | |
| "grad_norm": 0.20311777293682098, | |
| "learning_rate": 5.411764705882353e-06, | |
| "loss": 0.548, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.55078125, | |
| "grad_norm": 0.18243664503097534, | |
| "learning_rate": 5.529411764705883e-06, | |
| "loss": 0.5282, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.5625, | |
| "grad_norm": 0.16974785923957825, | |
| "learning_rate": 5.6470588235294125e-06, | |
| "loss": 0.4836, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.57421875, | |
| "grad_norm": 0.1762179285287857, | |
| "learning_rate": 5.764705882352941e-06, | |
| "loss": 0.5304, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.5859375, | |
| "grad_norm": 0.20087581872940063, | |
| "learning_rate": 5.882352941176471e-06, | |
| "loss": 0.5317, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.59765625, | |
| "grad_norm": 0.1862439662218094, | |
| "learning_rate": 6e-06, | |
| "loss": 0.5536, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.609375, | |
| "grad_norm": 0.1928662210702896, | |
| "learning_rate": 6.11764705882353e-06, | |
| "loss": 0.4844, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.62109375, | |
| "grad_norm": 0.20256248116493225, | |
| "learning_rate": 6.2352941176470595e-06, | |
| "loss": 0.5428, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.6328125, | |
| "grad_norm": 0.2007371336221695, | |
| "learning_rate": 6.352941176470589e-06, | |
| "loss": 0.5225, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.64453125, | |
| "grad_norm": 0.16175827383995056, | |
| "learning_rate": 6.470588235294119e-06, | |
| "loss": 0.5137, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.65625, | |
| "grad_norm": 0.17586955428123474, | |
| "learning_rate": 6.588235294117647e-06, | |
| "loss": 0.5034, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.66796875, | |
| "grad_norm": 0.17159290611743927, | |
| "learning_rate": 6.705882352941176e-06, | |
| "loss": 0.5267, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.6796875, | |
| "grad_norm": 0.17129066586494446, | |
| "learning_rate": 6.8235294117647065e-06, | |
| "loss": 0.4634, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.69140625, | |
| "grad_norm": 0.14943340420722961, | |
| "learning_rate": 6.941176470588236e-06, | |
| "loss": 0.4749, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.703125, | |
| "grad_norm": 0.17984403669834137, | |
| "learning_rate": 7.058823529411766e-06, | |
| "loss": 0.5254, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.71484375, | |
| "grad_norm": 0.15619614720344543, | |
| "learning_rate": 7.176470588235295e-06, | |
| "loss": 0.4941, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.7265625, | |
| "grad_norm": 0.14417926967144012, | |
| "learning_rate": 7.294117647058823e-06, | |
| "loss": 0.4234, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.73828125, | |
| "grad_norm": 0.16936203837394714, | |
| "learning_rate": 7.4117647058823535e-06, | |
| "loss": 0.5016, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.17044682800769806, | |
| "learning_rate": 7.529411764705883e-06, | |
| "loss": 0.4817, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.76171875, | |
| "grad_norm": 0.1539342850446701, | |
| "learning_rate": 7.647058823529411e-06, | |
| "loss": 0.4804, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.7734375, | |
| "grad_norm": 0.15344035625457764, | |
| "learning_rate": 7.764705882352941e-06, | |
| "loss": 0.4875, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.78515625, | |
| "grad_norm": 0.15734320878982544, | |
| "learning_rate": 7.882352941176471e-06, | |
| "loss": 0.436, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.796875, | |
| "grad_norm": 0.17874149978160858, | |
| "learning_rate": 8.000000000000001e-06, | |
| "loss": 0.5315, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.80859375, | |
| "grad_norm": 0.18438568711280823, | |
| "learning_rate": 8.11764705882353e-06, | |
| "loss": 0.5206, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.8203125, | |
| "grad_norm": 0.16773667931556702, | |
| "learning_rate": 8.23529411764706e-06, | |
| "loss": 0.4604, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.83203125, | |
| "grad_norm": 0.146653413772583, | |
| "learning_rate": 8.35294117647059e-06, | |
| "loss": 0.4445, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.84375, | |
| "grad_norm": 0.1633101850748062, | |
| "learning_rate": 8.470588235294118e-06, | |
| "loss": 0.4741, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.85546875, | |
| "grad_norm": 0.1426013708114624, | |
| "learning_rate": 8.588235294117647e-06, | |
| "loss": 0.4447, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.8671875, | |
| "grad_norm": 0.19708958268165588, | |
| "learning_rate": 8.705882352941177e-06, | |
| "loss": 0.4915, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.87890625, | |
| "grad_norm": 0.13479125499725342, | |
| "learning_rate": 8.823529411764707e-06, | |
| "loss": 0.4302, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.890625, | |
| "grad_norm": 0.16112269461154938, | |
| "learning_rate": 8.941176470588237e-06, | |
| "loss": 0.4457, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.90234375, | |
| "grad_norm": 0.1683078110218048, | |
| "learning_rate": 9.058823529411765e-06, | |
| "loss": 0.4617, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.9140625, | |
| "grad_norm": 0.17289473116397858, | |
| "learning_rate": 9.176470588235294e-06, | |
| "loss": 0.4885, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.92578125, | |
| "grad_norm": 0.14186030626296997, | |
| "learning_rate": 9.294117647058824e-06, | |
| "loss": 0.4063, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.9375, | |
| "grad_norm": 0.17006702721118927, | |
| "learning_rate": 9.411764705882354e-06, | |
| "loss": 0.4684, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.94921875, | |
| "grad_norm": 0.16714246571063995, | |
| "learning_rate": 9.529411764705882e-06, | |
| "loss": 0.4416, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.9609375, | |
| "grad_norm": 0.1373886913061142, | |
| "learning_rate": 9.647058823529412e-06, | |
| "loss": 0.4677, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.97265625, | |
| "grad_norm": 0.15918415784835815, | |
| "learning_rate": 9.764705882352942e-06, | |
| "loss": 0.4479, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.984375, | |
| "grad_norm": 0.12358763813972473, | |
| "learning_rate": 9.882352941176472e-06, | |
| "loss": 0.4033, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.99609375, | |
| "grad_norm": 0.19624371826648712, | |
| "learning_rate": 1e-05, | |
| "loss": 0.4579, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.19624371826648712, | |
| "learning_rate": 9.99995783847866e-06, | |
| "loss": 0.4138, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 1.01171875, | |
| "grad_norm": 0.2939195930957794, | |
| "learning_rate": 9.999831354625678e-06, | |
| "loss": 0.3971, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 1.0234375, | |
| "grad_norm": 0.14151132106781006, | |
| "learning_rate": 9.999620550574155e-06, | |
| "loss": 0.4168, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 1.03515625, | |
| "grad_norm": 0.174483984708786, | |
| "learning_rate": 9.999325429879215e-06, | |
| "loss": 0.3952, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 1.046875, | |
| "grad_norm": 0.16392698884010315, | |
| "learning_rate": 9.998945997517957e-06, | |
| "loss": 0.4031, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 1.05859375, | |
| "grad_norm": 0.13462132215499878, | |
| "learning_rate": 9.99848225988936e-06, | |
| "loss": 0.3974, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 1.0703125, | |
| "grad_norm": 0.20017389953136444, | |
| "learning_rate": 9.997934224814173e-06, | |
| "loss": 0.4054, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 1.08203125, | |
| "grad_norm": 0.15260270237922668, | |
| "learning_rate": 9.997301901534797e-06, | |
| "loss": 0.43, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 1.09375, | |
| "grad_norm": 0.15193504095077515, | |
| "learning_rate": 9.996585300715117e-06, | |
| "loss": 0.3885, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 1.10546875, | |
| "grad_norm": 0.16993655264377594, | |
| "learning_rate": 9.99578443444032e-06, | |
| "loss": 0.4191, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 1.1171875, | |
| "grad_norm": 0.15632706880569458, | |
| "learning_rate": 9.994899316216709e-06, | |
| "loss": 0.3439, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 1.12890625, | |
| "grad_norm": 0.1436368227005005, | |
| "learning_rate": 9.99392996097145e-06, | |
| "loss": 0.3609, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 1.140625, | |
| "grad_norm": 0.14202651381492615, | |
| "learning_rate": 9.992876385052346e-06, | |
| "loss": 0.4133, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 1.15234375, | |
| "grad_norm": 0.17068006098270416, | |
| "learning_rate": 9.991738606227537e-06, | |
| "loss": 0.3775, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 1.1640625, | |
| "grad_norm": 0.15222977101802826, | |
| "learning_rate": 9.990516643685222e-06, | |
| "loss": 0.4191, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 1.17578125, | |
| "grad_norm": 0.1581757664680481, | |
| "learning_rate": 9.989210518033316e-06, | |
| "loss": 0.4098, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 1.1875, | |
| "grad_norm": 0.14198894798755646, | |
| "learning_rate": 9.987820251299121e-06, | |
| "loss": 0.4031, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 1.19921875, | |
| "grad_norm": 0.1587005853652954, | |
| "learning_rate": 9.98634586692894e-06, | |
| "loss": 0.3632, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 1.2109375, | |
| "grad_norm": 0.1380324512720108, | |
| "learning_rate": 9.984787389787689e-06, | |
| "loss": 0.4229, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 1.22265625, | |
| "grad_norm": 0.1664944589138031, | |
| "learning_rate": 9.983144846158472e-06, | |
| "loss": 0.3952, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 1.234375, | |
| "grad_norm": 0.15593458712100983, | |
| "learning_rate": 9.981418263742148e-06, | |
| "loss": 0.3657, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 1.24609375, | |
| "grad_norm": 0.14692318439483643, | |
| "learning_rate": 9.979607671656852e-06, | |
| "loss": 0.3777, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 1.2578125, | |
| "grad_norm": 0.19152411818504333, | |
| "learning_rate": 9.97771310043751e-06, | |
| "loss": 0.378, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 1.26953125, | |
| "grad_norm": 0.1495964378118515, | |
| "learning_rate": 9.975734582035323e-06, | |
| "loss": 0.3662, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 1.28125, | |
| "grad_norm": 0.14938651025295258, | |
| "learning_rate": 9.973672149817232e-06, | |
| "loss": 0.4197, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 1.29296875, | |
| "grad_norm": 0.15581446886062622, | |
| "learning_rate": 9.971525838565348e-06, | |
| "loss": 0.3804, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 1.3046875, | |
| "grad_norm": 0.14621272683143616, | |
| "learning_rate": 9.96929568447637e-06, | |
| "loss": 0.3372, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 1.31640625, | |
| "grad_norm": 0.14326448738574982, | |
| "learning_rate": 9.966981725160972e-06, | |
| "loss": 0.4092, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 1.328125, | |
| "grad_norm": 0.1629864126443863, | |
| "learning_rate": 9.964583999643174e-06, | |
| "loss": 0.3829, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 1.33984375, | |
| "grad_norm": 0.16448885202407837, | |
| "learning_rate": 9.96210254835968e-06, | |
| "loss": 0.3952, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 1.3515625, | |
| "grad_norm": 0.13906875252723694, | |
| "learning_rate": 9.95953741315919e-06, | |
| "loss": 0.3501, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 1.36328125, | |
| "grad_norm": 0.1344955563545227, | |
| "learning_rate": 9.95688863730171e-06, | |
| "loss": 0.4212, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 1.375, | |
| "grad_norm": 0.2054166942834854, | |
| "learning_rate": 9.954156265457801e-06, | |
| "loss": 0.4155, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 1.38671875, | |
| "grad_norm": 0.14791074395179749, | |
| "learning_rate": 9.951340343707852e-06, | |
| "loss": 0.4104, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 1.3984375, | |
| "grad_norm": 0.17642416059970856, | |
| "learning_rate": 9.948440919541277e-06, | |
| "loss": 0.3502, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 1.41015625, | |
| "grad_norm": 0.14525847136974335, | |
| "learning_rate": 9.945458041855732e-06, | |
| "loss": 0.383, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 1.421875, | |
| "grad_norm": 0.15956953167915344, | |
| "learning_rate": 9.942391760956277e-06, | |
| "loss": 0.3864, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 1.43359375, | |
| "grad_norm": 0.16518352925777435, | |
| "learning_rate": 9.939242128554542e-06, | |
| "loss": 0.4374, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 1.4453125, | |
| "grad_norm": 0.15952655673027039, | |
| "learning_rate": 9.936009197767847e-06, | |
| "loss": 0.3719, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 1.45703125, | |
| "grad_norm": 0.13431760668754578, | |
| "learning_rate": 9.932693023118299e-06, | |
| "loss": 0.3693, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 1.46875, | |
| "grad_norm": 0.15348534286022186, | |
| "learning_rate": 9.929293660531889e-06, | |
| "loss": 0.3708, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 1.48046875, | |
| "grad_norm": 0.1592601090669632, | |
| "learning_rate": 9.925811167337533e-06, | |
| "loss": 0.3598, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 1.4921875, | |
| "grad_norm": 0.16625793278217316, | |
| "learning_rate": 9.922245602266119e-06, | |
| "loss": 0.3548, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 1.50390625, | |
| "grad_norm": 0.16050715744495392, | |
| "learning_rate": 9.918597025449505e-06, | |
| "loss": 0.3732, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 1.515625, | |
| "grad_norm": 0.1567108929157257, | |
| "learning_rate": 9.91486549841951e-06, | |
| "loss": 0.3875, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.52734375, | |
| "grad_norm": 0.13900204002857208, | |
| "learning_rate": 9.911051084106877e-06, | |
| "loss": 0.3134, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 1.5390625, | |
| "grad_norm": 0.17397968471050262, | |
| "learning_rate": 9.90715384684021e-06, | |
| "loss": 0.3283, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 1.55078125, | |
| "grad_norm": 0.13944688439369202, | |
| "learning_rate": 9.903173852344889e-06, | |
| "loss": 0.3801, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 1.5625, | |
| "grad_norm": 0.1763840615749359, | |
| "learning_rate": 9.899111167741966e-06, | |
| "loss": 0.3426, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 1.57421875, | |
| "grad_norm": 0.15428921580314636, | |
| "learning_rate": 9.894965861547023e-06, | |
| "loss": 0.4046, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 1.5859375, | |
| "grad_norm": 0.18792587518692017, | |
| "learning_rate": 9.890738003669029e-06, | |
| "loss": 0.3948, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 1.59765625, | |
| "grad_norm": 0.14246441423892975, | |
| "learning_rate": 9.88642766540915e-06, | |
| "loss": 0.343, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 1.609375, | |
| "grad_norm": 0.14254647493362427, | |
| "learning_rate": 9.882034919459556e-06, | |
| "loss": 0.3908, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 1.62109375, | |
| "grad_norm": 0.15610483288764954, | |
| "learning_rate": 9.877559839902185e-06, | |
| "loss": 0.4127, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 1.6328125, | |
| "grad_norm": 0.14824527502059937, | |
| "learning_rate": 9.873002502207502e-06, | |
| "loss": 0.3469, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.64453125, | |
| "grad_norm": 0.13025063276290894, | |
| "learning_rate": 9.868362983233226e-06, | |
| "loss": 0.3467, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 1.65625, | |
| "grad_norm": 0.12863893806934357, | |
| "learning_rate": 9.863641361223025e-06, | |
| "loss": 0.3441, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 1.66796875, | |
| "grad_norm": 0.1754492223262787, | |
| "learning_rate": 9.858837715805207e-06, | |
| "loss": 0.3893, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 1.6796875, | |
| "grad_norm": 0.14538809657096863, | |
| "learning_rate": 9.853952127991374e-06, | |
| "loss": 0.361, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 1.69140625, | |
| "grad_norm": 0.13839296996593475, | |
| "learning_rate": 9.848984680175049e-06, | |
| "loss": 0.4422, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 1.703125, | |
| "grad_norm": 0.16836000978946686, | |
| "learning_rate": 9.843935456130295e-06, | |
| "loss": 0.3741, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 1.71484375, | |
| "grad_norm": 0.1621960699558258, | |
| "learning_rate": 9.8388045410103e-06, | |
| "loss": 0.4299, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 1.7265625, | |
| "grad_norm": 0.15016399323940277, | |
| "learning_rate": 9.833592021345938e-06, | |
| "loss": 0.3613, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 1.73828125, | |
| "grad_norm": 0.1778838336467743, | |
| "learning_rate": 9.828297985044314e-06, | |
| "loss": 0.4127, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "grad_norm": 0.20492997765541077, | |
| "learning_rate": 9.822922521387277e-06, | |
| "loss": 0.4566, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.76171875, | |
| "grad_norm": 0.142822265625, | |
| "learning_rate": 9.817465721029916e-06, | |
| "loss": 0.3561, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 1.7734375, | |
| "grad_norm": 0.15397801995277405, | |
| "learning_rate": 9.811927675999035e-06, | |
| "loss": 0.3854, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 1.78515625, | |
| "grad_norm": 0.13618412613868713, | |
| "learning_rate": 9.806308479691595e-06, | |
| "loss": 0.3933, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 1.796875, | |
| "grad_norm": 0.19063451886177063, | |
| "learning_rate": 9.800608226873143e-06, | |
| "loss": 0.382, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 1.80859375, | |
| "grad_norm": 0.16610917448997498, | |
| "learning_rate": 9.794827013676206e-06, | |
| "loss": 0.402, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 1.8203125, | |
| "grad_norm": 0.14735649526119232, | |
| "learning_rate": 9.788964937598688e-06, | |
| "loss": 0.3646, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 1.83203125, | |
| "grad_norm": 0.1583123356103897, | |
| "learning_rate": 9.783022097502204e-06, | |
| "loss": 0.3519, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 1.84375, | |
| "grad_norm": 0.14677587151527405, | |
| "learning_rate": 9.776998593610428e-06, | |
| "loss": 0.3739, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 1.85546875, | |
| "grad_norm": 0.15498070418834686, | |
| "learning_rate": 9.770894527507393e-06, | |
| "loss": 0.344, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 1.8671875, | |
| "grad_norm": 0.164178267121315, | |
| "learning_rate": 9.764710002135784e-06, | |
| "loss": 0.3701, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 1.87890625, | |
| "grad_norm": 0.12638989090919495, | |
| "learning_rate": 9.7584451217952e-06, | |
| "loss": 0.3985, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 1.890625, | |
| "grad_norm": 0.17551939189434052, | |
| "learning_rate": 9.752099992140401e-06, | |
| "loss": 0.367, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 1.90234375, | |
| "grad_norm": 0.13494940102100372, | |
| "learning_rate": 9.745674720179507e-06, | |
| "loss": 0.345, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 1.9140625, | |
| "grad_norm": 0.15139806270599365, | |
| "learning_rate": 9.739169414272219e-06, | |
| "loss": 0.3767, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 1.92578125, | |
| "grad_norm": 0.16584157943725586, | |
| "learning_rate": 9.732584184127973e-06, | |
| "loss": 0.4002, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 1.9375, | |
| "grad_norm": 0.14180031418800354, | |
| "learning_rate": 9.7259191408041e-06, | |
| "loss": 0.3494, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 1.94921875, | |
| "grad_norm": 0.15869130194187164, | |
| "learning_rate": 9.719174396703941e-06, | |
| "loss": 0.3527, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 1.9609375, | |
| "grad_norm": 0.15733729302883148, | |
| "learning_rate": 9.71235006557497e-06, | |
| "loss": 0.4277, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 1.97265625, | |
| "grad_norm": 0.16619561612606049, | |
| "learning_rate": 9.705446262506858e-06, | |
| "loss": 0.398, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 1.984375, | |
| "grad_norm": 0.156788632273674, | |
| "learning_rate": 9.698463103929542e-06, | |
| "loss": 0.4272, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 1.99609375, | |
| "grad_norm": 0.17090734839439392, | |
| "learning_rate": 9.691400707611258e-06, | |
| "loss": 0.3683, | |
| "step": 171 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 850, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 10, | |
| "save_steps": 171, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 36652934111232.0, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |