| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 5.835051546391752, |
| "eval_steps": 500, |
| "global_step": 192, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.030927835051546393, |
| "grad_norm": 31.330896377563477, |
| "learning_rate": 5.0000000000000004e-08, |
| "loss": 4.8446, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.061855670103092786, |
| "grad_norm": 31.82550811767578, |
| "learning_rate": 1.0000000000000001e-07, |
| "loss": 4.7928, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.09278350515463918, |
| "grad_norm": 30.859010696411133, |
| "learning_rate": 1.5000000000000002e-07, |
| "loss": 4.6291, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.12371134020618557, |
| "grad_norm": 31.643869400024414, |
| "learning_rate": 2.0000000000000002e-07, |
| "loss": 4.713, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.15463917525773196, |
| "grad_norm": 33.41943359375, |
| "learning_rate": 2.5000000000000004e-07, |
| "loss": 4.9265, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.18556701030927836, |
| "grad_norm": 32.8159065246582, |
| "learning_rate": 3.0000000000000004e-07, |
| "loss": 4.832, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.21649484536082475, |
| "grad_norm": 31.771827697753906, |
| "learning_rate": 3.5000000000000004e-07, |
| "loss": 4.6801, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.24742268041237114, |
| "grad_norm": 33.52857971191406, |
| "learning_rate": 4.0000000000000003e-07, |
| "loss": 4.9088, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.27835051546391754, |
| "grad_norm": 31.47663688659668, |
| "learning_rate": 4.5000000000000003e-07, |
| "loss": 4.5767, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.30927835051546393, |
| "grad_norm": 34.30753707885742, |
| "learning_rate": 5.000000000000001e-07, |
| "loss": 4.961, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.3402061855670103, |
| "grad_norm": 31.3031005859375, |
| "learning_rate": 5.5e-07, |
| "loss": 4.7181, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.3711340206185567, |
| "grad_norm": 32.63737106323242, |
| "learning_rate": 6.000000000000001e-07, |
| "loss": 4.7796, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.4020618556701031, |
| "grad_norm": 31.14106559753418, |
| "learning_rate": 6.5e-07, |
| "loss": 4.5596, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.4329896907216495, |
| "grad_norm": 30.995929718017578, |
| "learning_rate": 7.000000000000001e-07, |
| "loss": 4.5709, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.4639175257731959, |
| "grad_norm": 29.113582611083984, |
| "learning_rate": 7.5e-07, |
| "loss": 4.4329, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.4948453608247423, |
| "grad_norm": 27.933208465576172, |
| "learning_rate": 8.000000000000001e-07, |
| "loss": 4.337, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.5257731958762887, |
| "grad_norm": 27.216915130615234, |
| "learning_rate": 8.500000000000001e-07, |
| "loss": 4.3253, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.5567010309278351, |
| "grad_norm": 25.551725387573242, |
| "learning_rate": 9.000000000000001e-07, |
| "loss": 4.3318, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.5876288659793815, |
| "grad_norm": 22.587017059326172, |
| "learning_rate": 9.500000000000001e-07, |
| "loss": 4.0206, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.6185567010309279, |
| "grad_norm": 21.570398330688477, |
| "learning_rate": 1.0000000000000002e-06, |
| "loss": 3.8186, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.6494845360824743, |
| "grad_norm": 20.41568946838379, |
| "learning_rate": 1.0500000000000001e-06, |
| "loss": 3.771, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.6804123711340206, |
| "grad_norm": 19.1239070892334, |
| "learning_rate": 1.1e-06, |
| "loss": 3.5997, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.711340206185567, |
| "grad_norm": 19.375389099121094, |
| "learning_rate": 1.1500000000000002e-06, |
| "loss": 3.4865, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.7422680412371134, |
| "grad_norm": 19.06153106689453, |
| "learning_rate": 1.2000000000000002e-06, |
| "loss": 3.3836, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.7731958762886598, |
| "grad_norm": 17.918527603149414, |
| "learning_rate": 1.25e-06, |
| "loss": 3.141, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.8041237113402062, |
| "grad_norm": 18.42892837524414, |
| "learning_rate": 1.3e-06, |
| "loss": 3.0393, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.8350515463917526, |
| "grad_norm": 18.298789978027344, |
| "learning_rate": 1.3500000000000002e-06, |
| "loss": 3.0098, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.865979381443299, |
| "grad_norm": 16.423397064208984, |
| "learning_rate": 1.4000000000000001e-06, |
| "loss": 2.786, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.8969072164948454, |
| "grad_norm": 14.053132057189941, |
| "learning_rate": 1.45e-06, |
| "loss": 2.5791, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.9278350515463918, |
| "grad_norm": 14.001585006713867, |
| "learning_rate": 1.5e-06, |
| "loss": 2.5818, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.9587628865979382, |
| "grad_norm": 13.626611709594727, |
| "learning_rate": 1.5500000000000002e-06, |
| "loss": 2.3909, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.9896907216494846, |
| "grad_norm": 13.853208541870117, |
| "learning_rate": 1.6000000000000001e-06, |
| "loss": 2.2437, |
| "step": 32 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 13.853208541870117, |
| "learning_rate": 1.6500000000000003e-06, |
| "loss": 2.2363, |
| "step": 33 |
| }, |
| { |
| "epoch": 1.0309278350515463, |
| "grad_norm": 24.948280334472656, |
| "learning_rate": 1.7000000000000002e-06, |
| "loss": 2.1462, |
| "step": 34 |
| }, |
| { |
| "epoch": 1.0618556701030928, |
| "grad_norm": 15.244880676269531, |
| "learning_rate": 1.75e-06, |
| "loss": 1.9498, |
| "step": 35 |
| }, |
| { |
| "epoch": 1.0927835051546393, |
| "grad_norm": 15.106721878051758, |
| "learning_rate": 1.8000000000000001e-06, |
| "loss": 1.7473, |
| "step": 36 |
| }, |
| { |
| "epoch": 1.1237113402061856, |
| "grad_norm": 14.083307266235352, |
| "learning_rate": 1.85e-06, |
| "loss": 1.6159, |
| "step": 37 |
| }, |
| { |
| "epoch": 1.1546391752577319, |
| "grad_norm": 15.086730003356934, |
| "learning_rate": 1.9000000000000002e-06, |
| "loss": 1.4901, |
| "step": 38 |
| }, |
| { |
| "epoch": 1.1855670103092784, |
| "grad_norm": 13.394895553588867, |
| "learning_rate": 1.9500000000000004e-06, |
| "loss": 1.2992, |
| "step": 39 |
| }, |
| { |
| "epoch": 1.2164948453608249, |
| "grad_norm": 13.134459495544434, |
| "learning_rate": 2.0000000000000003e-06, |
| "loss": 1.1725, |
| "step": 40 |
| }, |
| { |
| "epoch": 1.2474226804123711, |
| "grad_norm": 13.013129234313965, |
| "learning_rate": 2.05e-06, |
| "loss": 1.056, |
| "step": 41 |
| }, |
| { |
| "epoch": 1.2783505154639174, |
| "grad_norm": 12.309725761413574, |
| "learning_rate": 2.1000000000000002e-06, |
| "loss": 0.8779, |
| "step": 42 |
| }, |
| { |
| "epoch": 1.309278350515464, |
| "grad_norm": 11.717962265014648, |
| "learning_rate": 2.15e-06, |
| "loss": 0.7724, |
| "step": 43 |
| }, |
| { |
| "epoch": 1.3402061855670104, |
| "grad_norm": 10.531135559082031, |
| "learning_rate": 2.2e-06, |
| "loss": 0.6223, |
| "step": 44 |
| }, |
| { |
| "epoch": 1.3711340206185567, |
| "grad_norm": 9.451440811157227, |
| "learning_rate": 2.25e-06, |
| "loss": 0.5095, |
| "step": 45 |
| }, |
| { |
| "epoch": 1.402061855670103, |
| "grad_norm": 7.987078666687012, |
| "learning_rate": 2.3000000000000004e-06, |
| "loss": 0.3841, |
| "step": 46 |
| }, |
| { |
| "epoch": 1.4329896907216495, |
| "grad_norm": 6.726036071777344, |
| "learning_rate": 2.35e-06, |
| "loss": 0.3333, |
| "step": 47 |
| }, |
| { |
| "epoch": 1.463917525773196, |
| "grad_norm": 4.108280658721924, |
| "learning_rate": 2.4000000000000003e-06, |
| "loss": 0.2602, |
| "step": 48 |
| }, |
| { |
| "epoch": 1.4948453608247423, |
| "grad_norm": 2.6678011417388916, |
| "learning_rate": 2.4500000000000003e-06, |
| "loss": 0.2251, |
| "step": 49 |
| }, |
| { |
| "epoch": 1.5257731958762886, |
| "grad_norm": 2.8046882152557373, |
| "learning_rate": 2.5e-06, |
| "loss": 0.1984, |
| "step": 50 |
| }, |
| { |
| "epoch": 1.556701030927835, |
| "grad_norm": 2.6669921875, |
| "learning_rate": 2.55e-06, |
| "loss": 0.2084, |
| "step": 51 |
| }, |
| { |
| "epoch": 1.5876288659793816, |
| "grad_norm": 1.8236641883850098, |
| "learning_rate": 2.6e-06, |
| "loss": 0.169, |
| "step": 52 |
| }, |
| { |
| "epoch": 1.6185567010309279, |
| "grad_norm": 1.405070185661316, |
| "learning_rate": 2.6500000000000005e-06, |
| "loss": 0.1635, |
| "step": 53 |
| }, |
| { |
| "epoch": 1.6494845360824741, |
| "grad_norm": 1.869597315788269, |
| "learning_rate": 2.7000000000000004e-06, |
| "loss": 0.1572, |
| "step": 54 |
| }, |
| { |
| "epoch": 1.6804123711340206, |
| "grad_norm": 1.2718607187271118, |
| "learning_rate": 2.7500000000000004e-06, |
| "loss": 0.1411, |
| "step": 55 |
| }, |
| { |
| "epoch": 1.7113402061855671, |
| "grad_norm": 0.9617771506309509, |
| "learning_rate": 2.8000000000000003e-06, |
| "loss": 0.1427, |
| "step": 56 |
| }, |
| { |
| "epoch": 1.7422680412371134, |
| "grad_norm": 0.6258248090744019, |
| "learning_rate": 2.85e-06, |
| "loss": 0.1241, |
| "step": 57 |
| }, |
| { |
| "epoch": 1.7731958762886597, |
| "grad_norm": 0.5329176187515259, |
| "learning_rate": 2.9e-06, |
| "loss": 0.1214, |
| "step": 58 |
| }, |
| { |
| "epoch": 1.8041237113402062, |
| "grad_norm": 0.8642020225524902, |
| "learning_rate": 2.95e-06, |
| "loss": 0.1336, |
| "step": 59 |
| }, |
| { |
| "epoch": 1.8350515463917527, |
| "grad_norm": 0.627882182598114, |
| "learning_rate": 3e-06, |
| "loss": 0.121, |
| "step": 60 |
| }, |
| { |
| "epoch": 1.865979381443299, |
| "grad_norm": 0.7841255068778992, |
| "learning_rate": 3.05e-06, |
| "loss": 0.1163, |
| "step": 61 |
| }, |
| { |
| "epoch": 1.8969072164948453, |
| "grad_norm": 0.5962179899215698, |
| "learning_rate": 3.1000000000000004e-06, |
| "loss": 0.1117, |
| "step": 62 |
| }, |
| { |
| "epoch": 1.9278350515463918, |
| "grad_norm": 0.5344879627227783, |
| "learning_rate": 3.1500000000000003e-06, |
| "loss": 0.1162, |
| "step": 63 |
| }, |
| { |
| "epoch": 1.9587628865979383, |
| "grad_norm": 0.4333738386631012, |
| "learning_rate": 3.2000000000000003e-06, |
| "loss": 0.1152, |
| "step": 64 |
| }, |
| { |
| "epoch": 1.9896907216494846, |
| "grad_norm": 0.5866488814353943, |
| "learning_rate": 3.2500000000000002e-06, |
| "loss": 0.1171, |
| "step": 65 |
| }, |
| { |
| "epoch": 2.0, |
| "grad_norm": 0.5866488814353943, |
| "learning_rate": 3.3000000000000006e-06, |
| "loss": 0.0989, |
| "step": 66 |
| }, |
| { |
| "epoch": 2.0309278350515463, |
| "grad_norm": 0.8926169872283936, |
| "learning_rate": 3.3500000000000005e-06, |
| "loss": 0.1101, |
| "step": 67 |
| }, |
| { |
| "epoch": 2.0618556701030926, |
| "grad_norm": 0.4878557622432709, |
| "learning_rate": 3.4000000000000005e-06, |
| "loss": 0.1097, |
| "step": 68 |
| }, |
| { |
| "epoch": 2.0927835051546393, |
| "grad_norm": 0.4245884418487549, |
| "learning_rate": 3.45e-06, |
| "loss": 0.1089, |
| "step": 69 |
| }, |
| { |
| "epoch": 2.1237113402061856, |
| "grad_norm": 0.46030956506729126, |
| "learning_rate": 3.5e-06, |
| "loss": 0.1145, |
| "step": 70 |
| }, |
| { |
| "epoch": 2.154639175257732, |
| "grad_norm": 0.2561495900154114, |
| "learning_rate": 3.5500000000000003e-06, |
| "loss": 0.1118, |
| "step": 71 |
| }, |
| { |
| "epoch": 2.1855670103092786, |
| "grad_norm": 0.4041549563407898, |
| "learning_rate": 3.6000000000000003e-06, |
| "loss": 0.12, |
| "step": 72 |
| }, |
| { |
| "epoch": 2.216494845360825, |
| "grad_norm": 0.5008761882781982, |
| "learning_rate": 3.65e-06, |
| "loss": 0.1205, |
| "step": 73 |
| }, |
| { |
| "epoch": 2.247422680412371, |
| "grad_norm": 0.4233935475349426, |
| "learning_rate": 3.7e-06, |
| "loss": 0.1114, |
| "step": 74 |
| }, |
| { |
| "epoch": 2.2783505154639174, |
| "grad_norm": 0.3335970640182495, |
| "learning_rate": 3.7500000000000005e-06, |
| "loss": 0.1124, |
| "step": 75 |
| }, |
| { |
| "epoch": 2.3092783505154637, |
| "grad_norm": 0.3707692325115204, |
| "learning_rate": 3.8000000000000005e-06, |
| "loss": 0.1128, |
| "step": 76 |
| }, |
| { |
| "epoch": 2.3402061855670104, |
| "grad_norm": 0.3256140649318695, |
| "learning_rate": 3.85e-06, |
| "loss": 0.1107, |
| "step": 77 |
| }, |
| { |
| "epoch": 2.3711340206185567, |
| "grad_norm": 0.37325188517570496, |
| "learning_rate": 3.900000000000001e-06, |
| "loss": 0.1105, |
| "step": 78 |
| }, |
| { |
| "epoch": 2.402061855670103, |
| "grad_norm": 0.3175105154514313, |
| "learning_rate": 3.95e-06, |
| "loss": 0.1012, |
| "step": 79 |
| }, |
| { |
| "epoch": 2.4329896907216497, |
| "grad_norm": 0.2766858637332916, |
| "learning_rate": 4.000000000000001e-06, |
| "loss": 0.1107, |
| "step": 80 |
| }, |
| { |
| "epoch": 2.463917525773196, |
| "grad_norm": 0.3836129307746887, |
| "learning_rate": 4.05e-06, |
| "loss": 0.1122, |
| "step": 81 |
| }, |
| { |
| "epoch": 2.4948453608247423, |
| "grad_norm": 0.3037668764591217, |
| "learning_rate": 4.1e-06, |
| "loss": 0.1086, |
| "step": 82 |
| }, |
| { |
| "epoch": 2.5257731958762886, |
| "grad_norm": 0.2676079571247101, |
| "learning_rate": 4.15e-06, |
| "loss": 0.1078, |
| "step": 83 |
| }, |
| { |
| "epoch": 2.556701030927835, |
| "grad_norm": 0.2874029874801636, |
| "learning_rate": 4.2000000000000004e-06, |
| "loss": 0.1105, |
| "step": 84 |
| }, |
| { |
| "epoch": 2.5876288659793816, |
| "grad_norm": 0.26905298233032227, |
| "learning_rate": 4.25e-06, |
| "loss": 0.1085, |
| "step": 85 |
| }, |
| { |
| "epoch": 2.618556701030928, |
| "grad_norm": 0.29047831892967224, |
| "learning_rate": 4.3e-06, |
| "loss": 0.1054, |
| "step": 86 |
| }, |
| { |
| "epoch": 2.649484536082474, |
| "grad_norm": 0.28010866045951843, |
| "learning_rate": 4.350000000000001e-06, |
| "loss": 0.1056, |
| "step": 87 |
| }, |
| { |
| "epoch": 2.680412371134021, |
| "grad_norm": 0.2774551510810852, |
| "learning_rate": 4.4e-06, |
| "loss": 0.1062, |
| "step": 88 |
| }, |
| { |
| "epoch": 2.711340206185567, |
| "grad_norm": 0.36281612515449524, |
| "learning_rate": 4.450000000000001e-06, |
| "loss": 0.1027, |
| "step": 89 |
| }, |
| { |
| "epoch": 2.7422680412371134, |
| "grad_norm": 0.47866588830947876, |
| "learning_rate": 4.5e-06, |
| "loss": 0.1095, |
| "step": 90 |
| }, |
| { |
| "epoch": 2.7731958762886597, |
| "grad_norm": 0.35023054480552673, |
| "learning_rate": 4.5500000000000005e-06, |
| "loss": 0.1089, |
| "step": 91 |
| }, |
| { |
| "epoch": 2.804123711340206, |
| "grad_norm": 0.24360068142414093, |
| "learning_rate": 4.600000000000001e-06, |
| "loss": 0.1046, |
| "step": 92 |
| }, |
| { |
| "epoch": 2.8350515463917527, |
| "grad_norm": 0.6635286211967468, |
| "learning_rate": 4.65e-06, |
| "loss": 0.1083, |
| "step": 93 |
| }, |
| { |
| "epoch": 2.865979381443299, |
| "grad_norm": 0.44127148389816284, |
| "learning_rate": 4.7e-06, |
| "loss": 0.1082, |
| "step": 94 |
| }, |
| { |
| "epoch": 2.8969072164948453, |
| "grad_norm": 0.37611761689186096, |
| "learning_rate": 4.75e-06, |
| "loss": 0.107, |
| "step": 95 |
| }, |
| { |
| "epoch": 2.927835051546392, |
| "grad_norm": 0.29258766770362854, |
| "learning_rate": 4.800000000000001e-06, |
| "loss": 0.1079, |
| "step": 96 |
| }, |
| { |
| "epoch": 2.9587628865979383, |
| "grad_norm": 0.19428832828998566, |
| "learning_rate": 4.85e-06, |
| "loss": 0.1078, |
| "step": 97 |
| }, |
| { |
| "epoch": 2.9896907216494846, |
| "grad_norm": 0.30616602301597595, |
| "learning_rate": 4.9000000000000005e-06, |
| "loss": 0.1065, |
| "step": 98 |
| }, |
| { |
| "epoch": 3.0, |
| "grad_norm": 0.7477559447288513, |
| "learning_rate": 4.95e-06, |
| "loss": 0.1055, |
| "step": 99 |
| }, |
| { |
| "epoch": 3.0309278350515463, |
| "grad_norm": 0.250438392162323, |
| "learning_rate": 5e-06, |
| "loss": 0.1101, |
| "step": 100 |
| }, |
| { |
| "epoch": 3.0618556701030926, |
| "grad_norm": 0.22634077072143555, |
| "learning_rate": 4.998542555915435e-06, |
| "loss": 0.1027, |
| "step": 101 |
| }, |
| { |
| "epoch": 3.0927835051546393, |
| "grad_norm": 0.4120931327342987, |
| "learning_rate": 4.994171922976349e-06, |
| "loss": 0.1041, |
| "step": 102 |
| }, |
| { |
| "epoch": 3.1237113402061856, |
| "grad_norm": 0.4830654561519623, |
| "learning_rate": 4.986893197145238e-06, |
| "loss": 0.1103, |
| "step": 103 |
| }, |
| { |
| "epoch": 3.154639175257732, |
| "grad_norm": 0.39406684041023254, |
| "learning_rate": 4.976714865090827e-06, |
| "loss": 0.1041, |
| "step": 104 |
| }, |
| { |
| "epoch": 3.1855670103092786, |
| "grad_norm": 0.2510586380958557, |
| "learning_rate": 4.963648794292992e-06, |
| "loss": 0.1035, |
| "step": 105 |
| }, |
| { |
| "epoch": 3.216494845360825, |
| "grad_norm": 0.3943077027797699, |
| "learning_rate": 4.947710219205808e-06, |
| "loss": 0.1013, |
| "step": 106 |
| }, |
| { |
| "epoch": 3.247422680412371, |
| "grad_norm": 0.521105945110321, |
| "learning_rate": 4.928917723494854e-06, |
| "loss": 0.1066, |
| "step": 107 |
| }, |
| { |
| "epoch": 3.2783505154639174, |
| "grad_norm": 0.39136141538619995, |
| "learning_rate": 4.907293218369499e-06, |
| "loss": 0.1027, |
| "step": 108 |
| }, |
| { |
| "epoch": 3.3092783505154637, |
| "grad_norm": 0.7300665974617004, |
| "learning_rate": 4.882861917035403e-06, |
| "loss": 0.1052, |
| "step": 109 |
| }, |
| { |
| "epoch": 3.3402061855670104, |
| "grad_norm": 0.6353471279144287, |
| "learning_rate": 4.855652305297052e-06, |
| "loss": 0.1029, |
| "step": 110 |
| }, |
| { |
| "epoch": 3.3711340206185567, |
| "grad_norm": 0.7813745737075806, |
| "learning_rate": 4.825696108344583e-06, |
| "loss": 0.1061, |
| "step": 111 |
| }, |
| { |
| "epoch": 3.402061855670103, |
| "grad_norm": 0.35173729062080383, |
| "learning_rate": 4.793028253763633e-06, |
| "loss": 0.1087, |
| "step": 112 |
| }, |
| { |
| "epoch": 3.4329896907216497, |
| "grad_norm": 0.40841713547706604, |
| "learning_rate": 4.757686830811332e-06, |
| "loss": 0.1024, |
| "step": 113 |
| }, |
| { |
| "epoch": 3.463917525773196, |
| "grad_norm": 0.24630524218082428, |
| "learning_rate": 4.7197130460059385e-06, |
| "loss": 0.1071, |
| "step": 114 |
| }, |
| { |
| "epoch": 3.4948453608247423, |
| "grad_norm": 0.2730019986629486, |
| "learning_rate": 4.679151175081879e-06, |
| "loss": 0.1059, |
| "step": 115 |
| }, |
| { |
| "epoch": 3.5257731958762886, |
| "grad_norm": 0.4475044906139374, |
| "learning_rate": 4.636048511366222e-06, |
| "loss": 0.1055, |
| "step": 116 |
| }, |
| { |
| "epoch": 3.556701030927835, |
| "grad_norm": 0.4198738634586334, |
| "learning_rate": 4.590455310636778e-06, |
| "loss": 0.1031, |
| "step": 117 |
| }, |
| { |
| "epoch": 3.5876288659793816, |
| "grad_norm": 0.3596094846725464, |
| "learning_rate": 4.542424732526105e-06, |
| "loss": 0.1008, |
| "step": 118 |
| }, |
| { |
| "epoch": 3.618556701030928, |
| "grad_norm": 0.33327534794807434, |
| "learning_rate": 4.4920127785397615e-06, |
| "loss": 0.1046, |
| "step": 119 |
| }, |
| { |
| "epoch": 3.649484536082474, |
| "grad_norm": 0.26345059275627136, |
| "learning_rate": 4.43927822676105e-06, |
| "loss": 0.1036, |
| "step": 120 |
| }, |
| { |
| "epoch": 3.680412371134021, |
| "grad_norm": 0.4857600927352905, |
| "learning_rate": 4.384282563318403e-06, |
| "loss": 0.1059, |
| "step": 121 |
| }, |
| { |
| "epoch": 3.711340206185567, |
| "grad_norm": 0.5701199173927307, |
| "learning_rate": 4.32708991069531e-06, |
| "loss": 0.1048, |
| "step": 122 |
| }, |
| { |
| "epoch": 3.7422680412371134, |
| "grad_norm": 0.6222259998321533, |
| "learning_rate": 4.267766952966369e-06, |
| "loss": 0.1031, |
| "step": 123 |
| }, |
| { |
| "epoch": 3.7731958762886597, |
| "grad_norm": 0.3183295428752899, |
| "learning_rate": 4.206382858046636e-06, |
| "loss": 0.0993, |
| "step": 124 |
| }, |
| { |
| "epoch": 3.804123711340206, |
| "grad_norm": 0.4618447721004486, |
| "learning_rate": 4.143009197044932e-06, |
| "loss": 0.1049, |
| "step": 125 |
| }, |
| { |
| "epoch": 3.8350515463917527, |
| "grad_norm": 0.48689910769462585, |
| "learning_rate": 4.077719860815132e-06, |
| "loss": 0.1004, |
| "step": 126 |
| }, |
| { |
| "epoch": 3.865979381443299, |
| "grad_norm": 0.5205806493759155, |
| "learning_rate": 4.010590973802737e-06, |
| "loss": 0.1044, |
| "step": 127 |
| }, |
| { |
| "epoch": 3.8969072164948453, |
| "grad_norm": 0.29967570304870605, |
| "learning_rate": 3.941700805287169e-06, |
| "loss": 0.101, |
| "step": 128 |
| }, |
| { |
| "epoch": 3.927835051546392, |
| "grad_norm": 0.4049663543701172, |
| "learning_rate": 3.871129678123297e-06, |
| "loss": 0.1023, |
| "step": 129 |
| }, |
| { |
| "epoch": 3.9587628865979383, |
| "grad_norm": 0.3243454098701477, |
| "learning_rate": 3.798959875088584e-06, |
| "loss": 0.0996, |
| "step": 130 |
| }, |
| { |
| "epoch": 3.9896907216494846, |
| "grad_norm": 0.2899306118488312, |
| "learning_rate": 3.7252755429450437e-06, |
| "loss": 0.098, |
| "step": 131 |
| }, |
| { |
| "epoch": 4.0, |
| "grad_norm": 0.2899306118488312, |
| "learning_rate": 3.650162594327881e-06, |
| "loss": 0.0992, |
| "step": 132 |
| }, |
| { |
| "epoch": 4.030927835051546, |
| "grad_norm": 1.1642309427261353, |
| "learning_rate": 3.5737086075752054e-06, |
| "loss": 0.0959, |
| "step": 133 |
| }, |
| { |
| "epoch": 4.061855670103093, |
| "grad_norm": 0.7160468697547913, |
| "learning_rate": 3.4960027246156043e-06, |
| "loss": 0.1032, |
| "step": 134 |
| }, |
| { |
| "epoch": 4.092783505154639, |
| "grad_norm": 0.5639675855636597, |
| "learning_rate": 3.417135547032642e-06, |
| "loss": 0.0991, |
| "step": 135 |
| }, |
| { |
| "epoch": 4.123711340206185, |
| "grad_norm": 0.4824839234352112, |
| "learning_rate": 3.3371990304274654e-06, |
| "loss": 0.0924, |
| "step": 136 |
| }, |
| { |
| "epoch": 4.154639175257732, |
| "grad_norm": 0.47185495495796204, |
| "learning_rate": 3.25628637720269e-06, |
| "loss": 0.0965, |
| "step": 137 |
| }, |
| { |
| "epoch": 4.185567010309279, |
| "grad_norm": 0.7727807760238647, |
| "learning_rate": 3.174491927892561e-06, |
| "loss": 0.1004, |
| "step": 138 |
| }, |
| { |
| "epoch": 4.216494845360825, |
| "grad_norm": 0.8841911554336548, |
| "learning_rate": 3.091911051166117e-06, |
| "loss": 0.1037, |
| "step": 139 |
| }, |
| { |
| "epoch": 4.247422680412371, |
| "grad_norm": 1.0637952089309692, |
| "learning_rate": 3.0086400326315853e-06, |
| "loss": 0.0988, |
| "step": 140 |
| }, |
| { |
| "epoch": 4.278350515463917, |
| "grad_norm": 0.8243911266326904, |
| "learning_rate": 2.924775962571667e-06, |
| "loss": 0.0973, |
| "step": 141 |
| }, |
| { |
| "epoch": 4.309278350515464, |
| "grad_norm": 0.7223678827285767, |
| "learning_rate": 2.840416622740617e-06, |
| "loss": 0.0967, |
| "step": 142 |
| }, |
| { |
| "epoch": 4.34020618556701, |
| "grad_norm": 0.6225246787071228, |
| "learning_rate": 2.7556603723550855e-06, |
| "loss": 0.1013, |
| "step": 143 |
| }, |
| { |
| "epoch": 4.371134020618557, |
| "grad_norm": 0.4938095211982727, |
| "learning_rate": 2.670606033411678e-06, |
| "loss": 0.0939, |
| "step": 144 |
| }, |
| { |
| "epoch": 4.402061855670103, |
| "grad_norm": 0.521511435508728, |
| "learning_rate": 2.58535277546492e-06, |
| "loss": 0.1026, |
| "step": 145 |
| }, |
| { |
| "epoch": 4.43298969072165, |
| "grad_norm": 0.6362576484680176, |
| "learning_rate": 2.5e-06, |
| "loss": 0.0977, |
| "step": 146 |
| }, |
| { |
| "epoch": 4.463917525773196, |
| "grad_norm": 0.523650586605072, |
| "learning_rate": 2.4146472245350804e-06, |
| "loss": 0.0994, |
| "step": 147 |
| }, |
| { |
| "epoch": 4.494845360824742, |
| "grad_norm": 0.6778432726860046, |
| "learning_rate": 2.3293939665883233e-06, |
| "loss": 0.0927, |
| "step": 148 |
| }, |
| { |
| "epoch": 4.525773195876289, |
| "grad_norm": 0.5455223917961121, |
| "learning_rate": 2.2443396276449145e-06, |
| "loss": 0.0957, |
| "step": 149 |
| }, |
| { |
| "epoch": 4.556701030927835, |
| "grad_norm": 0.5670258402824402, |
| "learning_rate": 2.159583377259384e-06, |
| "loss": 0.0932, |
| "step": 150 |
| }, |
| { |
| "epoch": 4.587628865979381, |
| "grad_norm": 0.6063594818115234, |
| "learning_rate": 2.0752240374283334e-06, |
| "loss": 0.097, |
| "step": 151 |
| }, |
| { |
| "epoch": 4.618556701030927, |
| "grad_norm": 0.8716773390769958, |
| "learning_rate": 1.991359967368416e-06, |
| "loss": 0.0932, |
| "step": 152 |
| }, |
| { |
| "epoch": 4.649484536082475, |
| "grad_norm": 0.5040673613548279, |
| "learning_rate": 1.9080889488338833e-06, |
| "loss": 0.0945, |
| "step": 153 |
| }, |
| { |
| "epoch": 4.680412371134021, |
| "grad_norm": 0.7626310586929321, |
| "learning_rate": 1.8255080721074391e-06, |
| "loss": 0.0907, |
| "step": 154 |
| }, |
| { |
| "epoch": 4.711340206185567, |
| "grad_norm": 0.7159733176231384, |
| "learning_rate": 1.7437136227973108e-06, |
| "loss": 0.0959, |
| "step": 155 |
| }, |
| { |
| "epoch": 4.742268041237113, |
| "grad_norm": 0.7429032325744629, |
| "learning_rate": 1.6628009695725348e-06, |
| "loss": 0.0891, |
| "step": 156 |
| }, |
| { |
| "epoch": 4.77319587628866, |
| "grad_norm": 0.7814318537712097, |
| "learning_rate": 1.5828644529673592e-06, |
| "loss": 0.0888, |
| "step": 157 |
| }, |
| { |
| "epoch": 4.804123711340206, |
| "grad_norm": 0.9120146036148071, |
| "learning_rate": 1.5039972753843966e-06, |
| "loss": 0.0902, |
| "step": 158 |
| }, |
| { |
| "epoch": 4.835051546391752, |
| "grad_norm": 0.6306695342063904, |
| "learning_rate": 1.4262913924247956e-06, |
| "loss": 0.0871, |
| "step": 159 |
| }, |
| { |
| "epoch": 4.8659793814432994, |
| "grad_norm": 0.9792791604995728, |
| "learning_rate": 1.3498374056721198e-06, |
| "loss": 0.0943, |
| "step": 160 |
| }, |
| { |
| "epoch": 4.896907216494846, |
| "grad_norm": 1.5480984449386597, |
| "learning_rate": 1.2747244570549578e-06, |
| "loss": 0.0978, |
| "step": 161 |
| }, |
| { |
| "epoch": 4.927835051546392, |
| "grad_norm": 0.7049218416213989, |
| "learning_rate": 1.2010401249114166e-06, |
| "loss": 0.0879, |
| "step": 162 |
| }, |
| { |
| "epoch": 4.958762886597938, |
| "grad_norm": 1.0587555170059204, |
| "learning_rate": 1.1288703218767027e-06, |
| "loss": 0.0868, |
| "step": 163 |
| }, |
| { |
| "epoch": 4.989690721649485, |
| "grad_norm": 0.9614342451095581, |
| "learning_rate": 1.0582991947128324e-06, |
| "loss": 0.0921, |
| "step": 164 |
| }, |
| { |
| "epoch": 5.0, |
| "grad_norm": 0.9614342451095581, |
| "learning_rate": 9.89409026197264e-07, |
| "loss": 0.0744, |
| "step": 165 |
| }, |
| { |
| "epoch": 5.030927835051546, |
| "grad_norm": 2.484208106994629, |
| "learning_rate": 9.222801391848688e-07, |
| "loss": 0.09, |
| "step": 166 |
| }, |
| { |
| "epoch": 5.061855670103093, |
| "grad_norm": 1.0592623949050903, |
| "learning_rate": 8.569908029550686e-07, |
| "loss": 0.085, |
| "step": 167 |
| }, |
| { |
| "epoch": 5.092783505154639, |
| "grad_norm": 1.5516217947006226, |
| "learning_rate": 7.936171419533653e-07, |
| "loss": 0.0871, |
| "step": 168 |
| }, |
| { |
| "epoch": 5.123711340206185, |
| "grad_norm": 1.048254132270813, |
| "learning_rate": 7.322330470336314e-07, |
| "loss": 0.0859, |
| "step": 169 |
| }, |
| { |
| "epoch": 5.154639175257732, |
| "grad_norm": 1.14556086063385, |
| "learning_rate": 6.729100893046897e-07, |
| "loss": 0.0895, |
| "step": 170 |
| }, |
| { |
| "epoch": 5.185567010309279, |
| "grad_norm": 0.8058810234069824, |
| "learning_rate": 6.157174366815979e-07, |
| "loss": 0.0866, |
| "step": 171 |
| }, |
| { |
| "epoch": 5.216494845360825, |
| "grad_norm": 0.7695685625076294, |
| "learning_rate": 5.607217732389503e-07, |
| "loss": 0.0851, |
| "step": 172 |
| }, |
| { |
| "epoch": 5.247422680412371, |
| "grad_norm": 0.9549402594566345, |
| "learning_rate": 5.079872214602388e-07, |
| "loss": 0.0866, |
| "step": 173 |
| }, |
| { |
| "epoch": 5.278350515463917, |
| "grad_norm": 1.521871566772461, |
| "learning_rate": 4.5757526747389506e-07, |
| "loss": 0.0834, |
| "step": 174 |
| }, |
| { |
| "epoch": 5.309278350515464, |
| "grad_norm": 0.7393187284469604, |
| "learning_rate": 4.095446893632235e-07, |
| "loss": 0.0845, |
| "step": 175 |
| }, |
| { |
| "epoch": 5.34020618556701, |
| "grad_norm": 0.7789802551269531, |
| "learning_rate": 3.639514886337786e-07, |
| "loss": 0.0821, |
| "step": 176 |
| }, |
| { |
| "epoch": 5.371134020618557, |
| "grad_norm": 0.7179467082023621, |
| "learning_rate": 3.208488249181216e-07, |
| "loss": 0.079, |
| "step": 177 |
| }, |
| { |
| "epoch": 5.402061855670103, |
| "grad_norm": 0.7849685549736023, |
| "learning_rate": 2.80286953994062e-07, |
| "loss": 0.0829, |
| "step": 178 |
| }, |
| { |
| "epoch": 5.43298969072165, |
| "grad_norm": 1.0993801355361938, |
| "learning_rate": 2.423131691886682e-07, |
| "loss": 0.0876, |
| "step": 179 |
| }, |
| { |
| "epoch": 5.463917525773196, |
| "grad_norm": 0.8301159143447876, |
| "learning_rate": 2.0697174623636795e-07, |
| "loss": 0.0872, |
| "step": 180 |
| }, |
| { |
| "epoch": 5.494845360824742, |
| "grad_norm": 1.1351754665374756, |
| "learning_rate": 1.743038916554171e-07, |
| "loss": 0.0883, |
| "step": 181 |
| }, |
| { |
| "epoch": 5.525773195876289, |
| "grad_norm": 1.0282760858535767, |
| "learning_rate": 1.44347694702949e-07, |
| "loss": 0.0785, |
| "step": 182 |
| }, |
| { |
| "epoch": 5.556701030927835, |
| "grad_norm": 0.7519952058792114, |
| "learning_rate": 1.1713808296459794e-07, |
| "loss": 0.0862, |
| "step": 183 |
| }, |
| { |
| "epoch": 5.587628865979381, |
| "grad_norm": 0.7985242605209351, |
| "learning_rate": 9.270678163050218e-08, |
| "loss": 0.0831, |
| "step": 184 |
| }, |
| { |
| "epoch": 5.618556701030927, |
| "grad_norm": 0.783128559589386, |
| "learning_rate": 7.108227650514637e-08, |
| "loss": 0.0766, |
| "step": 185 |
| }, |
| { |
| "epoch": 5.649484536082475, |
| "grad_norm": 0.9066674113273621, |
| "learning_rate": 5.2289780794192726e-08, |
| "loss": 0.0812, |
| "step": 186 |
| }, |
| { |
| "epoch": 5.680412371134021, |
| "grad_norm": 1.1403989791870117, |
| "learning_rate": 3.635120570700784e-08, |
| "loss": 0.0871, |
| "step": 187 |
| }, |
| { |
| "epoch": 5.711340206185567, |
| "grad_norm": 0.9781270027160645, |
| "learning_rate": 2.3285134909173113e-08, |
| "loss": 0.0816, |
| "step": 188 |
| }, |
| { |
| "epoch": 5.742268041237113, |
| "grad_norm": 0.8195537328720093, |
| "learning_rate": 1.3106802854762901e-08, |
| "loss": 0.0838, |
| "step": 189 |
| }, |
| { |
| "epoch": 5.77319587628866, |
| "grad_norm": 1.0189077854156494, |
| "learning_rate": 5.828077023651846e-09, |
| "loss": 0.0843, |
| "step": 190 |
| }, |
| { |
| "epoch": 5.804123711340206, |
| "grad_norm": 1.030791163444519, |
| "learning_rate": 1.4574440845649406e-09, |
| "loss": 0.0847, |
| "step": 191 |
| }, |
| { |
| "epoch": 5.835051546391752, |
| "grad_norm": 1.2187172174453735, |
| "learning_rate": 0.0, |
| "loss": 0.0841, |
| "step": 192 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 192, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 6, |
| "save_steps": 32, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 4.2077352764217754e+17, |
| "train_batch_size": 4, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|