| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 5.870967741935484, |
| "eval_steps": 500, |
| "global_step": 246, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.024193548387096774, |
| "grad_norm": 33.12635803222656, |
| "learning_rate": 5.0000000000000004e-08, |
| "loss": 2.4997, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.04838709677419355, |
| "grad_norm": 32.004058837890625, |
| "learning_rate": 1.0000000000000001e-07, |
| "loss": 2.4277, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.07258064516129033, |
| "grad_norm": 34.234554290771484, |
| "learning_rate": 1.5000000000000002e-07, |
| "loss": 2.6112, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.0967741935483871, |
| "grad_norm": 32.96908187866211, |
| "learning_rate": 2.0000000000000002e-07, |
| "loss": 2.5017, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.12096774193548387, |
| "grad_norm": 35.06013870239258, |
| "learning_rate": 2.5000000000000004e-07, |
| "loss": 2.6115, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.14516129032258066, |
| "grad_norm": 33.552955627441406, |
| "learning_rate": 3.0000000000000004e-07, |
| "loss": 2.5234, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.1693548387096774, |
| "grad_norm": 32.13972091674805, |
| "learning_rate": 3.5000000000000004e-07, |
| "loss": 2.4724, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.1935483870967742, |
| "grad_norm": 32.68510055541992, |
| "learning_rate": 4.0000000000000003e-07, |
| "loss": 2.4925, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.21774193548387097, |
| "grad_norm": 32.32320785522461, |
| "learning_rate": 4.5000000000000003e-07, |
| "loss": 2.4983, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.24193548387096775, |
| "grad_norm": 32.311553955078125, |
| "learning_rate": 5.000000000000001e-07, |
| "loss": 2.4833, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.2661290322580645, |
| "grad_norm": 31.869163513183594, |
| "learning_rate": 5.5e-07, |
| "loss": 2.4362, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.2903225806451613, |
| "grad_norm": 31.329313278198242, |
| "learning_rate": 6.000000000000001e-07, |
| "loss": 2.4228, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.31451612903225806, |
| "grad_norm": 29.42159652709961, |
| "learning_rate": 6.5e-07, |
| "loss": 2.2499, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.3387096774193548, |
| "grad_norm": 31.27863311767578, |
| "learning_rate": 7.000000000000001e-07, |
| "loss": 2.3354, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.3629032258064516, |
| "grad_norm": 31.095605850219727, |
| "learning_rate": 7.5e-07, |
| "loss": 2.2723, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.3870967741935484, |
| "grad_norm": 30.90537452697754, |
| "learning_rate": 8.000000000000001e-07, |
| "loss": 2.2003, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.4112903225806452, |
| "grad_norm": 30.878215789794922, |
| "learning_rate": 8.500000000000001e-07, |
| "loss": 2.0797, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.43548387096774194, |
| "grad_norm": 32.37583541870117, |
| "learning_rate": 9.000000000000001e-07, |
| "loss": 1.9855, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.4596774193548387, |
| "grad_norm": 32.957889556884766, |
| "learning_rate": 9.500000000000001e-07, |
| "loss": 1.8497, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.4838709677419355, |
| "grad_norm": 33.7425537109375, |
| "learning_rate": 1.0000000000000002e-06, |
| "loss": 1.7037, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.5080645161290323, |
| "grad_norm": 35.177791595458984, |
| "learning_rate": 1.0500000000000001e-06, |
| "loss": 1.645, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.532258064516129, |
| "grad_norm": 34.37784957885742, |
| "learning_rate": 1.1e-06, |
| "loss": 1.4705, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.5564516129032258, |
| "grad_norm": 32.561283111572266, |
| "learning_rate": 1.1500000000000002e-06, |
| "loss": 1.3819, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.5806451612903226, |
| "grad_norm": 28.166706085205078, |
| "learning_rate": 1.2000000000000002e-06, |
| "loss": 1.1496, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.6048387096774194, |
| "grad_norm": 30.428386688232422, |
| "learning_rate": 1.25e-06, |
| "loss": 1.0998, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.6290322580645161, |
| "grad_norm": 34.153076171875, |
| "learning_rate": 1.3e-06, |
| "loss": 0.9278, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.6532258064516129, |
| "grad_norm": 39.16960906982422, |
| "learning_rate": 1.3500000000000002e-06, |
| "loss": 0.7463, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.6774193548387096, |
| "grad_norm": 39.09505081176758, |
| "learning_rate": 1.4000000000000001e-06, |
| "loss": 0.5676, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.7016129032258065, |
| "grad_norm": 38.89931869506836, |
| "learning_rate": 1.45e-06, |
| "loss": 0.4015, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.7258064516129032, |
| "grad_norm": 23.554725646972656, |
| "learning_rate": 1.5e-06, |
| "loss": 0.2364, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 11.884359359741211, |
| "learning_rate": 1.5500000000000002e-06, |
| "loss": 0.1429, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.7741935483870968, |
| "grad_norm": 5.657749176025391, |
| "learning_rate": 1.6000000000000001e-06, |
| "loss": 0.136, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.7983870967741935, |
| "grad_norm": 3.42618465423584, |
| "learning_rate": 1.6500000000000003e-06, |
| "loss": 0.0964, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.8225806451612904, |
| "grad_norm": 2.808098554611206, |
| "learning_rate": 1.7000000000000002e-06, |
| "loss": 0.0826, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.8467741935483871, |
| "grad_norm": 2.475355625152588, |
| "learning_rate": 1.75e-06, |
| "loss": 0.0781, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.8709677419354839, |
| "grad_norm": 1.9219006299972534, |
| "learning_rate": 1.8000000000000001e-06, |
| "loss": 0.0637, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.8951612903225806, |
| "grad_norm": 1.8285995721817017, |
| "learning_rate": 1.85e-06, |
| "loss": 0.0708, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.9193548387096774, |
| "grad_norm": 1.9102882146835327, |
| "learning_rate": 1.9000000000000002e-06, |
| "loss": 0.0865, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.9435483870967742, |
| "grad_norm": 2.190868854522705, |
| "learning_rate": 1.9500000000000004e-06, |
| "loss": 0.0732, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.967741935483871, |
| "grad_norm": 1.923084020614624, |
| "learning_rate": 2.0000000000000003e-06, |
| "loss": 0.0767, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.9919354838709677, |
| "grad_norm": 1.8931093215942383, |
| "learning_rate": 2.05e-06, |
| "loss": 0.0868, |
| "step": 41 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 1.8931093215942383, |
| "learning_rate": 2.1000000000000002e-06, |
| "loss": 0.138, |
| "step": 42 |
| }, |
| { |
| "epoch": 1.0241935483870968, |
| "grad_norm": 6.719915866851807, |
| "learning_rate": 2.15e-06, |
| "loss": 0.0736, |
| "step": 43 |
| }, |
| { |
| "epoch": 1.0483870967741935, |
| "grad_norm": 1.6687527894973755, |
| "learning_rate": 2.2e-06, |
| "loss": 0.0675, |
| "step": 44 |
| }, |
| { |
| "epoch": 1.0725806451612903, |
| "grad_norm": 1.6767126321792603, |
| "learning_rate": 2.25e-06, |
| "loss": 0.0629, |
| "step": 45 |
| }, |
| { |
| "epoch": 1.096774193548387, |
| "grad_norm": 1.3529062271118164, |
| "learning_rate": 2.3000000000000004e-06, |
| "loss": 0.0614, |
| "step": 46 |
| }, |
| { |
| "epoch": 1.120967741935484, |
| "grad_norm": 2.1146080493927, |
| "learning_rate": 2.35e-06, |
| "loss": 0.0602, |
| "step": 47 |
| }, |
| { |
| "epoch": 1.1451612903225807, |
| "grad_norm": 1.1904520988464355, |
| "learning_rate": 2.4000000000000003e-06, |
| "loss": 0.0639, |
| "step": 48 |
| }, |
| { |
| "epoch": 1.1693548387096775, |
| "grad_norm": 1.1737815141677856, |
| "learning_rate": 2.4500000000000003e-06, |
| "loss": 0.0478, |
| "step": 49 |
| }, |
| { |
| "epoch": 1.1935483870967742, |
| "grad_norm": 1.3181250095367432, |
| "learning_rate": 2.5e-06, |
| "loss": 0.0564, |
| "step": 50 |
| }, |
| { |
| "epoch": 1.217741935483871, |
| "grad_norm": 2.057123899459839, |
| "learning_rate": 2.55e-06, |
| "loss": 0.0432, |
| "step": 51 |
| }, |
| { |
| "epoch": 1.2419354838709677, |
| "grad_norm": 1.1123464107513428, |
| "learning_rate": 2.6e-06, |
| "loss": 0.051, |
| "step": 52 |
| }, |
| { |
| "epoch": 1.2661290322580645, |
| "grad_norm": 1.5854344367980957, |
| "learning_rate": 2.6500000000000005e-06, |
| "loss": 0.0494, |
| "step": 53 |
| }, |
| { |
| "epoch": 1.2903225806451613, |
| "grad_norm": 1.2793511152267456, |
| "learning_rate": 2.7000000000000004e-06, |
| "loss": 0.0464, |
| "step": 54 |
| }, |
| { |
| "epoch": 1.314516129032258, |
| "grad_norm": 1.3535298109054565, |
| "learning_rate": 2.7500000000000004e-06, |
| "loss": 0.0477, |
| "step": 55 |
| }, |
| { |
| "epoch": 1.3387096774193548, |
| "grad_norm": 1.186978816986084, |
| "learning_rate": 2.8000000000000003e-06, |
| "loss": 0.0375, |
| "step": 56 |
| }, |
| { |
| "epoch": 1.3629032258064515, |
| "grad_norm": 1.4667912721633911, |
| "learning_rate": 2.85e-06, |
| "loss": 0.0424, |
| "step": 57 |
| }, |
| { |
| "epoch": 1.3870967741935485, |
| "grad_norm": 1.0930287837982178, |
| "learning_rate": 2.9e-06, |
| "loss": 0.0417, |
| "step": 58 |
| }, |
| { |
| "epoch": 1.4112903225806452, |
| "grad_norm": 1.5085082054138184, |
| "learning_rate": 2.95e-06, |
| "loss": 0.0479, |
| "step": 59 |
| }, |
| { |
| "epoch": 1.435483870967742, |
| "grad_norm": 1.4030777215957642, |
| "learning_rate": 3e-06, |
| "loss": 0.0413, |
| "step": 60 |
| }, |
| { |
| "epoch": 1.4596774193548387, |
| "grad_norm": 1.6423301696777344, |
| "learning_rate": 3.05e-06, |
| "loss": 0.0349, |
| "step": 61 |
| }, |
| { |
| "epoch": 1.4838709677419355, |
| "grad_norm": 1.3811825513839722, |
| "learning_rate": 3.1000000000000004e-06, |
| "loss": 0.0482, |
| "step": 62 |
| }, |
| { |
| "epoch": 1.5080645161290323, |
| "grad_norm": 1.2499895095825195, |
| "learning_rate": 3.1500000000000003e-06, |
| "loss": 0.0308, |
| "step": 63 |
| }, |
| { |
| "epoch": 1.532258064516129, |
| "grad_norm": 1.1597909927368164, |
| "learning_rate": 3.2000000000000003e-06, |
| "loss": 0.0293, |
| "step": 64 |
| }, |
| { |
| "epoch": 1.5564516129032258, |
| "grad_norm": 1.1042351722717285, |
| "learning_rate": 3.2500000000000002e-06, |
| "loss": 0.035, |
| "step": 65 |
| }, |
| { |
| "epoch": 1.5806451612903225, |
| "grad_norm": 1.13418710231781, |
| "learning_rate": 3.3000000000000006e-06, |
| "loss": 0.0254, |
| "step": 66 |
| }, |
| { |
| "epoch": 1.6048387096774195, |
| "grad_norm": 0.934019148349762, |
| "learning_rate": 3.3500000000000005e-06, |
| "loss": 0.0283, |
| "step": 67 |
| }, |
| { |
| "epoch": 1.629032258064516, |
| "grad_norm": 1.468568205833435, |
| "learning_rate": 3.4000000000000005e-06, |
| "loss": 0.0325, |
| "step": 68 |
| }, |
| { |
| "epoch": 1.653225806451613, |
| "grad_norm": 1.3268495798110962, |
| "learning_rate": 3.45e-06, |
| "loss": 0.027, |
| "step": 69 |
| }, |
| { |
| "epoch": 1.6774193548387095, |
| "grad_norm": 0.8941407203674316, |
| "learning_rate": 3.5e-06, |
| "loss": 0.0244, |
| "step": 70 |
| }, |
| { |
| "epoch": 1.7016129032258065, |
| "grad_norm": 1.0857181549072266, |
| "learning_rate": 3.5500000000000003e-06, |
| "loss": 0.0225, |
| "step": 71 |
| }, |
| { |
| "epoch": 1.7258064516129032, |
| "grad_norm": 1.1653308868408203, |
| "learning_rate": 3.6000000000000003e-06, |
| "loss": 0.029, |
| "step": 72 |
| }, |
| { |
| "epoch": 1.75, |
| "grad_norm": 1.0501737594604492, |
| "learning_rate": 3.65e-06, |
| "loss": 0.0199, |
| "step": 73 |
| }, |
| { |
| "epoch": 1.7741935483870968, |
| "grad_norm": 0.8470718264579773, |
| "learning_rate": 3.7e-06, |
| "loss": 0.0219, |
| "step": 74 |
| }, |
| { |
| "epoch": 1.7983870967741935, |
| "grad_norm": 0.8664724826812744, |
| "learning_rate": 3.7500000000000005e-06, |
| "loss": 0.0161, |
| "step": 75 |
| }, |
| { |
| "epoch": 1.8225806451612905, |
| "grad_norm": 1.5050084590911865, |
| "learning_rate": 3.8000000000000005e-06, |
| "loss": 0.0246, |
| "step": 76 |
| }, |
| { |
| "epoch": 1.846774193548387, |
| "grad_norm": 1.6326985359191895, |
| "learning_rate": 3.85e-06, |
| "loss": 0.0253, |
| "step": 77 |
| }, |
| { |
| "epoch": 1.870967741935484, |
| "grad_norm": 1.5506129264831543, |
| "learning_rate": 3.900000000000001e-06, |
| "loss": 0.0133, |
| "step": 78 |
| }, |
| { |
| "epoch": 1.8951612903225805, |
| "grad_norm": 0.7956012487411499, |
| "learning_rate": 3.95e-06, |
| "loss": 0.0093, |
| "step": 79 |
| }, |
| { |
| "epoch": 1.9193548387096775, |
| "grad_norm": 1.898987054824829, |
| "learning_rate": 4.000000000000001e-06, |
| "loss": 0.0142, |
| "step": 80 |
| }, |
| { |
| "epoch": 1.9435483870967742, |
| "grad_norm": 0.832822859287262, |
| "learning_rate": 4.05e-06, |
| "loss": 0.0177, |
| "step": 81 |
| }, |
| { |
| "epoch": 1.967741935483871, |
| "grad_norm": 0.9572640657424927, |
| "learning_rate": 4.1e-06, |
| "loss": 0.0077, |
| "step": 82 |
| }, |
| { |
| "epoch": 1.9919354838709677, |
| "grad_norm": 0.6162229180335999, |
| "learning_rate": 4.15e-06, |
| "loss": 0.0078, |
| "step": 83 |
| }, |
| { |
| "epoch": 2.0, |
| "grad_norm": 0.6162229180335999, |
| "learning_rate": 4.2000000000000004e-06, |
| "loss": 0.0152, |
| "step": 84 |
| }, |
| { |
| "epoch": 2.024193548387097, |
| "grad_norm": 2.586178779602051, |
| "learning_rate": 4.25e-06, |
| "loss": 0.0048, |
| "step": 85 |
| }, |
| { |
| "epoch": 2.0483870967741935, |
| "grad_norm": 0.6102266907691956, |
| "learning_rate": 4.3e-06, |
| "loss": 0.0079, |
| "step": 86 |
| }, |
| { |
| "epoch": 2.0725806451612905, |
| "grad_norm": 0.5254344940185547, |
| "learning_rate": 4.350000000000001e-06, |
| "loss": 0.0059, |
| "step": 87 |
| }, |
| { |
| "epoch": 2.096774193548387, |
| "grad_norm": 0.47239282727241516, |
| "learning_rate": 4.4e-06, |
| "loss": 0.0039, |
| "step": 88 |
| }, |
| { |
| "epoch": 2.120967741935484, |
| "grad_norm": 0.4822653830051422, |
| "learning_rate": 4.450000000000001e-06, |
| "loss": 0.0032, |
| "step": 89 |
| }, |
| { |
| "epoch": 2.1451612903225805, |
| "grad_norm": 0.26892364025115967, |
| "learning_rate": 4.5e-06, |
| "loss": 0.0022, |
| "step": 90 |
| }, |
| { |
| "epoch": 2.1693548387096775, |
| "grad_norm": 0.4505153298377991, |
| "learning_rate": 4.5500000000000005e-06, |
| "loss": 0.006, |
| "step": 91 |
| }, |
| { |
| "epoch": 2.193548387096774, |
| "grad_norm": 0.4410068094730377, |
| "learning_rate": 4.600000000000001e-06, |
| "loss": 0.0037, |
| "step": 92 |
| }, |
| { |
| "epoch": 2.217741935483871, |
| "grad_norm": 0.5575999617576599, |
| "learning_rate": 4.65e-06, |
| "loss": 0.0062, |
| "step": 93 |
| }, |
| { |
| "epoch": 2.241935483870968, |
| "grad_norm": 0.8343164324760437, |
| "learning_rate": 4.7e-06, |
| "loss": 0.0051, |
| "step": 94 |
| }, |
| { |
| "epoch": 2.2661290322580645, |
| "grad_norm": 0.4667530655860901, |
| "learning_rate": 4.75e-06, |
| "loss": 0.0012, |
| "step": 95 |
| }, |
| { |
| "epoch": 2.2903225806451615, |
| "grad_norm": 0.4000648558139801, |
| "learning_rate": 4.800000000000001e-06, |
| "loss": 0.0029, |
| "step": 96 |
| }, |
| { |
| "epoch": 2.314516129032258, |
| "grad_norm": 0.37442290782928467, |
| "learning_rate": 4.85e-06, |
| "loss": 0.0009, |
| "step": 97 |
| }, |
| { |
| "epoch": 2.338709677419355, |
| "grad_norm": 1.7328227758407593, |
| "learning_rate": 4.9000000000000005e-06, |
| "loss": 0.0022, |
| "step": 98 |
| }, |
| { |
| "epoch": 2.3629032258064515, |
| "grad_norm": 0.3352905511856079, |
| "learning_rate": 4.95e-06, |
| "loss": 0.0022, |
| "step": 99 |
| }, |
| { |
| "epoch": 2.3870967741935485, |
| "grad_norm": 0.26117730140686035, |
| "learning_rate": 5e-06, |
| "loss": 0.0029, |
| "step": 100 |
| }, |
| { |
| "epoch": 2.411290322580645, |
| "grad_norm": 0.7075008153915405, |
| "learning_rate": 4.999421254949728e-06, |
| "loss": 0.0011, |
| "step": 101 |
| }, |
| { |
| "epoch": 2.435483870967742, |
| "grad_norm": 0.05702031031250954, |
| "learning_rate": 4.9976852877555755e-06, |
| "loss": 0.0015, |
| "step": 102 |
| }, |
| { |
| "epoch": 2.4596774193548385, |
| "grad_norm": 1.964158058166504, |
| "learning_rate": 4.9947929021634815e-06, |
| "loss": 0.0019, |
| "step": 103 |
| }, |
| { |
| "epoch": 2.4838709677419355, |
| "grad_norm": 0.7456927299499512, |
| "learning_rate": 4.99074543733652e-06, |
| "loss": 0.002, |
| "step": 104 |
| }, |
| { |
| "epoch": 2.508064516129032, |
| "grad_norm": 0.5102735161781311, |
| "learning_rate": 4.98554476723488e-06, |
| "loss": 0.0039, |
| "step": 105 |
| }, |
| { |
| "epoch": 2.532258064516129, |
| "grad_norm": 0.7966336011886597, |
| "learning_rate": 4.979193299748225e-06, |
| "loss": 0.0009, |
| "step": 106 |
| }, |
| { |
| "epoch": 2.556451612903226, |
| "grad_norm": 0.024575965479016304, |
| "learning_rate": 4.971693975580851e-06, |
| "loss": 0.0009, |
| "step": 107 |
| }, |
| { |
| "epoch": 2.5806451612903225, |
| "grad_norm": 0.1903764009475708, |
| "learning_rate": 4.963050266890152e-06, |
| "loss": 0.001, |
| "step": 108 |
| }, |
| { |
| "epoch": 2.6048387096774195, |
| "grad_norm": 0.45592474937438965, |
| "learning_rate": 4.953266175679023e-06, |
| "loss": 0.0032, |
| "step": 109 |
| }, |
| { |
| "epoch": 2.629032258064516, |
| "grad_norm": 0.2592508792877197, |
| "learning_rate": 4.942346231942955e-06, |
| "loss": 0.0017, |
| "step": 110 |
| }, |
| { |
| "epoch": 2.653225806451613, |
| "grad_norm": 1.9442164897918701, |
| "learning_rate": 4.9302954915726535e-06, |
| "loss": 0.0004, |
| "step": 111 |
| }, |
| { |
| "epoch": 2.6774193548387095, |
| "grad_norm": 0.07151424884796143, |
| "learning_rate": 4.917119534013194e-06, |
| "loss": 0.0006, |
| "step": 112 |
| }, |
| { |
| "epoch": 2.7016129032258065, |
| "grad_norm": 0.08166387677192688, |
| "learning_rate": 4.9028244596807525e-06, |
| "loss": 0.0007, |
| "step": 113 |
| }, |
| { |
| "epoch": 2.725806451612903, |
| "grad_norm": 1.0235861539840698, |
| "learning_rate": 4.887416887138139e-06, |
| "loss": 0.0021, |
| "step": 114 |
| }, |
| { |
| "epoch": 2.75, |
| "grad_norm": 0.14721287786960602, |
| "learning_rate": 4.870903950030429e-06, |
| "loss": 0.0005, |
| "step": 115 |
| }, |
| { |
| "epoch": 2.774193548387097, |
| "grad_norm": 0.03769293054938316, |
| "learning_rate": 4.853293293782118e-06, |
| "loss": 0.0017, |
| "step": 116 |
| }, |
| { |
| "epoch": 2.7983870967741935, |
| "grad_norm": 1.2892156839370728, |
| "learning_rate": 4.834593072057313e-06, |
| "loss": 0.0005, |
| "step": 117 |
| }, |
| { |
| "epoch": 2.8225806451612905, |
| "grad_norm": 0.1497962474822998, |
| "learning_rate": 4.814811942984625e-06, |
| "loss": 0.0009, |
| "step": 118 |
| }, |
| { |
| "epoch": 2.846774193548387, |
| "grad_norm": 0.19922426342964172, |
| "learning_rate": 4.793959065148484e-06, |
| "loss": 0.0002, |
| "step": 119 |
| }, |
| { |
| "epoch": 2.870967741935484, |
| "grad_norm": 0.025655284523963928, |
| "learning_rate": 4.772044093348757e-06, |
| "loss": 0.0006, |
| "step": 120 |
| }, |
| { |
| "epoch": 2.8951612903225805, |
| "grad_norm": 0.6246688961982727, |
| "learning_rate": 4.749077174130609e-06, |
| "loss": 0.0002, |
| "step": 121 |
| }, |
| { |
| "epoch": 2.9193548387096775, |
| "grad_norm": 0.06197616085410118, |
| "learning_rate": 4.725068941086693e-06, |
| "loss": 0.0002, |
| "step": 122 |
| }, |
| { |
| "epoch": 2.943548387096774, |
| "grad_norm": 0.09112539142370224, |
| "learning_rate": 4.70003050993384e-06, |
| "loss": 0.0002, |
| "step": 123 |
| }, |
| { |
| "epoch": 2.967741935483871, |
| "grad_norm": 0.01392870768904686, |
| "learning_rate": 4.6739734733665275e-06, |
| "loss": 0.0001, |
| "step": 124 |
| }, |
| { |
| "epoch": 2.991935483870968, |
| "grad_norm": 0.0538908950984478, |
| "learning_rate": 4.646909895689508e-06, |
| "loss": 0.0002, |
| "step": 125 |
| }, |
| { |
| "epoch": 3.0, |
| "grad_norm": 0.011572198010981083, |
| "learning_rate": 4.618852307232078e-06, |
| "loss": 0.0, |
| "step": 126 |
| }, |
| { |
| "epoch": 3.024193548387097, |
| "grad_norm": 0.014770111069083214, |
| "learning_rate": 4.589813698546592e-06, |
| "loss": 0.0001, |
| "step": 127 |
| }, |
| { |
| "epoch": 3.0483870967741935, |
| "grad_norm": 0.024740448221564293, |
| "learning_rate": 4.5598075143938855e-06, |
| "loss": 0.0001, |
| "step": 128 |
| }, |
| { |
| "epoch": 3.0725806451612905, |
| "grad_norm": 0.010587488301098347, |
| "learning_rate": 4.528847647518403e-06, |
| "loss": 0.0001, |
| "step": 129 |
| }, |
| { |
| "epoch": 3.096774193548387, |
| "grad_norm": 0.008824610151350498, |
| "learning_rate": 4.4969484322159125e-06, |
| "loss": 0.0001, |
| "step": 130 |
| }, |
| { |
| "epoch": 3.120967741935484, |
| "grad_norm": 0.006249427329748869, |
| "learning_rate": 4.464124637696786e-06, |
| "loss": 0.0, |
| "step": 131 |
| }, |
| { |
| "epoch": 3.1451612903225805, |
| "grad_norm": 0.006542777642607689, |
| "learning_rate": 4.430391461247911e-06, |
| "loss": 0.0, |
| "step": 132 |
| }, |
| { |
| "epoch": 3.1693548387096775, |
| "grad_norm": 0.010512913577258587, |
| "learning_rate": 4.3957645211964065e-06, |
| "loss": 0.0, |
| "step": 133 |
| }, |
| { |
| "epoch": 3.193548387096774, |
| "grad_norm": 0.012207292020320892, |
| "learning_rate": 4.360259849678402e-06, |
| "loss": 0.0, |
| "step": 134 |
| }, |
| { |
| "epoch": 3.217741935483871, |
| "grad_norm": 0.008094793185591698, |
| "learning_rate": 4.3238938852162195e-06, |
| "loss": 0.0, |
| "step": 135 |
| }, |
| { |
| "epoch": 3.241935483870968, |
| "grad_norm": 0.004484089091420174, |
| "learning_rate": 4.286683465107403e-06, |
| "loss": 0.0, |
| "step": 136 |
| }, |
| { |
| "epoch": 3.2661290322580645, |
| "grad_norm": 0.005113545805215836, |
| "learning_rate": 4.2486458176291176e-06, |
| "loss": 0.0, |
| "step": 137 |
| }, |
| { |
| "epoch": 3.2903225806451615, |
| "grad_norm": 0.039181407541036606, |
| "learning_rate": 4.209798554061527e-06, |
| "loss": 0.0001, |
| "step": 138 |
| }, |
| { |
| "epoch": 3.314516129032258, |
| "grad_norm": 0.008053544908761978, |
| "learning_rate": 4.170159660533834e-06, |
| "loss": 0.0001, |
| "step": 139 |
| }, |
| { |
| "epoch": 3.338709677419355, |
| "grad_norm": 0.0030653164722025394, |
| "learning_rate": 4.129747489696781e-06, |
| "loss": 0.0, |
| "step": 140 |
| }, |
| { |
| "epoch": 3.3629032258064515, |
| "grad_norm": 0.0038352159317582846, |
| "learning_rate": 4.0885807522254435e-06, |
| "loss": 0.0, |
| "step": 141 |
| }, |
| { |
| "epoch": 3.3870967741935485, |
| "grad_norm": 0.023383919149637222, |
| "learning_rate": 4.046678508156259e-06, |
| "loss": 0.0, |
| "step": 142 |
| }, |
| { |
| "epoch": 3.411290322580645, |
| "grad_norm": 0.026204578578472137, |
| "learning_rate": 4.004060158062306e-06, |
| "loss": 0.0001, |
| "step": 143 |
| }, |
| { |
| "epoch": 3.435483870967742, |
| "grad_norm": 0.007208545226603746, |
| "learning_rate": 3.9607454340709215e-06, |
| "loss": 0.0, |
| "step": 144 |
| }, |
| { |
| "epoch": 3.4596774193548385, |
| "grad_norm": 0.005157825071364641, |
| "learning_rate": 3.916754390727795e-06, |
| "loss": 0.0, |
| "step": 145 |
| }, |
| { |
| "epoch": 3.4838709677419355, |
| "grad_norm": 0.0048579806461930275, |
| "learning_rate": 3.872107395711799e-06, |
| "loss": 0.0, |
| "step": 146 |
| }, |
| { |
| "epoch": 3.508064516129032, |
| "grad_norm": 0.003963091876357794, |
| "learning_rate": 3.8268251204048335e-06, |
| "loss": 0.0, |
| "step": 147 |
| }, |
| { |
| "epoch": 3.532258064516129, |
| "grad_norm": 0.003541983664035797, |
| "learning_rate": 3.78092853032106e-06, |
| "loss": 0.0, |
| "step": 148 |
| }, |
| { |
| "epoch": 3.556451612903226, |
| "grad_norm": 0.00556611642241478, |
| "learning_rate": 3.7344388753999434e-06, |
| "loss": 0.0, |
| "step": 149 |
| }, |
| { |
| "epoch": 3.5806451612903225, |
| "grad_norm": 0.005311247892677784, |
| "learning_rate": 3.6873776801676265e-06, |
| "loss": 0.0, |
| "step": 150 |
| }, |
| { |
| "epoch": 3.6048387096774195, |
| "grad_norm": 0.005124111659824848, |
| "learning_rate": 3.6397667337711475e-06, |
| "loss": 0.0, |
| "step": 151 |
| }, |
| { |
| "epoch": 3.629032258064516, |
| "grad_norm": 0.002724511083215475, |
| "learning_rate": 3.5916280798901604e-06, |
| "loss": 0.0, |
| "step": 152 |
| }, |
| { |
| "epoch": 3.653225806451613, |
| "grad_norm": 0.012781741097569466, |
| "learning_rate": 3.5429840065307924e-06, |
| "loss": 0.0001, |
| "step": 153 |
| }, |
| { |
| "epoch": 3.6774193548387095, |
| "grad_norm": 0.002880590036511421, |
| "learning_rate": 3.4938570357063906e-06, |
| "loss": 0.0, |
| "step": 154 |
| }, |
| { |
| "epoch": 3.7016129032258065, |
| "grad_norm": 0.007057458162307739, |
| "learning_rate": 3.444269913009912e-06, |
| "loss": 0.0, |
| "step": 155 |
| }, |
| { |
| "epoch": 3.725806451612903, |
| "grad_norm": 0.0030600889585912228, |
| "learning_rate": 3.3942455970828146e-06, |
| "loss": 0.0, |
| "step": 156 |
| }, |
| { |
| "epoch": 3.75, |
| "grad_norm": 0.0037060801405459642, |
| "learning_rate": 3.3438072489852837e-06, |
| "loss": 0.0, |
| "step": 157 |
| }, |
| { |
| "epoch": 3.774193548387097, |
| "grad_norm": 0.0037356216926127672, |
| "learning_rate": 3.2929782214727657e-06, |
| "loss": 0.0, |
| "step": 158 |
| }, |
| { |
| "epoch": 3.7983870967741935, |
| "grad_norm": 0.0021931882947683334, |
| "learning_rate": 3.241782048183726e-06, |
| "loss": 0.0, |
| "step": 159 |
| }, |
| { |
| "epoch": 3.8225806451612905, |
| "grad_norm": 0.0031978520564734936, |
| "learning_rate": 3.190242432743673e-06, |
| "loss": 0.0, |
| "step": 160 |
| }, |
| { |
| "epoch": 3.846774193548387, |
| "grad_norm": 0.0027142702601850033, |
| "learning_rate": 3.1383832377904676e-06, |
| "loss": 0.0, |
| "step": 161 |
| }, |
| { |
| "epoch": 3.870967741935484, |
| "grad_norm": 0.005236359313130379, |
| "learning_rate": 3.0862284739260247e-06, |
| "loss": 0.0, |
| "step": 162 |
| }, |
| { |
| "epoch": 3.8951612903225805, |
| "grad_norm": 0.00442493474110961, |
| "learning_rate": 3.0338022885994904e-06, |
| "loss": 0.0, |
| "step": 163 |
| }, |
| { |
| "epoch": 3.9193548387096775, |
| "grad_norm": 0.003939308691769838, |
| "learning_rate": 2.981128954927075e-06, |
| "loss": 0.0, |
| "step": 164 |
| }, |
| { |
| "epoch": 3.943548387096774, |
| "grad_norm": 0.0058077434077858925, |
| "learning_rate": 2.928232860453694e-06, |
| "loss": 0.0, |
| "step": 165 |
| }, |
| { |
| "epoch": 3.967741935483871, |
| "grad_norm": 0.012285425327718258, |
| "learning_rate": 2.8751384958616318e-06, |
| "loss": 0.0, |
| "step": 166 |
| }, |
| { |
| "epoch": 3.991935483870968, |
| "grad_norm": 0.005668473429977894, |
| "learning_rate": 2.8218704436314525e-06, |
| "loss": 0.0, |
| "step": 167 |
| }, |
| { |
| "epoch": 4.0, |
| "grad_norm": 0.005668473429977894, |
| "learning_rate": 2.768453366660408e-06, |
| "loss": 0.0, |
| "step": 168 |
| }, |
| { |
| "epoch": 4.024193548387097, |
| "grad_norm": 0.0070312549360096455, |
| "learning_rate": 2.714911996843617e-06, |
| "loss": 0.0, |
| "step": 169 |
| }, |
| { |
| "epoch": 4.048387096774194, |
| "grad_norm": 0.002901165746152401, |
| "learning_rate": 2.6612711236232915e-06, |
| "loss": 0.0, |
| "step": 170 |
| }, |
| { |
| "epoch": 4.07258064516129, |
| "grad_norm": 0.004153820686042309, |
| "learning_rate": 2.6075555825113265e-06, |
| "loss": 0.0, |
| "step": 171 |
| }, |
| { |
| "epoch": 4.096774193548387, |
| "grad_norm": 0.002925405977293849, |
| "learning_rate": 2.553790243590556e-06, |
| "loss": 0.0, |
| "step": 172 |
| }, |
| { |
| "epoch": 4.120967741935484, |
| "grad_norm": 0.0030930652283132076, |
| "learning_rate": 2.5e-06, |
| "loss": 0.0, |
| "step": 173 |
| }, |
| { |
| "epoch": 4.145161290322581, |
| "grad_norm": 0.002492116065695882, |
| "learning_rate": 2.446209756409445e-06, |
| "loss": 0.0, |
| "step": 174 |
| }, |
| { |
| "epoch": 4.169354838709677, |
| "grad_norm": 0.012719309888780117, |
| "learning_rate": 2.3924444174886735e-06, |
| "loss": 0.0, |
| "step": 175 |
| }, |
| { |
| "epoch": 4.193548387096774, |
| "grad_norm": 0.0035637742839753628, |
| "learning_rate": 2.3387288763767097e-06, |
| "loss": 0.0, |
| "step": 176 |
| }, |
| { |
| "epoch": 4.217741935483871, |
| "grad_norm": 0.002116712275892496, |
| "learning_rate": 2.2850880031563845e-06, |
| "loss": 0.0, |
| "step": 177 |
| }, |
| { |
| "epoch": 4.241935483870968, |
| "grad_norm": 0.002913803094998002, |
| "learning_rate": 2.2315466333395927e-06, |
| "loss": 0.0, |
| "step": 178 |
| }, |
| { |
| "epoch": 4.266129032258064, |
| "grad_norm": 0.002084016101434827, |
| "learning_rate": 2.178129556368548e-06, |
| "loss": 0.0, |
| "step": 179 |
| }, |
| { |
| "epoch": 4.290322580645161, |
| "grad_norm": 0.0027423023711889982, |
| "learning_rate": 2.1248615041383686e-06, |
| "loss": 0.0, |
| "step": 180 |
| }, |
| { |
| "epoch": 4.314516129032258, |
| "grad_norm": 0.0027472435031086206, |
| "learning_rate": 2.0717671395463063e-06, |
| "loss": 0.0, |
| "step": 181 |
| }, |
| { |
| "epoch": 4.338709677419355, |
| "grad_norm": 0.0022199389059096575, |
| "learning_rate": 2.0188710450729255e-06, |
| "loss": 0.0, |
| "step": 182 |
| }, |
| { |
| "epoch": 4.362903225806452, |
| "grad_norm": 0.0020855457987636328, |
| "learning_rate": 1.96619771140051e-06, |
| "loss": 0.0, |
| "step": 183 |
| }, |
| { |
| "epoch": 4.387096774193548, |
| "grad_norm": 0.001854368718340993, |
| "learning_rate": 1.913771526073976e-06, |
| "loss": 0.0, |
| "step": 184 |
| }, |
| { |
| "epoch": 4.411290322580645, |
| "grad_norm": 0.0019964484963566065, |
| "learning_rate": 1.8616167622095328e-06, |
| "loss": 0.0, |
| "step": 185 |
| }, |
| { |
| "epoch": 4.435483870967742, |
| "grad_norm": 0.0020302555058151484, |
| "learning_rate": 1.8097575672563278e-06, |
| "loss": 0.0, |
| "step": 186 |
| }, |
| { |
| "epoch": 4.459677419354839, |
| "grad_norm": 0.0028748398181051016, |
| "learning_rate": 1.7582179518162742e-06, |
| "loss": 0.0, |
| "step": 187 |
| }, |
| { |
| "epoch": 4.483870967741936, |
| "grad_norm": 0.0022295413073152304, |
| "learning_rate": 1.7070217785272354e-06, |
| "loss": 0.0, |
| "step": 188 |
| }, |
| { |
| "epoch": 4.508064516129032, |
| "grad_norm": 0.0026783861685544252, |
| "learning_rate": 1.6561927510147172e-06, |
| "loss": 0.0, |
| "step": 189 |
| }, |
| { |
| "epoch": 4.532258064516129, |
| "grad_norm": 0.002686347346752882, |
| "learning_rate": 1.6057544029171863e-06, |
| "loss": 0.0, |
| "step": 190 |
| }, |
| { |
| "epoch": 4.556451612903226, |
| "grad_norm": 0.002102716825902462, |
| "learning_rate": 1.5557300869900876e-06, |
| "loss": 0.0, |
| "step": 191 |
| }, |
| { |
| "epoch": 4.580645161290323, |
| "grad_norm": 0.002360268495976925, |
| "learning_rate": 1.5061429642936107e-06, |
| "loss": 0.0, |
| "step": 192 |
| }, |
| { |
| "epoch": 4.604838709677419, |
| "grad_norm": 0.0019272768404334784, |
| "learning_rate": 1.4570159934692085e-06, |
| "loss": 0.0, |
| "step": 193 |
| }, |
| { |
| "epoch": 4.629032258064516, |
| "grad_norm": 0.004223938565701246, |
| "learning_rate": 1.4083719201098404e-06, |
| "loss": 0.0, |
| "step": 194 |
| }, |
| { |
| "epoch": 4.653225806451613, |
| "grad_norm": 0.0030710650607943535, |
| "learning_rate": 1.3602332662288536e-06, |
| "loss": 0.0, |
| "step": 195 |
| }, |
| { |
| "epoch": 4.67741935483871, |
| "grad_norm": 0.0022593277972191572, |
| "learning_rate": 1.3126223198323752e-06, |
| "loss": 0.0, |
| "step": 196 |
| }, |
| { |
| "epoch": 4.701612903225806, |
| "grad_norm": 0.003411229234188795, |
| "learning_rate": 1.265561124600057e-06, |
| "loss": 0.0, |
| "step": 197 |
| }, |
| { |
| "epoch": 4.725806451612903, |
| "grad_norm": 0.0026067630387842655, |
| "learning_rate": 1.219071469678941e-06, |
| "loss": 0.0, |
| "step": 198 |
| }, |
| { |
| "epoch": 4.75, |
| "grad_norm": 0.0027266363613307476, |
| "learning_rate": 1.173174879595166e-06, |
| "loss": 0.0, |
| "step": 199 |
| }, |
| { |
| "epoch": 4.774193548387097, |
| "grad_norm": 0.001948651159182191, |
| "learning_rate": 1.1278926042882026e-06, |
| "loss": 0.0, |
| "step": 200 |
| }, |
| { |
| "epoch": 4.798387096774194, |
| "grad_norm": 0.0035516598727554083, |
| "learning_rate": 1.0832456092722063e-06, |
| "loss": 0.0, |
| "step": 201 |
| }, |
| { |
| "epoch": 4.82258064516129, |
| "grad_norm": 0.002599873812869191, |
| "learning_rate": 1.0392545659290789e-06, |
| "loss": 0.0, |
| "step": 202 |
| }, |
| { |
| "epoch": 4.846774193548387, |
| "grad_norm": 0.0024046015460044146, |
| "learning_rate": 9.95939841937693e-07, |
| "loss": 0.0, |
| "step": 203 |
| }, |
| { |
| "epoch": 4.870967741935484, |
| "grad_norm": 0.0016676371451467276, |
| "learning_rate": 9.533214918437422e-07, |
| "loss": 0.0, |
| "step": 204 |
| }, |
| { |
| "epoch": 4.895161290322581, |
| "grad_norm": 0.005082397721707821, |
| "learning_rate": 9.114192477745568e-07, |
| "loss": 0.0, |
| "step": 205 |
| }, |
| { |
| "epoch": 4.919354838709677, |
| "grad_norm": 0.0022361574228852987, |
| "learning_rate": 8.702525103032186e-07, |
| "loss": 0.0, |
| "step": 206 |
| }, |
| { |
| "epoch": 4.943548387096774, |
| "grad_norm": 0.00175854389090091, |
| "learning_rate": 8.298403394661658e-07, |
| "loss": 0.0, |
| "step": 207 |
| }, |
| { |
| "epoch": 4.967741935483871, |
| "grad_norm": 0.002364831743761897, |
| "learning_rate": 7.902014459384744e-07, |
| "loss": 0.0, |
| "step": 208 |
| }, |
| { |
| "epoch": 4.991935483870968, |
| "grad_norm": 0.0020899497903883457, |
| "learning_rate": 7.513541823708828e-07, |
| "loss": 0.0, |
| "step": 209 |
| }, |
| { |
| "epoch": 5.0, |
| "grad_norm": 0.0020899497903883457, |
| "learning_rate": 7.133165348925978e-07, |
| "loss": 0.0, |
| "step": 210 |
| }, |
| { |
| "epoch": 5.024193548387097, |
| "grad_norm": 0.0033509680069983006, |
| "learning_rate": 6.761061147837808e-07, |
| "loss": 0.0, |
| "step": 211 |
| }, |
| { |
| "epoch": 5.048387096774194, |
| "grad_norm": 0.0016700942069292068, |
| "learning_rate": 6.397401503215992e-07, |
| "loss": 0.0, |
| "step": 212 |
| }, |
| { |
| "epoch": 5.07258064516129, |
| "grad_norm": 0.0036097129341214895, |
| "learning_rate": 6.042354788035943e-07, |
| "loss": 0.0, |
| "step": 213 |
| }, |
| { |
| "epoch": 5.096774193548387, |
| "grad_norm": 0.0019725814927369356, |
| "learning_rate": 5.696085387520894e-07, |
| "loss": 0.0, |
| "step": 214 |
| }, |
| { |
| "epoch": 5.120967741935484, |
| "grad_norm": 0.002575285965576768, |
| "learning_rate": 5.358753623032137e-07, |
| "loss": 0.0, |
| "step": 215 |
| }, |
| { |
| "epoch": 5.145161290322581, |
| "grad_norm": 0.0023315041325986385, |
| "learning_rate": 5.030515677840883e-07, |
| "loss": 0.0, |
| "step": 216 |
| }, |
| { |
| "epoch": 5.169354838709677, |
| "grad_norm": 0.002346003893762827, |
| "learning_rate": 4.711523524815978e-07, |
| "loss": 0.0, |
| "step": 217 |
| }, |
| { |
| "epoch": 5.193548387096774, |
| "grad_norm": 0.0021207425743341446, |
| "learning_rate": 4.401924856061146e-07, |
| "loss": 0.0, |
| "step": 218 |
| }, |
| { |
| "epoch": 5.217741935483871, |
| "grad_norm": 0.003580324584618211, |
| "learning_rate": 4.1018630145340744e-07, |
| "loss": 0.0, |
| "step": 219 |
| }, |
| { |
| "epoch": 5.241935483870968, |
| "grad_norm": 0.0020650820806622505, |
| "learning_rate": 3.811476927679228e-07, |
| "loss": 0.0, |
| "step": 220 |
| }, |
| { |
| "epoch": 5.266129032258064, |
| "grad_norm": 0.0015676968032494187, |
| "learning_rate": 3.5309010431049284e-07, |
| "loss": 0.0, |
| "step": 221 |
| }, |
| { |
| "epoch": 5.290322580645161, |
| "grad_norm": 0.0023263345938175917, |
| "learning_rate": 3.260265266334725e-07, |
| "loss": 0.0, |
| "step": 222 |
| }, |
| { |
| "epoch": 5.314516129032258, |
| "grad_norm": 0.00231003575026989, |
| "learning_rate": 2.9996949006616096e-07, |
| "loss": 0.0, |
| "step": 223 |
| }, |
| { |
| "epoch": 5.338709677419355, |
| "grad_norm": 0.0019675635267049074, |
| "learning_rate": 2.7493105891330837e-07, |
| "loss": 0.0, |
| "step": 224 |
| }, |
| { |
| "epoch": 5.362903225806452, |
| "grad_norm": 0.002346566179767251, |
| "learning_rate": 2.5092282586939187e-07, |
| "loss": 0.0, |
| "step": 225 |
| }, |
| { |
| "epoch": 5.387096774193548, |
| "grad_norm": 0.0021014949306845665, |
| "learning_rate": 2.2795590665124267e-07, |
| "loss": 0.0, |
| "step": 226 |
| }, |
| { |
| "epoch": 5.411290322580645, |
| "grad_norm": 0.0017796737374737859, |
| "learning_rate": 2.0604093485151548e-07, |
| "loss": 0.0, |
| "step": 227 |
| }, |
| { |
| "epoch": 5.435483870967742, |
| "grad_norm": 0.0015546936774626374, |
| "learning_rate": 1.851880570153755e-07, |
| "loss": 0.0, |
| "step": 228 |
| }, |
| { |
| "epoch": 5.459677419354839, |
| "grad_norm": 0.0038863597437739372, |
| "learning_rate": 1.654069279426873e-07, |
| "loss": 0.0, |
| "step": 229 |
| }, |
| { |
| "epoch": 5.483870967741936, |
| "grad_norm": 0.0019028914393857121, |
| "learning_rate": 1.467067062178823e-07, |
| "loss": 0.0, |
| "step": 230 |
| }, |
| { |
| "epoch": 5.508064516129032, |
| "grad_norm": 0.0014524314319714904, |
| "learning_rate": 1.2909604996957093e-07, |
| "loss": 0.0, |
| "step": 231 |
| }, |
| { |
| "epoch": 5.532258064516129, |
| "grad_norm": 0.0026214567478746176, |
| "learning_rate": 1.1258311286186208e-07, |
| "loss": 0.0, |
| "step": 232 |
| }, |
| { |
| "epoch": 5.556451612903226, |
| "grad_norm": 0.001435131300240755, |
| "learning_rate": 9.717554031924842e-08, |
| "loss": 0.0, |
| "step": 233 |
| }, |
| { |
| "epoch": 5.580645161290323, |
| "grad_norm": 0.0044931466691195965, |
| "learning_rate": 8.288046598680627e-08, |
| "loss": 0.0, |
| "step": 234 |
| }, |
| { |
| "epoch": 5.604838709677419, |
| "grad_norm": 0.0016189351445063949, |
| "learning_rate": 6.97045084273465e-08, |
| "loss": 0.0, |
| "step": 235 |
| }, |
| { |
| "epoch": 5.629032258064516, |
| "grad_norm": 0.0017611249350011349, |
| "learning_rate": 5.7653768057045757e-08, |
| "loss": 0.0, |
| "step": 236 |
| }, |
| { |
| "epoch": 5.653225806451613, |
| "grad_norm": 0.002121084136888385, |
| "learning_rate": 4.6733824320976674e-08, |
| "loss": 0.0, |
| "step": 237 |
| }, |
| { |
| "epoch": 5.67741935483871, |
| "grad_norm": 0.0014778735348954797, |
| "learning_rate": 3.6949733109848395e-08, |
| "loss": 0.0, |
| "step": 238 |
| }, |
| { |
| "epoch": 5.701612903225806, |
| "grad_norm": 0.026431361213326454, |
| "learning_rate": 2.8306024419148814e-08, |
| "loss": 0.0, |
| "step": 239 |
| }, |
| { |
| "epoch": 5.725806451612903, |
| "grad_norm": 0.0016552675515413284, |
| "learning_rate": 2.0806700251775057e-08, |
| "loss": 0.0, |
| "step": 240 |
| }, |
| { |
| "epoch": 5.75, |
| "grad_norm": 0.006627636030316353, |
| "learning_rate": 1.4455232765120397e-08, |
| "loss": 0.0, |
| "step": 241 |
| }, |
| { |
| "epoch": 5.774193548387097, |
| "grad_norm": 0.0024116451386362314, |
| "learning_rate": 9.25456266348046e-09, |
| "loss": 0.0, |
| "step": 242 |
| }, |
| { |
| "epoch": 5.798387096774194, |
| "grad_norm": 0.001904924400150776, |
| "learning_rate": 5.20709783651957e-09, |
| "loss": 0.0, |
| "step": 243 |
| }, |
| { |
| "epoch": 5.82258064516129, |
| "grad_norm": 0.0022408179938793182, |
| "learning_rate": 2.3147122444250327e-09, |
| "loss": 0.0, |
| "step": 244 |
| }, |
| { |
| "epoch": 5.846774193548387, |
| "grad_norm": 0.0030523776076734066, |
| "learning_rate": 5.787450502728331e-10, |
| "loss": 0.0, |
| "step": 245 |
| }, |
| { |
| "epoch": 5.870967741935484, |
| "grad_norm": 0.0039012248162180185, |
| "learning_rate": 0.0, |
| "loss": 0.0, |
| "step": 246 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 246, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 6, |
| "save_steps": 41, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 6.098273592095539e+17, |
| "train_batch_size": 4, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|