| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 0.2962962962962963, |
| "eval_steps": 100, |
| "global_step": 100, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.002962962962962963, |
| "grad_norm": 4.3932013511657715, |
| "learning_rate": 0.0, |
| "loss": 1.122, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.005925925925925926, |
| "grad_norm": 4.835041522979736, |
| "learning_rate": 4.000000000000001e-06, |
| "loss": 1.262, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.008888888888888889, |
| "grad_norm": 4.558024883270264, |
| "learning_rate": 8.000000000000001e-06, |
| "loss": 1.1905, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.011851851851851851, |
| "grad_norm": 4.187239170074463, |
| "learning_rate": 1.2e-05, |
| "loss": 1.0289, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.014814814814814815, |
| "grad_norm": 3.6275532245635986, |
| "learning_rate": 1.6000000000000003e-05, |
| "loss": 0.9252, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.017777777777777778, |
| "grad_norm": 4.801448345184326, |
| "learning_rate": 2e-05, |
| "loss": 1.134, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.02074074074074074, |
| "grad_norm": 4.969509601593018, |
| "learning_rate": 1.993993993993994e-05, |
| "loss": 1.1101, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.023703703703703703, |
| "grad_norm": 6.065815448760986, |
| "learning_rate": 1.987987987987988e-05, |
| "loss": 1.2835, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.02666666666666667, |
| "grad_norm": 4.675291538238525, |
| "learning_rate": 1.981981981981982e-05, |
| "loss": 0.9603, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.02962962962962963, |
| "grad_norm": 5.45444917678833, |
| "learning_rate": 1.9759759759759763e-05, |
| "loss": 0.9926, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.03259259259259259, |
| "grad_norm": 4.381354808807373, |
| "learning_rate": 1.9699699699699702e-05, |
| "loss": 0.7942, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.035555555555555556, |
| "grad_norm": 6.248989105224609, |
| "learning_rate": 1.963963963963964e-05, |
| "loss": 1.1086, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.03851851851851852, |
| "grad_norm": 5.123929023742676, |
| "learning_rate": 1.957957957957958e-05, |
| "loss": 0.778, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.04148148148148148, |
| "grad_norm": 6.84102725982666, |
| "learning_rate": 1.951951951951952e-05, |
| "loss": 1.0916, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.044444444444444446, |
| "grad_norm": 6.978565692901611, |
| "learning_rate": 1.9459459459459463e-05, |
| "loss": 0.982, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.047407407407407405, |
| "grad_norm": 8.47054386138916, |
| "learning_rate": 1.9399399399399402e-05, |
| "loss": 1.0711, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.05037037037037037, |
| "grad_norm": 7.508791923522949, |
| "learning_rate": 1.9339339339339342e-05, |
| "loss": 0.9112, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.05333333333333334, |
| "grad_norm": 9.517326354980469, |
| "learning_rate": 1.927927927927928e-05, |
| "loss": 1.0273, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.056296296296296296, |
| "grad_norm": 10.264245986938477, |
| "learning_rate": 1.921921921921922e-05, |
| "loss": 1.087, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.05925925925925926, |
| "grad_norm": 9.43092155456543, |
| "learning_rate": 1.915915915915916e-05, |
| "loss": 1.0215, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.06222222222222222, |
| "grad_norm": 8.558907508850098, |
| "learning_rate": 1.90990990990991e-05, |
| "loss": 0.82, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.06518518518518518, |
| "grad_norm": 14.64515495300293, |
| "learning_rate": 1.903903903903904e-05, |
| "loss": 1.4202, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.06814814814814815, |
| "grad_norm": 10.740450859069824, |
| "learning_rate": 1.8978978978978982e-05, |
| "loss": 1.1039, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.07111111111111111, |
| "grad_norm": 9.0601224899292, |
| "learning_rate": 1.891891891891892e-05, |
| "loss": 1.0625, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.07407407407407407, |
| "grad_norm": 11.073591232299805, |
| "learning_rate": 1.885885885885886e-05, |
| "loss": 0.9269, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.07703703703703704, |
| "grad_norm": 10.829946517944336, |
| "learning_rate": 1.87987987987988e-05, |
| "loss": 0.9481, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 14.61500358581543, |
| "learning_rate": 1.873873873873874e-05, |
| "loss": 1.2452, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.08296296296296296, |
| "grad_norm": 11.676919937133789, |
| "learning_rate": 1.8678678678678682e-05, |
| "loss": 1.0379, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.08592592592592592, |
| "grad_norm": 10.817378997802734, |
| "learning_rate": 1.861861861861862e-05, |
| "loss": 0.8553, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.08888888888888889, |
| "grad_norm": 11.56974983215332, |
| "learning_rate": 1.855855855855856e-05, |
| "loss": 0.8584, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.09185185185185185, |
| "grad_norm": 10.188511848449707, |
| "learning_rate": 1.84984984984985e-05, |
| "loss": 0.7416, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.09481481481481481, |
| "grad_norm": 10.488707542419434, |
| "learning_rate": 1.843843843843844e-05, |
| "loss": 0.5713, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.09777777777777778, |
| "grad_norm": 11.057037353515625, |
| "learning_rate": 1.8378378378378383e-05, |
| "loss": 0.6974, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.10074074074074074, |
| "grad_norm": 11.147880554199219, |
| "learning_rate": 1.831831831831832e-05, |
| "loss": 0.7529, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.1037037037037037, |
| "grad_norm": 12.731097221374512, |
| "learning_rate": 1.8258258258258258e-05, |
| "loss": 0.9118, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.10666666666666667, |
| "grad_norm": 13.77258014678955, |
| "learning_rate": 1.81981981981982e-05, |
| "loss": 0.8346, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.10962962962962963, |
| "grad_norm": 13.153702735900879, |
| "learning_rate": 1.813813813813814e-05, |
| "loss": 0.9084, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.11259259259259259, |
| "grad_norm": 12.729766845703125, |
| "learning_rate": 1.807807807807808e-05, |
| "loss": 0.7516, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.11555555555555555, |
| "grad_norm": 12.333885192871094, |
| "learning_rate": 1.801801801801802e-05, |
| "loss": 0.7124, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.11851851851851852, |
| "grad_norm": 10.395763397216797, |
| "learning_rate": 1.795795795795796e-05, |
| "loss": 0.6642, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.12148148148148148, |
| "grad_norm": 11.341495513916016, |
| "learning_rate": 1.78978978978979e-05, |
| "loss": 0.7956, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.12444444444444444, |
| "grad_norm": 12.643635749816895, |
| "learning_rate": 1.783783783783784e-05, |
| "loss": 1.012, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.1274074074074074, |
| "grad_norm": 10.671773910522461, |
| "learning_rate": 1.7777777777777777e-05, |
| "loss": 0.6679, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.13037037037037036, |
| "grad_norm": 10.571871757507324, |
| "learning_rate": 1.771771771771772e-05, |
| "loss": 0.6476, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.13333333333333333, |
| "grad_norm": 9.936208724975586, |
| "learning_rate": 1.765765765765766e-05, |
| "loss": 0.6951, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.1362962962962963, |
| "grad_norm": 13.039535522460938, |
| "learning_rate": 1.7597597597597598e-05, |
| "loss": 0.8956, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.13925925925925925, |
| "grad_norm": 15.345221519470215, |
| "learning_rate": 1.7537537537537538e-05, |
| "loss": 1.0664, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.14222222222222222, |
| "grad_norm": 12.190373420715332, |
| "learning_rate": 1.7477477477477477e-05, |
| "loss": 0.6127, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.1451851851851852, |
| "grad_norm": 12.246444702148438, |
| "learning_rate": 1.741741741741742e-05, |
| "loss": 0.7511, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.14814814814814814, |
| "grad_norm": 9.051176071166992, |
| "learning_rate": 1.735735735735736e-05, |
| "loss": 0.5273, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.1511111111111111, |
| "grad_norm": 14.076638221740723, |
| "learning_rate": 1.72972972972973e-05, |
| "loss": 0.8716, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.15407407407407409, |
| "grad_norm": 16.608592987060547, |
| "learning_rate": 1.7237237237237238e-05, |
| "loss": 0.7523, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.15703703703703703, |
| "grad_norm": 11.700766563415527, |
| "learning_rate": 1.7177177177177177e-05, |
| "loss": 0.6512, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 18.934720993041992, |
| "learning_rate": 1.711711711711712e-05, |
| "loss": 0.8111, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.16296296296296298, |
| "grad_norm": 12.533053398132324, |
| "learning_rate": 1.705705705705706e-05, |
| "loss": 0.5017, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.16592592592592592, |
| "grad_norm": 16.80641746520996, |
| "learning_rate": 1.6996996996997e-05, |
| "loss": 0.7349, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.1688888888888889, |
| "grad_norm": 17.351354598999023, |
| "learning_rate": 1.693693693693694e-05, |
| "loss": 0.5469, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.17185185185185184, |
| "grad_norm": 12.17600154876709, |
| "learning_rate": 1.6876876876876878e-05, |
| "loss": 0.5454, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.1748148148148148, |
| "grad_norm": 15.175498962402344, |
| "learning_rate": 1.6816816816816817e-05, |
| "loss": 0.7156, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.17777777777777778, |
| "grad_norm": 11.376442909240723, |
| "learning_rate": 1.6756756756756757e-05, |
| "loss": 0.6287, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.18074074074074073, |
| "grad_norm": 16.71291732788086, |
| "learning_rate": 1.6696696696696696e-05, |
| "loss": 0.5942, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.1837037037037037, |
| "grad_norm": 10.104132652282715, |
| "learning_rate": 1.663663663663664e-05, |
| "loss": 0.4791, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.18666666666666668, |
| "grad_norm": 11.949193000793457, |
| "learning_rate": 1.6576576576576578e-05, |
| "loss": 0.6793, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.18962962962962962, |
| "grad_norm": 9.24198055267334, |
| "learning_rate": 1.6516516516516518e-05, |
| "loss": 0.5549, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.1925925925925926, |
| "grad_norm": 9.324671745300293, |
| "learning_rate": 1.6456456456456457e-05, |
| "loss": 0.5719, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.19555555555555557, |
| "grad_norm": 11.018183708190918, |
| "learning_rate": 1.6396396396396396e-05, |
| "loss": 0.5578, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.1985185185185185, |
| "grad_norm": 9.80178165435791, |
| "learning_rate": 1.633633633633634e-05, |
| "loss": 0.648, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.20148148148148148, |
| "grad_norm": 13.870157241821289, |
| "learning_rate": 1.627627627627628e-05, |
| "loss": 0.755, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.20444444444444446, |
| "grad_norm": 14.881416320800781, |
| "learning_rate": 1.6216216216216218e-05, |
| "loss": 0.6568, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.2074074074074074, |
| "grad_norm": 13.221749305725098, |
| "learning_rate": 1.6156156156156157e-05, |
| "loss": 0.4347, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.21037037037037037, |
| "grad_norm": 15.9087553024292, |
| "learning_rate": 1.6096096096096097e-05, |
| "loss": 0.7353, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.21333333333333335, |
| "grad_norm": 18.63846206665039, |
| "learning_rate": 1.6036036036036036e-05, |
| "loss": 0.6407, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.2162962962962963, |
| "grad_norm": 16.938674926757812, |
| "learning_rate": 1.5975975975975976e-05, |
| "loss": 0.4618, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.21925925925925926, |
| "grad_norm": 19.83721160888672, |
| "learning_rate": 1.591591591591592e-05, |
| "loss": 0.4019, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.2222222222222222, |
| "grad_norm": 23.562578201293945, |
| "learning_rate": 1.5855855855855858e-05, |
| "loss": 0.5107, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.22518518518518518, |
| "grad_norm": 22.505775451660156, |
| "learning_rate": 1.5795795795795797e-05, |
| "loss": 0.4955, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.22814814814814816, |
| "grad_norm": 23.18890380859375, |
| "learning_rate": 1.5735735735735737e-05, |
| "loss": 0.5082, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.2311111111111111, |
| "grad_norm": 29.059083938598633, |
| "learning_rate": 1.5675675675675676e-05, |
| "loss": 0.5518, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.23407407407407407, |
| "grad_norm": 24.563520431518555, |
| "learning_rate": 1.5615615615615616e-05, |
| "loss": 0.3839, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.23703703703703705, |
| "grad_norm": 18.441802978515625, |
| "learning_rate": 1.555555555555556e-05, |
| "loss": 0.4532, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 19.27843475341797, |
| "learning_rate": 1.5495495495495498e-05, |
| "loss": 0.431, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.24296296296296296, |
| "grad_norm": 25.197582244873047, |
| "learning_rate": 1.5435435435435437e-05, |
| "loss": 0.3855, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.24592592592592594, |
| "grad_norm": 14.846511840820312, |
| "learning_rate": 1.5375375375375377e-05, |
| "loss": 0.3664, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.24888888888888888, |
| "grad_norm": 19.469579696655273, |
| "learning_rate": 1.5315315315315316e-05, |
| "loss": 0.4523, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.2518518518518518, |
| "grad_norm": 15.252752304077148, |
| "learning_rate": 1.5255255255255257e-05, |
| "loss": 0.551, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.2548148148148148, |
| "grad_norm": 11.770284652709961, |
| "learning_rate": 1.5195195195195196e-05, |
| "loss": 0.3996, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.2577777777777778, |
| "grad_norm": 12.881220817565918, |
| "learning_rate": 1.5135135135135138e-05, |
| "loss": 0.5693, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.2607407407407407, |
| "grad_norm": 11.658583641052246, |
| "learning_rate": 1.5075075075075077e-05, |
| "loss": 0.4513, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.2637037037037037, |
| "grad_norm": 10.149765968322754, |
| "learning_rate": 1.5015015015015015e-05, |
| "loss": 0.3656, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.26666666666666666, |
| "grad_norm": 7.5767822265625, |
| "learning_rate": 1.4954954954954957e-05, |
| "loss": 0.4198, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.2696296296296296, |
| "grad_norm": 18.215837478637695, |
| "learning_rate": 1.4894894894894895e-05, |
| "loss": 0.4751, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.2725925925925926, |
| "grad_norm": 15.47228717803955, |
| "learning_rate": 1.4834834834834836e-05, |
| "loss": 0.3909, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.27555555555555555, |
| "grad_norm": 8.646990776062012, |
| "learning_rate": 1.4774774774774776e-05, |
| "loss": 0.2954, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.2785185185185185, |
| "grad_norm": 10.278739929199219, |
| "learning_rate": 1.4714714714714715e-05, |
| "loss": 0.5366, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.2814814814814815, |
| "grad_norm": 14.884415626525879, |
| "learning_rate": 1.4654654654654656e-05, |
| "loss": 0.4875, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.28444444444444444, |
| "grad_norm": 7.219095230102539, |
| "learning_rate": 1.4594594594594596e-05, |
| "loss": 0.4121, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.2874074074074074, |
| "grad_norm": 6.076047897338867, |
| "learning_rate": 1.4534534534534537e-05, |
| "loss": 0.4601, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.2903703703703704, |
| "grad_norm": 5.5280680656433105, |
| "learning_rate": 1.4474474474474476e-05, |
| "loss": 0.3693, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.29333333333333333, |
| "grad_norm": 5.4252777099609375, |
| "learning_rate": 1.4414414414414416e-05, |
| "loss": 0.5719, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.2962962962962963, |
| "grad_norm": 3.8267464637756348, |
| "learning_rate": 1.4354354354354357e-05, |
| "loss": 0.3806, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.2962962962962963, |
| "eval_loss": 0.3932308554649353, |
| "eval_runtime": 12.1133, |
| "eval_samples_per_second": 49.532, |
| "eval_steps_per_second": 24.766, |
| "step": 100 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 338, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 1, |
| "save_steps": 100, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 3673239111585792.0, |
| "train_batch_size": 2, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|