| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 5.8023255813953485, |
| "eval_steps": 500, |
| "global_step": 168, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.03488372093023256, |
| "grad_norm": 34.417083740234375, |
| "learning_rate": 5.0000000000000004e-08, |
| "loss": 2.2576, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.06976744186046512, |
| "grad_norm": 34.94503402709961, |
| "learning_rate": 1.0000000000000001e-07, |
| "loss": 2.2928, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.10465116279069768, |
| "grad_norm": 34.11349105834961, |
| "learning_rate": 1.5000000000000002e-07, |
| "loss": 2.2273, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.13953488372093023, |
| "grad_norm": 35.753822326660156, |
| "learning_rate": 2.0000000000000002e-07, |
| "loss": 2.3202, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.1744186046511628, |
| "grad_norm": 34.57868957519531, |
| "learning_rate": 2.5000000000000004e-07, |
| "loss": 2.2515, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.20930232558139536, |
| "grad_norm": 34.45582580566406, |
| "learning_rate": 3.0000000000000004e-07, |
| "loss": 2.241, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.2441860465116279, |
| "grad_norm": 34.65391540527344, |
| "learning_rate": 3.5000000000000004e-07, |
| "loss": 2.2543, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.27906976744186046, |
| "grad_norm": 34.55806350708008, |
| "learning_rate": 4.0000000000000003e-07, |
| "loss": 2.2219, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.313953488372093, |
| "grad_norm": 34.35453414916992, |
| "learning_rate": 4.5000000000000003e-07, |
| "loss": 2.2105, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.3488372093023256, |
| "grad_norm": 34.049232482910156, |
| "learning_rate": 5.000000000000001e-07, |
| "loss": 2.1968, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.38372093023255816, |
| "grad_norm": 33.26045608520508, |
| "learning_rate": 5.5e-07, |
| "loss": 2.13, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.4186046511627907, |
| "grad_norm": 33.314796447753906, |
| "learning_rate": 6.000000000000001e-07, |
| "loss": 2.1306, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.45348837209302323, |
| "grad_norm": 32.00406265258789, |
| "learning_rate": 6.5e-07, |
| "loss": 2.0275, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.4883720930232558, |
| "grad_norm": 32.68654251098633, |
| "learning_rate": 7.000000000000001e-07, |
| "loss": 2.0442, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.5232558139534884, |
| "grad_norm": 31.643146514892578, |
| "learning_rate": 7.5e-07, |
| "loss": 1.9725, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.5581395348837209, |
| "grad_norm": 30.403486251831055, |
| "learning_rate": 8.000000000000001e-07, |
| "loss": 1.8724, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.5930232558139535, |
| "grad_norm": 29.716495513916016, |
| "learning_rate": 8.500000000000001e-07, |
| "loss": 1.8118, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.627906976744186, |
| "grad_norm": 29.22306251525879, |
| "learning_rate": 9.000000000000001e-07, |
| "loss": 1.7551, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.6627906976744186, |
| "grad_norm": 27.425785064697266, |
| "learning_rate": 9.500000000000001e-07, |
| "loss": 1.6087, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.6976744186046512, |
| "grad_norm": 26.653409957885742, |
| "learning_rate": 1.0000000000000002e-06, |
| "loss": 1.5191, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.7325581395348837, |
| "grad_norm": 25.145450592041016, |
| "learning_rate": 1.0500000000000001e-06, |
| "loss": 1.3758, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.7674418604651163, |
| "grad_norm": 24.684560775756836, |
| "learning_rate": 1.1e-06, |
| "loss": 1.2955, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.8023255813953488, |
| "grad_norm": 24.005306243896484, |
| "learning_rate": 1.1500000000000002e-06, |
| "loss": 1.1665, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.8372093023255814, |
| "grad_norm": 24.253219604492188, |
| "learning_rate": 1.2000000000000002e-06, |
| "loss": 1.0143, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.872093023255814, |
| "grad_norm": 24.97549057006836, |
| "learning_rate": 1.25e-06, |
| "loss": 0.875, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.9069767441860465, |
| "grad_norm": 22.349998474121094, |
| "learning_rate": 1.3e-06, |
| "loss": 0.6222, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.9418604651162791, |
| "grad_norm": 18.814115524291992, |
| "learning_rate": 1.3500000000000002e-06, |
| "loss": 0.4715, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.9767441860465116, |
| "grad_norm": 15.833977699279785, |
| "learning_rate": 1.4000000000000001e-06, |
| "loss": 0.374, |
| "step": 28 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 15.833977699279785, |
| "learning_rate": 1.45e-06, |
| "loss": 0.2818, |
| "step": 29 |
| }, |
| { |
| "epoch": 1.0348837209302326, |
| "grad_norm": 14.85798168182373, |
| "learning_rate": 1.5e-06, |
| "loss": 0.2559, |
| "step": 30 |
| }, |
| { |
| "epoch": 1.069767441860465, |
| "grad_norm": 6.723965167999268, |
| "learning_rate": 1.5500000000000002e-06, |
| "loss": 0.2073, |
| "step": 31 |
| }, |
| { |
| "epoch": 1.1046511627906976, |
| "grad_norm": 4.480190753936768, |
| "learning_rate": 1.6000000000000001e-06, |
| "loss": 0.1632, |
| "step": 32 |
| }, |
| { |
| "epoch": 1.1395348837209303, |
| "grad_norm": 3.228346824645996, |
| "learning_rate": 1.6500000000000003e-06, |
| "loss": 0.1432, |
| "step": 33 |
| }, |
| { |
| "epoch": 1.1744186046511629, |
| "grad_norm": 2.4646286964416504, |
| "learning_rate": 1.7000000000000002e-06, |
| "loss": 0.1166, |
| "step": 34 |
| }, |
| { |
| "epoch": 1.2093023255813953, |
| "grad_norm": 1.721303939819336, |
| "learning_rate": 1.75e-06, |
| "loss": 0.1034, |
| "step": 35 |
| }, |
| { |
| "epoch": 1.244186046511628, |
| "grad_norm": 1.6059235334396362, |
| "learning_rate": 1.8000000000000001e-06, |
| "loss": 0.0943, |
| "step": 36 |
| }, |
| { |
| "epoch": 1.2790697674418605, |
| "grad_norm": 1.390950322151184, |
| "learning_rate": 1.85e-06, |
| "loss": 0.0855, |
| "step": 37 |
| }, |
| { |
| "epoch": 1.3139534883720931, |
| "grad_norm": 1.2893034219741821, |
| "learning_rate": 1.9000000000000002e-06, |
| "loss": 0.0797, |
| "step": 38 |
| }, |
| { |
| "epoch": 1.3488372093023255, |
| "grad_norm": 1.0295542478561401, |
| "learning_rate": 1.9500000000000004e-06, |
| "loss": 0.072, |
| "step": 39 |
| }, |
| { |
| "epoch": 1.3837209302325582, |
| "grad_norm": 1.0248366594314575, |
| "learning_rate": 2.0000000000000003e-06, |
| "loss": 0.068, |
| "step": 40 |
| }, |
| { |
| "epoch": 1.4186046511627908, |
| "grad_norm": 0.7489507794380188, |
| "learning_rate": 2.05e-06, |
| "loss": 0.0613, |
| "step": 41 |
| }, |
| { |
| "epoch": 1.4534883720930232, |
| "grad_norm": 0.7149010300636292, |
| "learning_rate": 2.1000000000000002e-06, |
| "loss": 0.0596, |
| "step": 42 |
| }, |
| { |
| "epoch": 1.4883720930232558, |
| "grad_norm": 0.5656580328941345, |
| "learning_rate": 2.15e-06, |
| "loss": 0.0585, |
| "step": 43 |
| }, |
| { |
| "epoch": 1.5232558139534884, |
| "grad_norm": 0.6951081156730652, |
| "learning_rate": 2.2e-06, |
| "loss": 0.066, |
| "step": 44 |
| }, |
| { |
| "epoch": 1.558139534883721, |
| "grad_norm": 0.8682136535644531, |
| "learning_rate": 2.25e-06, |
| "loss": 0.0623, |
| "step": 45 |
| }, |
| { |
| "epoch": 1.5930232558139537, |
| "grad_norm": 0.6056260466575623, |
| "learning_rate": 2.3000000000000004e-06, |
| "loss": 0.054, |
| "step": 46 |
| }, |
| { |
| "epoch": 1.627906976744186, |
| "grad_norm": 0.35378050804138184, |
| "learning_rate": 2.35e-06, |
| "loss": 0.0626, |
| "step": 47 |
| }, |
| { |
| "epoch": 1.6627906976744184, |
| "grad_norm": 0.4003937244415283, |
| "learning_rate": 2.4000000000000003e-06, |
| "loss": 0.0524, |
| "step": 48 |
| }, |
| { |
| "epoch": 1.697674418604651, |
| "grad_norm": 0.527799665927887, |
| "learning_rate": 2.4500000000000003e-06, |
| "loss": 0.056, |
| "step": 49 |
| }, |
| { |
| "epoch": 1.7325581395348837, |
| "grad_norm": 0.5358547568321228, |
| "learning_rate": 2.5e-06, |
| "loss": 0.0527, |
| "step": 50 |
| }, |
| { |
| "epoch": 1.7674418604651163, |
| "grad_norm": 0.45014214515686035, |
| "learning_rate": 2.55e-06, |
| "loss": 0.0492, |
| "step": 51 |
| }, |
| { |
| "epoch": 1.802325581395349, |
| "grad_norm": 0.4785670340061188, |
| "learning_rate": 2.6e-06, |
| "loss": 0.0525, |
| "step": 52 |
| }, |
| { |
| "epoch": 1.8372093023255816, |
| "grad_norm": 0.4931303560733795, |
| "learning_rate": 2.6500000000000005e-06, |
| "loss": 0.0485, |
| "step": 53 |
| }, |
| { |
| "epoch": 1.872093023255814, |
| "grad_norm": 0.2308250367641449, |
| "learning_rate": 2.7000000000000004e-06, |
| "loss": 0.0499, |
| "step": 54 |
| }, |
| { |
| "epoch": 1.9069767441860463, |
| "grad_norm": 0.5777614712715149, |
| "learning_rate": 2.7500000000000004e-06, |
| "loss": 0.0527, |
| "step": 55 |
| }, |
| { |
| "epoch": 1.941860465116279, |
| "grad_norm": 0.32561731338500977, |
| "learning_rate": 2.8000000000000003e-06, |
| "loss": 0.052, |
| "step": 56 |
| }, |
| { |
| "epoch": 1.9767441860465116, |
| "grad_norm": 0.34972742199897766, |
| "learning_rate": 2.85e-06, |
| "loss": 0.0467, |
| "step": 57 |
| }, |
| { |
| "epoch": 2.0, |
| "grad_norm": 0.43575090169906616, |
| "learning_rate": 2.9e-06, |
| "loss": 0.0441, |
| "step": 58 |
| }, |
| { |
| "epoch": 2.0348837209302326, |
| "grad_norm": 0.3564951419830322, |
| "learning_rate": 2.95e-06, |
| "loss": 0.0471, |
| "step": 59 |
| }, |
| { |
| "epoch": 2.0697674418604652, |
| "grad_norm": 0.3126629889011383, |
| "learning_rate": 3e-06, |
| "loss": 0.0494, |
| "step": 60 |
| }, |
| { |
| "epoch": 2.104651162790698, |
| "grad_norm": 0.4542100429534912, |
| "learning_rate": 3.05e-06, |
| "loss": 0.0456, |
| "step": 61 |
| }, |
| { |
| "epoch": 2.13953488372093, |
| "grad_norm": 0.4597831666469574, |
| "learning_rate": 3.1000000000000004e-06, |
| "loss": 0.049, |
| "step": 62 |
| }, |
| { |
| "epoch": 2.1744186046511627, |
| "grad_norm": 0.3821645677089691, |
| "learning_rate": 3.1500000000000003e-06, |
| "loss": 0.0461, |
| "step": 63 |
| }, |
| { |
| "epoch": 2.2093023255813953, |
| "grad_norm": 0.3955609202384949, |
| "learning_rate": 3.2000000000000003e-06, |
| "loss": 0.0472, |
| "step": 64 |
| }, |
| { |
| "epoch": 2.244186046511628, |
| "grad_norm": 0.33170193433761597, |
| "learning_rate": 3.2500000000000002e-06, |
| "loss": 0.0433, |
| "step": 65 |
| }, |
| { |
| "epoch": 2.2790697674418605, |
| "grad_norm": 0.389060378074646, |
| "learning_rate": 3.3000000000000006e-06, |
| "loss": 0.0438, |
| "step": 66 |
| }, |
| { |
| "epoch": 2.313953488372093, |
| "grad_norm": 0.31669771671295166, |
| "learning_rate": 3.3500000000000005e-06, |
| "loss": 0.0403, |
| "step": 67 |
| }, |
| { |
| "epoch": 2.3488372093023258, |
| "grad_norm": 0.36098915338516235, |
| "learning_rate": 3.4000000000000005e-06, |
| "loss": 0.0392, |
| "step": 68 |
| }, |
| { |
| "epoch": 2.383720930232558, |
| "grad_norm": 0.4905363619327545, |
| "learning_rate": 3.45e-06, |
| "loss": 0.0391, |
| "step": 69 |
| }, |
| { |
| "epoch": 2.4186046511627906, |
| "grad_norm": 0.32984688878059387, |
| "learning_rate": 3.5e-06, |
| "loss": 0.0432, |
| "step": 70 |
| }, |
| { |
| "epoch": 2.453488372093023, |
| "grad_norm": 0.3890271782875061, |
| "learning_rate": 3.5500000000000003e-06, |
| "loss": 0.0411, |
| "step": 71 |
| }, |
| { |
| "epoch": 2.488372093023256, |
| "grad_norm": 0.4199780821800232, |
| "learning_rate": 3.6000000000000003e-06, |
| "loss": 0.0404, |
| "step": 72 |
| }, |
| { |
| "epoch": 2.5232558139534884, |
| "grad_norm": 0.5035303831100464, |
| "learning_rate": 3.65e-06, |
| "loss": 0.0417, |
| "step": 73 |
| }, |
| { |
| "epoch": 2.558139534883721, |
| "grad_norm": 0.45307648181915283, |
| "learning_rate": 3.7e-06, |
| "loss": 0.0397, |
| "step": 74 |
| }, |
| { |
| "epoch": 2.5930232558139537, |
| "grad_norm": 0.36111533641815186, |
| "learning_rate": 3.7500000000000005e-06, |
| "loss": 0.0372, |
| "step": 75 |
| }, |
| { |
| "epoch": 2.6279069767441863, |
| "grad_norm": 0.4821806252002716, |
| "learning_rate": 3.8000000000000005e-06, |
| "loss": 0.0375, |
| "step": 76 |
| }, |
| { |
| "epoch": 2.6627906976744184, |
| "grad_norm": 0.4104216992855072, |
| "learning_rate": 3.85e-06, |
| "loss": 0.0363, |
| "step": 77 |
| }, |
| { |
| "epoch": 2.697674418604651, |
| "grad_norm": 0.4832363724708557, |
| "learning_rate": 3.900000000000001e-06, |
| "loss": 0.0406, |
| "step": 78 |
| }, |
| { |
| "epoch": 2.7325581395348837, |
| "grad_norm": 0.2809476852416992, |
| "learning_rate": 3.95e-06, |
| "loss": 0.037, |
| "step": 79 |
| }, |
| { |
| "epoch": 2.7674418604651163, |
| "grad_norm": 0.22463542222976685, |
| "learning_rate": 4.000000000000001e-06, |
| "loss": 0.0359, |
| "step": 80 |
| }, |
| { |
| "epoch": 2.802325581395349, |
| "grad_norm": 0.5055042505264282, |
| "learning_rate": 4.05e-06, |
| "loss": 0.0403, |
| "step": 81 |
| }, |
| { |
| "epoch": 2.8372093023255816, |
| "grad_norm": 0.26646772027015686, |
| "learning_rate": 4.1e-06, |
| "loss": 0.0346, |
| "step": 82 |
| }, |
| { |
| "epoch": 2.8720930232558137, |
| "grad_norm": 0.3047487437725067, |
| "learning_rate": 4.15e-06, |
| "loss": 0.0381, |
| "step": 83 |
| }, |
| { |
| "epoch": 2.9069767441860463, |
| "grad_norm": 0.24949510395526886, |
| "learning_rate": 4.2000000000000004e-06, |
| "loss": 0.0312, |
| "step": 84 |
| }, |
| { |
| "epoch": 2.941860465116279, |
| "grad_norm": 0.328715980052948, |
| "learning_rate": 4.25e-06, |
| "loss": 0.0305, |
| "step": 85 |
| }, |
| { |
| "epoch": 2.9767441860465116, |
| "grad_norm": 0.3866642117500305, |
| "learning_rate": 4.3e-06, |
| "loss": 0.0304, |
| "step": 86 |
| }, |
| { |
| "epoch": 3.0, |
| "grad_norm": 0.4193498194217682, |
| "learning_rate": 4.350000000000001e-06, |
| "loss": 0.033, |
| "step": 87 |
| }, |
| { |
| "epoch": 3.0348837209302326, |
| "grad_norm": 0.417618066072464, |
| "learning_rate": 4.4e-06, |
| "loss": 0.0276, |
| "step": 88 |
| }, |
| { |
| "epoch": 3.0697674418604652, |
| "grad_norm": 0.29087409377098083, |
| "learning_rate": 4.450000000000001e-06, |
| "loss": 0.0303, |
| "step": 89 |
| }, |
| { |
| "epoch": 3.104651162790698, |
| "grad_norm": 0.2996387779712677, |
| "learning_rate": 4.5e-06, |
| "loss": 0.0267, |
| "step": 90 |
| }, |
| { |
| "epoch": 3.13953488372093, |
| "grad_norm": 0.27138552069664, |
| "learning_rate": 4.5500000000000005e-06, |
| "loss": 0.0236, |
| "step": 91 |
| }, |
| { |
| "epoch": 3.1744186046511627, |
| "grad_norm": 0.30709055066108704, |
| "learning_rate": 4.600000000000001e-06, |
| "loss": 0.0176, |
| "step": 92 |
| }, |
| { |
| "epoch": 3.2093023255813953, |
| "grad_norm": 0.2744629979133606, |
| "learning_rate": 4.65e-06, |
| "loss": 0.023, |
| "step": 93 |
| }, |
| { |
| "epoch": 3.244186046511628, |
| "grad_norm": 0.30749836564064026, |
| "learning_rate": 4.7e-06, |
| "loss": 0.0228, |
| "step": 94 |
| }, |
| { |
| "epoch": 3.2790697674418605, |
| "grad_norm": 0.27703243494033813, |
| "learning_rate": 4.75e-06, |
| "loss": 0.0232, |
| "step": 95 |
| }, |
| { |
| "epoch": 3.313953488372093, |
| "grad_norm": 0.41885659098625183, |
| "learning_rate": 4.800000000000001e-06, |
| "loss": 0.0205, |
| "step": 96 |
| }, |
| { |
| "epoch": 3.3488372093023258, |
| "grad_norm": 0.32025253772735596, |
| "learning_rate": 4.85e-06, |
| "loss": 0.0199, |
| "step": 97 |
| }, |
| { |
| "epoch": 3.383720930232558, |
| "grad_norm": 0.2905832529067993, |
| "learning_rate": 4.9000000000000005e-06, |
| "loss": 0.0175, |
| "step": 98 |
| }, |
| { |
| "epoch": 3.4186046511627906, |
| "grad_norm": 0.34518706798553467, |
| "learning_rate": 4.95e-06, |
| "loss": 0.019, |
| "step": 99 |
| }, |
| { |
| "epoch": 3.453488372093023, |
| "grad_norm": 0.24052713811397552, |
| "learning_rate": 5e-06, |
| "loss": 0.0176, |
| "step": 100 |
| }, |
| { |
| "epoch": 3.488372093023256, |
| "grad_norm": 0.28509387373924255, |
| "learning_rate": 4.997332437005932e-06, |
| "loss": 0.0176, |
| "step": 101 |
| }, |
| { |
| "epoch": 3.5232558139534884, |
| "grad_norm": 0.30950990319252014, |
| "learning_rate": 4.989335440737587e-06, |
| "loss": 0.0165, |
| "step": 102 |
| }, |
| { |
| "epoch": 3.558139534883721, |
| "grad_norm": 0.2843819558620453, |
| "learning_rate": 4.976026077188013e-06, |
| "loss": 0.0142, |
| "step": 103 |
| }, |
| { |
| "epoch": 3.5930232558139537, |
| "grad_norm": 0.2819436192512512, |
| "learning_rate": 4.957432749209755e-06, |
| "loss": 0.0136, |
| "step": 104 |
| }, |
| { |
| "epoch": 3.6279069767441863, |
| "grad_norm": 0.2749376893043518, |
| "learning_rate": 4.933595135901733e-06, |
| "loss": 0.0132, |
| "step": 105 |
| }, |
| { |
| "epoch": 3.6627906976744184, |
| "grad_norm": 0.25022268295288086, |
| "learning_rate": 4.904564107932048e-06, |
| "loss": 0.0102, |
| "step": 106 |
| }, |
| { |
| "epoch": 3.697674418604651, |
| "grad_norm": 0.25575509667396545, |
| "learning_rate": 4.870401618977415e-06, |
| "loss": 0.0116, |
| "step": 107 |
| }, |
| { |
| "epoch": 3.7325581395348837, |
| "grad_norm": 0.2706020176410675, |
| "learning_rate": 4.83118057351089e-06, |
| "loss": 0.0111, |
| "step": 108 |
| }, |
| { |
| "epoch": 3.7674418604651163, |
| "grad_norm": 0.33668869733810425, |
| "learning_rate": 4.786984671220053e-06, |
| "loss": 0.0158, |
| "step": 109 |
| }, |
| { |
| "epoch": 3.802325581395349, |
| "grad_norm": 0.2773989737033844, |
| "learning_rate": 4.737908228387656e-06, |
| "loss": 0.01, |
| "step": 110 |
| }, |
| { |
| "epoch": 3.8372093023255816, |
| "grad_norm": 0.23735634982585907, |
| "learning_rate": 4.684055976615924e-06, |
| "loss": 0.0072, |
| "step": 111 |
| }, |
| { |
| "epoch": 3.8720930232558137, |
| "grad_norm": 0.30508551001548767, |
| "learning_rate": 4.625542839324036e-06, |
| "loss": 0.0084, |
| "step": 112 |
| }, |
| { |
| "epoch": 3.9069767441860463, |
| "grad_norm": 0.2335406094789505, |
| "learning_rate": 4.562493686495756e-06, |
| "loss": 0.0079, |
| "step": 113 |
| }, |
| { |
| "epoch": 3.941860465116279, |
| "grad_norm": 0.21952728927135468, |
| "learning_rate": 4.4950430682005995e-06, |
| "loss": 0.0073, |
| "step": 114 |
| }, |
| { |
| "epoch": 3.9767441860465116, |
| "grad_norm": 0.23103268444538116, |
| "learning_rate": 4.423334927457198e-06, |
| "loss": 0.0086, |
| "step": 115 |
| }, |
| { |
| "epoch": 4.0, |
| "grad_norm": 0.23103268444538116, |
| "learning_rate": 4.3475222930516484e-06, |
| "loss": 0.0091, |
| "step": 116 |
| }, |
| { |
| "epoch": 4.034883720930233, |
| "grad_norm": 0.39881983399391174, |
| "learning_rate": 4.267766952966369e-06, |
| "loss": 0.0044, |
| "step": 117 |
| }, |
| { |
| "epoch": 4.069767441860465, |
| "grad_norm": 0.15434899926185608, |
| "learning_rate": 4.184239109116393e-06, |
| "loss": 0.0034, |
| "step": 118 |
| }, |
| { |
| "epoch": 4.104651162790698, |
| "grad_norm": 0.1445930004119873, |
| "learning_rate": 4.097117014129903e-06, |
| "loss": 0.0039, |
| "step": 119 |
| }, |
| { |
| "epoch": 4.1395348837209305, |
| "grad_norm": 0.23641756176948547, |
| "learning_rate": 4.006586590948141e-06, |
| "loss": 0.0035, |
| "step": 120 |
| }, |
| { |
| "epoch": 4.174418604651163, |
| "grad_norm": 0.14500728249549866, |
| "learning_rate": 3.91284103605648e-06, |
| "loss": 0.0069, |
| "step": 121 |
| }, |
| { |
| "epoch": 4.209302325581396, |
| "grad_norm": 0.1661936342716217, |
| "learning_rate": 3.81608040719339e-06, |
| "loss": 0.0028, |
| "step": 122 |
| }, |
| { |
| "epoch": 4.2441860465116275, |
| "grad_norm": 0.15642984211444855, |
| "learning_rate": 3.7165111964171407e-06, |
| "loss": 0.0035, |
| "step": 123 |
| }, |
| { |
| "epoch": 4.27906976744186, |
| "grad_norm": 0.16263604164123535, |
| "learning_rate": 3.6143458894413463e-06, |
| "loss": 0.0047, |
| "step": 124 |
| }, |
| { |
| "epoch": 4.313953488372093, |
| "grad_norm": 0.13780659437179565, |
| "learning_rate": 3.5098025121797375e-06, |
| "loss": 0.0027, |
| "step": 125 |
| }, |
| { |
| "epoch": 4.348837209302325, |
| "grad_norm": 0.37652286887168884, |
| "learning_rate": 3.403104165467883e-06, |
| "loss": 0.0041, |
| "step": 126 |
| }, |
| { |
| "epoch": 4.383720930232558, |
| "grad_norm": 0.19331718981266022, |
| "learning_rate": 3.2944785489547544e-06, |
| "loss": 0.0032, |
| "step": 127 |
| }, |
| { |
| "epoch": 4.4186046511627906, |
| "grad_norm": 0.18035675585269928, |
| "learning_rate": 3.184157475180208e-06, |
| "loss": 0.003, |
| "step": 128 |
| }, |
| { |
| "epoch": 4.453488372093023, |
| "grad_norm": 0.2259574979543686, |
| "learning_rate": 3.0723763748753354e-06, |
| "loss": 0.004, |
| "step": 129 |
| }, |
| { |
| "epoch": 4.488372093023256, |
| "grad_norm": 0.31054359674453735, |
| "learning_rate": 2.9593737945414264e-06, |
| "loss": 0.003, |
| "step": 130 |
| }, |
| { |
| "epoch": 4.523255813953488, |
| "grad_norm": 0.11562985181808472, |
| "learning_rate": 2.845390887379706e-06, |
| "loss": 0.0016, |
| "step": 131 |
| }, |
| { |
| "epoch": 4.558139534883721, |
| "grad_norm": 0.16370315849781036, |
| "learning_rate": 2.730670898658255e-06, |
| "loss": 0.0049, |
| "step": 132 |
| }, |
| { |
| "epoch": 4.593023255813954, |
| "grad_norm": 0.2251535803079605, |
| "learning_rate": 2.6154586466143495e-06, |
| "loss": 0.0034, |
| "step": 133 |
| }, |
| { |
| "epoch": 4.627906976744186, |
| "grad_norm": 0.15986023843288422, |
| "learning_rate": 2.5e-06, |
| "loss": 0.0035, |
| "step": 134 |
| }, |
| { |
| "epoch": 4.662790697674419, |
| "grad_norm": 0.16980388760566711, |
| "learning_rate": 2.3845413533856517e-06, |
| "loss": 0.0029, |
| "step": 135 |
| }, |
| { |
| "epoch": 4.6976744186046515, |
| "grad_norm": 0.19148623943328857, |
| "learning_rate": 2.269329101341745e-06, |
| "loss": 0.0031, |
| "step": 136 |
| }, |
| { |
| "epoch": 4.732558139534884, |
| "grad_norm": 0.12943695485591888, |
| "learning_rate": 2.1546091126202955e-06, |
| "loss": 0.0035, |
| "step": 137 |
| }, |
| { |
| "epoch": 4.767441860465116, |
| "grad_norm": 0.11984245479106903, |
| "learning_rate": 2.040626205458574e-06, |
| "loss": 0.0028, |
| "step": 138 |
| }, |
| { |
| "epoch": 4.8023255813953485, |
| "grad_norm": 0.1532817780971527, |
| "learning_rate": 1.9276236251246655e-06, |
| "loss": 0.0028, |
| "step": 139 |
| }, |
| { |
| "epoch": 4.837209302325581, |
| "grad_norm": 0.5720587372779846, |
| "learning_rate": 1.8158425248197931e-06, |
| "loss": 0.0022, |
| "step": 140 |
| }, |
| { |
| "epoch": 4.872093023255814, |
| "grad_norm": 0.16564592719078064, |
| "learning_rate": 1.7055214510452462e-06, |
| "loss": 0.0019, |
| "step": 141 |
| }, |
| { |
| "epoch": 4.906976744186046, |
| "grad_norm": 0.12440971285104752, |
| "learning_rate": 1.5968958345321178e-06, |
| "loss": 0.0017, |
| "step": 142 |
| }, |
| { |
| "epoch": 4.941860465116279, |
| "grad_norm": 0.08577684313058853, |
| "learning_rate": 1.490197487820263e-06, |
| "loss": 0.0017, |
| "step": 143 |
| }, |
| { |
| "epoch": 4.976744186046512, |
| "grad_norm": 0.10730113834142685, |
| "learning_rate": 1.3856541105586545e-06, |
| "loss": 0.0016, |
| "step": 144 |
| }, |
| { |
| "epoch": 5.0, |
| "grad_norm": 0.11632312089204788, |
| "learning_rate": 1.2834888035828597e-06, |
| "loss": 0.002, |
| "step": 145 |
| }, |
| { |
| "epoch": 5.034883720930233, |
| "grad_norm": 0.11552423238754272, |
| "learning_rate": 1.1839195928066101e-06, |
| "loss": 0.0021, |
| "step": 146 |
| }, |
| { |
| "epoch": 5.069767441860465, |
| "grad_norm": 0.10733441263437271, |
| "learning_rate": 1.0871589639435204e-06, |
| "loss": 0.0016, |
| "step": 147 |
| }, |
| { |
| "epoch": 5.104651162790698, |
| "grad_norm": 0.06744810193777084, |
| "learning_rate": 9.934134090518593e-07, |
| "loss": 0.0017, |
| "step": 148 |
| }, |
| { |
| "epoch": 5.1395348837209305, |
| "grad_norm": 0.0750541016459465, |
| "learning_rate": 9.028829858700974e-07, |
| "loss": 0.0015, |
| "step": 149 |
| }, |
| { |
| "epoch": 5.174418604651163, |
| "grad_norm": 0.04486292973160744, |
| "learning_rate": 8.157608908836071e-07, |
| "loss": 0.0011, |
| "step": 150 |
| }, |
| { |
| "epoch": 5.209302325581396, |
| "grad_norm": 0.060016702860593796, |
| "learning_rate": 7.322330470336314e-07, |
| "loss": 0.0012, |
| "step": 151 |
| }, |
| { |
| "epoch": 5.2441860465116275, |
| "grad_norm": 0.056270696222782135, |
| "learning_rate": 6.524777069483526e-07, |
| "loss": 0.0009, |
| "step": 152 |
| }, |
| { |
| "epoch": 5.27906976744186, |
| "grad_norm": 0.06919720023870468, |
| "learning_rate": 5.766650725428027e-07, |
| "loss": 0.0023, |
| "step": 153 |
| }, |
| { |
| "epoch": 5.313953488372093, |
| "grad_norm": 0.10715026408433914, |
| "learning_rate": 5.049569317994013e-07, |
| "loss": 0.0016, |
| "step": 154 |
| }, |
| { |
| "epoch": 5.348837209302325, |
| "grad_norm": 0.07221261411905289, |
| "learning_rate": 4.3750631350424456e-07, |
| "loss": 0.0019, |
| "step": 155 |
| }, |
| { |
| "epoch": 5.383720930232558, |
| "grad_norm": 0.16049650311470032, |
| "learning_rate": 3.7445716067596506e-07, |
| "loss": 0.0022, |
| "step": 156 |
| }, |
| { |
| "epoch": 5.4186046511627906, |
| "grad_norm": 0.06509647518396378, |
| "learning_rate": 3.1594402338407633e-07, |
| "loss": 0.0017, |
| "step": 157 |
| }, |
| { |
| "epoch": 5.453488372093023, |
| "grad_norm": 0.22881199419498444, |
| "learning_rate": 2.620917716123444e-07, |
| "loss": 0.002, |
| "step": 158 |
| }, |
| { |
| "epoch": 5.488372093023256, |
| "grad_norm": 0.08407599478960037, |
| "learning_rate": 2.1301532877994747e-07, |
| "loss": 0.0012, |
| "step": 159 |
| }, |
| { |
| "epoch": 5.523255813953488, |
| "grad_norm": 0.048866819590330124, |
| "learning_rate": 1.6881942648911077e-07, |
| "loss": 0.0012, |
| "step": 160 |
| }, |
| { |
| "epoch": 5.558139534883721, |
| "grad_norm": 0.09164915978908539, |
| "learning_rate": 1.2959838102258537e-07, |
| "loss": 0.0023, |
| "step": 161 |
| }, |
| { |
| "epoch": 5.593023255813954, |
| "grad_norm": 0.07083190977573395, |
| "learning_rate": 9.54358920679524e-08, |
| "loss": 0.0009, |
| "step": 162 |
| }, |
| { |
| "epoch": 5.627906976744186, |
| "grad_norm": 0.054843056946992874, |
| "learning_rate": 6.640486409826785e-08, |
| "loss": 0.0007, |
| "step": 163 |
| }, |
| { |
| "epoch": 5.662790697674419, |
| "grad_norm": 0.06817606836557388, |
| "learning_rate": 4.256725079024554e-08, |
| "loss": 0.0015, |
| "step": 164 |
| }, |
| { |
| "epoch": 5.6976744186046515, |
| "grad_norm": 0.049567993730306625, |
| "learning_rate": 2.3973922811987295e-08, |
| "loss": 0.0014, |
| "step": 165 |
| }, |
| { |
| "epoch": 5.732558139534884, |
| "grad_norm": 0.07678244262933731, |
| "learning_rate": 1.0664559262413831e-08, |
| "loss": 0.0029, |
| "step": 166 |
| }, |
| { |
| "epoch": 5.767441860465116, |
| "grad_norm": 0.13374446332454681, |
| "learning_rate": 2.6675629940689508e-09, |
| "loss": 0.0015, |
| "step": 167 |
| }, |
| { |
| "epoch": 5.8023255813953485, |
| "grad_norm": 0.0840042233467102, |
| "learning_rate": 0.0, |
| "loss": 0.0019, |
| "step": 168 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 168, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 6, |
| "save_steps": 28, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 1.4267371372895273e+19, |
| "train_batch_size": 4, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|